Define arm_arch_core_flags in a single file
[official-gcc.git] / gcc / tree-cfg.c
blob6cb5b6f5b5f97b7d88b91bf932ca7134238f33fb
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-low.h"
58 #include "tree-cfgcleanup.h"
59 #include "gimplify.h"
60 #include "attribs.h"
61 #include "selftest.h"
63 /* This file contains functions for building the Control Flow Graph (CFG)
64 for a function tree. */
66 /* Local declarations. */
68 /* Initial capacity for the basic block array. */
69 static const int initial_cfg_capacity = 20;
71 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
72 which use a particular edge. The CASE_LABEL_EXPRs are chained together
73 via their CASE_CHAIN field, which we clear after we're done with the
74 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
76 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
77 update the case vector in response to edge redirections.
79 Right now this table is set up and torn down at key points in the
80 compilation process. It would be nice if we could make the table
81 more persistent. The key is getting notification of changes to
82 the CFG (particularly edge removal, creation and redirection). */
84 static hash_map<edge, tree> *edge_to_cases;
86 /* If we record edge_to_cases, this bitmap will hold indexes
87 of basic blocks that end in a GIMPLE_SWITCH which we touched
88 due to edge manipulations. */
90 static bitmap touched_switch_bbs;
92 /* CFG statistics. */
93 struct cfg_stats_d
95 long num_merged_labels;
98 static struct cfg_stats_d cfg_stats;
100 /* Data to pass to replace_block_vars_by_duplicates_1. */
101 struct replace_decls_d
103 hash_map<tree, tree> *vars_map;
104 tree to_context;
107 /* Hash table to store last discriminator assigned for each locus. */
108 struct locus_discrim_map
110 location_t locus;
111 int discriminator;
114 /* Hashtable helpers. */
116 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
118 static inline hashval_t hash (const locus_discrim_map *);
119 static inline bool equal (const locus_discrim_map *,
120 const locus_discrim_map *);
123 /* Trivial hash function for a location_t. ITEM is a pointer to
124 a hash table entry that maps a location_t to a discriminator. */
126 inline hashval_t
127 locus_discrim_hasher::hash (const locus_discrim_map *item)
129 return LOCATION_LINE (item->locus);
132 /* Equality function for the locus-to-discriminator map. A and B
133 point to the two hash table entries to compare. */
135 inline bool
136 locus_discrim_hasher::equal (const locus_discrim_map *a,
137 const locus_discrim_map *b)
139 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
142 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
144 /* Basic blocks and flowgraphs. */
145 static void make_blocks (gimple_seq);
147 /* Edges. */
148 static void make_edges (void);
149 static void assign_discriminators (void);
150 static void make_cond_expr_edges (basic_block);
151 static void make_gimple_switch_edges (gswitch *, basic_block);
152 static bool make_goto_expr_edges (basic_block);
153 static void make_gimple_asm_edges (basic_block);
154 static edge gimple_redirect_edge_and_branch (edge, basic_block);
155 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
157 /* Various helpers. */
158 static inline bool stmt_starts_bb_p (gimple *, gimple *);
159 static int gimple_verify_flow_info (void);
160 static void gimple_make_forwarder_block (edge);
161 static gimple *first_non_label_stmt (basic_block);
162 static bool verify_gimple_transaction (gtransaction *);
163 static bool call_can_make_abnormal_goto (gimple *);
165 /* Flowgraph optimization and cleanup. */
166 static void gimple_merge_blocks (basic_block, basic_block);
167 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
168 static void remove_bb (basic_block);
169 static edge find_taken_edge_computed_goto (basic_block, tree);
170 static edge find_taken_edge_cond_expr (basic_block, tree);
171 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
172 static tree find_case_label_for_value (gswitch *, tree);
173 static void lower_phi_internal_fn ();
175 void
176 init_empty_tree_cfg_for_function (struct function *fn)
178 /* Initialize the basic block array. */
179 init_flow (fn);
180 profile_status_for_fn (fn) = PROFILE_ABSENT;
181 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
182 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
183 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
184 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
185 initial_cfg_capacity);
187 /* Build a mapping of labels to their associated blocks. */
188 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
190 initial_cfg_capacity);
192 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
193 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
196 = EXIT_BLOCK_PTR_FOR_FN (fn);
197 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
198 = ENTRY_BLOCK_PTR_FOR_FN (fn);
201 void
202 init_empty_tree_cfg (void)
204 init_empty_tree_cfg_for_function (cfun);
207 /*---------------------------------------------------------------------------
208 Create basic blocks
209 ---------------------------------------------------------------------------*/
211 /* Entry point to the CFG builder for trees. SEQ is the sequence of
212 statements to be added to the flowgraph. */
214 static void
215 build_gimple_cfg (gimple_seq seq)
217 /* Register specific gimple functions. */
218 gimple_register_cfg_hooks ();
220 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222 init_empty_tree_cfg ();
224 make_blocks (seq);
226 /* Make sure there is always at least one block, even if it's empty. */
227 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
228 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230 /* Adjust the size of the array. */
231 if (basic_block_info_for_fn (cfun)->length ()
232 < (size_t) n_basic_blocks_for_fn (cfun))
233 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
234 n_basic_blocks_for_fn (cfun));
236 /* To speed up statement iterator walks, we first purge dead labels. */
237 cleanup_dead_labels ();
239 /* Group case nodes to reduce the number of edges.
240 We do this after cleaning up dead labels because otherwise we miss
241 a lot of obvious case merging opportunities. */
242 group_case_labels ();
244 /* Create the edges of the flowgraph. */
245 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
246 make_edges ();
247 assign_discriminators ();
248 lower_phi_internal_fn ();
249 cleanup_dead_labels ();
250 delete discriminator_per_locus;
251 discriminator_per_locus = NULL;
254 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
255 them and propagate the information to LOOP. We assume that the annotations
256 come immediately before the condition in BB, if any. */
258 static void
259 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
261 gimple_stmt_iterator gsi = gsi_last_bb (bb);
262 gimple *stmt = gsi_stmt (gsi);
264 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
265 return;
267 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
269 stmt = gsi_stmt (gsi);
270 if (gimple_code (stmt) != GIMPLE_CALL)
271 break;
272 if (!gimple_call_internal_p (stmt)
273 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
274 break;
276 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
278 case annot_expr_ivdep_kind:
279 loop->safelen = INT_MAX;
280 break;
281 case annot_expr_no_vector_kind:
282 loop->dont_vectorize = true;
283 break;
284 case annot_expr_vector_kind:
285 loop->force_vectorize = true;
286 cfun->has_force_vectorize_loops = true;
287 break;
288 default:
289 gcc_unreachable ();
292 stmt = gimple_build_assign (gimple_call_lhs (stmt),
293 gimple_call_arg (stmt, 0));
294 gsi_replace (&gsi, stmt, true);
298 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
299 them and propagate the information to the loop. We assume that the
300 annotations come immediately before the condition of the loop. */
302 static void
303 replace_loop_annotate (void)
305 struct loop *loop;
306 basic_block bb;
307 gimple_stmt_iterator gsi;
308 gimple *stmt;
310 FOR_EACH_LOOP (loop, 0)
312 /* First look into the header. */
313 replace_loop_annotate_in_block (loop->header, loop);
315 /* Then look into the latch, if any. */
316 if (loop->latch)
317 replace_loop_annotate_in_block (loop->latch, loop);
320 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
321 FOR_EACH_BB_FN (bb, cfun)
323 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
325 stmt = gsi_stmt (gsi);
326 if (gimple_code (stmt) != GIMPLE_CALL)
327 continue;
328 if (!gimple_call_internal_p (stmt)
329 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
330 continue;
332 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
334 case annot_expr_ivdep_kind:
335 case annot_expr_no_vector_kind:
336 case annot_expr_vector_kind:
337 break;
338 default:
339 gcc_unreachable ();
342 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
343 stmt = gimple_build_assign (gimple_call_lhs (stmt),
344 gimple_call_arg (stmt, 0));
345 gsi_replace (&gsi, stmt, true);
350 /* Lower internal PHI function from GIMPLE FE. */
352 static void
353 lower_phi_internal_fn ()
355 basic_block bb, pred = NULL;
356 gimple_stmt_iterator gsi;
357 tree lhs;
358 gphi *phi_node;
359 gimple *stmt;
361 /* After edge creation, handle __PHI function from GIMPLE FE. */
362 FOR_EACH_BB_FN (bb, cfun)
364 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
366 stmt = gsi_stmt (gsi);
367 if (! gimple_call_internal_p (stmt, IFN_PHI))
368 break;
370 lhs = gimple_call_lhs (stmt);
371 phi_node = create_phi_node (lhs, bb);
373 /* Add arguments to the PHI node. */
374 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
376 tree arg = gimple_call_arg (stmt, i);
377 if (TREE_CODE (arg) == LABEL_DECL)
378 pred = label_to_block (arg);
379 else
381 edge e = find_edge (pred, bb);
382 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
386 gsi_remove (&gsi, true);
391 static unsigned int
392 execute_build_cfg (void)
394 gimple_seq body = gimple_body (current_function_decl);
396 build_gimple_cfg (body);
397 gimple_set_body (current_function_decl, NULL);
398 if (dump_file && (dump_flags & TDF_DETAILS))
400 fprintf (dump_file, "Scope blocks:\n");
401 dump_scope_blocks (dump_file, dump_flags);
403 cleanup_tree_cfg ();
404 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
405 replace_loop_annotate ();
406 return 0;
409 namespace {
411 const pass_data pass_data_build_cfg =
413 GIMPLE_PASS, /* type */
414 "cfg", /* name */
415 OPTGROUP_NONE, /* optinfo_flags */
416 TV_TREE_CFG, /* tv_id */
417 PROP_gimple_leh, /* properties_required */
418 ( PROP_cfg | PROP_loops ), /* properties_provided */
419 0, /* properties_destroyed */
420 0, /* todo_flags_start */
421 0, /* todo_flags_finish */
424 class pass_build_cfg : public gimple_opt_pass
426 public:
427 pass_build_cfg (gcc::context *ctxt)
428 : gimple_opt_pass (pass_data_build_cfg, ctxt)
431 /* opt_pass methods: */
432 virtual unsigned int execute (function *) { return execute_build_cfg (); }
434 }; // class pass_build_cfg
436 } // anon namespace
438 gimple_opt_pass *
439 make_pass_build_cfg (gcc::context *ctxt)
441 return new pass_build_cfg (ctxt);
445 /* Return true if T is a computed goto. */
447 bool
448 computed_goto_p (gimple *t)
450 return (gimple_code (t) == GIMPLE_GOTO
451 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
454 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
455 the other edge points to a bb with just __builtin_unreachable ().
456 I.e. return true for C->M edge in:
457 <bb C>:
459 if (something)
460 goto <bb N>;
461 else
462 goto <bb M>;
463 <bb N>:
464 __builtin_unreachable ();
465 <bb M>: */
467 bool
468 assert_unreachable_fallthru_edge_p (edge e)
470 basic_block pred_bb = e->src;
471 gimple *last = last_stmt (pred_bb);
472 if (last && gimple_code (last) == GIMPLE_COND)
474 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
475 if (other_bb == e->dest)
476 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
477 if (EDGE_COUNT (other_bb->succs) == 0)
479 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
480 gimple *stmt;
482 if (gsi_end_p (gsi))
483 return false;
484 stmt = gsi_stmt (gsi);
485 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
487 gsi_next (&gsi);
488 if (gsi_end_p (gsi))
489 return false;
490 stmt = gsi_stmt (gsi);
492 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
495 return false;
499 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
500 could alter control flow except via eh. We initialize the flag at
501 CFG build time and only ever clear it later. */
503 static void
504 gimple_call_initialize_ctrl_altering (gimple *stmt)
506 int flags = gimple_call_flags (stmt);
508 /* A call alters control flow if it can make an abnormal goto. */
509 if (call_can_make_abnormal_goto (stmt)
510 /* A call also alters control flow if it does not return. */
511 || flags & ECF_NORETURN
512 /* TM ending statements have backedges out of the transaction.
513 Return true so we split the basic block containing them.
514 Note that the TM_BUILTIN test is merely an optimization. */
515 || ((flags & ECF_TM_BUILTIN)
516 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
517 /* BUILT_IN_RETURN call is same as return statement. */
518 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
519 /* IFN_UNIQUE should be the last insn, to make checking for it
520 as cheap as possible. */
521 || (gimple_call_internal_p (stmt)
522 && gimple_call_internal_unique_p (stmt)))
523 gimple_call_set_ctrl_altering (stmt, true);
524 else
525 gimple_call_set_ctrl_altering (stmt, false);
529 /* Insert SEQ after BB and build a flowgraph. */
531 static basic_block
532 make_blocks_1 (gimple_seq seq, basic_block bb)
534 gimple_stmt_iterator i = gsi_start (seq);
535 gimple *stmt = NULL;
536 bool start_new_block = true;
537 bool first_stmt_of_seq = true;
539 while (!gsi_end_p (i))
541 gimple *prev_stmt;
543 prev_stmt = stmt;
544 stmt = gsi_stmt (i);
546 if (stmt && is_gimple_call (stmt))
547 gimple_call_initialize_ctrl_altering (stmt);
549 /* If the statement starts a new basic block or if we have determined
550 in a previous pass that we need to create a new block for STMT, do
551 so now. */
552 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
554 if (!first_stmt_of_seq)
555 gsi_split_seq_before (&i, &seq);
556 bb = create_basic_block (seq, bb);
557 start_new_block = false;
560 /* Now add STMT to BB and create the subgraphs for special statement
561 codes. */
562 gimple_set_bb (stmt, bb);
564 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
565 next iteration. */
566 if (stmt_ends_bb_p (stmt))
568 /* If the stmt can make abnormal goto use a new temporary
569 for the assignment to the LHS. This makes sure the old value
570 of the LHS is available on the abnormal edge. Otherwise
571 we will end up with overlapping life-ranges for abnormal
572 SSA names. */
573 if (gimple_has_lhs (stmt)
574 && stmt_can_make_abnormal_goto (stmt)
575 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
577 tree lhs = gimple_get_lhs (stmt);
578 tree tmp = create_tmp_var (TREE_TYPE (lhs));
579 gimple *s = gimple_build_assign (lhs, tmp);
580 gimple_set_location (s, gimple_location (stmt));
581 gimple_set_block (s, gimple_block (stmt));
582 gimple_set_lhs (stmt, tmp);
583 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
584 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
585 DECL_GIMPLE_REG_P (tmp) = 1;
586 gsi_insert_after (&i, s, GSI_SAME_STMT);
588 start_new_block = true;
591 gsi_next (&i);
592 first_stmt_of_seq = false;
594 return bb;
597 /* Build a flowgraph for the sequence of stmts SEQ. */
599 static void
600 make_blocks (gimple_seq seq)
602 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
605 /* Create and return a new empty basic block after bb AFTER. */
607 static basic_block
608 create_bb (void *h, void *e, basic_block after)
610 basic_block bb;
612 gcc_assert (!e);
614 /* Create and initialize a new basic block. Since alloc_block uses
615 GC allocation that clears memory to allocate a basic block, we do
616 not have to clear the newly allocated basic block here. */
617 bb = alloc_block ();
619 bb->index = last_basic_block_for_fn (cfun);
620 bb->flags = BB_NEW;
621 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
623 /* Add the new block to the linked list of blocks. */
624 link_block (bb, after);
626 /* Grow the basic block array if needed. */
627 if ((size_t) last_basic_block_for_fn (cfun)
628 == basic_block_info_for_fn (cfun)->length ())
630 size_t new_size =
631 (last_basic_block_for_fn (cfun)
632 + (last_basic_block_for_fn (cfun) + 3) / 4);
633 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
636 /* Add the newly created block to the array. */
637 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
639 n_basic_blocks_for_fn (cfun)++;
640 last_basic_block_for_fn (cfun)++;
642 return bb;
646 /*---------------------------------------------------------------------------
647 Edge creation
648 ---------------------------------------------------------------------------*/
650 /* If basic block BB has an abnormal edge to a basic block
651 containing IFN_ABNORMAL_DISPATCHER internal call, return
652 that the dispatcher's basic block, otherwise return NULL. */
654 basic_block
655 get_abnormal_succ_dispatcher (basic_block bb)
657 edge e;
658 edge_iterator ei;
660 FOR_EACH_EDGE (e, ei, bb->succs)
661 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
663 gimple_stmt_iterator gsi
664 = gsi_start_nondebug_after_labels_bb (e->dest);
665 gimple *g = gsi_stmt (gsi);
666 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
667 return e->dest;
669 return NULL;
672 /* Helper function for make_edges. Create a basic block with
673 with ABNORMAL_DISPATCHER internal call in it if needed, and
674 create abnormal edges from BBS to it and from it to FOR_BB
675 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
677 static void
678 handle_abnormal_edges (basic_block *dispatcher_bbs,
679 basic_block for_bb, int *bb_to_omp_idx,
680 auto_vec<basic_block> *bbs, bool computed_goto)
682 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
683 unsigned int idx = 0;
684 basic_block bb;
685 bool inner = false;
687 if (bb_to_omp_idx)
689 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
690 if (bb_to_omp_idx[for_bb->index] != 0)
691 inner = true;
694 /* If the dispatcher has been created already, then there are basic
695 blocks with abnormal edges to it, so just make a new edge to
696 for_bb. */
697 if (*dispatcher == NULL)
699 /* Check if there are any basic blocks that need to have
700 abnormal edges to this dispatcher. If there are none, return
701 early. */
702 if (bb_to_omp_idx == NULL)
704 if (bbs->is_empty ())
705 return;
707 else
709 FOR_EACH_VEC_ELT (*bbs, idx, bb)
710 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
711 break;
712 if (bb == NULL)
713 return;
716 /* Create the dispatcher bb. */
717 *dispatcher = create_basic_block (NULL, for_bb);
718 if (computed_goto)
720 /* Factor computed gotos into a common computed goto site. Also
721 record the location of that site so that we can un-factor the
722 gotos after we have converted back to normal form. */
723 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
725 /* Create the destination of the factored goto. Each original
726 computed goto will put its desired destination into this
727 variable and jump to the label we create immediately below. */
728 tree var = create_tmp_var (ptr_type_node, "gotovar");
730 /* Build a label for the new block which will contain the
731 factored computed goto. */
732 tree factored_label_decl
733 = create_artificial_label (UNKNOWN_LOCATION);
734 gimple *factored_computed_goto_label
735 = gimple_build_label (factored_label_decl);
736 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
738 /* Build our new computed goto. */
739 gimple *factored_computed_goto = gimple_build_goto (var);
740 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
742 FOR_EACH_VEC_ELT (*bbs, idx, bb)
744 if (bb_to_omp_idx
745 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
746 continue;
748 gsi = gsi_last_bb (bb);
749 gimple *last = gsi_stmt (gsi);
751 gcc_assert (computed_goto_p (last));
753 /* Copy the original computed goto's destination into VAR. */
754 gimple *assignment
755 = gimple_build_assign (var, gimple_goto_dest (last));
756 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
758 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
759 e->goto_locus = gimple_location (last);
760 gsi_remove (&gsi, true);
763 else
765 tree arg = inner ? boolean_true_node : boolean_false_node;
766 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
767 1, arg);
768 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
769 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
771 /* Create predecessor edges of the dispatcher. */
772 FOR_EACH_VEC_ELT (*bbs, idx, bb)
774 if (bb_to_omp_idx
775 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
776 continue;
777 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
782 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
785 /* Creates outgoing edges for BB. Returns 1 when it ends with an
786 computed goto, returns 2 when it ends with a statement that
787 might return to this function via an nonlocal goto, otherwise
788 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
790 static int
791 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
793 gimple *last = last_stmt (bb);
794 bool fallthru = false;
795 int ret = 0;
797 if (!last)
798 return ret;
800 switch (gimple_code (last))
802 case GIMPLE_GOTO:
803 if (make_goto_expr_edges (bb))
804 ret = 1;
805 fallthru = false;
806 break;
807 case GIMPLE_RETURN:
809 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
810 e->goto_locus = gimple_location (last);
811 fallthru = false;
813 break;
814 case GIMPLE_COND:
815 make_cond_expr_edges (bb);
816 fallthru = false;
817 break;
818 case GIMPLE_SWITCH:
819 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
820 fallthru = false;
821 break;
822 case GIMPLE_RESX:
823 make_eh_edges (last);
824 fallthru = false;
825 break;
826 case GIMPLE_EH_DISPATCH:
827 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
828 break;
830 case GIMPLE_CALL:
831 /* If this function receives a nonlocal goto, then we need to
832 make edges from this call site to all the nonlocal goto
833 handlers. */
834 if (stmt_can_make_abnormal_goto (last))
835 ret = 2;
837 /* If this statement has reachable exception handlers, then
838 create abnormal edges to them. */
839 make_eh_edges (last);
841 /* BUILTIN_RETURN is really a return statement. */
842 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
844 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
845 fallthru = false;
847 /* Some calls are known not to return. */
848 else
849 fallthru = !gimple_call_noreturn_p (last);
850 break;
852 case GIMPLE_ASSIGN:
853 /* A GIMPLE_ASSIGN may throw internally and thus be considered
854 control-altering. */
855 if (is_ctrl_altering_stmt (last))
856 make_eh_edges (last);
857 fallthru = true;
858 break;
860 case GIMPLE_ASM:
861 make_gimple_asm_edges (bb);
862 fallthru = true;
863 break;
865 CASE_GIMPLE_OMP:
866 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
867 break;
869 case GIMPLE_TRANSACTION:
871 gtransaction *txn = as_a <gtransaction *> (last);
872 tree label1 = gimple_transaction_label_norm (txn);
873 tree label2 = gimple_transaction_label_uninst (txn);
875 if (label1)
876 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
877 if (label2)
878 make_edge (bb, label_to_block (label2),
879 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
881 tree label3 = gimple_transaction_label_over (txn);
882 if (gimple_transaction_subcode (txn)
883 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
884 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
886 fallthru = false;
888 break;
890 default:
891 gcc_assert (!stmt_ends_bb_p (last));
892 fallthru = true;
893 break;
896 if (fallthru)
897 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
899 return ret;
902 /* Join all the blocks in the flowgraph. */
904 static void
905 make_edges (void)
907 basic_block bb;
908 struct omp_region *cur_region = NULL;
909 auto_vec<basic_block> ab_edge_goto;
910 auto_vec<basic_block> ab_edge_call;
911 int *bb_to_omp_idx = NULL;
912 int cur_omp_region_idx = 0;
914 /* Create an edge from entry to the first block with executable
915 statements in it. */
916 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
917 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
918 EDGE_FALLTHRU);
920 /* Traverse the basic block array placing edges. */
921 FOR_EACH_BB_FN (bb, cfun)
923 int mer;
925 if (bb_to_omp_idx)
926 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
928 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
929 if (mer == 1)
930 ab_edge_goto.safe_push (bb);
931 else if (mer == 2)
932 ab_edge_call.safe_push (bb);
934 if (cur_region && bb_to_omp_idx == NULL)
935 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
938 /* Computed gotos are hell to deal with, especially if there are
939 lots of them with a large number of destinations. So we factor
940 them to a common computed goto location before we build the
941 edge list. After we convert back to normal form, we will un-factor
942 the computed gotos since factoring introduces an unwanted jump.
943 For non-local gotos and abnormal edges from calls to calls that return
944 twice or forced labels, factor the abnormal edges too, by having all
945 abnormal edges from the calls go to a common artificial basic block
946 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
947 basic block to all forced labels and calls returning twice.
948 We do this per-OpenMP structured block, because those regions
949 are guaranteed to be single entry single exit by the standard,
950 so it is not allowed to enter or exit such regions abnormally this way,
951 thus all computed gotos, non-local gotos and setjmp/longjmp calls
952 must not transfer control across SESE region boundaries. */
953 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
955 gimple_stmt_iterator gsi;
956 basic_block dispatcher_bb_array[2] = { NULL, NULL };
957 basic_block *dispatcher_bbs = dispatcher_bb_array;
958 int count = n_basic_blocks_for_fn (cfun);
960 if (bb_to_omp_idx)
961 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
963 FOR_EACH_BB_FN (bb, cfun)
965 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
967 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
968 tree target;
970 if (!label_stmt)
971 break;
973 target = gimple_label_label (label_stmt);
975 /* Make an edge to every label block that has been marked as a
976 potential target for a computed goto or a non-local goto. */
977 if (FORCED_LABEL (target))
978 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
979 &ab_edge_goto, true);
980 if (DECL_NONLOCAL (target))
982 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
983 &ab_edge_call, false);
984 break;
988 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
989 gsi_next_nondebug (&gsi);
990 if (!gsi_end_p (gsi))
992 /* Make an edge to every setjmp-like call. */
993 gimple *call_stmt = gsi_stmt (gsi);
994 if (is_gimple_call (call_stmt)
995 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
996 || gimple_call_builtin_p (call_stmt,
997 BUILT_IN_SETJMP_RECEIVER)))
998 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
999 &ab_edge_call, false);
1003 if (bb_to_omp_idx)
1004 XDELETE (dispatcher_bbs);
1007 XDELETE (bb_to_omp_idx);
1009 free_omp_regions ();
1012 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1013 needed. Returns true if new bbs were created.
1014 Note: This is transitional code, and should not be used for new code. We
1015 should be able to get rid of this by rewriting all target va-arg
1016 gimplification hooks to use an interface gimple_build_cond_value as described
1017 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1019 bool
1020 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1022 gimple *stmt = gsi_stmt (*gsi);
1023 basic_block bb = gimple_bb (stmt);
1024 basic_block lastbb, afterbb;
1025 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1026 edge e;
1027 lastbb = make_blocks_1 (seq, bb);
1028 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1029 return false;
1030 e = split_block (bb, stmt);
1031 /* Move e->dest to come after the new basic blocks. */
1032 afterbb = e->dest;
1033 unlink_block (afterbb);
1034 link_block (afterbb, lastbb);
1035 redirect_edge_succ (e, bb->next_bb);
1036 bb = bb->next_bb;
1037 while (bb != afterbb)
1039 struct omp_region *cur_region = NULL;
1040 int cur_omp_region_idx = 0;
1041 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1042 gcc_assert (!mer && !cur_region);
1043 add_bb_to_loop (bb, afterbb->loop_father);
1044 bb = bb->next_bb;
1046 return true;
1049 /* Find the next available discriminator value for LOCUS. The
1050 discriminator distinguishes among several basic blocks that
1051 share a common locus, allowing for more accurate sample-based
1052 profiling. */
1054 static int
1055 next_discriminator_for_locus (location_t locus)
1057 struct locus_discrim_map item;
1058 struct locus_discrim_map **slot;
1060 item.locus = locus;
1061 item.discriminator = 0;
1062 slot = discriminator_per_locus->find_slot_with_hash (
1063 &item, LOCATION_LINE (locus), INSERT);
1064 gcc_assert (slot);
1065 if (*slot == HTAB_EMPTY_ENTRY)
1067 *slot = XNEW (struct locus_discrim_map);
1068 gcc_assert (*slot);
1069 (*slot)->locus = locus;
1070 (*slot)->discriminator = 0;
1072 (*slot)->discriminator++;
1073 return (*slot)->discriminator;
1076 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1078 static bool
1079 same_line_p (location_t locus1, location_t locus2)
1081 expanded_location from, to;
1083 if (locus1 == locus2)
1084 return true;
1086 from = expand_location (locus1);
1087 to = expand_location (locus2);
1089 if (from.line != to.line)
1090 return false;
1091 if (from.file == to.file)
1092 return true;
1093 return (from.file != NULL
1094 && to.file != NULL
1095 && filename_cmp (from.file, to.file) == 0);
1098 /* Assign discriminators to each basic block. */
1100 static void
1101 assign_discriminators (void)
1103 basic_block bb;
1105 FOR_EACH_BB_FN (bb, cfun)
1107 edge e;
1108 edge_iterator ei;
1109 gimple *last = last_stmt (bb);
1110 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1112 if (locus == UNKNOWN_LOCATION)
1113 continue;
1115 FOR_EACH_EDGE (e, ei, bb->succs)
1117 gimple *first = first_non_label_stmt (e->dest);
1118 gimple *last = last_stmt (e->dest);
1119 if ((first && same_line_p (locus, gimple_location (first)))
1120 || (last && same_line_p (locus, gimple_location (last))))
1122 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1123 bb->discriminator = next_discriminator_for_locus (locus);
1124 else
1125 e->dest->discriminator = next_discriminator_for_locus (locus);
1131 /* Create the edges for a GIMPLE_COND starting at block BB. */
1133 static void
1134 make_cond_expr_edges (basic_block bb)
1136 gcond *entry = as_a <gcond *> (last_stmt (bb));
1137 gimple *then_stmt, *else_stmt;
1138 basic_block then_bb, else_bb;
1139 tree then_label, else_label;
1140 edge e;
1142 gcc_assert (entry);
1143 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1145 /* Entry basic blocks for each component. */
1146 then_label = gimple_cond_true_label (entry);
1147 else_label = gimple_cond_false_label (entry);
1148 then_bb = label_to_block (then_label);
1149 else_bb = label_to_block (else_label);
1150 then_stmt = first_stmt (then_bb);
1151 else_stmt = first_stmt (else_bb);
1153 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1154 e->goto_locus = gimple_location (then_stmt);
1155 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1156 if (e)
1157 e->goto_locus = gimple_location (else_stmt);
1159 /* We do not need the labels anymore. */
1160 gimple_cond_set_true_label (entry, NULL_TREE);
1161 gimple_cond_set_false_label (entry, NULL_TREE);
1165 /* Called for each element in the hash table (P) as we delete the
1166 edge to cases hash table.
1168 Clear all the CASE_CHAINs to prevent problems with copying of
1169 SWITCH_EXPRs and structure sharing rules, then free the hash table
1170 element. */
1172 bool
1173 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1175 tree t, next;
1177 for (t = value; t; t = next)
1179 next = CASE_CHAIN (t);
1180 CASE_CHAIN (t) = NULL;
1183 return true;
1186 /* Start recording information mapping edges to case labels. */
1188 void
1189 start_recording_case_labels (void)
1191 gcc_assert (edge_to_cases == NULL);
1192 edge_to_cases = new hash_map<edge, tree>;
1193 touched_switch_bbs = BITMAP_ALLOC (NULL);
1196 /* Return nonzero if we are recording information for case labels. */
1198 static bool
1199 recording_case_labels_p (void)
1201 return (edge_to_cases != NULL);
1204 /* Stop recording information mapping edges to case labels and
1205 remove any information we have recorded. */
1206 void
1207 end_recording_case_labels (void)
1209 bitmap_iterator bi;
1210 unsigned i;
1211 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1212 delete edge_to_cases;
1213 edge_to_cases = NULL;
1214 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1216 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1217 if (bb)
1219 gimple *stmt = last_stmt (bb);
1220 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1221 group_case_labels_stmt (as_a <gswitch *> (stmt));
1224 BITMAP_FREE (touched_switch_bbs);
1227 /* If we are inside a {start,end}_recording_cases block, then return
1228 a chain of CASE_LABEL_EXPRs from T which reference E.
1230 Otherwise return NULL. */
1232 static tree
1233 get_cases_for_edge (edge e, gswitch *t)
1235 tree *slot;
1236 size_t i, n;
1238 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1239 chains available. Return NULL so the caller can detect this case. */
1240 if (!recording_case_labels_p ())
1241 return NULL;
1243 slot = edge_to_cases->get (e);
1244 if (slot)
1245 return *slot;
1247 /* If we did not find E in the hash table, then this must be the first
1248 time we have been queried for information about E & T. Add all the
1249 elements from T to the hash table then perform the query again. */
1251 n = gimple_switch_num_labels (t);
1252 for (i = 0; i < n; i++)
1254 tree elt = gimple_switch_label (t, i);
1255 tree lab = CASE_LABEL (elt);
1256 basic_block label_bb = label_to_block (lab);
1257 edge this_edge = find_edge (e->src, label_bb);
1259 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1260 a new chain. */
1261 tree &s = edge_to_cases->get_or_insert (this_edge);
1262 CASE_CHAIN (elt) = s;
1263 s = elt;
1266 return *edge_to_cases->get (e);
1269 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1271 static void
1272 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1274 size_t i, n;
1276 n = gimple_switch_num_labels (entry);
1278 for (i = 0; i < n; ++i)
1280 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1281 basic_block label_bb = label_to_block (lab);
1282 make_edge (bb, label_bb, 0);
1287 /* Return the basic block holding label DEST. */
1289 basic_block
1290 label_to_block_fn (struct function *ifun, tree dest)
1292 int uid = LABEL_DECL_UID (dest);
1294 /* We would die hard when faced by an undefined label. Emit a label to
1295 the very first basic block. This will hopefully make even the dataflow
1296 and undefined variable warnings quite right. */
1297 if (seen_error () && uid < 0)
1299 gimple_stmt_iterator gsi =
1300 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1301 gimple *stmt;
1303 stmt = gimple_build_label (dest);
1304 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1305 uid = LABEL_DECL_UID (dest);
1307 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1308 return NULL;
1309 return (*ifun->cfg->x_label_to_block_map)[uid];
1312 /* Create edges for a goto statement at block BB. Returns true
1313 if abnormal edges should be created. */
1315 static bool
1316 make_goto_expr_edges (basic_block bb)
1318 gimple_stmt_iterator last = gsi_last_bb (bb);
1319 gimple *goto_t = gsi_stmt (last);
1321 /* A simple GOTO creates normal edges. */
1322 if (simple_goto_p (goto_t))
1324 tree dest = gimple_goto_dest (goto_t);
1325 basic_block label_bb = label_to_block (dest);
1326 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1327 e->goto_locus = gimple_location (goto_t);
1328 gsi_remove (&last, true);
1329 return false;
1332 /* A computed GOTO creates abnormal edges. */
1333 return true;
1336 /* Create edges for an asm statement with labels at block BB. */
1338 static void
1339 make_gimple_asm_edges (basic_block bb)
1341 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1342 int i, n = gimple_asm_nlabels (stmt);
1344 for (i = 0; i < n; ++i)
1346 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1347 basic_block label_bb = label_to_block (label);
1348 make_edge (bb, label_bb, 0);
1352 /*---------------------------------------------------------------------------
1353 Flowgraph analysis
1354 ---------------------------------------------------------------------------*/
1356 /* Cleanup useless labels in basic blocks. This is something we wish
1357 to do early because it allows us to group case labels before creating
1358 the edges for the CFG, and it speeds up block statement iterators in
1359 all passes later on.
1360 We rerun this pass after CFG is created, to get rid of the labels that
1361 are no longer referenced. After then we do not run it any more, since
1362 (almost) no new labels should be created. */
1364 /* A map from basic block index to the leading label of that block. */
1365 static struct label_record
1367 /* The label. */
1368 tree label;
1370 /* True if the label is referenced from somewhere. */
1371 bool used;
1372 } *label_for_bb;
1374 /* Given LABEL return the first label in the same basic block. */
1376 static tree
1377 main_block_label (tree label)
1379 basic_block bb = label_to_block (label);
1380 tree main_label = label_for_bb[bb->index].label;
1382 /* label_to_block possibly inserted undefined label into the chain. */
1383 if (!main_label)
1385 label_for_bb[bb->index].label = label;
1386 main_label = label;
1389 label_for_bb[bb->index].used = true;
1390 return main_label;
1393 /* Clean up redundant labels within the exception tree. */
1395 static void
1396 cleanup_dead_labels_eh (void)
1398 eh_landing_pad lp;
1399 eh_region r;
1400 tree lab;
1401 int i;
1403 if (cfun->eh == NULL)
1404 return;
1406 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1407 if (lp && lp->post_landing_pad)
1409 lab = main_block_label (lp->post_landing_pad);
1410 if (lab != lp->post_landing_pad)
1412 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1413 EH_LANDING_PAD_NR (lab) = lp->index;
1417 FOR_ALL_EH_REGION (r)
1418 switch (r->type)
1420 case ERT_CLEANUP:
1421 case ERT_MUST_NOT_THROW:
1422 break;
1424 case ERT_TRY:
1426 eh_catch c;
1427 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1429 lab = c->label;
1430 if (lab)
1431 c->label = main_block_label (lab);
1434 break;
1436 case ERT_ALLOWED_EXCEPTIONS:
1437 lab = r->u.allowed.label;
1438 if (lab)
1439 r->u.allowed.label = main_block_label (lab);
1440 break;
1445 /* Cleanup redundant labels. This is a three-step process:
1446 1) Find the leading label for each block.
1447 2) Redirect all references to labels to the leading labels.
1448 3) Cleanup all useless labels. */
1450 void
1451 cleanup_dead_labels (void)
1453 basic_block bb;
1454 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1456 /* Find a suitable label for each block. We use the first user-defined
1457 label if there is one, or otherwise just the first label we see. */
1458 FOR_EACH_BB_FN (bb, cfun)
1460 gimple_stmt_iterator i;
1462 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1464 tree label;
1465 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1467 if (!label_stmt)
1468 break;
1470 label = gimple_label_label (label_stmt);
1472 /* If we have not yet seen a label for the current block,
1473 remember this one and see if there are more labels. */
1474 if (!label_for_bb[bb->index].label)
1476 label_for_bb[bb->index].label = label;
1477 continue;
1480 /* If we did see a label for the current block already, but it
1481 is an artificially created label, replace it if the current
1482 label is a user defined label. */
1483 if (!DECL_ARTIFICIAL (label)
1484 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1486 label_for_bb[bb->index].label = label;
1487 break;
1492 /* Now redirect all jumps/branches to the selected label.
1493 First do so for each block ending in a control statement. */
1494 FOR_EACH_BB_FN (bb, cfun)
1496 gimple *stmt = last_stmt (bb);
1497 tree label, new_label;
1499 if (!stmt)
1500 continue;
1502 switch (gimple_code (stmt))
1504 case GIMPLE_COND:
1506 gcond *cond_stmt = as_a <gcond *> (stmt);
1507 label = gimple_cond_true_label (cond_stmt);
1508 if (label)
1510 new_label = main_block_label (label);
1511 if (new_label != label)
1512 gimple_cond_set_true_label (cond_stmt, new_label);
1515 label = gimple_cond_false_label (cond_stmt);
1516 if (label)
1518 new_label = main_block_label (label);
1519 if (new_label != label)
1520 gimple_cond_set_false_label (cond_stmt, new_label);
1523 break;
1525 case GIMPLE_SWITCH:
1527 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1528 size_t i, n = gimple_switch_num_labels (switch_stmt);
1530 /* Replace all destination labels. */
1531 for (i = 0; i < n; ++i)
1533 tree case_label = gimple_switch_label (switch_stmt, i);
1534 label = CASE_LABEL (case_label);
1535 new_label = main_block_label (label);
1536 if (new_label != label)
1537 CASE_LABEL (case_label) = new_label;
1539 break;
1542 case GIMPLE_ASM:
1544 gasm *asm_stmt = as_a <gasm *> (stmt);
1545 int i, n = gimple_asm_nlabels (asm_stmt);
1547 for (i = 0; i < n; ++i)
1549 tree cons = gimple_asm_label_op (asm_stmt, i);
1550 tree label = main_block_label (TREE_VALUE (cons));
1551 TREE_VALUE (cons) = label;
1553 break;
1556 /* We have to handle gotos until they're removed, and we don't
1557 remove them until after we've created the CFG edges. */
1558 case GIMPLE_GOTO:
1559 if (!computed_goto_p (stmt))
1561 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1562 label = gimple_goto_dest (goto_stmt);
1563 new_label = main_block_label (label);
1564 if (new_label != label)
1565 gimple_goto_set_dest (goto_stmt, new_label);
1567 break;
1569 case GIMPLE_TRANSACTION:
1571 gtransaction *txn = as_a <gtransaction *> (stmt);
1573 label = gimple_transaction_label_norm (txn);
1574 if (label)
1576 new_label = main_block_label (label);
1577 if (new_label != label)
1578 gimple_transaction_set_label_norm (txn, new_label);
1581 label = gimple_transaction_label_uninst (txn);
1582 if (label)
1584 new_label = main_block_label (label);
1585 if (new_label != label)
1586 gimple_transaction_set_label_uninst (txn, new_label);
1589 label = gimple_transaction_label_over (txn);
1590 if (label)
1592 new_label = main_block_label (label);
1593 if (new_label != label)
1594 gimple_transaction_set_label_over (txn, new_label);
1597 break;
1599 default:
1600 break;
1604 /* Do the same for the exception region tree labels. */
1605 cleanup_dead_labels_eh ();
1607 /* Finally, purge dead labels. All user-defined labels and labels that
1608 can be the target of non-local gotos and labels which have their
1609 address taken are preserved. */
1610 FOR_EACH_BB_FN (bb, cfun)
1612 gimple_stmt_iterator i;
1613 tree label_for_this_bb = label_for_bb[bb->index].label;
1615 if (!label_for_this_bb)
1616 continue;
1618 /* If the main label of the block is unused, we may still remove it. */
1619 if (!label_for_bb[bb->index].used)
1620 label_for_this_bb = NULL;
1622 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1624 tree label;
1625 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1627 if (!label_stmt)
1628 break;
1630 label = gimple_label_label (label_stmt);
1632 if (label == label_for_this_bb
1633 || !DECL_ARTIFICIAL (label)
1634 || DECL_NONLOCAL (label)
1635 || FORCED_LABEL (label))
1636 gsi_next (&i);
1637 else
1638 gsi_remove (&i, true);
1642 free (label_for_bb);
1645 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1646 the ones jumping to the same label.
1647 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1649 void
1650 group_case_labels_stmt (gswitch *stmt)
1652 int old_size = gimple_switch_num_labels (stmt);
1653 int i, j, new_size = old_size;
1654 basic_block default_bb = NULL;
1656 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1658 /* Look for possible opportunities to merge cases. */
1659 i = 1;
1660 while (i < old_size)
1662 tree base_case, base_high;
1663 basic_block base_bb;
1665 base_case = gimple_switch_label (stmt, i);
1667 gcc_assert (base_case);
1668 base_bb = label_to_block (CASE_LABEL (base_case));
1670 /* Discard cases that have the same destination as the
1671 default case. */
1672 if (base_bb == default_bb)
1674 gimple_switch_set_label (stmt, i, NULL_TREE);
1675 i++;
1676 new_size--;
1677 continue;
1680 base_high = CASE_HIGH (base_case)
1681 ? CASE_HIGH (base_case)
1682 : CASE_LOW (base_case);
1683 i++;
1685 /* Try to merge case labels. Break out when we reach the end
1686 of the label vector or when we cannot merge the next case
1687 label with the current one. */
1688 while (i < old_size)
1690 tree merge_case = gimple_switch_label (stmt, i);
1691 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1692 wide_int bhp1 = wi::add (base_high, 1);
1694 /* Merge the cases if they jump to the same place,
1695 and their ranges are consecutive. */
1696 if (merge_bb == base_bb
1697 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1699 base_high = CASE_HIGH (merge_case) ?
1700 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1701 CASE_HIGH (base_case) = base_high;
1702 gimple_switch_set_label (stmt, i, NULL_TREE);
1703 new_size--;
1704 i++;
1706 else
1707 break;
1711 /* Compress the case labels in the label vector, and adjust the
1712 length of the vector. */
1713 for (i = 0, j = 0; i < new_size; i++)
1715 while (! gimple_switch_label (stmt, j))
1716 j++;
1717 gimple_switch_set_label (stmt, i,
1718 gimple_switch_label (stmt, j++));
1721 gcc_assert (new_size <= old_size);
1722 gimple_switch_set_num_labels (stmt, new_size);
1725 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1726 and scan the sorted vector of cases. Combine the ones jumping to the
1727 same label. */
1729 void
1730 group_case_labels (void)
1732 basic_block bb;
1734 FOR_EACH_BB_FN (bb, cfun)
1736 gimple *stmt = last_stmt (bb);
1737 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1738 group_case_labels_stmt (as_a <gswitch *> (stmt));
1742 /* Checks whether we can merge block B into block A. */
1744 static bool
1745 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1747 gimple *stmt;
1749 if (!single_succ_p (a))
1750 return false;
1752 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1753 return false;
1755 if (single_succ (a) != b)
1756 return false;
1758 if (!single_pred_p (b))
1759 return false;
1761 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1762 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1763 return false;
1765 /* If A ends by a statement causing exceptions or something similar, we
1766 cannot merge the blocks. */
1767 stmt = last_stmt (a);
1768 if (stmt && stmt_ends_bb_p (stmt))
1769 return false;
1771 /* Do not allow a block with only a non-local label to be merged. */
1772 if (stmt)
1773 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1774 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1775 return false;
1777 /* Examine the labels at the beginning of B. */
1778 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1779 gsi_next (&gsi))
1781 tree lab;
1782 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1783 if (!label_stmt)
1784 break;
1785 lab = gimple_label_label (label_stmt);
1787 /* Do not remove user forced labels or for -O0 any user labels. */
1788 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1789 return false;
1792 /* Protect simple loop latches. We only want to avoid merging
1793 the latch with the loop header or with a block in another
1794 loop in this case. */
1795 if (current_loops
1796 && b->loop_father->latch == b
1797 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1798 && (b->loop_father->header == a
1799 || b->loop_father != a->loop_father))
1800 return false;
1802 /* It must be possible to eliminate all phi nodes in B. If ssa form
1803 is not up-to-date and a name-mapping is registered, we cannot eliminate
1804 any phis. Symbols marked for renaming are never a problem though. */
1805 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1806 gsi_next (&gsi))
1808 gphi *phi = gsi.phi ();
1809 /* Technically only new names matter. */
1810 if (name_registered_for_update_p (PHI_RESULT (phi)))
1811 return false;
1814 /* When not optimizing, don't merge if we'd lose goto_locus. */
1815 if (!optimize
1816 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1818 location_t goto_locus = single_succ_edge (a)->goto_locus;
1819 gimple_stmt_iterator prev, next;
1820 prev = gsi_last_nondebug_bb (a);
1821 next = gsi_after_labels (b);
1822 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1823 gsi_next_nondebug (&next);
1824 if ((gsi_end_p (prev)
1825 || gimple_location (gsi_stmt (prev)) != goto_locus)
1826 && (gsi_end_p (next)
1827 || gimple_location (gsi_stmt (next)) != goto_locus))
1828 return false;
1831 return true;
1834 /* Replaces all uses of NAME by VAL. */
1836 void
1837 replace_uses_by (tree name, tree val)
1839 imm_use_iterator imm_iter;
1840 use_operand_p use;
1841 gimple *stmt;
1842 edge e;
1844 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1846 /* Mark the block if we change the last stmt in it. */
1847 if (cfgcleanup_altered_bbs
1848 && stmt_ends_bb_p (stmt))
1849 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1851 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1853 replace_exp (use, val);
1855 if (gimple_code (stmt) == GIMPLE_PHI)
1857 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1858 PHI_ARG_INDEX_FROM_USE (use));
1859 if (e->flags & EDGE_ABNORMAL
1860 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1862 /* This can only occur for virtual operands, since
1863 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1864 would prevent replacement. */
1865 gcc_checking_assert (virtual_operand_p (name));
1866 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1871 if (gimple_code (stmt) != GIMPLE_PHI)
1873 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1874 gimple *orig_stmt = stmt;
1875 size_t i;
1877 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1878 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1879 only change sth from non-invariant to invariant, and only
1880 when propagating constants. */
1881 if (is_gimple_min_invariant (val))
1882 for (i = 0; i < gimple_num_ops (stmt); i++)
1884 tree op = gimple_op (stmt, i);
1885 /* Operands may be empty here. For example, the labels
1886 of a GIMPLE_COND are nulled out following the creation
1887 of the corresponding CFG edges. */
1888 if (op && TREE_CODE (op) == ADDR_EXPR)
1889 recompute_tree_invariant_for_addr_expr (op);
1892 if (fold_stmt (&gsi))
1893 stmt = gsi_stmt (gsi);
1895 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1896 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1898 update_stmt (stmt);
1902 gcc_checking_assert (has_zero_uses (name));
1904 /* Also update the trees stored in loop structures. */
1905 if (current_loops)
1907 struct loop *loop;
1909 FOR_EACH_LOOP (loop, 0)
1911 substitute_in_loop_info (loop, name, val);
1916 /* Merge block B into block A. */
1918 static void
1919 gimple_merge_blocks (basic_block a, basic_block b)
1921 gimple_stmt_iterator last, gsi;
1922 gphi_iterator psi;
1924 if (dump_file)
1925 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1927 /* Remove all single-valued PHI nodes from block B of the form
1928 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1929 gsi = gsi_last_bb (a);
1930 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1932 gimple *phi = gsi_stmt (psi);
1933 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1934 gimple *copy;
1935 bool may_replace_uses = (virtual_operand_p (def)
1936 || may_propagate_copy (def, use));
1938 /* In case we maintain loop closed ssa form, do not propagate arguments
1939 of loop exit phi nodes. */
1940 if (current_loops
1941 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1942 && !virtual_operand_p (def)
1943 && TREE_CODE (use) == SSA_NAME
1944 && a->loop_father != b->loop_father)
1945 may_replace_uses = false;
1947 if (!may_replace_uses)
1949 gcc_assert (!virtual_operand_p (def));
1951 /* Note that just emitting the copies is fine -- there is no problem
1952 with ordering of phi nodes. This is because A is the single
1953 predecessor of B, therefore results of the phi nodes cannot
1954 appear as arguments of the phi nodes. */
1955 copy = gimple_build_assign (def, use);
1956 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1957 remove_phi_node (&psi, false);
1959 else
1961 /* If we deal with a PHI for virtual operands, we can simply
1962 propagate these without fussing with folding or updating
1963 the stmt. */
1964 if (virtual_operand_p (def))
1966 imm_use_iterator iter;
1967 use_operand_p use_p;
1968 gimple *stmt;
1970 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1971 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1972 SET_USE (use_p, use);
1974 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1975 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1977 else
1978 replace_uses_by (def, use);
1980 remove_phi_node (&psi, true);
1984 /* Ensure that B follows A. */
1985 move_block_after (b, a);
1987 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1988 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1990 /* Remove labels from B and set gimple_bb to A for other statements. */
1991 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1993 gimple *stmt = gsi_stmt (gsi);
1994 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1996 tree label = gimple_label_label (label_stmt);
1997 int lp_nr;
1999 gsi_remove (&gsi, false);
2001 /* Now that we can thread computed gotos, we might have
2002 a situation where we have a forced label in block B
2003 However, the label at the start of block B might still be
2004 used in other ways (think about the runtime checking for
2005 Fortran assigned gotos). So we can not just delete the
2006 label. Instead we move the label to the start of block A. */
2007 if (FORCED_LABEL (label))
2009 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2010 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2012 /* Other user labels keep around in a form of a debug stmt. */
2013 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2015 gimple *dbg = gimple_build_debug_bind (label,
2016 integer_zero_node,
2017 stmt);
2018 gimple_debug_bind_reset_value (dbg);
2019 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2022 lp_nr = EH_LANDING_PAD_NR (label);
2023 if (lp_nr)
2025 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2026 lp->post_landing_pad = NULL;
2029 else
2031 gimple_set_bb (stmt, a);
2032 gsi_next (&gsi);
2036 /* When merging two BBs, if their counts are different, the larger count
2037 is selected as the new bb count. This is to handle inconsistent
2038 profiles. */
2039 if (a->loop_father == b->loop_father)
2041 a->count = MAX (a->count, b->count);
2042 a->frequency = MAX (a->frequency, b->frequency);
2045 /* Merge the sequences. */
2046 last = gsi_last_bb (a);
2047 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2048 set_bb_seq (b, NULL);
2050 if (cfgcleanup_altered_bbs)
2051 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2055 /* Return the one of two successors of BB that is not reachable by a
2056 complex edge, if there is one. Else, return BB. We use
2057 this in optimizations that use post-dominators for their heuristics,
2058 to catch the cases in C++ where function calls are involved. */
2060 basic_block
2061 single_noncomplex_succ (basic_block bb)
2063 edge e0, e1;
2064 if (EDGE_COUNT (bb->succs) != 2)
2065 return bb;
2067 e0 = EDGE_SUCC (bb, 0);
2068 e1 = EDGE_SUCC (bb, 1);
2069 if (e0->flags & EDGE_COMPLEX)
2070 return e1->dest;
2071 if (e1->flags & EDGE_COMPLEX)
2072 return e0->dest;
2074 return bb;
2077 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2079 void
2080 notice_special_calls (gcall *call)
2082 int flags = gimple_call_flags (call);
2084 if (flags & ECF_MAY_BE_ALLOCA)
2085 cfun->calls_alloca = true;
2086 if (flags & ECF_RETURNS_TWICE)
2087 cfun->calls_setjmp = true;
2091 /* Clear flags set by notice_special_calls. Used by dead code removal
2092 to update the flags. */
2094 void
2095 clear_special_calls (void)
2097 cfun->calls_alloca = false;
2098 cfun->calls_setjmp = false;
2101 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2103 static void
2104 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2106 /* Since this block is no longer reachable, we can just delete all
2107 of its PHI nodes. */
2108 remove_phi_nodes (bb);
2110 /* Remove edges to BB's successors. */
2111 while (EDGE_COUNT (bb->succs) > 0)
2112 remove_edge (EDGE_SUCC (bb, 0));
2116 /* Remove statements of basic block BB. */
2118 static void
2119 remove_bb (basic_block bb)
2121 gimple_stmt_iterator i;
2123 if (dump_file)
2125 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2126 if (dump_flags & TDF_DETAILS)
2128 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2129 fprintf (dump_file, "\n");
2133 if (current_loops)
2135 struct loop *loop = bb->loop_father;
2137 /* If a loop gets removed, clean up the information associated
2138 with it. */
2139 if (loop->latch == bb
2140 || loop->header == bb)
2141 free_numbers_of_iterations_estimates_loop (loop);
2144 /* Remove all the instructions in the block. */
2145 if (bb_seq (bb) != NULL)
2147 /* Walk backwards so as to get a chance to substitute all
2148 released DEFs into debug stmts. See
2149 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2150 details. */
2151 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2153 gimple *stmt = gsi_stmt (i);
2154 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2155 if (label_stmt
2156 && (FORCED_LABEL (gimple_label_label (label_stmt))
2157 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2159 basic_block new_bb;
2160 gimple_stmt_iterator new_gsi;
2162 /* A non-reachable non-local label may still be referenced.
2163 But it no longer needs to carry the extra semantics of
2164 non-locality. */
2165 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2167 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2168 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2171 new_bb = bb->prev_bb;
2172 new_gsi = gsi_start_bb (new_bb);
2173 gsi_remove (&i, false);
2174 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2176 else
2178 /* Release SSA definitions. */
2179 release_defs (stmt);
2180 gsi_remove (&i, true);
2183 if (gsi_end_p (i))
2184 i = gsi_last_bb (bb);
2185 else
2186 gsi_prev (&i);
2190 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2191 bb->il.gimple.seq = NULL;
2192 bb->il.gimple.phi_nodes = NULL;
2196 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2197 predicate VAL, return the edge that will be taken out of the block.
2198 If VAL does not match a unique edge, NULL is returned. */
2200 edge
2201 find_taken_edge (basic_block bb, tree val)
2203 gimple *stmt;
2205 stmt = last_stmt (bb);
2207 gcc_assert (stmt);
2208 gcc_assert (is_ctrl_stmt (stmt));
2210 if (val == NULL)
2211 return NULL;
2213 if (!is_gimple_min_invariant (val))
2214 return NULL;
2216 if (gimple_code (stmt) == GIMPLE_COND)
2217 return find_taken_edge_cond_expr (bb, val);
2219 if (gimple_code (stmt) == GIMPLE_SWITCH)
2220 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2222 if (computed_goto_p (stmt))
2224 /* Only optimize if the argument is a label, if the argument is
2225 not a label then we can not construct a proper CFG.
2227 It may be the case that we only need to allow the LABEL_REF to
2228 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2229 appear inside a LABEL_EXPR just to be safe. */
2230 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2231 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2232 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2233 return NULL;
2236 gcc_unreachable ();
2239 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2240 statement, determine which of the outgoing edges will be taken out of the
2241 block. Return NULL if either edge may be taken. */
2243 static edge
2244 find_taken_edge_computed_goto (basic_block bb, tree val)
2246 basic_block dest;
2247 edge e = NULL;
2249 dest = label_to_block (val);
2250 if (dest)
2252 e = find_edge (bb, dest);
2253 gcc_assert (e != NULL);
2256 return e;
2259 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2260 statement, determine which of the two edges will be taken out of the
2261 block. Return NULL if either edge may be taken. */
2263 static edge
2264 find_taken_edge_cond_expr (basic_block bb, tree val)
2266 edge true_edge, false_edge;
2268 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2270 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2271 return (integer_zerop (val) ? false_edge : true_edge);
2274 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2275 statement, determine which edge will be taken out of the block. Return
2276 NULL if any edge may be taken. */
2278 static edge
2279 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2280 tree val)
2282 basic_block dest_bb;
2283 edge e;
2284 tree taken_case;
2286 taken_case = find_case_label_for_value (switch_stmt, val);
2287 dest_bb = label_to_block (CASE_LABEL (taken_case));
2289 e = find_edge (bb, dest_bb);
2290 gcc_assert (e);
2291 return e;
2295 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2296 We can make optimal use here of the fact that the case labels are
2297 sorted: We can do a binary search for a case matching VAL. */
2299 static tree
2300 find_case_label_for_value (gswitch *switch_stmt, tree val)
2302 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2303 tree default_case = gimple_switch_default_label (switch_stmt);
2305 for (low = 0, high = n; high - low > 1; )
2307 size_t i = (high + low) / 2;
2308 tree t = gimple_switch_label (switch_stmt, i);
2309 int cmp;
2311 /* Cache the result of comparing CASE_LOW and val. */
2312 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2314 if (cmp > 0)
2315 high = i;
2316 else
2317 low = i;
2319 if (CASE_HIGH (t) == NULL)
2321 /* A singe-valued case label. */
2322 if (cmp == 0)
2323 return t;
2325 else
2327 /* A case range. We can only handle integer ranges. */
2328 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2329 return t;
2333 return default_case;
2337 /* Dump a basic block on stderr. */
2339 void
2340 gimple_debug_bb (basic_block bb)
2342 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2346 /* Dump basic block with index N on stderr. */
2348 basic_block
2349 gimple_debug_bb_n (int n)
2351 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2352 return BASIC_BLOCK_FOR_FN (cfun, n);
2356 /* Dump the CFG on stderr.
2358 FLAGS are the same used by the tree dumping functions
2359 (see TDF_* in dumpfile.h). */
2361 void
2362 gimple_debug_cfg (int flags)
2364 gimple_dump_cfg (stderr, flags);
2368 /* Dump the program showing basic block boundaries on the given FILE.
2370 FLAGS are the same used by the tree dumping functions (see TDF_* in
2371 tree.h). */
2373 void
2374 gimple_dump_cfg (FILE *file, int flags)
2376 if (flags & TDF_DETAILS)
2378 dump_function_header (file, current_function_decl, flags);
2379 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2380 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2381 last_basic_block_for_fn (cfun));
2383 brief_dump_cfg (file, flags | TDF_COMMENT);
2384 fprintf (file, "\n");
2387 if (flags & TDF_STATS)
2388 dump_cfg_stats (file);
2390 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2394 /* Dump CFG statistics on FILE. */
2396 void
2397 dump_cfg_stats (FILE *file)
2399 static long max_num_merged_labels = 0;
2400 unsigned long size, total = 0;
2401 long num_edges;
2402 basic_block bb;
2403 const char * const fmt_str = "%-30s%-13s%12s\n";
2404 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2405 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2406 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2407 const char *funcname = current_function_name ();
2409 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2411 fprintf (file, "---------------------------------------------------------\n");
2412 fprintf (file, fmt_str, "", " Number of ", "Memory");
2413 fprintf (file, fmt_str, "", " instances ", "used ");
2414 fprintf (file, "---------------------------------------------------------\n");
2416 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2417 total += size;
2418 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2419 SCALE (size), LABEL (size));
2421 num_edges = 0;
2422 FOR_EACH_BB_FN (bb, cfun)
2423 num_edges += EDGE_COUNT (bb->succs);
2424 size = num_edges * sizeof (struct edge_def);
2425 total += size;
2426 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2428 fprintf (file, "---------------------------------------------------------\n");
2429 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2430 LABEL (total));
2431 fprintf (file, "---------------------------------------------------------\n");
2432 fprintf (file, "\n");
2434 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2435 max_num_merged_labels = cfg_stats.num_merged_labels;
2437 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2438 cfg_stats.num_merged_labels, max_num_merged_labels);
2440 fprintf (file, "\n");
2444 /* Dump CFG statistics on stderr. Keep extern so that it's always
2445 linked in the final executable. */
2447 DEBUG_FUNCTION void
2448 debug_cfg_stats (void)
2450 dump_cfg_stats (stderr);
2453 /*---------------------------------------------------------------------------
2454 Miscellaneous helpers
2455 ---------------------------------------------------------------------------*/
2457 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2458 flow. Transfers of control flow associated with EH are excluded. */
2460 static bool
2461 call_can_make_abnormal_goto (gimple *t)
2463 /* If the function has no non-local labels, then a call cannot make an
2464 abnormal transfer of control. */
2465 if (!cfun->has_nonlocal_label
2466 && !cfun->calls_setjmp)
2467 return false;
2469 /* Likewise if the call has no side effects. */
2470 if (!gimple_has_side_effects (t))
2471 return false;
2473 /* Likewise if the called function is leaf. */
2474 if (gimple_call_flags (t) & ECF_LEAF)
2475 return false;
2477 return true;
2481 /* Return true if T can make an abnormal transfer of control flow.
2482 Transfers of control flow associated with EH are excluded. */
2484 bool
2485 stmt_can_make_abnormal_goto (gimple *t)
2487 if (computed_goto_p (t))
2488 return true;
2489 if (is_gimple_call (t))
2490 return call_can_make_abnormal_goto (t);
2491 return false;
2495 /* Return true if T represents a stmt that always transfers control. */
2497 bool
2498 is_ctrl_stmt (gimple *t)
2500 switch (gimple_code (t))
2502 case GIMPLE_COND:
2503 case GIMPLE_SWITCH:
2504 case GIMPLE_GOTO:
2505 case GIMPLE_RETURN:
2506 case GIMPLE_RESX:
2507 return true;
2508 default:
2509 return false;
2514 /* Return true if T is a statement that may alter the flow of control
2515 (e.g., a call to a non-returning function). */
2517 bool
2518 is_ctrl_altering_stmt (gimple *t)
2520 gcc_assert (t);
2522 switch (gimple_code (t))
2524 case GIMPLE_CALL:
2525 /* Per stmt call flag indicates whether the call could alter
2526 controlflow. */
2527 if (gimple_call_ctrl_altering_p (t))
2528 return true;
2529 break;
2531 case GIMPLE_EH_DISPATCH:
2532 /* EH_DISPATCH branches to the individual catch handlers at
2533 this level of a try or allowed-exceptions region. It can
2534 fallthru to the next statement as well. */
2535 return true;
2537 case GIMPLE_ASM:
2538 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2539 return true;
2540 break;
2542 CASE_GIMPLE_OMP:
2543 /* OpenMP directives alter control flow. */
2544 return true;
2546 case GIMPLE_TRANSACTION:
2547 /* A transaction start alters control flow. */
2548 return true;
2550 default:
2551 break;
2554 /* If a statement can throw, it alters control flow. */
2555 return stmt_can_throw_internal (t);
2559 /* Return true if T is a simple local goto. */
2561 bool
2562 simple_goto_p (gimple *t)
2564 return (gimple_code (t) == GIMPLE_GOTO
2565 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2569 /* Return true if STMT should start a new basic block. PREV_STMT is
2570 the statement preceding STMT. It is used when STMT is a label or a
2571 case label. Labels should only start a new basic block if their
2572 previous statement wasn't a label. Otherwise, sequence of labels
2573 would generate unnecessary basic blocks that only contain a single
2574 label. */
2576 static inline bool
2577 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2579 if (stmt == NULL)
2580 return false;
2582 /* Labels start a new basic block only if the preceding statement
2583 wasn't a label of the same type. This prevents the creation of
2584 consecutive blocks that have nothing but a single label. */
2585 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2587 /* Nonlocal and computed GOTO targets always start a new block. */
2588 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2589 || FORCED_LABEL (gimple_label_label (label_stmt)))
2590 return true;
2592 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2594 if (DECL_NONLOCAL (gimple_label_label (
2595 as_a <glabel *> (prev_stmt))))
2596 return true;
2598 cfg_stats.num_merged_labels++;
2599 return false;
2601 else
2602 return true;
2604 else if (gimple_code (stmt) == GIMPLE_CALL)
2606 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2607 /* setjmp acts similar to a nonlocal GOTO target and thus should
2608 start a new block. */
2609 return true;
2610 if (gimple_call_internal_p (stmt, IFN_PHI)
2611 && prev_stmt
2612 && gimple_code (prev_stmt) != GIMPLE_LABEL
2613 && (gimple_code (prev_stmt) != GIMPLE_CALL
2614 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2615 /* PHI nodes start a new block unless preceeded by a label
2616 or another PHI. */
2617 return true;
2620 return false;
2624 /* Return true if T should end a basic block. */
2626 bool
2627 stmt_ends_bb_p (gimple *t)
2629 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2632 /* Remove block annotations and other data structures. */
2634 void
2635 delete_tree_cfg_annotations (struct function *fn)
2637 vec_free (label_to_block_map_for_fn (fn));
2640 /* Return the virtual phi in BB. */
2642 gphi *
2643 get_virtual_phi (basic_block bb)
2645 for (gphi_iterator gsi = gsi_start_phis (bb);
2646 !gsi_end_p (gsi);
2647 gsi_next (&gsi))
2649 gphi *phi = gsi.phi ();
2651 if (virtual_operand_p (PHI_RESULT (phi)))
2652 return phi;
2655 return NULL;
2658 /* Return the first statement in basic block BB. */
2660 gimple *
2661 first_stmt (basic_block bb)
2663 gimple_stmt_iterator i = gsi_start_bb (bb);
2664 gimple *stmt = NULL;
2666 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2668 gsi_next (&i);
2669 stmt = NULL;
2671 return stmt;
2674 /* Return the first non-label statement in basic block BB. */
2676 static gimple *
2677 first_non_label_stmt (basic_block bb)
2679 gimple_stmt_iterator i = gsi_start_bb (bb);
2680 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2681 gsi_next (&i);
2682 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2685 /* Return the last statement in basic block BB. */
2687 gimple *
2688 last_stmt (basic_block bb)
2690 gimple_stmt_iterator i = gsi_last_bb (bb);
2691 gimple *stmt = NULL;
2693 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2695 gsi_prev (&i);
2696 stmt = NULL;
2698 return stmt;
2701 /* Return the last statement of an otherwise empty block. Return NULL
2702 if the block is totally empty, or if it contains more than one
2703 statement. */
2705 gimple *
2706 last_and_only_stmt (basic_block bb)
2708 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2709 gimple *last, *prev;
2711 if (gsi_end_p (i))
2712 return NULL;
2714 last = gsi_stmt (i);
2715 gsi_prev_nondebug (&i);
2716 if (gsi_end_p (i))
2717 return last;
2719 /* Empty statements should no longer appear in the instruction stream.
2720 Everything that might have appeared before should be deleted by
2721 remove_useless_stmts, and the optimizers should just gsi_remove
2722 instead of smashing with build_empty_stmt.
2724 Thus the only thing that should appear here in a block containing
2725 one executable statement is a label. */
2726 prev = gsi_stmt (i);
2727 if (gimple_code (prev) == GIMPLE_LABEL)
2728 return last;
2729 else
2730 return NULL;
2733 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2735 static void
2736 reinstall_phi_args (edge new_edge, edge old_edge)
2738 edge_var_map *vm;
2739 int i;
2740 gphi_iterator phis;
2742 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2743 if (!v)
2744 return;
2746 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2747 v->iterate (i, &vm) && !gsi_end_p (phis);
2748 i++, gsi_next (&phis))
2750 gphi *phi = phis.phi ();
2751 tree result = redirect_edge_var_map_result (vm);
2752 tree arg = redirect_edge_var_map_def (vm);
2754 gcc_assert (result == gimple_phi_result (phi));
2756 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2759 redirect_edge_var_map_clear (old_edge);
2762 /* Returns the basic block after which the new basic block created
2763 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2764 near its "logical" location. This is of most help to humans looking
2765 at debugging dumps. */
2767 basic_block
2768 split_edge_bb_loc (edge edge_in)
2770 basic_block dest = edge_in->dest;
2771 basic_block dest_prev = dest->prev_bb;
2773 if (dest_prev)
2775 edge e = find_edge (dest_prev, dest);
2776 if (e && !(e->flags & EDGE_COMPLEX))
2777 return edge_in->src;
2779 return dest_prev;
2782 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2783 Abort on abnormal edges. */
2785 static basic_block
2786 gimple_split_edge (edge edge_in)
2788 basic_block new_bb, after_bb, dest;
2789 edge new_edge, e;
2791 /* Abnormal edges cannot be split. */
2792 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2794 dest = edge_in->dest;
2796 after_bb = split_edge_bb_loc (edge_in);
2798 new_bb = create_empty_bb (after_bb);
2799 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2800 new_bb->count = edge_in->count;
2801 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2802 new_edge->probability = REG_BR_PROB_BASE;
2803 new_edge->count = edge_in->count;
2805 e = redirect_edge_and_branch (edge_in, new_bb);
2806 gcc_assert (e == edge_in);
2807 reinstall_phi_args (new_edge, e);
2809 return new_bb;
2813 /* Verify properties of the address expression T with base object BASE. */
2815 static tree
2816 verify_address (tree t, tree base)
2818 bool old_constant;
2819 bool old_side_effects;
2820 bool new_constant;
2821 bool new_side_effects;
2823 old_constant = TREE_CONSTANT (t);
2824 old_side_effects = TREE_SIDE_EFFECTS (t);
2826 recompute_tree_invariant_for_addr_expr (t);
2827 new_side_effects = TREE_SIDE_EFFECTS (t);
2828 new_constant = TREE_CONSTANT (t);
2830 if (old_constant != new_constant)
2832 error ("constant not recomputed when ADDR_EXPR changed");
2833 return t;
2835 if (old_side_effects != new_side_effects)
2837 error ("side effects not recomputed when ADDR_EXPR changed");
2838 return t;
2841 if (!(VAR_P (base)
2842 || TREE_CODE (base) == PARM_DECL
2843 || TREE_CODE (base) == RESULT_DECL))
2844 return NULL_TREE;
2846 if (DECL_GIMPLE_REG_P (base))
2848 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2849 return base;
2852 return NULL_TREE;
2855 /* Callback for walk_tree, check that all elements with address taken are
2856 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2857 inside a PHI node. */
2859 static tree
2860 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2862 tree t = *tp, x;
2864 if (TYPE_P (t))
2865 *walk_subtrees = 0;
2867 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2868 #define CHECK_OP(N, MSG) \
2869 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2870 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2872 switch (TREE_CODE (t))
2874 case SSA_NAME:
2875 if (SSA_NAME_IN_FREE_LIST (t))
2877 error ("SSA name in freelist but still referenced");
2878 return *tp;
2880 break;
2882 case PARM_DECL:
2883 case VAR_DECL:
2884 case RESULT_DECL:
2886 tree context = decl_function_context (t);
2887 if (context != cfun->decl
2888 && !SCOPE_FILE_SCOPE_P (context)
2889 && !TREE_STATIC (t)
2890 && !DECL_EXTERNAL (t))
2892 error ("Local declaration from a different function");
2893 return t;
2896 break;
2898 case INDIRECT_REF:
2899 error ("INDIRECT_REF in gimple IL");
2900 return t;
2902 case MEM_REF:
2903 x = TREE_OPERAND (t, 0);
2904 if (!POINTER_TYPE_P (TREE_TYPE (x))
2905 || !is_gimple_mem_ref_addr (x))
2907 error ("invalid first operand of MEM_REF");
2908 return x;
2910 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2911 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2913 error ("invalid offset operand of MEM_REF");
2914 return TREE_OPERAND (t, 1);
2916 if (TREE_CODE (x) == ADDR_EXPR)
2918 tree va = verify_address (x, TREE_OPERAND (x, 0));
2919 if (va)
2920 return va;
2921 x = TREE_OPERAND (x, 0);
2923 walk_tree (&x, verify_expr, data, NULL);
2924 *walk_subtrees = 0;
2925 break;
2927 case ASSERT_EXPR:
2928 x = fold (ASSERT_EXPR_COND (t));
2929 if (x == boolean_false_node)
2931 error ("ASSERT_EXPR with an always-false condition");
2932 return *tp;
2934 break;
2936 case MODIFY_EXPR:
2937 error ("MODIFY_EXPR not expected while having tuples");
2938 return *tp;
2940 case ADDR_EXPR:
2942 tree tem;
2944 gcc_assert (is_gimple_address (t));
2946 /* Skip any references (they will be checked when we recurse down the
2947 tree) and ensure that any variable used as a prefix is marked
2948 addressable. */
2949 for (x = TREE_OPERAND (t, 0);
2950 handled_component_p (x);
2951 x = TREE_OPERAND (x, 0))
2954 if ((tem = verify_address (t, x)))
2955 return tem;
2957 if (!(VAR_P (x)
2958 || TREE_CODE (x) == PARM_DECL
2959 || TREE_CODE (x) == RESULT_DECL))
2960 return NULL;
2962 if (!TREE_ADDRESSABLE (x))
2964 error ("address taken, but ADDRESSABLE bit not set");
2965 return x;
2968 break;
2971 case COND_EXPR:
2972 x = COND_EXPR_COND (t);
2973 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2975 error ("non-integral used in condition");
2976 return x;
2978 if (!is_gimple_condexpr (x))
2980 error ("invalid conditional operand");
2981 return x;
2983 break;
2985 case NON_LVALUE_EXPR:
2986 case TRUTH_NOT_EXPR:
2987 gcc_unreachable ();
2989 CASE_CONVERT:
2990 case FIX_TRUNC_EXPR:
2991 case FLOAT_EXPR:
2992 case NEGATE_EXPR:
2993 case ABS_EXPR:
2994 case BIT_NOT_EXPR:
2995 CHECK_OP (0, "invalid operand to unary operator");
2996 break;
2998 case REALPART_EXPR:
2999 case IMAGPART_EXPR:
3000 case BIT_FIELD_REF:
3001 if (!is_gimple_reg_type (TREE_TYPE (t)))
3003 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3004 return t;
3007 if (TREE_CODE (t) == BIT_FIELD_REF)
3009 tree t0 = TREE_OPERAND (t, 0);
3010 tree t1 = TREE_OPERAND (t, 1);
3011 tree t2 = TREE_OPERAND (t, 2);
3012 if (!tree_fits_uhwi_p (t1)
3013 || !tree_fits_uhwi_p (t2))
3015 error ("invalid position or size operand to BIT_FIELD_REF");
3016 return t;
3018 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3019 && (TYPE_PRECISION (TREE_TYPE (t))
3020 != tree_to_uhwi (t1)))
3022 error ("integral result type precision does not match "
3023 "field size of BIT_FIELD_REF");
3024 return t;
3026 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3027 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3028 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3029 != tree_to_uhwi (t1)))
3031 error ("mode size of non-integral result does not "
3032 "match field size of BIT_FIELD_REF");
3033 return t;
3035 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3036 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3037 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3039 error ("position plus size exceeds size of referenced object in "
3040 "BIT_FIELD_REF");
3041 return t;
3044 t = TREE_OPERAND (t, 0);
3046 /* Fall-through. */
3047 case COMPONENT_REF:
3048 case ARRAY_REF:
3049 case ARRAY_RANGE_REF:
3050 case VIEW_CONVERT_EXPR:
3051 /* We have a nest of references. Verify that each of the operands
3052 that determine where to reference is either a constant or a variable,
3053 verify that the base is valid, and then show we've already checked
3054 the subtrees. */
3055 while (handled_component_p (t))
3057 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3058 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3059 else if (TREE_CODE (t) == ARRAY_REF
3060 || TREE_CODE (t) == ARRAY_RANGE_REF)
3062 CHECK_OP (1, "invalid array index");
3063 if (TREE_OPERAND (t, 2))
3064 CHECK_OP (2, "invalid array lower bound");
3065 if (TREE_OPERAND (t, 3))
3066 CHECK_OP (3, "invalid array stride");
3068 else if (TREE_CODE (t) == BIT_FIELD_REF
3069 || TREE_CODE (t) == REALPART_EXPR
3070 || TREE_CODE (t) == IMAGPART_EXPR)
3072 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3073 "REALPART_EXPR");
3074 return t;
3077 t = TREE_OPERAND (t, 0);
3080 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3082 error ("invalid reference prefix");
3083 return t;
3085 walk_tree (&t, verify_expr, data, NULL);
3086 *walk_subtrees = 0;
3087 break;
3088 case PLUS_EXPR:
3089 case MINUS_EXPR:
3090 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3091 POINTER_PLUS_EXPR. */
3092 if (POINTER_TYPE_P (TREE_TYPE (t)))
3094 error ("invalid operand to plus/minus, type is a pointer");
3095 return t;
3097 CHECK_OP (0, "invalid operand to binary operator");
3098 CHECK_OP (1, "invalid operand to binary operator");
3099 break;
3101 case POINTER_PLUS_EXPR:
3102 /* Check to make sure the first operand is a pointer or reference type. */
3103 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3105 error ("invalid operand to pointer plus, first operand is not a pointer");
3106 return t;
3108 /* Check to make sure the second operand is a ptrofftype. */
3109 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3111 error ("invalid operand to pointer plus, second operand is not an "
3112 "integer type of appropriate width");
3113 return t;
3115 /* FALLTHROUGH */
3116 case LT_EXPR:
3117 case LE_EXPR:
3118 case GT_EXPR:
3119 case GE_EXPR:
3120 case EQ_EXPR:
3121 case NE_EXPR:
3122 case UNORDERED_EXPR:
3123 case ORDERED_EXPR:
3124 case UNLT_EXPR:
3125 case UNLE_EXPR:
3126 case UNGT_EXPR:
3127 case UNGE_EXPR:
3128 case UNEQ_EXPR:
3129 case LTGT_EXPR:
3130 case MULT_EXPR:
3131 case TRUNC_DIV_EXPR:
3132 case CEIL_DIV_EXPR:
3133 case FLOOR_DIV_EXPR:
3134 case ROUND_DIV_EXPR:
3135 case TRUNC_MOD_EXPR:
3136 case CEIL_MOD_EXPR:
3137 case FLOOR_MOD_EXPR:
3138 case ROUND_MOD_EXPR:
3139 case RDIV_EXPR:
3140 case EXACT_DIV_EXPR:
3141 case MIN_EXPR:
3142 case MAX_EXPR:
3143 case LSHIFT_EXPR:
3144 case RSHIFT_EXPR:
3145 case LROTATE_EXPR:
3146 case RROTATE_EXPR:
3147 case BIT_IOR_EXPR:
3148 case BIT_XOR_EXPR:
3149 case BIT_AND_EXPR:
3150 CHECK_OP (0, "invalid operand to binary operator");
3151 CHECK_OP (1, "invalid operand to binary operator");
3152 break;
3154 case CONSTRUCTOR:
3155 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3156 *walk_subtrees = 0;
3157 break;
3159 case CASE_LABEL_EXPR:
3160 if (CASE_CHAIN (t))
3162 error ("invalid CASE_CHAIN");
3163 return t;
3165 break;
3167 default:
3168 break;
3170 return NULL;
3172 #undef CHECK_OP
3176 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3177 Returns true if there is an error, otherwise false. */
3179 static bool
3180 verify_types_in_gimple_min_lval (tree expr)
3182 tree op;
3184 if (is_gimple_id (expr))
3185 return false;
3187 if (TREE_CODE (expr) != TARGET_MEM_REF
3188 && TREE_CODE (expr) != MEM_REF)
3190 error ("invalid expression for min lvalue");
3191 return true;
3194 /* TARGET_MEM_REFs are strange beasts. */
3195 if (TREE_CODE (expr) == TARGET_MEM_REF)
3196 return false;
3198 op = TREE_OPERAND (expr, 0);
3199 if (!is_gimple_val (op))
3201 error ("invalid operand in indirect reference");
3202 debug_generic_stmt (op);
3203 return true;
3205 /* Memory references now generally can involve a value conversion. */
3207 return false;
3210 /* Verify if EXPR is a valid GIMPLE reference expression. If
3211 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3212 if there is an error, otherwise false. */
3214 static bool
3215 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3217 while (handled_component_p (expr))
3219 tree op = TREE_OPERAND (expr, 0);
3221 if (TREE_CODE (expr) == ARRAY_REF
3222 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3224 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3225 || (TREE_OPERAND (expr, 2)
3226 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3227 || (TREE_OPERAND (expr, 3)
3228 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3230 error ("invalid operands to array reference");
3231 debug_generic_stmt (expr);
3232 return true;
3236 /* Verify if the reference array element types are compatible. */
3237 if (TREE_CODE (expr) == ARRAY_REF
3238 && !useless_type_conversion_p (TREE_TYPE (expr),
3239 TREE_TYPE (TREE_TYPE (op))))
3241 error ("type mismatch in array reference");
3242 debug_generic_stmt (TREE_TYPE (expr));
3243 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3244 return true;
3246 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3247 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3248 TREE_TYPE (TREE_TYPE (op))))
3250 error ("type mismatch in array range reference");
3251 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3252 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3253 return true;
3256 if ((TREE_CODE (expr) == REALPART_EXPR
3257 || TREE_CODE (expr) == IMAGPART_EXPR)
3258 && !useless_type_conversion_p (TREE_TYPE (expr),
3259 TREE_TYPE (TREE_TYPE (op))))
3261 error ("type mismatch in real/imagpart reference");
3262 debug_generic_stmt (TREE_TYPE (expr));
3263 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3264 return true;
3267 if (TREE_CODE (expr) == COMPONENT_REF
3268 && !useless_type_conversion_p (TREE_TYPE (expr),
3269 TREE_TYPE (TREE_OPERAND (expr, 1))))
3271 error ("type mismatch in component reference");
3272 debug_generic_stmt (TREE_TYPE (expr));
3273 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3274 return true;
3277 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3279 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3280 that their operand is not an SSA name or an invariant when
3281 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3282 bug). Otherwise there is nothing to verify, gross mismatches at
3283 most invoke undefined behavior. */
3284 if (require_lvalue
3285 && (TREE_CODE (op) == SSA_NAME
3286 || is_gimple_min_invariant (op)))
3288 error ("conversion of an SSA_NAME on the left hand side");
3289 debug_generic_stmt (expr);
3290 return true;
3292 else if (TREE_CODE (op) == SSA_NAME
3293 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3295 error ("conversion of register to a different size");
3296 debug_generic_stmt (expr);
3297 return true;
3299 else if (!handled_component_p (op))
3300 return false;
3303 expr = op;
3306 if (TREE_CODE (expr) == MEM_REF)
3308 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3310 error ("invalid address operand in MEM_REF");
3311 debug_generic_stmt (expr);
3312 return true;
3314 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3315 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3317 error ("invalid offset operand in MEM_REF");
3318 debug_generic_stmt (expr);
3319 return true;
3322 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3324 if (!TMR_BASE (expr)
3325 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3327 error ("invalid address operand in TARGET_MEM_REF");
3328 return true;
3330 if (!TMR_OFFSET (expr)
3331 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3332 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3334 error ("invalid offset operand in TARGET_MEM_REF");
3335 debug_generic_stmt (expr);
3336 return true;
3340 return ((require_lvalue || !is_gimple_min_invariant (expr))
3341 && verify_types_in_gimple_min_lval (expr));
3344 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3345 list of pointer-to types that is trivially convertible to DEST. */
3347 static bool
3348 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3350 tree src;
3352 if (!TYPE_POINTER_TO (src_obj))
3353 return true;
3355 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3356 if (useless_type_conversion_p (dest, src))
3357 return true;
3359 return false;
3362 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3363 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3365 static bool
3366 valid_fixed_convert_types_p (tree type1, tree type2)
3368 return (FIXED_POINT_TYPE_P (type1)
3369 && (INTEGRAL_TYPE_P (type2)
3370 || SCALAR_FLOAT_TYPE_P (type2)
3371 || FIXED_POINT_TYPE_P (type2)));
3374 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3375 is a problem, otherwise false. */
3377 static bool
3378 verify_gimple_call (gcall *stmt)
3380 tree fn = gimple_call_fn (stmt);
3381 tree fntype, fndecl;
3382 unsigned i;
3384 if (gimple_call_internal_p (stmt))
3386 if (fn)
3388 error ("gimple call has two targets");
3389 debug_generic_stmt (fn);
3390 return true;
3392 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3393 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3395 return false;
3398 else
3400 if (!fn)
3402 error ("gimple call has no target");
3403 return true;
3407 if (fn && !is_gimple_call_addr (fn))
3409 error ("invalid function in gimple call");
3410 debug_generic_stmt (fn);
3411 return true;
3414 if (fn
3415 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3416 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3417 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3419 error ("non-function in gimple call");
3420 return true;
3423 fndecl = gimple_call_fndecl (stmt);
3424 if (fndecl
3425 && TREE_CODE (fndecl) == FUNCTION_DECL
3426 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3427 && !DECL_PURE_P (fndecl)
3428 && !TREE_READONLY (fndecl))
3430 error ("invalid pure const state for function");
3431 return true;
3434 tree lhs = gimple_call_lhs (stmt);
3435 if (lhs
3436 && (!is_gimple_lvalue (lhs)
3437 || verify_types_in_gimple_reference (lhs, true)))
3439 error ("invalid LHS in gimple call");
3440 return true;
3443 if (gimple_call_ctrl_altering_p (stmt)
3444 && gimple_call_noreturn_p (stmt)
3445 && should_remove_lhs_p (lhs))
3447 error ("LHS in noreturn call");
3448 return true;
3451 fntype = gimple_call_fntype (stmt);
3452 if (fntype
3453 && lhs
3454 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3455 /* ??? At least C++ misses conversions at assignments from
3456 void * call results.
3457 ??? Java is completely off. Especially with functions
3458 returning java.lang.Object.
3459 For now simply allow arbitrary pointer type conversions. */
3460 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3461 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3463 error ("invalid conversion in gimple call");
3464 debug_generic_stmt (TREE_TYPE (lhs));
3465 debug_generic_stmt (TREE_TYPE (fntype));
3466 return true;
3469 if (gimple_call_chain (stmt)
3470 && !is_gimple_val (gimple_call_chain (stmt)))
3472 error ("invalid static chain in gimple call");
3473 debug_generic_stmt (gimple_call_chain (stmt));
3474 return true;
3477 /* If there is a static chain argument, the call should either be
3478 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3479 if (gimple_call_chain (stmt)
3480 && fndecl
3481 && !DECL_STATIC_CHAIN (fndecl))
3483 error ("static chain with function that doesn%'t use one");
3484 return true;
3487 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3489 switch (DECL_FUNCTION_CODE (fndecl))
3491 case BUILT_IN_UNREACHABLE:
3492 case BUILT_IN_TRAP:
3493 if (gimple_call_num_args (stmt) > 0)
3495 /* Built-in unreachable with parameters might not be caught by
3496 undefined behavior sanitizer. Front-ends do check users do not
3497 call them that way but we also produce calls to
3498 __builtin_unreachable internally, for example when IPA figures
3499 out a call cannot happen in a legal program. In such cases,
3500 we must make sure arguments are stripped off. */
3501 error ("__builtin_unreachable or __builtin_trap call with "
3502 "arguments");
3503 return true;
3505 break;
3506 default:
3507 break;
3511 /* ??? The C frontend passes unpromoted arguments in case it
3512 didn't see a function declaration before the call. So for now
3513 leave the call arguments mostly unverified. Once we gimplify
3514 unit-at-a-time we have a chance to fix this. */
3516 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3518 tree arg = gimple_call_arg (stmt, i);
3519 if ((is_gimple_reg_type (TREE_TYPE (arg))
3520 && !is_gimple_val (arg))
3521 || (!is_gimple_reg_type (TREE_TYPE (arg))
3522 && !is_gimple_lvalue (arg)))
3524 error ("invalid argument to gimple call");
3525 debug_generic_expr (arg);
3526 return true;
3530 return false;
3533 /* Verifies the gimple comparison with the result type TYPE and
3534 the operands OP0 and OP1, comparison code is CODE. */
3536 static bool
3537 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3539 tree op0_type = TREE_TYPE (op0);
3540 tree op1_type = TREE_TYPE (op1);
3542 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3544 error ("invalid operands in gimple comparison");
3545 return true;
3548 /* For comparisons we do not have the operations type as the
3549 effective type the comparison is carried out in. Instead
3550 we require that either the first operand is trivially
3551 convertible into the second, or the other way around.
3552 Because we special-case pointers to void we allow
3553 comparisons of pointers with the same mode as well. */
3554 if (!useless_type_conversion_p (op0_type, op1_type)
3555 && !useless_type_conversion_p (op1_type, op0_type)
3556 && (!POINTER_TYPE_P (op0_type)
3557 || !POINTER_TYPE_P (op1_type)
3558 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3560 error ("mismatching comparison operand types");
3561 debug_generic_expr (op0_type);
3562 debug_generic_expr (op1_type);
3563 return true;
3566 /* The resulting type of a comparison may be an effective boolean type. */
3567 if (INTEGRAL_TYPE_P (type)
3568 && (TREE_CODE (type) == BOOLEAN_TYPE
3569 || TYPE_PRECISION (type) == 1))
3571 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3572 || TREE_CODE (op1_type) == VECTOR_TYPE)
3573 && code != EQ_EXPR && code != NE_EXPR
3574 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3575 && !VECTOR_INTEGER_TYPE_P (op0_type))
3577 error ("unsupported operation or type for vector comparison"
3578 " returning a boolean");
3579 debug_generic_expr (op0_type);
3580 debug_generic_expr (op1_type);
3581 return true;
3584 /* Or a boolean vector type with the same element count
3585 as the comparison operand types. */
3586 else if (TREE_CODE (type) == VECTOR_TYPE
3587 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3589 if (TREE_CODE (op0_type) != VECTOR_TYPE
3590 || TREE_CODE (op1_type) != VECTOR_TYPE)
3592 error ("non-vector operands in vector comparison");
3593 debug_generic_expr (op0_type);
3594 debug_generic_expr (op1_type);
3595 return true;
3598 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3600 error ("invalid vector comparison resulting type");
3601 debug_generic_expr (type);
3602 return true;
3605 else
3607 error ("bogus comparison result type");
3608 debug_generic_expr (type);
3609 return true;
3612 return false;
3615 /* Verify a gimple assignment statement STMT with an unary rhs.
3616 Returns true if anything is wrong. */
3618 static bool
3619 verify_gimple_assign_unary (gassign *stmt)
3621 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3622 tree lhs = gimple_assign_lhs (stmt);
3623 tree lhs_type = TREE_TYPE (lhs);
3624 tree rhs1 = gimple_assign_rhs1 (stmt);
3625 tree rhs1_type = TREE_TYPE (rhs1);
3627 if (!is_gimple_reg (lhs))
3629 error ("non-register as LHS of unary operation");
3630 return true;
3633 if (!is_gimple_val (rhs1))
3635 error ("invalid operand in unary operation");
3636 return true;
3639 /* First handle conversions. */
3640 switch (rhs_code)
3642 CASE_CONVERT:
3644 /* Allow conversions from pointer type to integral type only if
3645 there is no sign or zero extension involved.
3646 For targets were the precision of ptrofftype doesn't match that
3647 of pointers we need to allow arbitrary conversions to ptrofftype. */
3648 if ((POINTER_TYPE_P (lhs_type)
3649 && INTEGRAL_TYPE_P (rhs1_type))
3650 || (POINTER_TYPE_P (rhs1_type)
3651 && INTEGRAL_TYPE_P (lhs_type)
3652 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3653 || ptrofftype_p (sizetype))))
3654 return false;
3656 /* Allow conversion from integral to offset type and vice versa. */
3657 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3658 && INTEGRAL_TYPE_P (rhs1_type))
3659 || (INTEGRAL_TYPE_P (lhs_type)
3660 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3661 return false;
3663 /* Otherwise assert we are converting between types of the
3664 same kind. */
3665 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3667 error ("invalid types in nop conversion");
3668 debug_generic_expr (lhs_type);
3669 debug_generic_expr (rhs1_type);
3670 return true;
3673 return false;
3676 case ADDR_SPACE_CONVERT_EXPR:
3678 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3679 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3680 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3682 error ("invalid types in address space conversion");
3683 debug_generic_expr (lhs_type);
3684 debug_generic_expr (rhs1_type);
3685 return true;
3688 return false;
3691 case FIXED_CONVERT_EXPR:
3693 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3694 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3696 error ("invalid types in fixed-point conversion");
3697 debug_generic_expr (lhs_type);
3698 debug_generic_expr (rhs1_type);
3699 return true;
3702 return false;
3705 case FLOAT_EXPR:
3707 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3708 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3709 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3711 error ("invalid types in conversion to floating point");
3712 debug_generic_expr (lhs_type);
3713 debug_generic_expr (rhs1_type);
3714 return true;
3717 return false;
3720 case FIX_TRUNC_EXPR:
3722 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3723 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3724 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3726 error ("invalid types in conversion to integer");
3727 debug_generic_expr (lhs_type);
3728 debug_generic_expr (rhs1_type);
3729 return true;
3732 return false;
3734 case REDUC_MAX_EXPR:
3735 case REDUC_MIN_EXPR:
3736 case REDUC_PLUS_EXPR:
3737 if (!VECTOR_TYPE_P (rhs1_type)
3738 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3740 error ("reduction should convert from vector to element type");
3741 debug_generic_expr (lhs_type);
3742 debug_generic_expr (rhs1_type);
3743 return true;
3745 return false;
3747 case VEC_UNPACK_HI_EXPR:
3748 case VEC_UNPACK_LO_EXPR:
3749 case VEC_UNPACK_FLOAT_HI_EXPR:
3750 case VEC_UNPACK_FLOAT_LO_EXPR:
3751 /* FIXME. */
3752 return false;
3754 case NEGATE_EXPR:
3755 case ABS_EXPR:
3756 case BIT_NOT_EXPR:
3757 case PAREN_EXPR:
3758 case CONJ_EXPR:
3759 break;
3761 default:
3762 gcc_unreachable ();
3765 /* For the remaining codes assert there is no conversion involved. */
3766 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3768 error ("non-trivial conversion in unary operation");
3769 debug_generic_expr (lhs_type);
3770 debug_generic_expr (rhs1_type);
3771 return true;
3774 return false;
3777 /* Verify a gimple assignment statement STMT with a binary rhs.
3778 Returns true if anything is wrong. */
3780 static bool
3781 verify_gimple_assign_binary (gassign *stmt)
3783 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3784 tree lhs = gimple_assign_lhs (stmt);
3785 tree lhs_type = TREE_TYPE (lhs);
3786 tree rhs1 = gimple_assign_rhs1 (stmt);
3787 tree rhs1_type = TREE_TYPE (rhs1);
3788 tree rhs2 = gimple_assign_rhs2 (stmt);
3789 tree rhs2_type = TREE_TYPE (rhs2);
3791 if (!is_gimple_reg (lhs))
3793 error ("non-register as LHS of binary operation");
3794 return true;
3797 if (!is_gimple_val (rhs1)
3798 || !is_gimple_val (rhs2))
3800 error ("invalid operands in binary operation");
3801 return true;
3804 /* First handle operations that involve different types. */
3805 switch (rhs_code)
3807 case COMPLEX_EXPR:
3809 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3810 || !(INTEGRAL_TYPE_P (rhs1_type)
3811 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3812 || !(INTEGRAL_TYPE_P (rhs2_type)
3813 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3815 error ("type mismatch in complex expression");
3816 debug_generic_expr (lhs_type);
3817 debug_generic_expr (rhs1_type);
3818 debug_generic_expr (rhs2_type);
3819 return true;
3822 return false;
3825 case LSHIFT_EXPR:
3826 case RSHIFT_EXPR:
3827 case LROTATE_EXPR:
3828 case RROTATE_EXPR:
3830 /* Shifts and rotates are ok on integral types, fixed point
3831 types and integer vector types. */
3832 if ((!INTEGRAL_TYPE_P (rhs1_type)
3833 && !FIXED_POINT_TYPE_P (rhs1_type)
3834 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3835 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3836 || (!INTEGRAL_TYPE_P (rhs2_type)
3837 /* Vector shifts of vectors are also ok. */
3838 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3839 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3840 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3841 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3842 || !useless_type_conversion_p (lhs_type, rhs1_type))
3844 error ("type mismatch in shift expression");
3845 debug_generic_expr (lhs_type);
3846 debug_generic_expr (rhs1_type);
3847 debug_generic_expr (rhs2_type);
3848 return true;
3851 return false;
3854 case WIDEN_LSHIFT_EXPR:
3856 if (!INTEGRAL_TYPE_P (lhs_type)
3857 || !INTEGRAL_TYPE_P (rhs1_type)
3858 || TREE_CODE (rhs2) != INTEGER_CST
3859 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3861 error ("type mismatch in widening vector shift expression");
3862 debug_generic_expr (lhs_type);
3863 debug_generic_expr (rhs1_type);
3864 debug_generic_expr (rhs2_type);
3865 return true;
3868 return false;
3871 case VEC_WIDEN_LSHIFT_HI_EXPR:
3872 case VEC_WIDEN_LSHIFT_LO_EXPR:
3874 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3875 || TREE_CODE (lhs_type) != VECTOR_TYPE
3876 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3877 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3878 || TREE_CODE (rhs2) != INTEGER_CST
3879 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3880 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3882 error ("type mismatch in widening vector shift expression");
3883 debug_generic_expr (lhs_type);
3884 debug_generic_expr (rhs1_type);
3885 debug_generic_expr (rhs2_type);
3886 return true;
3889 return false;
3892 case PLUS_EXPR:
3893 case MINUS_EXPR:
3895 tree lhs_etype = lhs_type;
3896 tree rhs1_etype = rhs1_type;
3897 tree rhs2_etype = rhs2_type;
3898 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3900 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3901 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3903 error ("invalid non-vector operands to vector valued plus");
3904 return true;
3906 lhs_etype = TREE_TYPE (lhs_type);
3907 rhs1_etype = TREE_TYPE (rhs1_type);
3908 rhs2_etype = TREE_TYPE (rhs2_type);
3910 if (POINTER_TYPE_P (lhs_etype)
3911 || POINTER_TYPE_P (rhs1_etype)
3912 || POINTER_TYPE_P (rhs2_etype))
3914 error ("invalid (pointer) operands to plus/minus");
3915 return true;
3918 /* Continue with generic binary expression handling. */
3919 break;
3922 case POINTER_PLUS_EXPR:
3924 if (!POINTER_TYPE_P (rhs1_type)
3925 || !useless_type_conversion_p (lhs_type, rhs1_type)
3926 || !ptrofftype_p (rhs2_type))
3928 error ("type mismatch in pointer plus expression");
3929 debug_generic_stmt (lhs_type);
3930 debug_generic_stmt (rhs1_type);
3931 debug_generic_stmt (rhs2_type);
3932 return true;
3935 return false;
3938 case TRUTH_ANDIF_EXPR:
3939 case TRUTH_ORIF_EXPR:
3940 case TRUTH_AND_EXPR:
3941 case TRUTH_OR_EXPR:
3942 case TRUTH_XOR_EXPR:
3944 gcc_unreachable ();
3946 case LT_EXPR:
3947 case LE_EXPR:
3948 case GT_EXPR:
3949 case GE_EXPR:
3950 case EQ_EXPR:
3951 case NE_EXPR:
3952 case UNORDERED_EXPR:
3953 case ORDERED_EXPR:
3954 case UNLT_EXPR:
3955 case UNLE_EXPR:
3956 case UNGT_EXPR:
3957 case UNGE_EXPR:
3958 case UNEQ_EXPR:
3959 case LTGT_EXPR:
3960 /* Comparisons are also binary, but the result type is not
3961 connected to the operand types. */
3962 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3964 case WIDEN_MULT_EXPR:
3965 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3966 return true;
3967 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3968 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3970 case WIDEN_SUM_EXPR:
3971 case VEC_WIDEN_MULT_HI_EXPR:
3972 case VEC_WIDEN_MULT_LO_EXPR:
3973 case VEC_WIDEN_MULT_EVEN_EXPR:
3974 case VEC_WIDEN_MULT_ODD_EXPR:
3975 case VEC_PACK_TRUNC_EXPR:
3976 case VEC_PACK_SAT_EXPR:
3977 case VEC_PACK_FIX_TRUNC_EXPR:
3978 /* FIXME. */
3979 return false;
3981 case MULT_EXPR:
3982 case MULT_HIGHPART_EXPR:
3983 case TRUNC_DIV_EXPR:
3984 case CEIL_DIV_EXPR:
3985 case FLOOR_DIV_EXPR:
3986 case ROUND_DIV_EXPR:
3987 case TRUNC_MOD_EXPR:
3988 case CEIL_MOD_EXPR:
3989 case FLOOR_MOD_EXPR:
3990 case ROUND_MOD_EXPR:
3991 case RDIV_EXPR:
3992 case EXACT_DIV_EXPR:
3993 case MIN_EXPR:
3994 case MAX_EXPR:
3995 case BIT_IOR_EXPR:
3996 case BIT_XOR_EXPR:
3997 case BIT_AND_EXPR:
3998 /* Continue with generic binary expression handling. */
3999 break;
4001 default:
4002 gcc_unreachable ();
4005 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4006 || !useless_type_conversion_p (lhs_type, rhs2_type))
4008 error ("type mismatch in binary expression");
4009 debug_generic_stmt (lhs_type);
4010 debug_generic_stmt (rhs1_type);
4011 debug_generic_stmt (rhs2_type);
4012 return true;
4015 return false;
4018 /* Verify a gimple assignment statement STMT with a ternary rhs.
4019 Returns true if anything is wrong. */
4021 static bool
4022 verify_gimple_assign_ternary (gassign *stmt)
4024 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4025 tree lhs = gimple_assign_lhs (stmt);
4026 tree lhs_type = TREE_TYPE (lhs);
4027 tree rhs1 = gimple_assign_rhs1 (stmt);
4028 tree rhs1_type = TREE_TYPE (rhs1);
4029 tree rhs2 = gimple_assign_rhs2 (stmt);
4030 tree rhs2_type = TREE_TYPE (rhs2);
4031 tree rhs3 = gimple_assign_rhs3 (stmt);
4032 tree rhs3_type = TREE_TYPE (rhs3);
4034 if (!is_gimple_reg (lhs))
4036 error ("non-register as LHS of ternary operation");
4037 return true;
4040 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4041 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4042 || !is_gimple_val (rhs2)
4043 || !is_gimple_val (rhs3))
4045 error ("invalid operands in ternary operation");
4046 return true;
4049 /* First handle operations that involve different types. */
4050 switch (rhs_code)
4052 case WIDEN_MULT_PLUS_EXPR:
4053 case WIDEN_MULT_MINUS_EXPR:
4054 if ((!INTEGRAL_TYPE_P (rhs1_type)
4055 && !FIXED_POINT_TYPE_P (rhs1_type))
4056 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4057 || !useless_type_conversion_p (lhs_type, rhs3_type)
4058 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4059 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4061 error ("type mismatch in widening multiply-accumulate expression");
4062 debug_generic_expr (lhs_type);
4063 debug_generic_expr (rhs1_type);
4064 debug_generic_expr (rhs2_type);
4065 debug_generic_expr (rhs3_type);
4066 return true;
4068 break;
4070 case FMA_EXPR:
4071 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4072 || !useless_type_conversion_p (lhs_type, rhs2_type)
4073 || !useless_type_conversion_p (lhs_type, rhs3_type))
4075 error ("type mismatch in fused multiply-add expression");
4076 debug_generic_expr (lhs_type);
4077 debug_generic_expr (rhs1_type);
4078 debug_generic_expr (rhs2_type);
4079 debug_generic_expr (rhs3_type);
4080 return true;
4082 break;
4084 case VEC_COND_EXPR:
4085 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4086 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4087 != TYPE_VECTOR_SUBPARTS (lhs_type))
4089 error ("the first argument of a VEC_COND_EXPR must be of a "
4090 "boolean vector type of the same number of elements "
4091 "as the result");
4092 debug_generic_expr (lhs_type);
4093 debug_generic_expr (rhs1_type);
4094 return true;
4096 /* Fallthrough. */
4097 case COND_EXPR:
4098 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4099 || !useless_type_conversion_p (lhs_type, rhs3_type))
4101 error ("type mismatch in conditional expression");
4102 debug_generic_expr (lhs_type);
4103 debug_generic_expr (rhs2_type);
4104 debug_generic_expr (rhs3_type);
4105 return true;
4107 break;
4109 case VEC_PERM_EXPR:
4110 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4111 || !useless_type_conversion_p (lhs_type, rhs2_type))
4113 error ("type mismatch in vector permute expression");
4114 debug_generic_expr (lhs_type);
4115 debug_generic_expr (rhs1_type);
4116 debug_generic_expr (rhs2_type);
4117 debug_generic_expr (rhs3_type);
4118 return true;
4121 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4122 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4123 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4125 error ("vector types expected in vector permute expression");
4126 debug_generic_expr (lhs_type);
4127 debug_generic_expr (rhs1_type);
4128 debug_generic_expr (rhs2_type);
4129 debug_generic_expr (rhs3_type);
4130 return true;
4133 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4134 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4135 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4136 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4137 != TYPE_VECTOR_SUBPARTS (lhs_type))
4139 error ("vectors with different element number found "
4140 "in vector permute expression");
4141 debug_generic_expr (lhs_type);
4142 debug_generic_expr (rhs1_type);
4143 debug_generic_expr (rhs2_type);
4144 debug_generic_expr (rhs3_type);
4145 return true;
4148 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4149 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4150 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4152 error ("invalid mask type in vector permute expression");
4153 debug_generic_expr (lhs_type);
4154 debug_generic_expr (rhs1_type);
4155 debug_generic_expr (rhs2_type);
4156 debug_generic_expr (rhs3_type);
4157 return true;
4160 return false;
4162 case SAD_EXPR:
4163 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4164 || !useless_type_conversion_p (lhs_type, rhs3_type)
4165 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4166 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4168 error ("type mismatch in sad expression");
4169 debug_generic_expr (lhs_type);
4170 debug_generic_expr (rhs1_type);
4171 debug_generic_expr (rhs2_type);
4172 debug_generic_expr (rhs3_type);
4173 return true;
4176 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4177 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4178 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4180 error ("vector types expected in sad expression");
4181 debug_generic_expr (lhs_type);
4182 debug_generic_expr (rhs1_type);
4183 debug_generic_expr (rhs2_type);
4184 debug_generic_expr (rhs3_type);
4185 return true;
4188 return false;
4190 case BIT_INSERT_EXPR:
4191 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4193 error ("type mismatch in BIT_INSERT_EXPR");
4194 debug_generic_expr (lhs_type);
4195 debug_generic_expr (rhs1_type);
4196 return true;
4198 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4199 && INTEGRAL_TYPE_P (rhs2_type))
4200 || (VECTOR_TYPE_P (rhs1_type)
4201 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4203 error ("not allowed type combination in BIT_INSERT_EXPR");
4204 debug_generic_expr (rhs1_type);
4205 debug_generic_expr (rhs2_type);
4206 return true;
4208 if (! tree_fits_uhwi_p (rhs3)
4209 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4211 error ("invalid position or size in BIT_INSERT_EXPR");
4212 return true;
4214 if (INTEGRAL_TYPE_P (rhs1_type))
4216 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4217 if (bitpos >= TYPE_PRECISION (rhs1_type)
4218 || (bitpos + TYPE_PRECISION (rhs2_type)
4219 > TYPE_PRECISION (rhs1_type)))
4221 error ("insertion out of range in BIT_INSERT_EXPR");
4222 return true;
4225 else if (VECTOR_TYPE_P (rhs1_type))
4227 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4228 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4229 if (bitpos % bitsize != 0)
4231 error ("vector insertion not at element boundary");
4232 return true;
4235 return false;
4237 case DOT_PROD_EXPR:
4238 case REALIGN_LOAD_EXPR:
4239 /* FIXME. */
4240 return false;
4242 default:
4243 gcc_unreachable ();
4245 return false;
4248 /* Verify a gimple assignment statement STMT with a single rhs.
4249 Returns true if anything is wrong. */
4251 static bool
4252 verify_gimple_assign_single (gassign *stmt)
4254 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4255 tree lhs = gimple_assign_lhs (stmt);
4256 tree lhs_type = TREE_TYPE (lhs);
4257 tree rhs1 = gimple_assign_rhs1 (stmt);
4258 tree rhs1_type = TREE_TYPE (rhs1);
4259 bool res = false;
4261 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4263 error ("non-trivial conversion at assignment");
4264 debug_generic_expr (lhs_type);
4265 debug_generic_expr (rhs1_type);
4266 return true;
4269 if (gimple_clobber_p (stmt)
4270 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4272 error ("non-decl/MEM_REF LHS in clobber statement");
4273 debug_generic_expr (lhs);
4274 return true;
4277 if (handled_component_p (lhs)
4278 || TREE_CODE (lhs) == MEM_REF
4279 || TREE_CODE (lhs) == TARGET_MEM_REF)
4280 res |= verify_types_in_gimple_reference (lhs, true);
4282 /* Special codes we cannot handle via their class. */
4283 switch (rhs_code)
4285 case ADDR_EXPR:
4287 tree op = TREE_OPERAND (rhs1, 0);
4288 if (!is_gimple_addressable (op))
4290 error ("invalid operand in unary expression");
4291 return true;
4294 /* Technically there is no longer a need for matching types, but
4295 gimple hygiene asks for this check. In LTO we can end up
4296 combining incompatible units and thus end up with addresses
4297 of globals that change their type to a common one. */
4298 if (!in_lto_p
4299 && !types_compatible_p (TREE_TYPE (op),
4300 TREE_TYPE (TREE_TYPE (rhs1)))
4301 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4302 TREE_TYPE (op)))
4304 error ("type mismatch in address expression");
4305 debug_generic_stmt (TREE_TYPE (rhs1));
4306 debug_generic_stmt (TREE_TYPE (op));
4307 return true;
4310 return verify_types_in_gimple_reference (op, true);
4313 /* tcc_reference */
4314 case INDIRECT_REF:
4315 error ("INDIRECT_REF in gimple IL");
4316 return true;
4318 case COMPONENT_REF:
4319 case BIT_FIELD_REF:
4320 case ARRAY_REF:
4321 case ARRAY_RANGE_REF:
4322 case VIEW_CONVERT_EXPR:
4323 case REALPART_EXPR:
4324 case IMAGPART_EXPR:
4325 case TARGET_MEM_REF:
4326 case MEM_REF:
4327 if (!is_gimple_reg (lhs)
4328 && is_gimple_reg_type (TREE_TYPE (lhs)))
4330 error ("invalid rhs for gimple memory store");
4331 debug_generic_stmt (lhs);
4332 debug_generic_stmt (rhs1);
4333 return true;
4335 return res || verify_types_in_gimple_reference (rhs1, false);
4337 /* tcc_constant */
4338 case SSA_NAME:
4339 case INTEGER_CST:
4340 case REAL_CST:
4341 case FIXED_CST:
4342 case COMPLEX_CST:
4343 case VECTOR_CST:
4344 case STRING_CST:
4345 return res;
4347 /* tcc_declaration */
4348 case CONST_DECL:
4349 return res;
4350 case VAR_DECL:
4351 case PARM_DECL:
4352 if (!is_gimple_reg (lhs)
4353 && !is_gimple_reg (rhs1)
4354 && is_gimple_reg_type (TREE_TYPE (lhs)))
4356 error ("invalid rhs for gimple memory store");
4357 debug_generic_stmt (lhs);
4358 debug_generic_stmt (rhs1);
4359 return true;
4361 return res;
4363 case CONSTRUCTOR:
4364 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4366 unsigned int i;
4367 tree elt_i, elt_v, elt_t = NULL_TREE;
4369 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4370 return res;
4371 /* For vector CONSTRUCTORs we require that either it is empty
4372 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4373 (then the element count must be correct to cover the whole
4374 outer vector and index must be NULL on all elements, or it is
4375 a CONSTRUCTOR of scalar elements, where we as an exception allow
4376 smaller number of elements (assuming zero filling) and
4377 consecutive indexes as compared to NULL indexes (such
4378 CONSTRUCTORs can appear in the IL from FEs). */
4379 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4381 if (elt_t == NULL_TREE)
4383 elt_t = TREE_TYPE (elt_v);
4384 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4386 tree elt_t = TREE_TYPE (elt_v);
4387 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4388 TREE_TYPE (elt_t)))
4390 error ("incorrect type of vector CONSTRUCTOR"
4391 " elements");
4392 debug_generic_stmt (rhs1);
4393 return true;
4395 else if (CONSTRUCTOR_NELTS (rhs1)
4396 * TYPE_VECTOR_SUBPARTS (elt_t)
4397 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4399 error ("incorrect number of vector CONSTRUCTOR"
4400 " elements");
4401 debug_generic_stmt (rhs1);
4402 return true;
4405 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4406 elt_t))
4408 error ("incorrect type of vector CONSTRUCTOR elements");
4409 debug_generic_stmt (rhs1);
4410 return true;
4412 else if (CONSTRUCTOR_NELTS (rhs1)
4413 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4415 error ("incorrect number of vector CONSTRUCTOR elements");
4416 debug_generic_stmt (rhs1);
4417 return true;
4420 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4422 error ("incorrect type of vector CONSTRUCTOR elements");
4423 debug_generic_stmt (rhs1);
4424 return true;
4426 if (elt_i != NULL_TREE
4427 && (TREE_CODE (elt_t) == VECTOR_TYPE
4428 || TREE_CODE (elt_i) != INTEGER_CST
4429 || compare_tree_int (elt_i, i) != 0))
4431 error ("vector CONSTRUCTOR with non-NULL element index");
4432 debug_generic_stmt (rhs1);
4433 return true;
4435 if (!is_gimple_val (elt_v))
4437 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4438 debug_generic_stmt (rhs1);
4439 return true;
4443 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4445 error ("non-vector CONSTRUCTOR with elements");
4446 debug_generic_stmt (rhs1);
4447 return true;
4449 return res;
4450 case OBJ_TYPE_REF:
4451 case ASSERT_EXPR:
4452 case WITH_SIZE_EXPR:
4453 /* FIXME. */
4454 return res;
4456 default:;
4459 return res;
4462 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4463 is a problem, otherwise false. */
4465 static bool
4466 verify_gimple_assign (gassign *stmt)
4468 switch (gimple_assign_rhs_class (stmt))
4470 case GIMPLE_SINGLE_RHS:
4471 return verify_gimple_assign_single (stmt);
4473 case GIMPLE_UNARY_RHS:
4474 return verify_gimple_assign_unary (stmt);
4476 case GIMPLE_BINARY_RHS:
4477 return verify_gimple_assign_binary (stmt);
4479 case GIMPLE_TERNARY_RHS:
4480 return verify_gimple_assign_ternary (stmt);
4482 default:
4483 gcc_unreachable ();
4487 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4488 is a problem, otherwise false. */
4490 static bool
4491 verify_gimple_return (greturn *stmt)
4493 tree op = gimple_return_retval (stmt);
4494 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4496 /* We cannot test for present return values as we do not fix up missing
4497 return values from the original source. */
4498 if (op == NULL)
4499 return false;
4501 if (!is_gimple_val (op)
4502 && TREE_CODE (op) != RESULT_DECL)
4504 error ("invalid operand in return statement");
4505 debug_generic_stmt (op);
4506 return true;
4509 if ((TREE_CODE (op) == RESULT_DECL
4510 && DECL_BY_REFERENCE (op))
4511 || (TREE_CODE (op) == SSA_NAME
4512 && SSA_NAME_VAR (op)
4513 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4514 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4515 op = TREE_TYPE (op);
4517 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4519 error ("invalid conversion in return statement");
4520 debug_generic_stmt (restype);
4521 debug_generic_stmt (TREE_TYPE (op));
4522 return true;
4525 return false;
4529 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4530 is a problem, otherwise false. */
4532 static bool
4533 verify_gimple_goto (ggoto *stmt)
4535 tree dest = gimple_goto_dest (stmt);
4537 /* ??? We have two canonical forms of direct goto destinations, a
4538 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4539 if (TREE_CODE (dest) != LABEL_DECL
4540 && (!is_gimple_val (dest)
4541 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4543 error ("goto destination is neither a label nor a pointer");
4544 return true;
4547 return false;
4550 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4551 is a problem, otherwise false. */
4553 static bool
4554 verify_gimple_switch (gswitch *stmt)
4556 unsigned int i, n;
4557 tree elt, prev_upper_bound = NULL_TREE;
4558 tree index_type, elt_type = NULL_TREE;
4560 if (!is_gimple_val (gimple_switch_index (stmt)))
4562 error ("invalid operand to switch statement");
4563 debug_generic_stmt (gimple_switch_index (stmt));
4564 return true;
4567 index_type = TREE_TYPE (gimple_switch_index (stmt));
4568 if (! INTEGRAL_TYPE_P (index_type))
4570 error ("non-integral type switch statement");
4571 debug_generic_expr (index_type);
4572 return true;
4575 elt = gimple_switch_label (stmt, 0);
4576 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4578 error ("invalid default case label in switch statement");
4579 debug_generic_expr (elt);
4580 return true;
4583 n = gimple_switch_num_labels (stmt);
4584 for (i = 1; i < n; i++)
4586 elt = gimple_switch_label (stmt, i);
4588 if (! CASE_LOW (elt))
4590 error ("invalid case label in switch statement");
4591 debug_generic_expr (elt);
4592 return true;
4594 if (CASE_HIGH (elt)
4595 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4597 error ("invalid case range in switch statement");
4598 debug_generic_expr (elt);
4599 return true;
4602 if (elt_type)
4604 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4605 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4607 error ("type mismatch for case label in switch statement");
4608 debug_generic_expr (elt);
4609 return true;
4612 else
4614 elt_type = TREE_TYPE (CASE_LOW (elt));
4615 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4617 error ("type precision mismatch in switch statement");
4618 return true;
4622 if (prev_upper_bound)
4624 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4626 error ("case labels not sorted in switch statement");
4627 return true;
4631 prev_upper_bound = CASE_HIGH (elt);
4632 if (! prev_upper_bound)
4633 prev_upper_bound = CASE_LOW (elt);
4636 return false;
4639 /* Verify a gimple debug statement STMT.
4640 Returns true if anything is wrong. */
4642 static bool
4643 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4645 /* There isn't much that could be wrong in a gimple debug stmt. A
4646 gimple debug bind stmt, for example, maps a tree, that's usually
4647 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4648 component or member of an aggregate type, to another tree, that
4649 can be an arbitrary expression. These stmts expand into debug
4650 insns, and are converted to debug notes by var-tracking.c. */
4651 return false;
4654 /* Verify a gimple label statement STMT.
4655 Returns true if anything is wrong. */
4657 static bool
4658 verify_gimple_label (glabel *stmt)
4660 tree decl = gimple_label_label (stmt);
4661 int uid;
4662 bool err = false;
4664 if (TREE_CODE (decl) != LABEL_DECL)
4665 return true;
4666 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4667 && DECL_CONTEXT (decl) != current_function_decl)
4669 error ("label's context is not the current function decl");
4670 err |= true;
4673 uid = LABEL_DECL_UID (decl);
4674 if (cfun->cfg
4675 && (uid == -1
4676 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4678 error ("incorrect entry in label_to_block_map");
4679 err |= true;
4682 uid = EH_LANDING_PAD_NR (decl);
4683 if (uid)
4685 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4686 if (decl != lp->post_landing_pad)
4688 error ("incorrect setting of landing pad number");
4689 err |= true;
4693 return err;
4696 /* Verify a gimple cond statement STMT.
4697 Returns true if anything is wrong. */
4699 static bool
4700 verify_gimple_cond (gcond *stmt)
4702 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4704 error ("invalid comparison code in gimple cond");
4705 return true;
4707 if (!(!gimple_cond_true_label (stmt)
4708 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4709 || !(!gimple_cond_false_label (stmt)
4710 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4712 error ("invalid labels in gimple cond");
4713 return true;
4716 return verify_gimple_comparison (boolean_type_node,
4717 gimple_cond_lhs (stmt),
4718 gimple_cond_rhs (stmt),
4719 gimple_cond_code (stmt));
4722 /* Verify the GIMPLE statement STMT. Returns true if there is an
4723 error, otherwise false. */
4725 static bool
4726 verify_gimple_stmt (gimple *stmt)
4728 switch (gimple_code (stmt))
4730 case GIMPLE_ASSIGN:
4731 return verify_gimple_assign (as_a <gassign *> (stmt));
4733 case GIMPLE_LABEL:
4734 return verify_gimple_label (as_a <glabel *> (stmt));
4736 case GIMPLE_CALL:
4737 return verify_gimple_call (as_a <gcall *> (stmt));
4739 case GIMPLE_COND:
4740 return verify_gimple_cond (as_a <gcond *> (stmt));
4742 case GIMPLE_GOTO:
4743 return verify_gimple_goto (as_a <ggoto *> (stmt));
4745 case GIMPLE_SWITCH:
4746 return verify_gimple_switch (as_a <gswitch *> (stmt));
4748 case GIMPLE_RETURN:
4749 return verify_gimple_return (as_a <greturn *> (stmt));
4751 case GIMPLE_ASM:
4752 return false;
4754 case GIMPLE_TRANSACTION:
4755 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4757 /* Tuples that do not have tree operands. */
4758 case GIMPLE_NOP:
4759 case GIMPLE_PREDICT:
4760 case GIMPLE_RESX:
4761 case GIMPLE_EH_DISPATCH:
4762 case GIMPLE_EH_MUST_NOT_THROW:
4763 return false;
4765 CASE_GIMPLE_OMP:
4766 /* OpenMP directives are validated by the FE and never operated
4767 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4768 non-gimple expressions when the main index variable has had
4769 its address taken. This does not affect the loop itself
4770 because the header of an GIMPLE_OMP_FOR is merely used to determine
4771 how to setup the parallel iteration. */
4772 return false;
4774 case GIMPLE_DEBUG:
4775 return verify_gimple_debug (stmt);
4777 default:
4778 gcc_unreachable ();
4782 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4783 and false otherwise. */
4785 static bool
4786 verify_gimple_phi (gimple *phi)
4788 bool err = false;
4789 unsigned i;
4790 tree phi_result = gimple_phi_result (phi);
4791 bool virtual_p;
4793 if (!phi_result)
4795 error ("invalid PHI result");
4796 return true;
4799 virtual_p = virtual_operand_p (phi_result);
4800 if (TREE_CODE (phi_result) != SSA_NAME
4801 || (virtual_p
4802 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4804 error ("invalid PHI result");
4805 err = true;
4808 for (i = 0; i < gimple_phi_num_args (phi); i++)
4810 tree t = gimple_phi_arg_def (phi, i);
4812 if (!t)
4814 error ("missing PHI def");
4815 err |= true;
4816 continue;
4818 /* Addressable variables do have SSA_NAMEs but they
4819 are not considered gimple values. */
4820 else if ((TREE_CODE (t) == SSA_NAME
4821 && virtual_p != virtual_operand_p (t))
4822 || (virtual_p
4823 && (TREE_CODE (t) != SSA_NAME
4824 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4825 || (!virtual_p
4826 && !is_gimple_val (t)))
4828 error ("invalid PHI argument");
4829 debug_generic_expr (t);
4830 err |= true;
4832 #ifdef ENABLE_TYPES_CHECKING
4833 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4835 error ("incompatible types in PHI argument %u", i);
4836 debug_generic_stmt (TREE_TYPE (phi_result));
4837 debug_generic_stmt (TREE_TYPE (t));
4838 err |= true;
4840 #endif
4843 return err;
4846 /* Verify the GIMPLE statements inside the sequence STMTS. */
4848 static bool
4849 verify_gimple_in_seq_2 (gimple_seq stmts)
4851 gimple_stmt_iterator ittr;
4852 bool err = false;
4854 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4856 gimple *stmt = gsi_stmt (ittr);
4858 switch (gimple_code (stmt))
4860 case GIMPLE_BIND:
4861 err |= verify_gimple_in_seq_2 (
4862 gimple_bind_body (as_a <gbind *> (stmt)));
4863 break;
4865 case GIMPLE_TRY:
4866 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4867 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4868 break;
4870 case GIMPLE_EH_FILTER:
4871 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4872 break;
4874 case GIMPLE_EH_ELSE:
4876 geh_else *eh_else = as_a <geh_else *> (stmt);
4877 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4878 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4880 break;
4882 case GIMPLE_CATCH:
4883 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4884 as_a <gcatch *> (stmt)));
4885 break;
4887 case GIMPLE_TRANSACTION:
4888 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4889 break;
4891 default:
4893 bool err2 = verify_gimple_stmt (stmt);
4894 if (err2)
4895 debug_gimple_stmt (stmt);
4896 err |= err2;
4901 return err;
4904 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4905 is a problem, otherwise false. */
4907 static bool
4908 verify_gimple_transaction (gtransaction *stmt)
4910 tree lab;
4912 lab = gimple_transaction_label_norm (stmt);
4913 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4914 return true;
4915 lab = gimple_transaction_label_uninst (stmt);
4916 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4917 return true;
4918 lab = gimple_transaction_label_over (stmt);
4919 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4920 return true;
4922 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4926 /* Verify the GIMPLE statements inside the statement list STMTS. */
4928 DEBUG_FUNCTION void
4929 verify_gimple_in_seq (gimple_seq stmts)
4931 timevar_push (TV_TREE_STMT_VERIFY);
4932 if (verify_gimple_in_seq_2 (stmts))
4933 internal_error ("verify_gimple failed");
4934 timevar_pop (TV_TREE_STMT_VERIFY);
4937 /* Return true when the T can be shared. */
4939 static bool
4940 tree_node_can_be_shared (tree t)
4942 if (IS_TYPE_OR_DECL_P (t)
4943 || is_gimple_min_invariant (t)
4944 || TREE_CODE (t) == SSA_NAME
4945 || t == error_mark_node
4946 || TREE_CODE (t) == IDENTIFIER_NODE)
4947 return true;
4949 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4950 return true;
4952 if (DECL_P (t))
4953 return true;
4955 return false;
4958 /* Called via walk_tree. Verify tree sharing. */
4960 static tree
4961 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4963 hash_set<void *> *visited = (hash_set<void *> *) data;
4965 if (tree_node_can_be_shared (*tp))
4967 *walk_subtrees = false;
4968 return NULL;
4971 if (visited->add (*tp))
4972 return *tp;
4974 return NULL;
4977 /* Called via walk_gimple_stmt. Verify tree sharing. */
4979 static tree
4980 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4982 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4983 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4986 static bool eh_error_found;
4987 bool
4988 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4989 hash_set<gimple *> *visited)
4991 if (!visited->contains (stmt))
4993 error ("dead STMT in EH table");
4994 debug_gimple_stmt (stmt);
4995 eh_error_found = true;
4997 return true;
5000 /* Verify if the location LOCs block is in BLOCKS. */
5002 static bool
5003 verify_location (hash_set<tree> *blocks, location_t loc)
5005 tree block = LOCATION_BLOCK (loc);
5006 if (block != NULL_TREE
5007 && !blocks->contains (block))
5009 error ("location references block not in block tree");
5010 return true;
5012 if (block != NULL_TREE)
5013 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5014 return false;
5017 /* Called via walk_tree. Verify that expressions have no blocks. */
5019 static tree
5020 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5022 if (!EXPR_P (*tp))
5024 *walk_subtrees = false;
5025 return NULL;
5028 location_t loc = EXPR_LOCATION (*tp);
5029 if (LOCATION_BLOCK (loc) != NULL)
5030 return *tp;
5032 return NULL;
5035 /* Called via walk_tree. Verify locations of expressions. */
5037 static tree
5038 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5040 hash_set<tree> *blocks = (hash_set<tree> *) data;
5042 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5044 tree t = DECL_DEBUG_EXPR (*tp);
5045 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5046 if (addr)
5047 return addr;
5049 if ((VAR_P (*tp)
5050 || TREE_CODE (*tp) == PARM_DECL
5051 || TREE_CODE (*tp) == RESULT_DECL)
5052 && DECL_HAS_VALUE_EXPR_P (*tp))
5054 tree t = DECL_VALUE_EXPR (*tp);
5055 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5056 if (addr)
5057 return addr;
5060 if (!EXPR_P (*tp))
5062 *walk_subtrees = false;
5063 return NULL;
5066 location_t loc = EXPR_LOCATION (*tp);
5067 if (verify_location (blocks, loc))
5068 return *tp;
5070 return NULL;
5073 /* Called via walk_gimple_op. Verify locations of expressions. */
5075 static tree
5076 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5078 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5079 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5082 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5084 static void
5085 collect_subblocks (hash_set<tree> *blocks, tree block)
5087 tree t;
5088 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5090 blocks->add (t);
5091 collect_subblocks (blocks, t);
5095 /* Verify the GIMPLE statements in the CFG of FN. */
5097 DEBUG_FUNCTION void
5098 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5100 basic_block bb;
5101 bool err = false;
5103 timevar_push (TV_TREE_STMT_VERIFY);
5104 hash_set<void *> visited;
5105 hash_set<gimple *> visited_stmts;
5107 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5108 hash_set<tree> blocks;
5109 if (DECL_INITIAL (fn->decl))
5111 blocks.add (DECL_INITIAL (fn->decl));
5112 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5115 FOR_EACH_BB_FN (bb, fn)
5117 gimple_stmt_iterator gsi;
5119 for (gphi_iterator gpi = gsi_start_phis (bb);
5120 !gsi_end_p (gpi);
5121 gsi_next (&gpi))
5123 gphi *phi = gpi.phi ();
5124 bool err2 = false;
5125 unsigned i;
5127 visited_stmts.add (phi);
5129 if (gimple_bb (phi) != bb)
5131 error ("gimple_bb (phi) is set to a wrong basic block");
5132 err2 = true;
5135 err2 |= verify_gimple_phi (phi);
5137 /* Only PHI arguments have locations. */
5138 if (gimple_location (phi) != UNKNOWN_LOCATION)
5140 error ("PHI node with location");
5141 err2 = true;
5144 for (i = 0; i < gimple_phi_num_args (phi); i++)
5146 tree arg = gimple_phi_arg_def (phi, i);
5147 tree addr = walk_tree (&arg, verify_node_sharing_1,
5148 &visited, NULL);
5149 if (addr)
5151 error ("incorrect sharing of tree nodes");
5152 debug_generic_expr (addr);
5153 err2 |= true;
5155 location_t loc = gimple_phi_arg_location (phi, i);
5156 if (virtual_operand_p (gimple_phi_result (phi))
5157 && loc != UNKNOWN_LOCATION)
5159 error ("virtual PHI with argument locations");
5160 err2 = true;
5162 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5163 if (addr)
5165 debug_generic_expr (addr);
5166 err2 = true;
5168 err2 |= verify_location (&blocks, loc);
5171 if (err2)
5172 debug_gimple_stmt (phi);
5173 err |= err2;
5176 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5178 gimple *stmt = gsi_stmt (gsi);
5179 bool err2 = false;
5180 struct walk_stmt_info wi;
5181 tree addr;
5182 int lp_nr;
5184 visited_stmts.add (stmt);
5186 if (gimple_bb (stmt) != bb)
5188 error ("gimple_bb (stmt) is set to a wrong basic block");
5189 err2 = true;
5192 err2 |= verify_gimple_stmt (stmt);
5193 err2 |= verify_location (&blocks, gimple_location (stmt));
5195 memset (&wi, 0, sizeof (wi));
5196 wi.info = (void *) &visited;
5197 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5198 if (addr)
5200 error ("incorrect sharing of tree nodes");
5201 debug_generic_expr (addr);
5202 err2 |= true;
5205 memset (&wi, 0, sizeof (wi));
5206 wi.info = (void *) &blocks;
5207 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5208 if (addr)
5210 debug_generic_expr (addr);
5211 err2 |= true;
5214 /* ??? Instead of not checking these stmts at all the walker
5215 should know its context via wi. */
5216 if (!is_gimple_debug (stmt)
5217 && !is_gimple_omp (stmt))
5219 memset (&wi, 0, sizeof (wi));
5220 addr = walk_gimple_op (stmt, verify_expr, &wi);
5221 if (addr)
5223 debug_generic_expr (addr);
5224 inform (gimple_location (stmt), "in statement");
5225 err2 |= true;
5229 /* If the statement is marked as part of an EH region, then it is
5230 expected that the statement could throw. Verify that when we
5231 have optimizations that simplify statements such that we prove
5232 that they cannot throw, that we update other data structures
5233 to match. */
5234 lp_nr = lookup_stmt_eh_lp (stmt);
5235 if (lp_nr > 0)
5237 if (!stmt_could_throw_p (stmt))
5239 if (verify_nothrow)
5241 error ("statement marked for throw, but doesn%'t");
5242 err2 |= true;
5245 else if (!gsi_one_before_end_p (gsi))
5247 error ("statement marked for throw in middle of block");
5248 err2 |= true;
5252 if (err2)
5253 debug_gimple_stmt (stmt);
5254 err |= err2;
5258 eh_error_found = false;
5259 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5260 if (eh_table)
5261 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5262 (&visited_stmts);
5264 if (err || eh_error_found)
5265 internal_error ("verify_gimple failed");
5267 verify_histograms ();
5268 timevar_pop (TV_TREE_STMT_VERIFY);
5272 /* Verifies that the flow information is OK. */
5274 static int
5275 gimple_verify_flow_info (void)
5277 int err = 0;
5278 basic_block bb;
5279 gimple_stmt_iterator gsi;
5280 gimple *stmt;
5281 edge e;
5282 edge_iterator ei;
5284 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5285 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5287 error ("ENTRY_BLOCK has IL associated with it");
5288 err = 1;
5291 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5292 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5294 error ("EXIT_BLOCK has IL associated with it");
5295 err = 1;
5298 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5299 if (e->flags & EDGE_FALLTHRU)
5301 error ("fallthru to exit from bb %d", e->src->index);
5302 err = 1;
5305 FOR_EACH_BB_FN (bb, cfun)
5307 bool found_ctrl_stmt = false;
5309 stmt = NULL;
5311 /* Skip labels on the start of basic block. */
5312 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5314 tree label;
5315 gimple *prev_stmt = stmt;
5317 stmt = gsi_stmt (gsi);
5319 if (gimple_code (stmt) != GIMPLE_LABEL)
5320 break;
5322 label = gimple_label_label (as_a <glabel *> (stmt));
5323 if (prev_stmt && DECL_NONLOCAL (label))
5325 error ("nonlocal label ");
5326 print_generic_expr (stderr, label, 0);
5327 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5328 bb->index);
5329 err = 1;
5332 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5334 error ("EH landing pad label ");
5335 print_generic_expr (stderr, label, 0);
5336 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5337 bb->index);
5338 err = 1;
5341 if (label_to_block (label) != bb)
5343 error ("label ");
5344 print_generic_expr (stderr, label, 0);
5345 fprintf (stderr, " to block does not match in bb %d",
5346 bb->index);
5347 err = 1;
5350 if (decl_function_context (label) != current_function_decl)
5352 error ("label ");
5353 print_generic_expr (stderr, label, 0);
5354 fprintf (stderr, " has incorrect context in bb %d",
5355 bb->index);
5356 err = 1;
5360 /* Verify that body of basic block BB is free of control flow. */
5361 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5363 gimple *stmt = gsi_stmt (gsi);
5365 if (found_ctrl_stmt)
5367 error ("control flow in the middle of basic block %d",
5368 bb->index);
5369 err = 1;
5372 if (stmt_ends_bb_p (stmt))
5373 found_ctrl_stmt = true;
5375 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5377 error ("label ");
5378 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5379 fprintf (stderr, " in the middle of basic block %d", bb->index);
5380 err = 1;
5384 gsi = gsi_last_bb (bb);
5385 if (gsi_end_p (gsi))
5386 continue;
5388 stmt = gsi_stmt (gsi);
5390 if (gimple_code (stmt) == GIMPLE_LABEL)
5391 continue;
5393 err |= verify_eh_edges (stmt);
5395 if (is_ctrl_stmt (stmt))
5397 FOR_EACH_EDGE (e, ei, bb->succs)
5398 if (e->flags & EDGE_FALLTHRU)
5400 error ("fallthru edge after a control statement in bb %d",
5401 bb->index);
5402 err = 1;
5406 if (gimple_code (stmt) != GIMPLE_COND)
5408 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5409 after anything else but if statement. */
5410 FOR_EACH_EDGE (e, ei, bb->succs)
5411 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5413 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5414 bb->index);
5415 err = 1;
5419 switch (gimple_code (stmt))
5421 case GIMPLE_COND:
5423 edge true_edge;
5424 edge false_edge;
5426 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5428 if (!true_edge
5429 || !false_edge
5430 || !(true_edge->flags & EDGE_TRUE_VALUE)
5431 || !(false_edge->flags & EDGE_FALSE_VALUE)
5432 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5433 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5434 || EDGE_COUNT (bb->succs) >= 3)
5436 error ("wrong outgoing edge flags at end of bb %d",
5437 bb->index);
5438 err = 1;
5441 break;
5443 case GIMPLE_GOTO:
5444 if (simple_goto_p (stmt))
5446 error ("explicit goto at end of bb %d", bb->index);
5447 err = 1;
5449 else
5451 /* FIXME. We should double check that the labels in the
5452 destination blocks have their address taken. */
5453 FOR_EACH_EDGE (e, ei, bb->succs)
5454 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5455 | EDGE_FALSE_VALUE))
5456 || !(e->flags & EDGE_ABNORMAL))
5458 error ("wrong outgoing edge flags at end of bb %d",
5459 bb->index);
5460 err = 1;
5463 break;
5465 case GIMPLE_CALL:
5466 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5467 break;
5468 /* fallthru */
5469 case GIMPLE_RETURN:
5470 if (!single_succ_p (bb)
5471 || (single_succ_edge (bb)->flags
5472 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5473 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5475 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5476 err = 1;
5478 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5480 error ("return edge does not point to exit in bb %d",
5481 bb->index);
5482 err = 1;
5484 break;
5486 case GIMPLE_SWITCH:
5488 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5489 tree prev;
5490 edge e;
5491 size_t i, n;
5493 n = gimple_switch_num_labels (switch_stmt);
5495 /* Mark all the destination basic blocks. */
5496 for (i = 0; i < n; ++i)
5498 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5499 basic_block label_bb = label_to_block (lab);
5500 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5501 label_bb->aux = (void *)1;
5504 /* Verify that the case labels are sorted. */
5505 prev = gimple_switch_label (switch_stmt, 0);
5506 for (i = 1; i < n; ++i)
5508 tree c = gimple_switch_label (switch_stmt, i);
5509 if (!CASE_LOW (c))
5511 error ("found default case not at the start of "
5512 "case vector");
5513 err = 1;
5514 continue;
5516 if (CASE_LOW (prev)
5517 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5519 error ("case labels not sorted: ");
5520 print_generic_expr (stderr, prev, 0);
5521 fprintf (stderr," is greater than ");
5522 print_generic_expr (stderr, c, 0);
5523 fprintf (stderr," but comes before it.\n");
5524 err = 1;
5526 prev = c;
5528 /* VRP will remove the default case if it can prove it will
5529 never be executed. So do not verify there always exists
5530 a default case here. */
5532 FOR_EACH_EDGE (e, ei, bb->succs)
5534 if (!e->dest->aux)
5536 error ("extra outgoing edge %d->%d",
5537 bb->index, e->dest->index);
5538 err = 1;
5541 e->dest->aux = (void *)2;
5542 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5543 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5545 error ("wrong outgoing edge flags at end of bb %d",
5546 bb->index);
5547 err = 1;
5551 /* Check that we have all of them. */
5552 for (i = 0; i < n; ++i)
5554 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5555 basic_block label_bb = label_to_block (lab);
5557 if (label_bb->aux != (void *)2)
5559 error ("missing edge %i->%i", bb->index, label_bb->index);
5560 err = 1;
5564 FOR_EACH_EDGE (e, ei, bb->succs)
5565 e->dest->aux = (void *)0;
5567 break;
5569 case GIMPLE_EH_DISPATCH:
5570 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5571 break;
5573 default:
5574 break;
5578 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5579 verify_dominators (CDI_DOMINATORS);
5581 return err;
5585 /* Updates phi nodes after creating a forwarder block joined
5586 by edge FALLTHRU. */
5588 static void
5589 gimple_make_forwarder_block (edge fallthru)
5591 edge e;
5592 edge_iterator ei;
5593 basic_block dummy, bb;
5594 tree var;
5595 gphi_iterator gsi;
5597 dummy = fallthru->src;
5598 bb = fallthru->dest;
5600 if (single_pred_p (bb))
5601 return;
5603 /* If we redirected a branch we must create new PHI nodes at the
5604 start of BB. */
5605 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5607 gphi *phi, *new_phi;
5609 phi = gsi.phi ();
5610 var = gimple_phi_result (phi);
5611 new_phi = create_phi_node (var, bb);
5612 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5613 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5614 UNKNOWN_LOCATION);
5617 /* Add the arguments we have stored on edges. */
5618 FOR_EACH_EDGE (e, ei, bb->preds)
5620 if (e == fallthru)
5621 continue;
5623 flush_pending_stmts (e);
5628 /* Return a non-special label in the head of basic block BLOCK.
5629 Create one if it doesn't exist. */
5631 tree
5632 gimple_block_label (basic_block bb)
5634 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5635 bool first = true;
5636 tree label;
5637 glabel *stmt;
5639 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5641 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5642 if (!stmt)
5643 break;
5644 label = gimple_label_label (stmt);
5645 if (!DECL_NONLOCAL (label))
5647 if (!first)
5648 gsi_move_before (&i, &s);
5649 return label;
5653 label = create_artificial_label (UNKNOWN_LOCATION);
5654 stmt = gimple_build_label (label);
5655 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5656 return label;
5660 /* Attempt to perform edge redirection by replacing a possibly complex
5661 jump instruction by a goto or by removing the jump completely.
5662 This can apply only if all edges now point to the same block. The
5663 parameters and return values are equivalent to
5664 redirect_edge_and_branch. */
5666 static edge
5667 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5669 basic_block src = e->src;
5670 gimple_stmt_iterator i;
5671 gimple *stmt;
5673 /* We can replace or remove a complex jump only when we have exactly
5674 two edges. */
5675 if (EDGE_COUNT (src->succs) != 2
5676 /* Verify that all targets will be TARGET. Specifically, the
5677 edge that is not E must also go to TARGET. */
5678 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5679 return NULL;
5681 i = gsi_last_bb (src);
5682 if (gsi_end_p (i))
5683 return NULL;
5685 stmt = gsi_stmt (i);
5687 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5689 gsi_remove (&i, true);
5690 e = ssa_redirect_edge (e, target);
5691 e->flags = EDGE_FALLTHRU;
5692 return e;
5695 return NULL;
5699 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5700 edge representing the redirected branch. */
5702 static edge
5703 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5705 basic_block bb = e->src;
5706 gimple_stmt_iterator gsi;
5707 edge ret;
5708 gimple *stmt;
5710 if (e->flags & EDGE_ABNORMAL)
5711 return NULL;
5713 if (e->dest == dest)
5714 return NULL;
5716 if (e->flags & EDGE_EH)
5717 return redirect_eh_edge (e, dest);
5719 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5721 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5722 if (ret)
5723 return ret;
5726 gsi = gsi_last_bb (bb);
5727 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5729 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5731 case GIMPLE_COND:
5732 /* For COND_EXPR, we only need to redirect the edge. */
5733 break;
5735 case GIMPLE_GOTO:
5736 /* No non-abnormal edges should lead from a non-simple goto, and
5737 simple ones should be represented implicitly. */
5738 gcc_unreachable ();
5740 case GIMPLE_SWITCH:
5742 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5743 tree label = gimple_block_label (dest);
5744 tree cases = get_cases_for_edge (e, switch_stmt);
5746 /* If we have a list of cases associated with E, then use it
5747 as it's a lot faster than walking the entire case vector. */
5748 if (cases)
5750 edge e2 = find_edge (e->src, dest);
5751 tree last, first;
5753 first = cases;
5754 while (cases)
5756 last = cases;
5757 CASE_LABEL (cases) = label;
5758 cases = CASE_CHAIN (cases);
5761 /* If there was already an edge in the CFG, then we need
5762 to move all the cases associated with E to E2. */
5763 if (e2)
5765 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5767 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5768 CASE_CHAIN (cases2) = first;
5770 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5772 else
5774 size_t i, n = gimple_switch_num_labels (switch_stmt);
5776 for (i = 0; i < n; i++)
5778 tree elt = gimple_switch_label (switch_stmt, i);
5779 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5780 CASE_LABEL (elt) = label;
5784 break;
5786 case GIMPLE_ASM:
5788 gasm *asm_stmt = as_a <gasm *> (stmt);
5789 int i, n = gimple_asm_nlabels (asm_stmt);
5790 tree label = NULL;
5792 for (i = 0; i < n; ++i)
5794 tree cons = gimple_asm_label_op (asm_stmt, i);
5795 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5797 if (!label)
5798 label = gimple_block_label (dest);
5799 TREE_VALUE (cons) = label;
5803 /* If we didn't find any label matching the former edge in the
5804 asm labels, we must be redirecting the fallthrough
5805 edge. */
5806 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5808 break;
5810 case GIMPLE_RETURN:
5811 gsi_remove (&gsi, true);
5812 e->flags |= EDGE_FALLTHRU;
5813 break;
5815 case GIMPLE_OMP_RETURN:
5816 case GIMPLE_OMP_CONTINUE:
5817 case GIMPLE_OMP_SECTIONS_SWITCH:
5818 case GIMPLE_OMP_FOR:
5819 /* The edges from OMP constructs can be simply redirected. */
5820 break;
5822 case GIMPLE_EH_DISPATCH:
5823 if (!(e->flags & EDGE_FALLTHRU))
5824 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5825 break;
5827 case GIMPLE_TRANSACTION:
5828 if (e->flags & EDGE_TM_ABORT)
5829 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5830 gimple_block_label (dest));
5831 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5832 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5833 gimple_block_label (dest));
5834 else
5835 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5836 gimple_block_label (dest));
5837 break;
5839 default:
5840 /* Otherwise it must be a fallthru edge, and we don't need to
5841 do anything besides redirecting it. */
5842 gcc_assert (e->flags & EDGE_FALLTHRU);
5843 break;
5846 /* Update/insert PHI nodes as necessary. */
5848 /* Now update the edges in the CFG. */
5849 e = ssa_redirect_edge (e, dest);
5851 return e;
5854 /* Returns true if it is possible to remove edge E by redirecting
5855 it to the destination of the other edge from E->src. */
5857 static bool
5858 gimple_can_remove_branch_p (const_edge e)
5860 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5861 return false;
5863 return true;
5866 /* Simple wrapper, as we can always redirect fallthru edges. */
5868 static basic_block
5869 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5871 e = gimple_redirect_edge_and_branch (e, dest);
5872 gcc_assert (e);
5874 return NULL;
5878 /* Splits basic block BB after statement STMT (but at least after the
5879 labels). If STMT is NULL, BB is split just after the labels. */
5881 static basic_block
5882 gimple_split_block (basic_block bb, void *stmt)
5884 gimple_stmt_iterator gsi;
5885 gimple_stmt_iterator gsi_tgt;
5886 gimple_seq list;
5887 basic_block new_bb;
5888 edge e;
5889 edge_iterator ei;
5891 new_bb = create_empty_bb (bb);
5893 /* Redirect the outgoing edges. */
5894 new_bb->succs = bb->succs;
5895 bb->succs = NULL;
5896 FOR_EACH_EDGE (e, ei, new_bb->succs)
5897 e->src = new_bb;
5899 /* Get a stmt iterator pointing to the first stmt to move. */
5900 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5901 gsi = gsi_after_labels (bb);
5902 else
5904 gsi = gsi_for_stmt ((gimple *) stmt);
5905 gsi_next (&gsi);
5908 /* Move everything from GSI to the new basic block. */
5909 if (gsi_end_p (gsi))
5910 return new_bb;
5912 /* Split the statement list - avoid re-creating new containers as this
5913 brings ugly quadratic memory consumption in the inliner.
5914 (We are still quadratic since we need to update stmt BB pointers,
5915 sadly.) */
5916 gsi_split_seq_before (&gsi, &list);
5917 set_bb_seq (new_bb, list);
5918 for (gsi_tgt = gsi_start (list);
5919 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5920 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5922 return new_bb;
5926 /* Moves basic block BB after block AFTER. */
5928 static bool
5929 gimple_move_block_after (basic_block bb, basic_block after)
5931 if (bb->prev_bb == after)
5932 return true;
5934 unlink_block (bb);
5935 link_block (bb, after);
5937 return true;
5941 /* Return TRUE if block BB has no executable statements, otherwise return
5942 FALSE. */
5944 static bool
5945 gimple_empty_block_p (basic_block bb)
5947 /* BB must have no executable statements. */
5948 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5949 if (phi_nodes (bb))
5950 return false;
5951 if (gsi_end_p (gsi))
5952 return true;
5953 if (is_gimple_debug (gsi_stmt (gsi)))
5954 gsi_next_nondebug (&gsi);
5955 return gsi_end_p (gsi);
5959 /* Split a basic block if it ends with a conditional branch and if the
5960 other part of the block is not empty. */
5962 static basic_block
5963 gimple_split_block_before_cond_jump (basic_block bb)
5965 gimple *last, *split_point;
5966 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5967 if (gsi_end_p (gsi))
5968 return NULL;
5969 last = gsi_stmt (gsi);
5970 if (gimple_code (last) != GIMPLE_COND
5971 && gimple_code (last) != GIMPLE_SWITCH)
5972 return NULL;
5973 gsi_prev (&gsi);
5974 split_point = gsi_stmt (gsi);
5975 return split_block (bb, split_point)->dest;
5979 /* Return true if basic_block can be duplicated. */
5981 static bool
5982 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5984 return true;
5987 /* Create a duplicate of the basic block BB. NOTE: This does not
5988 preserve SSA form. */
5990 static basic_block
5991 gimple_duplicate_bb (basic_block bb)
5993 basic_block new_bb;
5994 gimple_stmt_iterator gsi_tgt;
5996 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5998 /* Copy the PHI nodes. We ignore PHI node arguments here because
5999 the incoming edges have not been setup yet. */
6000 for (gphi_iterator gpi = gsi_start_phis (bb);
6001 !gsi_end_p (gpi);
6002 gsi_next (&gpi))
6004 gphi *phi, *copy;
6005 phi = gpi.phi ();
6006 copy = create_phi_node (NULL_TREE, new_bb);
6007 create_new_def_for (gimple_phi_result (phi), copy,
6008 gimple_phi_result_ptr (copy));
6009 gimple_set_uid (copy, gimple_uid (phi));
6012 gsi_tgt = gsi_start_bb (new_bb);
6013 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6014 !gsi_end_p (gsi);
6015 gsi_next (&gsi))
6017 def_operand_p def_p;
6018 ssa_op_iter op_iter;
6019 tree lhs;
6020 gimple *stmt, *copy;
6022 stmt = gsi_stmt (gsi);
6023 if (gimple_code (stmt) == GIMPLE_LABEL)
6024 continue;
6026 /* Don't duplicate label debug stmts. */
6027 if (gimple_debug_bind_p (stmt)
6028 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6029 == LABEL_DECL)
6030 continue;
6032 /* Create a new copy of STMT and duplicate STMT's virtual
6033 operands. */
6034 copy = gimple_copy (stmt);
6035 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6037 maybe_duplicate_eh_stmt (copy, stmt);
6038 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6040 /* When copying around a stmt writing into a local non-user
6041 aggregate, make sure it won't share stack slot with other
6042 vars. */
6043 lhs = gimple_get_lhs (stmt);
6044 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6046 tree base = get_base_address (lhs);
6047 if (base
6048 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6049 && DECL_IGNORED_P (base)
6050 && !TREE_STATIC (base)
6051 && !DECL_EXTERNAL (base)
6052 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6053 DECL_NONSHAREABLE (base) = 1;
6056 /* Create new names for all the definitions created by COPY and
6057 add replacement mappings for each new name. */
6058 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6059 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6062 return new_bb;
6065 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6067 static void
6068 add_phi_args_after_copy_edge (edge e_copy)
6070 basic_block bb, bb_copy = e_copy->src, dest;
6071 edge e;
6072 edge_iterator ei;
6073 gphi *phi, *phi_copy;
6074 tree def;
6075 gphi_iterator psi, psi_copy;
6077 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6078 return;
6080 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6082 if (e_copy->dest->flags & BB_DUPLICATED)
6083 dest = get_bb_original (e_copy->dest);
6084 else
6085 dest = e_copy->dest;
6087 e = find_edge (bb, dest);
6088 if (!e)
6090 /* During loop unrolling the target of the latch edge is copied.
6091 In this case we are not looking for edge to dest, but to
6092 duplicated block whose original was dest. */
6093 FOR_EACH_EDGE (e, ei, bb->succs)
6095 if ((e->dest->flags & BB_DUPLICATED)
6096 && get_bb_original (e->dest) == dest)
6097 break;
6100 gcc_assert (e != NULL);
6103 for (psi = gsi_start_phis (e->dest),
6104 psi_copy = gsi_start_phis (e_copy->dest);
6105 !gsi_end_p (psi);
6106 gsi_next (&psi), gsi_next (&psi_copy))
6108 phi = psi.phi ();
6109 phi_copy = psi_copy.phi ();
6110 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6111 add_phi_arg (phi_copy, def, e_copy,
6112 gimple_phi_arg_location_from_edge (phi, e));
6117 /* Basic block BB_COPY was created by code duplication. Add phi node
6118 arguments for edges going out of BB_COPY. The blocks that were
6119 duplicated have BB_DUPLICATED set. */
6121 void
6122 add_phi_args_after_copy_bb (basic_block bb_copy)
6124 edge e_copy;
6125 edge_iterator ei;
6127 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6129 add_phi_args_after_copy_edge (e_copy);
6133 /* Blocks in REGION_COPY array of length N_REGION were created by
6134 duplication of basic blocks. Add phi node arguments for edges
6135 going from these blocks. If E_COPY is not NULL, also add
6136 phi node arguments for its destination.*/
6138 void
6139 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6140 edge e_copy)
6142 unsigned i;
6144 for (i = 0; i < n_region; i++)
6145 region_copy[i]->flags |= BB_DUPLICATED;
6147 for (i = 0; i < n_region; i++)
6148 add_phi_args_after_copy_bb (region_copy[i]);
6149 if (e_copy)
6150 add_phi_args_after_copy_edge (e_copy);
6152 for (i = 0; i < n_region; i++)
6153 region_copy[i]->flags &= ~BB_DUPLICATED;
6156 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6157 important exit edge EXIT. By important we mean that no SSA name defined
6158 inside region is live over the other exit edges of the region. All entry
6159 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6160 to the duplicate of the region. Dominance and loop information is
6161 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6162 UPDATE_DOMINANCE is false then we assume that the caller will update the
6163 dominance information after calling this function. The new basic
6164 blocks are stored to REGION_COPY in the same order as they had in REGION,
6165 provided that REGION_COPY is not NULL.
6166 The function returns false if it is unable to copy the region,
6167 true otherwise. */
6169 bool
6170 gimple_duplicate_sese_region (edge entry, edge exit,
6171 basic_block *region, unsigned n_region,
6172 basic_block *region_copy,
6173 bool update_dominance)
6175 unsigned i;
6176 bool free_region_copy = false, copying_header = false;
6177 struct loop *loop = entry->dest->loop_father;
6178 edge exit_copy;
6179 vec<basic_block> doms;
6180 edge redirected;
6181 int total_freq = 0, entry_freq = 0;
6182 gcov_type total_count = 0, entry_count = 0;
6184 if (!can_copy_bbs_p (region, n_region))
6185 return false;
6187 /* Some sanity checking. Note that we do not check for all possible
6188 missuses of the functions. I.e. if you ask to copy something weird,
6189 it will work, but the state of structures probably will not be
6190 correct. */
6191 for (i = 0; i < n_region; i++)
6193 /* We do not handle subloops, i.e. all the blocks must belong to the
6194 same loop. */
6195 if (region[i]->loop_father != loop)
6196 return false;
6198 if (region[i] != entry->dest
6199 && region[i] == loop->header)
6200 return false;
6203 /* In case the function is used for loop header copying (which is the primary
6204 use), ensure that EXIT and its copy will be new latch and entry edges. */
6205 if (loop->header == entry->dest)
6207 copying_header = true;
6209 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6210 return false;
6212 for (i = 0; i < n_region; i++)
6213 if (region[i] != exit->src
6214 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6215 return false;
6218 initialize_original_copy_tables ();
6220 if (copying_header)
6221 set_loop_copy (loop, loop_outer (loop));
6222 else
6223 set_loop_copy (loop, loop);
6225 if (!region_copy)
6227 region_copy = XNEWVEC (basic_block, n_region);
6228 free_region_copy = true;
6231 /* Record blocks outside the region that are dominated by something
6232 inside. */
6233 if (update_dominance)
6235 doms.create (0);
6236 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6239 if (entry->dest->count)
6241 total_count = entry->dest->count;
6242 entry_count = entry->count;
6243 /* Fix up corner cases, to avoid division by zero or creation of negative
6244 frequencies. */
6245 if (entry_count > total_count)
6246 entry_count = total_count;
6248 else
6250 total_freq = entry->dest->frequency;
6251 entry_freq = EDGE_FREQUENCY (entry);
6252 /* Fix up corner cases, to avoid division by zero or creation of negative
6253 frequencies. */
6254 if (total_freq == 0)
6255 total_freq = 1;
6256 else if (entry_freq > total_freq)
6257 entry_freq = total_freq;
6260 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6261 split_edge_bb_loc (entry), update_dominance);
6262 if (total_count)
6264 scale_bbs_frequencies_gcov_type (region, n_region,
6265 total_count - entry_count,
6266 total_count);
6267 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6268 total_count);
6270 else
6272 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6273 total_freq);
6274 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6277 if (copying_header)
6279 loop->header = exit->dest;
6280 loop->latch = exit->src;
6283 /* Redirect the entry and add the phi node arguments. */
6284 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6285 gcc_assert (redirected != NULL);
6286 flush_pending_stmts (entry);
6288 /* Concerning updating of dominators: We must recount dominators
6289 for entry block and its copy. Anything that is outside of the
6290 region, but was dominated by something inside needs recounting as
6291 well. */
6292 if (update_dominance)
6294 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6295 doms.safe_push (get_bb_original (entry->dest));
6296 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6297 doms.release ();
6300 /* Add the other PHI node arguments. */
6301 add_phi_args_after_copy (region_copy, n_region, NULL);
6303 if (free_region_copy)
6304 free (region_copy);
6306 free_original_copy_tables ();
6307 return true;
6310 /* Checks if BB is part of the region defined by N_REGION BBS. */
6311 static bool
6312 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6314 unsigned int n;
6316 for (n = 0; n < n_region; n++)
6318 if (bb == bbs[n])
6319 return true;
6321 return false;
6324 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6325 are stored to REGION_COPY in the same order in that they appear
6326 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6327 the region, EXIT an exit from it. The condition guarding EXIT
6328 is moved to ENTRY. Returns true if duplication succeeds, false
6329 otherwise.
6331 For example,
6333 some_code;
6334 if (cond)
6336 else
6339 is transformed to
6341 if (cond)
6343 some_code;
6346 else
6348 some_code;
6353 bool
6354 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6355 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6356 basic_block *region_copy ATTRIBUTE_UNUSED)
6358 unsigned i;
6359 bool free_region_copy = false;
6360 struct loop *loop = exit->dest->loop_father;
6361 struct loop *orig_loop = entry->dest->loop_father;
6362 basic_block switch_bb, entry_bb, nentry_bb;
6363 vec<basic_block> doms;
6364 int total_freq = 0, exit_freq = 0;
6365 gcov_type total_count = 0, exit_count = 0;
6366 edge exits[2], nexits[2], e;
6367 gimple_stmt_iterator gsi;
6368 gimple *cond_stmt;
6369 edge sorig, snew;
6370 basic_block exit_bb;
6371 gphi_iterator psi;
6372 gphi *phi;
6373 tree def;
6374 struct loop *target, *aloop, *cloop;
6376 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6377 exits[0] = exit;
6378 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6380 if (!can_copy_bbs_p (region, n_region))
6381 return false;
6383 initialize_original_copy_tables ();
6384 set_loop_copy (orig_loop, loop);
6386 target= loop;
6387 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6389 if (bb_part_of_region_p (aloop->header, region, n_region))
6391 cloop = duplicate_loop (aloop, target);
6392 duplicate_subloops (aloop, cloop);
6396 if (!region_copy)
6398 region_copy = XNEWVEC (basic_block, n_region);
6399 free_region_copy = true;
6402 gcc_assert (!need_ssa_update_p (cfun));
6404 /* Record blocks outside the region that are dominated by something
6405 inside. */
6406 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6408 if (exit->src->count)
6410 total_count = exit->src->count;
6411 exit_count = exit->count;
6412 /* Fix up corner cases, to avoid division by zero or creation of negative
6413 frequencies. */
6414 if (exit_count > total_count)
6415 exit_count = total_count;
6417 else
6419 total_freq = exit->src->frequency;
6420 exit_freq = EDGE_FREQUENCY (exit);
6421 /* Fix up corner cases, to avoid division by zero or creation of negative
6422 frequencies. */
6423 if (total_freq == 0)
6424 total_freq = 1;
6425 if (exit_freq > total_freq)
6426 exit_freq = total_freq;
6429 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6430 split_edge_bb_loc (exit), true);
6431 if (total_count)
6433 scale_bbs_frequencies_gcov_type (region, n_region,
6434 total_count - exit_count,
6435 total_count);
6436 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6437 total_count);
6439 else
6441 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6442 total_freq);
6443 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6446 /* Create the switch block, and put the exit condition to it. */
6447 entry_bb = entry->dest;
6448 nentry_bb = get_bb_copy (entry_bb);
6449 if (!last_stmt (entry->src)
6450 || !stmt_ends_bb_p (last_stmt (entry->src)))
6451 switch_bb = entry->src;
6452 else
6453 switch_bb = split_edge (entry);
6454 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6456 gsi = gsi_last_bb (switch_bb);
6457 cond_stmt = last_stmt (exit->src);
6458 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6459 cond_stmt = gimple_copy (cond_stmt);
6461 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6463 sorig = single_succ_edge (switch_bb);
6464 sorig->flags = exits[1]->flags;
6465 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6467 /* Register the new edge from SWITCH_BB in loop exit lists. */
6468 rescan_loop_exit (snew, true, false);
6470 /* Add the PHI node arguments. */
6471 add_phi_args_after_copy (region_copy, n_region, snew);
6473 /* Get rid of now superfluous conditions and associated edges (and phi node
6474 arguments). */
6475 exit_bb = exit->dest;
6477 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6478 PENDING_STMT (e) = NULL;
6480 /* The latch of ORIG_LOOP was copied, and so was the backedge
6481 to the original header. We redirect this backedge to EXIT_BB. */
6482 for (i = 0; i < n_region; i++)
6483 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6485 gcc_assert (single_succ_edge (region_copy[i]));
6486 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6487 PENDING_STMT (e) = NULL;
6488 for (psi = gsi_start_phis (exit_bb);
6489 !gsi_end_p (psi);
6490 gsi_next (&psi))
6492 phi = psi.phi ();
6493 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6494 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6497 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6498 PENDING_STMT (e) = NULL;
6500 /* Anything that is outside of the region, but was dominated by something
6501 inside needs to update dominance info. */
6502 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6503 doms.release ();
6504 /* Update the SSA web. */
6505 update_ssa (TODO_update_ssa);
6507 if (free_region_copy)
6508 free (region_copy);
6510 free_original_copy_tables ();
6511 return true;
6514 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6515 adding blocks when the dominator traversal reaches EXIT. This
6516 function silently assumes that ENTRY strictly dominates EXIT. */
6518 void
6519 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6520 vec<basic_block> *bbs_p)
6522 basic_block son;
6524 for (son = first_dom_son (CDI_DOMINATORS, entry);
6525 son;
6526 son = next_dom_son (CDI_DOMINATORS, son))
6528 bbs_p->safe_push (son);
6529 if (son != exit)
6530 gather_blocks_in_sese_region (son, exit, bbs_p);
6534 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6535 The duplicates are recorded in VARS_MAP. */
6537 static void
6538 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6539 tree to_context)
6541 tree t = *tp, new_t;
6542 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6544 if (DECL_CONTEXT (t) == to_context)
6545 return;
6547 bool existed;
6548 tree &loc = vars_map->get_or_insert (t, &existed);
6550 if (!existed)
6552 if (SSA_VAR_P (t))
6554 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6555 add_local_decl (f, new_t);
6557 else
6559 gcc_assert (TREE_CODE (t) == CONST_DECL);
6560 new_t = copy_node (t);
6562 DECL_CONTEXT (new_t) = to_context;
6564 loc = new_t;
6566 else
6567 new_t = loc;
6569 *tp = new_t;
6573 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6574 VARS_MAP maps old ssa names and var_decls to the new ones. */
6576 static tree
6577 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6578 tree to_context)
6580 tree new_name;
6582 gcc_assert (!virtual_operand_p (name));
6584 tree *loc = vars_map->get (name);
6586 if (!loc)
6588 tree decl = SSA_NAME_VAR (name);
6589 if (decl)
6591 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6592 replace_by_duplicate_decl (&decl, vars_map, to_context);
6593 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6594 decl, SSA_NAME_DEF_STMT (name));
6596 else
6597 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6598 name, SSA_NAME_DEF_STMT (name));
6600 /* Now that we've used the def stmt to define new_name, make sure it
6601 doesn't define name anymore. */
6602 SSA_NAME_DEF_STMT (name) = NULL;
6604 vars_map->put (name, new_name);
6606 else
6607 new_name = *loc;
6609 return new_name;
6612 struct move_stmt_d
6614 tree orig_block;
6615 tree new_block;
6616 tree from_context;
6617 tree to_context;
6618 hash_map<tree, tree> *vars_map;
6619 htab_t new_label_map;
6620 hash_map<void *, void *> *eh_map;
6621 bool remap_decls_p;
6624 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6625 contained in *TP if it has been ORIG_BLOCK previously and change the
6626 DECL_CONTEXT of every local variable referenced in *TP. */
6628 static tree
6629 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6631 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6632 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6633 tree t = *tp;
6635 if (EXPR_P (t))
6637 tree block = TREE_BLOCK (t);
6638 if (block == p->orig_block
6639 || (p->orig_block == NULL_TREE
6640 && block != NULL_TREE))
6641 TREE_SET_BLOCK (t, p->new_block);
6642 else if (flag_checking && block != NULL_TREE)
6644 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6645 block = BLOCK_SUPERCONTEXT (block);
6646 gcc_assert (block == p->orig_block);
6649 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6651 if (TREE_CODE (t) == SSA_NAME)
6652 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6653 else if (TREE_CODE (t) == PARM_DECL
6654 && gimple_in_ssa_p (cfun))
6655 *tp = *(p->vars_map->get (t));
6656 else if (TREE_CODE (t) == LABEL_DECL)
6658 if (p->new_label_map)
6660 struct tree_map in, *out;
6661 in.base.from = t;
6662 out = (struct tree_map *)
6663 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6664 if (out)
6665 *tp = t = out->to;
6668 DECL_CONTEXT (t) = p->to_context;
6670 else if (p->remap_decls_p)
6672 /* Replace T with its duplicate. T should no longer appear in the
6673 parent function, so this looks wasteful; however, it may appear
6674 in referenced_vars, and more importantly, as virtual operands of
6675 statements, and in alias lists of other variables. It would be
6676 quite difficult to expunge it from all those places. ??? It might
6677 suffice to do this for addressable variables. */
6678 if ((VAR_P (t) && !is_global_var (t))
6679 || TREE_CODE (t) == CONST_DECL)
6680 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6682 *walk_subtrees = 0;
6684 else if (TYPE_P (t))
6685 *walk_subtrees = 0;
6687 return NULL_TREE;
6690 /* Helper for move_stmt_r. Given an EH region number for the source
6691 function, map that to the duplicate EH regio number in the dest. */
6693 static int
6694 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6696 eh_region old_r, new_r;
6698 old_r = get_eh_region_from_number (old_nr);
6699 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6701 return new_r->index;
6704 /* Similar, but operate on INTEGER_CSTs. */
6706 static tree
6707 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6709 int old_nr, new_nr;
6711 old_nr = tree_to_shwi (old_t_nr);
6712 new_nr = move_stmt_eh_region_nr (old_nr, p);
6714 return build_int_cst (integer_type_node, new_nr);
6717 /* Like move_stmt_op, but for gimple statements.
6719 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6720 contained in the current statement in *GSI_P and change the
6721 DECL_CONTEXT of every local variable referenced in the current
6722 statement. */
6724 static tree
6725 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6726 struct walk_stmt_info *wi)
6728 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6729 gimple *stmt = gsi_stmt (*gsi_p);
6730 tree block = gimple_block (stmt);
6732 if (block == p->orig_block
6733 || (p->orig_block == NULL_TREE
6734 && block != NULL_TREE))
6735 gimple_set_block (stmt, p->new_block);
6737 switch (gimple_code (stmt))
6739 case GIMPLE_CALL:
6740 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6742 tree r, fndecl = gimple_call_fndecl (stmt);
6743 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6744 switch (DECL_FUNCTION_CODE (fndecl))
6746 case BUILT_IN_EH_COPY_VALUES:
6747 r = gimple_call_arg (stmt, 1);
6748 r = move_stmt_eh_region_tree_nr (r, p);
6749 gimple_call_set_arg (stmt, 1, r);
6750 /* FALLTHRU */
6752 case BUILT_IN_EH_POINTER:
6753 case BUILT_IN_EH_FILTER:
6754 r = gimple_call_arg (stmt, 0);
6755 r = move_stmt_eh_region_tree_nr (r, p);
6756 gimple_call_set_arg (stmt, 0, r);
6757 break;
6759 default:
6760 break;
6763 break;
6765 case GIMPLE_RESX:
6767 gresx *resx_stmt = as_a <gresx *> (stmt);
6768 int r = gimple_resx_region (resx_stmt);
6769 r = move_stmt_eh_region_nr (r, p);
6770 gimple_resx_set_region (resx_stmt, r);
6772 break;
6774 case GIMPLE_EH_DISPATCH:
6776 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6777 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6778 r = move_stmt_eh_region_nr (r, p);
6779 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6781 break;
6783 case GIMPLE_OMP_RETURN:
6784 case GIMPLE_OMP_CONTINUE:
6785 break;
6786 default:
6787 if (is_gimple_omp (stmt))
6789 /* Do not remap variables inside OMP directives. Variables
6790 referenced in clauses and directive header belong to the
6791 parent function and should not be moved into the child
6792 function. */
6793 bool save_remap_decls_p = p->remap_decls_p;
6794 p->remap_decls_p = false;
6795 *handled_ops_p = true;
6797 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6798 move_stmt_op, wi);
6800 p->remap_decls_p = save_remap_decls_p;
6802 break;
6805 return NULL_TREE;
6808 /* Move basic block BB from function CFUN to function DEST_FN. The
6809 block is moved out of the original linked list and placed after
6810 block AFTER in the new list. Also, the block is removed from the
6811 original array of blocks and placed in DEST_FN's array of blocks.
6812 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6813 updated to reflect the moved edges.
6815 The local variables are remapped to new instances, VARS_MAP is used
6816 to record the mapping. */
6818 static void
6819 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6820 basic_block after, bool update_edge_count_p,
6821 struct move_stmt_d *d)
6823 struct control_flow_graph *cfg;
6824 edge_iterator ei;
6825 edge e;
6826 gimple_stmt_iterator si;
6827 unsigned old_len, new_len;
6829 /* Remove BB from dominance structures. */
6830 delete_from_dominance_info (CDI_DOMINATORS, bb);
6832 /* Move BB from its current loop to the copy in the new function. */
6833 if (current_loops)
6835 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6836 if (new_loop)
6837 bb->loop_father = new_loop;
6840 /* Link BB to the new linked list. */
6841 move_block_after (bb, after);
6843 /* Update the edge count in the corresponding flowgraphs. */
6844 if (update_edge_count_p)
6845 FOR_EACH_EDGE (e, ei, bb->succs)
6847 cfun->cfg->x_n_edges--;
6848 dest_cfun->cfg->x_n_edges++;
6851 /* Remove BB from the original basic block array. */
6852 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6853 cfun->cfg->x_n_basic_blocks--;
6855 /* Grow DEST_CFUN's basic block array if needed. */
6856 cfg = dest_cfun->cfg;
6857 cfg->x_n_basic_blocks++;
6858 if (bb->index >= cfg->x_last_basic_block)
6859 cfg->x_last_basic_block = bb->index + 1;
6861 old_len = vec_safe_length (cfg->x_basic_block_info);
6862 if ((unsigned) cfg->x_last_basic_block >= old_len)
6864 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6865 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6868 (*cfg->x_basic_block_info)[bb->index] = bb;
6870 /* Remap the variables in phi nodes. */
6871 for (gphi_iterator psi = gsi_start_phis (bb);
6872 !gsi_end_p (psi); )
6874 gphi *phi = psi.phi ();
6875 use_operand_p use;
6876 tree op = PHI_RESULT (phi);
6877 ssa_op_iter oi;
6878 unsigned i;
6880 if (virtual_operand_p (op))
6882 /* Remove the phi nodes for virtual operands (alias analysis will be
6883 run for the new function, anyway). */
6884 remove_phi_node (&psi, true);
6885 continue;
6888 SET_PHI_RESULT (phi,
6889 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6890 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6892 op = USE_FROM_PTR (use);
6893 if (TREE_CODE (op) == SSA_NAME)
6894 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6897 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6899 location_t locus = gimple_phi_arg_location (phi, i);
6900 tree block = LOCATION_BLOCK (locus);
6902 if (locus == UNKNOWN_LOCATION)
6903 continue;
6904 if (d->orig_block == NULL_TREE || block == d->orig_block)
6906 locus = set_block (locus, d->new_block);
6907 gimple_phi_arg_set_location (phi, i, locus);
6911 gsi_next (&psi);
6914 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6916 gimple *stmt = gsi_stmt (si);
6917 struct walk_stmt_info wi;
6919 memset (&wi, 0, sizeof (wi));
6920 wi.info = d;
6921 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6923 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6925 tree label = gimple_label_label (label_stmt);
6926 int uid = LABEL_DECL_UID (label);
6928 gcc_assert (uid > -1);
6930 old_len = vec_safe_length (cfg->x_label_to_block_map);
6931 if (old_len <= (unsigned) uid)
6933 new_len = 3 * uid / 2 + 1;
6934 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6937 (*cfg->x_label_to_block_map)[uid] = bb;
6938 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6940 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6942 if (uid >= dest_cfun->cfg->last_label_uid)
6943 dest_cfun->cfg->last_label_uid = uid + 1;
6946 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6947 remove_stmt_from_eh_lp_fn (cfun, stmt);
6949 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6950 gimple_remove_stmt_histograms (cfun, stmt);
6952 /* We cannot leave any operands allocated from the operand caches of
6953 the current function. */
6954 free_stmt_operands (cfun, stmt);
6955 push_cfun (dest_cfun);
6956 update_stmt (stmt);
6957 pop_cfun ();
6960 FOR_EACH_EDGE (e, ei, bb->succs)
6961 if (e->goto_locus != UNKNOWN_LOCATION)
6963 tree block = LOCATION_BLOCK (e->goto_locus);
6964 if (d->orig_block == NULL_TREE
6965 || block == d->orig_block)
6966 e->goto_locus = set_block (e->goto_locus, d->new_block);
6970 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6971 the outermost EH region. Use REGION as the incoming base EH region. */
6973 static eh_region
6974 find_outermost_region_in_block (struct function *src_cfun,
6975 basic_block bb, eh_region region)
6977 gimple_stmt_iterator si;
6979 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6981 gimple *stmt = gsi_stmt (si);
6982 eh_region stmt_region;
6983 int lp_nr;
6985 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6986 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6987 if (stmt_region)
6989 if (region == NULL)
6990 region = stmt_region;
6991 else if (stmt_region != region)
6993 region = eh_region_outermost (src_cfun, stmt_region, region);
6994 gcc_assert (region != NULL);
6999 return region;
7002 static tree
7003 new_label_mapper (tree decl, void *data)
7005 htab_t hash = (htab_t) data;
7006 struct tree_map *m;
7007 void **slot;
7009 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7011 m = XNEW (struct tree_map);
7012 m->hash = DECL_UID (decl);
7013 m->base.from = decl;
7014 m->to = create_artificial_label (UNKNOWN_LOCATION);
7015 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7016 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7017 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7019 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7020 gcc_assert (*slot == NULL);
7022 *slot = m;
7024 return m->to;
7027 /* Tree walker to replace the decls used inside value expressions by
7028 duplicates. */
7030 static tree
7031 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7033 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7035 switch (TREE_CODE (*tp))
7037 case VAR_DECL:
7038 case PARM_DECL:
7039 case RESULT_DECL:
7040 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7041 break;
7042 default:
7043 break;
7046 if (IS_TYPE_OR_DECL_P (*tp))
7047 *walk_subtrees = false;
7049 return NULL;
7052 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7053 subblocks. */
7055 static void
7056 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7057 tree to_context)
7059 tree *tp, t;
7061 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7063 t = *tp;
7064 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7065 continue;
7066 replace_by_duplicate_decl (&t, vars_map, to_context);
7067 if (t != *tp)
7069 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7071 tree x = DECL_VALUE_EXPR (*tp);
7072 struct replace_decls_d rd = { vars_map, to_context };
7073 unshare_expr (x);
7074 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7075 SET_DECL_VALUE_EXPR (t, x);
7076 DECL_HAS_VALUE_EXPR_P (t) = 1;
7078 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7079 *tp = t;
7083 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7084 replace_block_vars_by_duplicates (block, vars_map, to_context);
7087 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7088 from FN1 to FN2. */
7090 static void
7091 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7092 struct loop *loop)
7094 /* Discard it from the old loop array. */
7095 (*get_loops (fn1))[loop->num] = NULL;
7097 /* Place it in the new loop array, assigning it a new number. */
7098 loop->num = number_of_loops (fn2);
7099 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7101 /* Recurse to children. */
7102 for (loop = loop->inner; loop; loop = loop->next)
7103 fixup_loop_arrays_after_move (fn1, fn2, loop);
7106 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7107 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7109 DEBUG_FUNCTION void
7110 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7112 basic_block bb;
7113 edge_iterator ei;
7114 edge e;
7115 bitmap bbs = BITMAP_ALLOC (NULL);
7116 int i;
7118 gcc_assert (entry != NULL);
7119 gcc_assert (entry != exit);
7120 gcc_assert (bbs_p != NULL);
7122 gcc_assert (bbs_p->length () > 0);
7124 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7125 bitmap_set_bit (bbs, bb->index);
7127 gcc_assert (bitmap_bit_p (bbs, entry->index));
7128 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7130 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7132 if (bb == entry)
7134 gcc_assert (single_pred_p (entry));
7135 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7137 else
7138 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7140 e = ei_edge (ei);
7141 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7144 if (bb == exit)
7146 gcc_assert (single_succ_p (exit));
7147 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7149 else
7150 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7152 e = ei_edge (ei);
7153 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7157 BITMAP_FREE (bbs);
7160 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7162 bool
7163 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7165 bitmap release_names = (bitmap)data;
7167 if (TREE_CODE (from) != SSA_NAME)
7168 return true;
7170 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7171 return true;
7174 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7175 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7176 single basic block in the original CFG and the new basic block is
7177 returned. DEST_CFUN must not have a CFG yet.
7179 Note that the region need not be a pure SESE region. Blocks inside
7180 the region may contain calls to abort/exit. The only restriction
7181 is that ENTRY_BB should be the only entry point and it must
7182 dominate EXIT_BB.
7184 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7185 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7186 to the new function.
7188 All local variables referenced in the region are assumed to be in
7189 the corresponding BLOCK_VARS and unexpanded variable lists
7190 associated with DEST_CFUN.
7192 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7193 reimplement move_sese_region_to_fn by duplicating the region rather than
7194 moving it. */
7196 basic_block
7197 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7198 basic_block exit_bb, tree orig_block)
7200 vec<basic_block> bbs, dom_bbs;
7201 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7202 basic_block after, bb, *entry_pred, *exit_succ, abb;
7203 struct function *saved_cfun = cfun;
7204 int *entry_flag, *exit_flag;
7205 unsigned *entry_prob, *exit_prob;
7206 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7207 edge e;
7208 edge_iterator ei;
7209 htab_t new_label_map;
7210 hash_map<void *, void *> *eh_map;
7211 struct loop *loop = entry_bb->loop_father;
7212 struct loop *loop0 = get_loop (saved_cfun, 0);
7213 struct move_stmt_d d;
7215 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7216 region. */
7217 gcc_assert (entry_bb != exit_bb
7218 && (!exit_bb
7219 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7221 /* Collect all the blocks in the region. Manually add ENTRY_BB
7222 because it won't be added by dfs_enumerate_from. */
7223 bbs.create (0);
7224 bbs.safe_push (entry_bb);
7225 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7227 if (flag_checking)
7228 verify_sese (entry_bb, exit_bb, &bbs);
7230 /* The blocks that used to be dominated by something in BBS will now be
7231 dominated by the new block. */
7232 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7233 bbs.address (),
7234 bbs.length ());
7236 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7237 the predecessor edges to ENTRY_BB and the successor edges to
7238 EXIT_BB so that we can re-attach them to the new basic block that
7239 will replace the region. */
7240 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7241 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7242 entry_flag = XNEWVEC (int, num_entry_edges);
7243 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7244 i = 0;
7245 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7247 entry_prob[i] = e->probability;
7248 entry_flag[i] = e->flags;
7249 entry_pred[i++] = e->src;
7250 remove_edge (e);
7253 if (exit_bb)
7255 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7256 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7257 exit_flag = XNEWVEC (int, num_exit_edges);
7258 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7259 i = 0;
7260 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7262 exit_prob[i] = e->probability;
7263 exit_flag[i] = e->flags;
7264 exit_succ[i++] = e->dest;
7265 remove_edge (e);
7268 else
7270 num_exit_edges = 0;
7271 exit_succ = NULL;
7272 exit_flag = NULL;
7273 exit_prob = NULL;
7276 /* Switch context to the child function to initialize DEST_FN's CFG. */
7277 gcc_assert (dest_cfun->cfg == NULL);
7278 push_cfun (dest_cfun);
7280 init_empty_tree_cfg ();
7282 /* Initialize EH information for the new function. */
7283 eh_map = NULL;
7284 new_label_map = NULL;
7285 if (saved_cfun->eh)
7287 eh_region region = NULL;
7289 FOR_EACH_VEC_ELT (bbs, i, bb)
7290 region = find_outermost_region_in_block (saved_cfun, bb, region);
7292 init_eh_for_function ();
7293 if (region != NULL)
7295 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7296 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7297 new_label_mapper, new_label_map);
7301 /* Initialize an empty loop tree. */
7302 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7303 init_loops_structure (dest_cfun, loops, 1);
7304 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7305 set_loops_for_fn (dest_cfun, loops);
7307 /* Move the outlined loop tree part. */
7308 num_nodes = bbs.length ();
7309 FOR_EACH_VEC_ELT (bbs, i, bb)
7311 if (bb->loop_father->header == bb)
7313 struct loop *this_loop = bb->loop_father;
7314 struct loop *outer = loop_outer (this_loop);
7315 if (outer == loop
7316 /* If the SESE region contains some bbs ending with
7317 a noreturn call, those are considered to belong
7318 to the outermost loop in saved_cfun, rather than
7319 the entry_bb's loop_father. */
7320 || outer == loop0)
7322 if (outer != loop)
7323 num_nodes -= this_loop->num_nodes;
7324 flow_loop_tree_node_remove (bb->loop_father);
7325 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7326 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7329 else if (bb->loop_father == loop0 && loop0 != loop)
7330 num_nodes--;
7332 /* Remove loop exits from the outlined region. */
7333 if (loops_for_fn (saved_cfun)->exits)
7334 FOR_EACH_EDGE (e, ei, bb->succs)
7336 struct loops *l = loops_for_fn (saved_cfun);
7337 loop_exit **slot
7338 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7339 NO_INSERT);
7340 if (slot)
7341 l->exits->clear_slot (slot);
7346 /* Adjust the number of blocks in the tree root of the outlined part. */
7347 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7349 /* Setup a mapping to be used by move_block_to_fn. */
7350 loop->aux = current_loops->tree_root;
7351 loop0->aux = current_loops->tree_root;
7353 pop_cfun ();
7355 /* Move blocks from BBS into DEST_CFUN. */
7356 gcc_assert (bbs.length () >= 2);
7357 after = dest_cfun->cfg->x_entry_block_ptr;
7358 hash_map<tree, tree> vars_map;
7360 memset (&d, 0, sizeof (d));
7361 d.orig_block = orig_block;
7362 d.new_block = DECL_INITIAL (dest_cfun->decl);
7363 d.from_context = cfun->decl;
7364 d.to_context = dest_cfun->decl;
7365 d.vars_map = &vars_map;
7366 d.new_label_map = new_label_map;
7367 d.eh_map = eh_map;
7368 d.remap_decls_p = true;
7370 if (gimple_in_ssa_p (cfun))
7371 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7373 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7374 set_ssa_default_def (dest_cfun, arg, narg);
7375 vars_map.put (arg, narg);
7378 FOR_EACH_VEC_ELT (bbs, i, bb)
7380 /* No need to update edge counts on the last block. It has
7381 already been updated earlier when we detached the region from
7382 the original CFG. */
7383 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7384 after = bb;
7387 loop->aux = NULL;
7388 loop0->aux = NULL;
7389 /* Loop sizes are no longer correct, fix them up. */
7390 loop->num_nodes -= num_nodes;
7391 for (struct loop *outer = loop_outer (loop);
7392 outer; outer = loop_outer (outer))
7393 outer->num_nodes -= num_nodes;
7394 loop0->num_nodes -= bbs.length () - num_nodes;
7396 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7398 struct loop *aloop;
7399 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7400 if (aloop != NULL)
7402 if (aloop->simduid)
7404 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7405 d.to_context);
7406 dest_cfun->has_simduid_loops = true;
7408 if (aloop->force_vectorize)
7409 dest_cfun->has_force_vectorize_loops = true;
7413 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7414 if (orig_block)
7416 tree block;
7417 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7418 == NULL_TREE);
7419 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7420 = BLOCK_SUBBLOCKS (orig_block);
7421 for (block = BLOCK_SUBBLOCKS (orig_block);
7422 block; block = BLOCK_CHAIN (block))
7423 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7424 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7427 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7428 &vars_map, dest_cfun->decl);
7430 if (new_label_map)
7431 htab_delete (new_label_map);
7432 if (eh_map)
7433 delete eh_map;
7435 if (gimple_in_ssa_p (cfun))
7437 /* We need to release ssa-names in a defined order, so first find them,
7438 and then iterate in ascending version order. */
7439 bitmap release_names = BITMAP_ALLOC (NULL);
7440 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7441 bitmap_iterator bi;
7442 unsigned i;
7443 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7444 release_ssa_name (ssa_name (i));
7445 BITMAP_FREE (release_names);
7448 /* Rewire the entry and exit blocks. The successor to the entry
7449 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7450 the child function. Similarly, the predecessor of DEST_FN's
7451 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7452 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7453 various CFG manipulation function get to the right CFG.
7455 FIXME, this is silly. The CFG ought to become a parameter to
7456 these helpers. */
7457 push_cfun (dest_cfun);
7458 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7459 if (exit_bb)
7460 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7461 pop_cfun ();
7463 /* Back in the original function, the SESE region has disappeared,
7464 create a new basic block in its place. */
7465 bb = create_empty_bb (entry_pred[0]);
7466 if (current_loops)
7467 add_bb_to_loop (bb, loop);
7468 for (i = 0; i < num_entry_edges; i++)
7470 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7471 e->probability = entry_prob[i];
7474 for (i = 0; i < num_exit_edges; i++)
7476 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7477 e->probability = exit_prob[i];
7480 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7481 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7482 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7483 dom_bbs.release ();
7485 if (exit_bb)
7487 free (exit_prob);
7488 free (exit_flag);
7489 free (exit_succ);
7491 free (entry_prob);
7492 free (entry_flag);
7493 free (entry_pred);
7494 bbs.release ();
7496 return bb;
7499 /* Dump default def DEF to file FILE using FLAGS and indentation
7500 SPC. */
7502 static void
7503 dump_default_def (FILE *file, tree def, int spc, int flags)
7505 for (int i = 0; i < spc; ++i)
7506 fprintf (file, " ");
7507 dump_ssaname_info_to_file (file, def, spc);
7509 print_generic_expr (file, TREE_TYPE (def), flags);
7510 fprintf (file, " ");
7511 print_generic_expr (file, def, flags);
7512 fprintf (file, " = ");
7513 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7514 fprintf (file, ";\n");
7517 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7520 void
7521 dump_function_to_file (tree fndecl, FILE *file, int flags)
7523 tree arg, var, old_current_fndecl = current_function_decl;
7524 struct function *dsf;
7525 bool ignore_topmost_bind = false, any_var = false;
7526 basic_block bb;
7527 tree chain;
7528 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7529 && decl_is_tm_clone (fndecl));
7530 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7532 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7534 fprintf (file, "__attribute__((");
7536 bool first = true;
7537 tree chain;
7538 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7539 first = false, chain = TREE_CHAIN (chain))
7541 if (!first)
7542 fprintf (file, ", ");
7544 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7545 if (TREE_VALUE (chain) != NULL_TREE)
7547 fprintf (file, " (");
7548 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7549 fprintf (file, ")");
7553 fprintf (file, "))\n");
7556 current_function_decl = fndecl;
7557 if (flags & TDF_GIMPLE)
7559 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7560 dump_flags | TDF_SLIM);
7561 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7563 else
7564 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7566 arg = DECL_ARGUMENTS (fndecl);
7567 while (arg)
7569 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7570 fprintf (file, " ");
7571 print_generic_expr (file, arg, dump_flags);
7572 if (flags & TDF_VERBOSE)
7573 print_node (file, "", arg, 4);
7574 if (DECL_CHAIN (arg))
7575 fprintf (file, ", ");
7576 arg = DECL_CHAIN (arg);
7578 fprintf (file, ")\n");
7580 if (flags & TDF_VERBOSE)
7581 print_node (file, "", fndecl, 2);
7583 dsf = DECL_STRUCT_FUNCTION (fndecl);
7584 if (dsf && (flags & TDF_EH))
7585 dump_eh_tree (file, dsf);
7587 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7589 dump_node (fndecl, TDF_SLIM | flags, file);
7590 current_function_decl = old_current_fndecl;
7591 return;
7594 /* When GIMPLE is lowered, the variables are no longer available in
7595 BIND_EXPRs, so display them separately. */
7596 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7598 unsigned ix;
7599 ignore_topmost_bind = true;
7601 fprintf (file, "{\n");
7602 if (gimple_in_ssa_p (fun)
7603 && (flags & TDF_ALIAS))
7605 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7606 arg = DECL_CHAIN (arg))
7608 tree def = ssa_default_def (fun, arg);
7609 if (def)
7610 dump_default_def (file, def, 2, flags);
7613 tree res = DECL_RESULT (fun->decl);
7614 if (res != NULL_TREE
7615 && DECL_BY_REFERENCE (res))
7617 tree def = ssa_default_def (fun, res);
7618 if (def)
7619 dump_default_def (file, def, 2, flags);
7622 tree static_chain = fun->static_chain_decl;
7623 if (static_chain != NULL_TREE)
7625 tree def = ssa_default_def (fun, static_chain);
7626 if (def)
7627 dump_default_def (file, def, 2, flags);
7631 if (!vec_safe_is_empty (fun->local_decls))
7632 FOR_EACH_LOCAL_DECL (fun, ix, var)
7634 print_generic_decl (file, var, flags);
7635 if (flags & TDF_VERBOSE)
7636 print_node (file, "", var, 4);
7637 fprintf (file, "\n");
7639 any_var = true;
7642 tree name;
7644 if (gimple_in_ssa_p (cfun))
7645 FOR_EACH_SSA_NAME (ix, name, cfun)
7647 if (!SSA_NAME_VAR (name))
7649 fprintf (file, " ");
7650 print_generic_expr (file, TREE_TYPE (name), flags);
7651 fprintf (file, " ");
7652 print_generic_expr (file, name, flags);
7653 fprintf (file, ";\n");
7655 any_var = true;
7660 if (fun && fun->decl == fndecl
7661 && fun->cfg
7662 && basic_block_info_for_fn (fun))
7664 /* If the CFG has been built, emit a CFG-based dump. */
7665 if (!ignore_topmost_bind)
7666 fprintf (file, "{\n");
7668 if (any_var && n_basic_blocks_for_fn (fun))
7669 fprintf (file, "\n");
7671 FOR_EACH_BB_FN (bb, fun)
7672 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7674 fprintf (file, "}\n");
7676 else if (fun->curr_properties & PROP_gimple_any)
7678 /* The function is now in GIMPLE form but the CFG has not been
7679 built yet. Emit the single sequence of GIMPLE statements
7680 that make up its body. */
7681 gimple_seq body = gimple_body (fndecl);
7683 if (gimple_seq_first_stmt (body)
7684 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7685 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7686 print_gimple_seq (file, body, 0, flags);
7687 else
7689 if (!ignore_topmost_bind)
7690 fprintf (file, "{\n");
7692 if (any_var)
7693 fprintf (file, "\n");
7695 print_gimple_seq (file, body, 2, flags);
7696 fprintf (file, "}\n");
7699 else
7701 int indent;
7703 /* Make a tree based dump. */
7704 chain = DECL_SAVED_TREE (fndecl);
7705 if (chain && TREE_CODE (chain) == BIND_EXPR)
7707 if (ignore_topmost_bind)
7709 chain = BIND_EXPR_BODY (chain);
7710 indent = 2;
7712 else
7713 indent = 0;
7715 else
7717 if (!ignore_topmost_bind)
7719 fprintf (file, "{\n");
7720 /* No topmost bind, pretend it's ignored for later. */
7721 ignore_topmost_bind = true;
7723 indent = 2;
7726 if (any_var)
7727 fprintf (file, "\n");
7729 print_generic_stmt_indented (file, chain, flags, indent);
7730 if (ignore_topmost_bind)
7731 fprintf (file, "}\n");
7734 if (flags & TDF_ENUMERATE_LOCALS)
7735 dump_enumerated_decls (file, flags);
7736 fprintf (file, "\n\n");
7738 current_function_decl = old_current_fndecl;
7741 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7743 DEBUG_FUNCTION void
7744 debug_function (tree fn, int flags)
7746 dump_function_to_file (fn, stderr, flags);
7750 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7752 static void
7753 print_pred_bbs (FILE *file, basic_block bb)
7755 edge e;
7756 edge_iterator ei;
7758 FOR_EACH_EDGE (e, ei, bb->preds)
7759 fprintf (file, "bb_%d ", e->src->index);
7763 /* Print on FILE the indexes for the successors of basic_block BB. */
7765 static void
7766 print_succ_bbs (FILE *file, basic_block bb)
7768 edge e;
7769 edge_iterator ei;
7771 FOR_EACH_EDGE (e, ei, bb->succs)
7772 fprintf (file, "bb_%d ", e->dest->index);
7775 /* Print to FILE the basic block BB following the VERBOSITY level. */
7777 void
7778 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7780 char *s_indent = (char *) alloca ((size_t) indent + 1);
7781 memset ((void *) s_indent, ' ', (size_t) indent);
7782 s_indent[indent] = '\0';
7784 /* Print basic_block's header. */
7785 if (verbosity >= 2)
7787 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7788 print_pred_bbs (file, bb);
7789 fprintf (file, "}, succs = {");
7790 print_succ_bbs (file, bb);
7791 fprintf (file, "})\n");
7794 /* Print basic_block's body. */
7795 if (verbosity >= 3)
7797 fprintf (file, "%s {\n", s_indent);
7798 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7799 fprintf (file, "%s }\n", s_indent);
7803 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7805 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7806 VERBOSITY level this outputs the contents of the loop, or just its
7807 structure. */
7809 static void
7810 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7812 char *s_indent;
7813 basic_block bb;
7815 if (loop == NULL)
7816 return;
7818 s_indent = (char *) alloca ((size_t) indent + 1);
7819 memset ((void *) s_indent, ' ', (size_t) indent);
7820 s_indent[indent] = '\0';
7822 /* Print loop's header. */
7823 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7824 if (loop->header)
7825 fprintf (file, "header = %d", loop->header->index);
7826 else
7828 fprintf (file, "deleted)\n");
7829 return;
7831 if (loop->latch)
7832 fprintf (file, ", latch = %d", loop->latch->index);
7833 else
7834 fprintf (file, ", multiple latches");
7835 fprintf (file, ", niter = ");
7836 print_generic_expr (file, loop->nb_iterations, 0);
7838 if (loop->any_upper_bound)
7840 fprintf (file, ", upper_bound = ");
7841 print_decu (loop->nb_iterations_upper_bound, file);
7843 if (loop->any_likely_upper_bound)
7845 fprintf (file, ", likely_upper_bound = ");
7846 print_decu (loop->nb_iterations_likely_upper_bound, file);
7849 if (loop->any_estimate)
7851 fprintf (file, ", estimate = ");
7852 print_decu (loop->nb_iterations_estimate, file);
7854 fprintf (file, ")\n");
7856 /* Print loop's body. */
7857 if (verbosity >= 1)
7859 fprintf (file, "%s{\n", s_indent);
7860 FOR_EACH_BB_FN (bb, cfun)
7861 if (bb->loop_father == loop)
7862 print_loops_bb (file, bb, indent, verbosity);
7864 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7865 fprintf (file, "%s}\n", s_indent);
7869 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7870 spaces. Following VERBOSITY level this outputs the contents of the
7871 loop, or just its structure. */
7873 static void
7874 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7875 int verbosity)
7877 if (loop == NULL)
7878 return;
7880 print_loop (file, loop, indent, verbosity);
7881 print_loop_and_siblings (file, loop->next, indent, verbosity);
7884 /* Follow a CFG edge from the entry point of the program, and on entry
7885 of a loop, pretty print the loop structure on FILE. */
7887 void
7888 print_loops (FILE *file, int verbosity)
7890 basic_block bb;
7892 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7893 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7894 if (bb && bb->loop_father)
7895 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7898 /* Dump a loop. */
7900 DEBUG_FUNCTION void
7901 debug (struct loop &ref)
7903 print_loop (stderr, &ref, 0, /*verbosity*/0);
7906 DEBUG_FUNCTION void
7907 debug (struct loop *ptr)
7909 if (ptr)
7910 debug (*ptr);
7911 else
7912 fprintf (stderr, "<nil>\n");
7915 /* Dump a loop verbosely. */
7917 DEBUG_FUNCTION void
7918 debug_verbose (struct loop &ref)
7920 print_loop (stderr, &ref, 0, /*verbosity*/3);
7923 DEBUG_FUNCTION void
7924 debug_verbose (struct loop *ptr)
7926 if (ptr)
7927 debug (*ptr);
7928 else
7929 fprintf (stderr, "<nil>\n");
7933 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7935 DEBUG_FUNCTION void
7936 debug_loops (int verbosity)
7938 print_loops (stderr, verbosity);
7941 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7943 DEBUG_FUNCTION void
7944 debug_loop (struct loop *loop, int verbosity)
7946 print_loop (stderr, loop, 0, verbosity);
7949 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7950 level. */
7952 DEBUG_FUNCTION void
7953 debug_loop_num (unsigned num, int verbosity)
7955 debug_loop (get_loop (cfun, num), verbosity);
7958 /* Return true if BB ends with a call, possibly followed by some
7959 instructions that must stay with the call. Return false,
7960 otherwise. */
7962 static bool
7963 gimple_block_ends_with_call_p (basic_block bb)
7965 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7966 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7970 /* Return true if BB ends with a conditional branch. Return false,
7971 otherwise. */
7973 static bool
7974 gimple_block_ends_with_condjump_p (const_basic_block bb)
7976 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7977 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7981 /* Return true if statement T may terminate execution of BB in ways not
7982 explicitly represtented in the CFG. */
7984 bool
7985 stmt_can_terminate_bb_p (gimple *t)
7987 tree fndecl = NULL_TREE;
7988 int call_flags = 0;
7990 /* Eh exception not handled internally terminates execution of the whole
7991 function. */
7992 if (stmt_can_throw_external (t))
7993 return true;
7995 /* NORETURN and LONGJMP calls already have an edge to exit.
7996 CONST and PURE calls do not need one.
7997 We don't currently check for CONST and PURE here, although
7998 it would be a good idea, because those attributes are
7999 figured out from the RTL in mark_constant_function, and
8000 the counter incrementation code from -fprofile-arcs
8001 leads to different results from -fbranch-probabilities. */
8002 if (is_gimple_call (t))
8004 fndecl = gimple_call_fndecl (t);
8005 call_flags = gimple_call_flags (t);
8008 if (is_gimple_call (t)
8009 && fndecl
8010 && DECL_BUILT_IN (fndecl)
8011 && (call_flags & ECF_NOTHROW)
8012 && !(call_flags & ECF_RETURNS_TWICE)
8013 /* fork() doesn't really return twice, but the effect of
8014 wrapping it in __gcov_fork() which calls __gcov_flush()
8015 and clears the counters before forking has the same
8016 effect as returning twice. Force a fake edge. */
8017 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8018 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8019 return false;
8021 if (is_gimple_call (t))
8023 edge_iterator ei;
8024 edge e;
8025 basic_block bb;
8027 if (call_flags & (ECF_PURE | ECF_CONST)
8028 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8029 return false;
8031 /* Function call may do longjmp, terminate program or do other things.
8032 Special case noreturn that have non-abnormal edges out as in this case
8033 the fact is sufficiently represented by lack of edges out of T. */
8034 if (!(call_flags & ECF_NORETURN))
8035 return true;
8037 bb = gimple_bb (t);
8038 FOR_EACH_EDGE (e, ei, bb->succs)
8039 if ((e->flags & EDGE_FAKE) == 0)
8040 return true;
8043 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8044 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8045 return true;
8047 return false;
8051 /* Add fake edges to the function exit for any non constant and non
8052 noreturn calls (or noreturn calls with EH/abnormal edges),
8053 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8054 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8055 that were split.
8057 The goal is to expose cases in which entering a basic block does
8058 not imply that all subsequent instructions must be executed. */
8060 static int
8061 gimple_flow_call_edges_add (sbitmap blocks)
8063 int i;
8064 int blocks_split = 0;
8065 int last_bb = last_basic_block_for_fn (cfun);
8066 bool check_last_block = false;
8068 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8069 return 0;
8071 if (! blocks)
8072 check_last_block = true;
8073 else
8074 check_last_block = bitmap_bit_p (blocks,
8075 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8077 /* In the last basic block, before epilogue generation, there will be
8078 a fallthru edge to EXIT. Special care is required if the last insn
8079 of the last basic block is a call because make_edge folds duplicate
8080 edges, which would result in the fallthru edge also being marked
8081 fake, which would result in the fallthru edge being removed by
8082 remove_fake_edges, which would result in an invalid CFG.
8084 Moreover, we can't elide the outgoing fake edge, since the block
8085 profiler needs to take this into account in order to solve the minimal
8086 spanning tree in the case that the call doesn't return.
8088 Handle this by adding a dummy instruction in a new last basic block. */
8089 if (check_last_block)
8091 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8092 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8093 gimple *t = NULL;
8095 if (!gsi_end_p (gsi))
8096 t = gsi_stmt (gsi);
8098 if (t && stmt_can_terminate_bb_p (t))
8100 edge e;
8102 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8103 if (e)
8105 gsi_insert_on_edge (e, gimple_build_nop ());
8106 gsi_commit_edge_inserts ();
8111 /* Now add fake edges to the function exit for any non constant
8112 calls since there is no way that we can determine if they will
8113 return or not... */
8114 for (i = 0; i < last_bb; i++)
8116 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8117 gimple_stmt_iterator gsi;
8118 gimple *stmt, *last_stmt;
8120 if (!bb)
8121 continue;
8123 if (blocks && !bitmap_bit_p (blocks, i))
8124 continue;
8126 gsi = gsi_last_nondebug_bb (bb);
8127 if (!gsi_end_p (gsi))
8129 last_stmt = gsi_stmt (gsi);
8132 stmt = gsi_stmt (gsi);
8133 if (stmt_can_terminate_bb_p (stmt))
8135 edge e;
8137 /* The handling above of the final block before the
8138 epilogue should be enough to verify that there is
8139 no edge to the exit block in CFG already.
8140 Calling make_edge in such case would cause us to
8141 mark that edge as fake and remove it later. */
8142 if (flag_checking && stmt == last_stmt)
8144 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8145 gcc_assert (e == NULL);
8148 /* Note that the following may create a new basic block
8149 and renumber the existing basic blocks. */
8150 if (stmt != last_stmt)
8152 e = split_block (bb, stmt);
8153 if (e)
8154 blocks_split++;
8156 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8158 gsi_prev (&gsi);
8160 while (!gsi_end_p (gsi));
8164 if (blocks_split)
8165 verify_flow_info ();
8167 return blocks_split;
8170 /* Removes edge E and all the blocks dominated by it, and updates dominance
8171 information. The IL in E->src needs to be updated separately.
8172 If dominance info is not available, only the edge E is removed.*/
8174 void
8175 remove_edge_and_dominated_blocks (edge e)
8177 vec<basic_block> bbs_to_remove = vNULL;
8178 vec<basic_block> bbs_to_fix_dom = vNULL;
8179 bitmap df, df_idom;
8180 edge f;
8181 edge_iterator ei;
8182 bool none_removed = false;
8183 unsigned i;
8184 basic_block bb, dbb;
8185 bitmap_iterator bi;
8187 /* If we are removing a path inside a non-root loop that may change
8188 loop ownership of blocks or remove loops. Mark loops for fixup. */
8189 if (current_loops
8190 && loop_outer (e->src->loop_father) != NULL
8191 && e->src->loop_father == e->dest->loop_father)
8192 loops_state_set (LOOPS_NEED_FIXUP);
8194 if (!dom_info_available_p (CDI_DOMINATORS))
8196 remove_edge (e);
8197 return;
8200 /* No updating is needed for edges to exit. */
8201 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8203 if (cfgcleanup_altered_bbs)
8204 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8205 remove_edge (e);
8206 return;
8209 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8210 that is not dominated by E->dest, then this set is empty. Otherwise,
8211 all the basic blocks dominated by E->dest are removed.
8213 Also, to DF_IDOM we store the immediate dominators of the blocks in
8214 the dominance frontier of E (i.e., of the successors of the
8215 removed blocks, if there are any, and of E->dest otherwise). */
8216 FOR_EACH_EDGE (f, ei, e->dest->preds)
8218 if (f == e)
8219 continue;
8221 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8223 none_removed = true;
8224 break;
8228 df = BITMAP_ALLOC (NULL);
8229 df_idom = BITMAP_ALLOC (NULL);
8231 if (none_removed)
8232 bitmap_set_bit (df_idom,
8233 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8234 else
8236 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8237 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8239 FOR_EACH_EDGE (f, ei, bb->succs)
8241 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8242 bitmap_set_bit (df, f->dest->index);
8245 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8246 bitmap_clear_bit (df, bb->index);
8248 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8250 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8251 bitmap_set_bit (df_idom,
8252 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8256 if (cfgcleanup_altered_bbs)
8258 /* Record the set of the altered basic blocks. */
8259 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8260 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8263 /* Remove E and the cancelled blocks. */
8264 if (none_removed)
8265 remove_edge (e);
8266 else
8268 /* Walk backwards so as to get a chance to substitute all
8269 released DEFs into debug stmts. See
8270 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8271 details. */
8272 for (i = bbs_to_remove.length (); i-- > 0; )
8273 delete_basic_block (bbs_to_remove[i]);
8276 /* Update the dominance information. The immediate dominator may change only
8277 for blocks whose immediate dominator belongs to DF_IDOM:
8279 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8280 removal. Let Z the arbitrary block such that idom(Z) = Y and
8281 Z dominates X after the removal. Before removal, there exists a path P
8282 from Y to X that avoids Z. Let F be the last edge on P that is
8283 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8284 dominates W, and because of P, Z does not dominate W), and W belongs to
8285 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8286 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8288 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8289 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8290 dbb;
8291 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8292 bbs_to_fix_dom.safe_push (dbb);
8295 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8297 BITMAP_FREE (df);
8298 BITMAP_FREE (df_idom);
8299 bbs_to_remove.release ();
8300 bbs_to_fix_dom.release ();
8303 /* Purge dead EH edges from basic block BB. */
8305 bool
8306 gimple_purge_dead_eh_edges (basic_block bb)
8308 bool changed = false;
8309 edge e;
8310 edge_iterator ei;
8311 gimple *stmt = last_stmt (bb);
8313 if (stmt && stmt_can_throw_internal (stmt))
8314 return false;
8316 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8318 if (e->flags & EDGE_EH)
8320 remove_edge_and_dominated_blocks (e);
8321 changed = true;
8323 else
8324 ei_next (&ei);
8327 return changed;
8330 /* Purge dead EH edges from basic block listed in BLOCKS. */
8332 bool
8333 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8335 bool changed = false;
8336 unsigned i;
8337 bitmap_iterator bi;
8339 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8341 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8343 /* Earlier gimple_purge_dead_eh_edges could have removed
8344 this basic block already. */
8345 gcc_assert (bb || changed);
8346 if (bb != NULL)
8347 changed |= gimple_purge_dead_eh_edges (bb);
8350 return changed;
8353 /* Purge dead abnormal call edges from basic block BB. */
8355 bool
8356 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8358 bool changed = false;
8359 edge e;
8360 edge_iterator ei;
8361 gimple *stmt = last_stmt (bb);
8363 if (!cfun->has_nonlocal_label
8364 && !cfun->calls_setjmp)
8365 return false;
8367 if (stmt && stmt_can_make_abnormal_goto (stmt))
8368 return false;
8370 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8372 if (e->flags & EDGE_ABNORMAL)
8374 if (e->flags & EDGE_FALLTHRU)
8375 e->flags &= ~EDGE_ABNORMAL;
8376 else
8377 remove_edge_and_dominated_blocks (e);
8378 changed = true;
8380 else
8381 ei_next (&ei);
8384 return changed;
8387 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8389 bool
8390 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8392 bool changed = false;
8393 unsigned i;
8394 bitmap_iterator bi;
8396 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8398 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8400 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8401 this basic block already. */
8402 gcc_assert (bb || changed);
8403 if (bb != NULL)
8404 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8407 return changed;
8410 /* This function is called whenever a new edge is created or
8411 redirected. */
8413 static void
8414 gimple_execute_on_growing_pred (edge e)
8416 basic_block bb = e->dest;
8418 if (!gimple_seq_empty_p (phi_nodes (bb)))
8419 reserve_phi_args_for_new_edge (bb);
8422 /* This function is called immediately before edge E is removed from
8423 the edge vector E->dest->preds. */
8425 static void
8426 gimple_execute_on_shrinking_pred (edge e)
8428 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8429 remove_phi_args (e);
8432 /*---------------------------------------------------------------------------
8433 Helper functions for Loop versioning
8434 ---------------------------------------------------------------------------*/
8436 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8437 of 'first'. Both of them are dominated by 'new_head' basic block. When
8438 'new_head' was created by 'second's incoming edge it received phi arguments
8439 on the edge by split_edge(). Later, additional edge 'e' was created to
8440 connect 'new_head' and 'first'. Now this routine adds phi args on this
8441 additional edge 'e' that new_head to second edge received as part of edge
8442 splitting. */
8444 static void
8445 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8446 basic_block new_head, edge e)
8448 gphi *phi1, *phi2;
8449 gphi_iterator psi1, psi2;
8450 tree def;
8451 edge e2 = find_edge (new_head, second);
8453 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8454 edge, we should always have an edge from NEW_HEAD to SECOND. */
8455 gcc_assert (e2 != NULL);
8457 /* Browse all 'second' basic block phi nodes and add phi args to
8458 edge 'e' for 'first' head. PHI args are always in correct order. */
8460 for (psi2 = gsi_start_phis (second),
8461 psi1 = gsi_start_phis (first);
8462 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8463 gsi_next (&psi2), gsi_next (&psi1))
8465 phi1 = psi1.phi ();
8466 phi2 = psi2.phi ();
8467 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8468 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8473 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8474 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8475 the destination of the ELSE part. */
8477 static void
8478 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8479 basic_block second_head ATTRIBUTE_UNUSED,
8480 basic_block cond_bb, void *cond_e)
8482 gimple_stmt_iterator gsi;
8483 gimple *new_cond_expr;
8484 tree cond_expr = (tree) cond_e;
8485 edge e0;
8487 /* Build new conditional expr */
8488 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8489 NULL_TREE, NULL_TREE);
8491 /* Add new cond in cond_bb. */
8492 gsi = gsi_last_bb (cond_bb);
8493 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8495 /* Adjust edges appropriately to connect new head with first head
8496 as well as second head. */
8497 e0 = single_succ_edge (cond_bb);
8498 e0->flags &= ~EDGE_FALLTHRU;
8499 e0->flags |= EDGE_FALSE_VALUE;
8503 /* Do book-keeping of basic block BB for the profile consistency checker.
8504 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8505 then do post-pass accounting. Store the counting in RECORD. */
8506 static void
8507 gimple_account_profile_record (basic_block bb, int after_pass,
8508 struct profile_record *record)
8510 gimple_stmt_iterator i;
8511 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8513 record->size[after_pass]
8514 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8515 if (profile_status_for_fn (cfun) == PROFILE_READ)
8516 record->time[after_pass]
8517 += estimate_num_insns (gsi_stmt (i),
8518 &eni_time_weights) * bb->count;
8519 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8520 record->time[after_pass]
8521 += estimate_num_insns (gsi_stmt (i),
8522 &eni_time_weights) * bb->frequency;
8526 struct cfg_hooks gimple_cfg_hooks = {
8527 "gimple",
8528 gimple_verify_flow_info,
8529 gimple_dump_bb, /* dump_bb */
8530 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8531 create_bb, /* create_basic_block */
8532 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8533 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8534 gimple_can_remove_branch_p, /* can_remove_branch_p */
8535 remove_bb, /* delete_basic_block */
8536 gimple_split_block, /* split_block */
8537 gimple_move_block_after, /* move_block_after */
8538 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8539 gimple_merge_blocks, /* merge_blocks */
8540 gimple_predict_edge, /* predict_edge */
8541 gimple_predicted_by_p, /* predicted_by_p */
8542 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8543 gimple_duplicate_bb, /* duplicate_block */
8544 gimple_split_edge, /* split_edge */
8545 gimple_make_forwarder_block, /* make_forward_block */
8546 NULL, /* tidy_fallthru_edge */
8547 NULL, /* force_nonfallthru */
8548 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8549 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8550 gimple_flow_call_edges_add, /* flow_call_edges_add */
8551 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8552 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8553 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8554 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8555 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8556 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8557 flush_pending_stmts, /* flush_pending_stmts */
8558 gimple_empty_block_p, /* block_empty_p */
8559 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8560 gimple_account_profile_record,
8564 /* Split all critical edges. */
8566 unsigned int
8567 split_critical_edges (void)
8569 basic_block bb;
8570 edge e;
8571 edge_iterator ei;
8573 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8574 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8575 mappings around the calls to split_edge. */
8576 start_recording_case_labels ();
8577 FOR_ALL_BB_FN (bb, cfun)
8579 FOR_EACH_EDGE (e, ei, bb->succs)
8581 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8582 split_edge (e);
8583 /* PRE inserts statements to edges and expects that
8584 since split_critical_edges was done beforehand, committing edge
8585 insertions will not split more edges. In addition to critical
8586 edges we must split edges that have multiple successors and
8587 end by control flow statements, such as RESX.
8588 Go ahead and split them too. This matches the logic in
8589 gimple_find_edge_insert_loc. */
8590 else if ((!single_pred_p (e->dest)
8591 || !gimple_seq_empty_p (phi_nodes (e->dest))
8592 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8593 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8594 && !(e->flags & EDGE_ABNORMAL))
8596 gimple_stmt_iterator gsi;
8598 gsi = gsi_last_bb (e->src);
8599 if (!gsi_end_p (gsi)
8600 && stmt_ends_bb_p (gsi_stmt (gsi))
8601 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8602 && !gimple_call_builtin_p (gsi_stmt (gsi),
8603 BUILT_IN_RETURN)))
8604 split_edge (e);
8608 end_recording_case_labels ();
8609 return 0;
8612 namespace {
8614 const pass_data pass_data_split_crit_edges =
8616 GIMPLE_PASS, /* type */
8617 "crited", /* name */
8618 OPTGROUP_NONE, /* optinfo_flags */
8619 TV_TREE_SPLIT_EDGES, /* tv_id */
8620 PROP_cfg, /* properties_required */
8621 PROP_no_crit_edges, /* properties_provided */
8622 0, /* properties_destroyed */
8623 0, /* todo_flags_start */
8624 0, /* todo_flags_finish */
8627 class pass_split_crit_edges : public gimple_opt_pass
8629 public:
8630 pass_split_crit_edges (gcc::context *ctxt)
8631 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8634 /* opt_pass methods: */
8635 virtual unsigned int execute (function *) { return split_critical_edges (); }
8637 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8638 }; // class pass_split_crit_edges
8640 } // anon namespace
8642 gimple_opt_pass *
8643 make_pass_split_crit_edges (gcc::context *ctxt)
8645 return new pass_split_crit_edges (ctxt);
8649 /* Insert COND expression which is GIMPLE_COND after STMT
8650 in basic block BB with appropriate basic block split
8651 and creation of a new conditionally executed basic block.
8652 Return created basic block. */
8653 basic_block
8654 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8656 edge fall = split_block (bb, stmt);
8657 gimple_stmt_iterator iter = gsi_last_bb (bb);
8658 basic_block new_bb;
8660 /* Insert cond statement. */
8661 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8662 if (gsi_end_p (iter))
8663 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8664 else
8665 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8667 /* Create conditionally executed block. */
8668 new_bb = create_empty_bb (bb);
8669 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8670 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8672 /* Fix edge for split bb. */
8673 fall->flags = EDGE_FALSE_VALUE;
8675 /* Update dominance info. */
8676 if (dom_info_available_p (CDI_DOMINATORS))
8678 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8679 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8682 /* Update loop info. */
8683 if (current_loops)
8684 add_bb_to_loop (new_bb, bb->loop_father);
8686 return new_bb;
8689 /* Build a ternary operation and gimplify it. Emit code before GSI.
8690 Return the gimple_val holding the result. */
8692 tree
8693 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8694 tree type, tree a, tree b, tree c)
8696 tree ret;
8697 location_t loc = gimple_location (gsi_stmt (*gsi));
8699 ret = fold_build3_loc (loc, code, type, a, b, c);
8700 STRIP_NOPS (ret);
8702 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8703 GSI_SAME_STMT);
8706 /* Build a binary operation and gimplify it. Emit code before GSI.
8707 Return the gimple_val holding the result. */
8709 tree
8710 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8711 tree type, tree a, tree b)
8713 tree ret;
8715 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8716 STRIP_NOPS (ret);
8718 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8719 GSI_SAME_STMT);
8722 /* Build a unary operation and gimplify it. Emit code before GSI.
8723 Return the gimple_val holding the result. */
8725 tree
8726 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8727 tree a)
8729 tree ret;
8731 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8732 STRIP_NOPS (ret);
8734 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8735 GSI_SAME_STMT);
8740 /* Given a basic block B which ends with a conditional and has
8741 precisely two successors, determine which of the edges is taken if
8742 the conditional is true and which is taken if the conditional is
8743 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8745 void
8746 extract_true_false_edges_from_block (basic_block b,
8747 edge *true_edge,
8748 edge *false_edge)
8750 edge e = EDGE_SUCC (b, 0);
8752 if (e->flags & EDGE_TRUE_VALUE)
8754 *true_edge = e;
8755 *false_edge = EDGE_SUCC (b, 1);
8757 else
8759 *false_edge = e;
8760 *true_edge = EDGE_SUCC (b, 1);
8765 /* From a controlling predicate in the immediate dominator DOM of
8766 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8767 predicate evaluates to true and false and store them to
8768 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8769 they are non-NULL. Returns true if the edges can be determined,
8770 else return false. */
8772 bool
8773 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8774 edge *true_controlled_edge,
8775 edge *false_controlled_edge)
8777 basic_block bb = phiblock;
8778 edge true_edge, false_edge, tem;
8779 edge e0 = NULL, e1 = NULL;
8781 /* We have to verify that one edge into the PHI node is dominated
8782 by the true edge of the predicate block and the other edge
8783 dominated by the false edge. This ensures that the PHI argument
8784 we are going to take is completely determined by the path we
8785 take from the predicate block.
8786 We can only use BB dominance checks below if the destination of
8787 the true/false edges are dominated by their edge, thus only
8788 have a single predecessor. */
8789 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8790 tem = EDGE_PRED (bb, 0);
8791 if (tem == true_edge
8792 || (single_pred_p (true_edge->dest)
8793 && (tem->src == true_edge->dest
8794 || dominated_by_p (CDI_DOMINATORS,
8795 tem->src, true_edge->dest))))
8796 e0 = tem;
8797 else if (tem == false_edge
8798 || (single_pred_p (false_edge->dest)
8799 && (tem->src == false_edge->dest
8800 || dominated_by_p (CDI_DOMINATORS,
8801 tem->src, false_edge->dest))))
8802 e1 = tem;
8803 else
8804 return false;
8805 tem = EDGE_PRED (bb, 1);
8806 if (tem == true_edge
8807 || (single_pred_p (true_edge->dest)
8808 && (tem->src == true_edge->dest
8809 || dominated_by_p (CDI_DOMINATORS,
8810 tem->src, true_edge->dest))))
8811 e0 = tem;
8812 else if (tem == false_edge
8813 || (single_pred_p (false_edge->dest)
8814 && (tem->src == false_edge->dest
8815 || dominated_by_p (CDI_DOMINATORS,
8816 tem->src, false_edge->dest))))
8817 e1 = tem;
8818 else
8819 return false;
8820 if (!e0 || !e1)
8821 return false;
8823 if (true_controlled_edge)
8824 *true_controlled_edge = e0;
8825 if (false_controlled_edge)
8826 *false_controlled_edge = e1;
8828 return true;
8833 /* Emit return warnings. */
8835 namespace {
8837 const pass_data pass_data_warn_function_return =
8839 GIMPLE_PASS, /* type */
8840 "*warn_function_return", /* name */
8841 OPTGROUP_NONE, /* optinfo_flags */
8842 TV_NONE, /* tv_id */
8843 PROP_cfg, /* properties_required */
8844 0, /* properties_provided */
8845 0, /* properties_destroyed */
8846 0, /* todo_flags_start */
8847 0, /* todo_flags_finish */
8850 class pass_warn_function_return : public gimple_opt_pass
8852 public:
8853 pass_warn_function_return (gcc::context *ctxt)
8854 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8857 /* opt_pass methods: */
8858 virtual unsigned int execute (function *);
8860 }; // class pass_warn_function_return
8862 unsigned int
8863 pass_warn_function_return::execute (function *fun)
8865 source_location location;
8866 gimple *last;
8867 edge e;
8868 edge_iterator ei;
8870 if (!targetm.warn_func_return (fun->decl))
8871 return 0;
8873 /* If we have a path to EXIT, then we do return. */
8874 if (TREE_THIS_VOLATILE (fun->decl)
8875 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8877 location = UNKNOWN_LOCATION;
8878 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8880 last = last_stmt (e->src);
8881 if ((gimple_code (last) == GIMPLE_RETURN
8882 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8883 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8884 break;
8886 if (location == UNKNOWN_LOCATION)
8887 location = cfun->function_end_locus;
8888 warning_at (location, 0, "%<noreturn%> function does return");
8891 /* If we see "return;" in some basic block, then we do reach the end
8892 without returning a value. */
8893 else if (warn_return_type
8894 && !TREE_NO_WARNING (fun->decl)
8895 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8896 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8898 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8900 gimple *last = last_stmt (e->src);
8901 greturn *return_stmt = dyn_cast <greturn *> (last);
8902 if (return_stmt
8903 && gimple_return_retval (return_stmt) == NULL
8904 && !gimple_no_warning_p (last))
8906 location = gimple_location (last);
8907 if (location == UNKNOWN_LOCATION)
8908 location = fun->function_end_locus;
8909 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8910 TREE_NO_WARNING (fun->decl) = 1;
8911 break;
8915 return 0;
8918 } // anon namespace
8920 gimple_opt_pass *
8921 make_pass_warn_function_return (gcc::context *ctxt)
8923 return new pass_warn_function_return (ctxt);
8926 /* Walk a gimplified function and warn for functions whose return value is
8927 ignored and attribute((warn_unused_result)) is set. This is done before
8928 inlining, so we don't have to worry about that. */
8930 static void
8931 do_warn_unused_result (gimple_seq seq)
8933 tree fdecl, ftype;
8934 gimple_stmt_iterator i;
8936 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8938 gimple *g = gsi_stmt (i);
8940 switch (gimple_code (g))
8942 case GIMPLE_BIND:
8943 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8944 break;
8945 case GIMPLE_TRY:
8946 do_warn_unused_result (gimple_try_eval (g));
8947 do_warn_unused_result (gimple_try_cleanup (g));
8948 break;
8949 case GIMPLE_CATCH:
8950 do_warn_unused_result (gimple_catch_handler (
8951 as_a <gcatch *> (g)));
8952 break;
8953 case GIMPLE_EH_FILTER:
8954 do_warn_unused_result (gimple_eh_filter_failure (g));
8955 break;
8957 case GIMPLE_CALL:
8958 if (gimple_call_lhs (g))
8959 break;
8960 if (gimple_call_internal_p (g))
8961 break;
8963 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8964 LHS. All calls whose value is ignored should be
8965 represented like this. Look for the attribute. */
8966 fdecl = gimple_call_fndecl (g);
8967 ftype = gimple_call_fntype (g);
8969 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8971 location_t loc = gimple_location (g);
8973 if (fdecl)
8974 warning_at (loc, OPT_Wunused_result,
8975 "ignoring return value of %qD, "
8976 "declared with attribute warn_unused_result",
8977 fdecl);
8978 else
8979 warning_at (loc, OPT_Wunused_result,
8980 "ignoring return value of function "
8981 "declared with attribute warn_unused_result");
8983 break;
8985 default:
8986 /* Not a container, not a call, or a call whose value is used. */
8987 break;
8992 namespace {
8994 const pass_data pass_data_warn_unused_result =
8996 GIMPLE_PASS, /* type */
8997 "*warn_unused_result", /* name */
8998 OPTGROUP_NONE, /* optinfo_flags */
8999 TV_NONE, /* tv_id */
9000 PROP_gimple_any, /* properties_required */
9001 0, /* properties_provided */
9002 0, /* properties_destroyed */
9003 0, /* todo_flags_start */
9004 0, /* todo_flags_finish */
9007 class pass_warn_unused_result : public gimple_opt_pass
9009 public:
9010 pass_warn_unused_result (gcc::context *ctxt)
9011 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9014 /* opt_pass methods: */
9015 virtual bool gate (function *) { return flag_warn_unused_result; }
9016 virtual unsigned int execute (function *)
9018 do_warn_unused_result (gimple_body (current_function_decl));
9019 return 0;
9022 }; // class pass_warn_unused_result
9024 } // anon namespace
9026 gimple_opt_pass *
9027 make_pass_warn_unused_result (gcc::context *ctxt)
9029 return new pass_warn_unused_result (ctxt);
9032 /* IPA passes, compilation of earlier functions or inlining
9033 might have changed some properties, such as marked functions nothrow,
9034 pure, const or noreturn.
9035 Remove redundant edges and basic blocks, and create new ones if necessary.
9037 This pass can't be executed as stand alone pass from pass manager, because
9038 in between inlining and this fixup the verify_flow_info would fail. */
9040 unsigned int
9041 execute_fixup_cfg (void)
9043 basic_block bb;
9044 gimple_stmt_iterator gsi;
9045 int todo = 0;
9046 gcov_type count_scale;
9047 edge e;
9048 edge_iterator ei;
9049 cgraph_node *node = cgraph_node::get (current_function_decl);
9051 count_scale
9052 = GCOV_COMPUTE_SCALE (node->count, ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
9054 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9055 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9056 = apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count, count_scale);
9058 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
9059 e->count = apply_scale (e->count, count_scale);
9061 FOR_EACH_BB_FN (bb, cfun)
9063 bb->count = apply_scale (bb->count, count_scale);
9064 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9066 gimple *stmt = gsi_stmt (gsi);
9067 tree decl = is_gimple_call (stmt)
9068 ? gimple_call_fndecl (stmt)
9069 : NULL;
9070 if (decl)
9072 int flags = gimple_call_flags (stmt);
9073 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9075 if (gimple_purge_dead_abnormal_call_edges (bb))
9076 todo |= TODO_cleanup_cfg;
9078 if (gimple_in_ssa_p (cfun))
9080 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9081 update_stmt (stmt);
9085 if (flags & ECF_NORETURN
9086 && fixup_noreturn_call (stmt))
9087 todo |= TODO_cleanup_cfg;
9090 /* Remove stores to variables we marked write-only.
9091 Keep access when store has side effect, i.e. in case when source
9092 is volatile. */
9093 if (gimple_store_p (stmt)
9094 && !gimple_has_side_effects (stmt))
9096 tree lhs = get_base_address (gimple_get_lhs (stmt));
9098 if (VAR_P (lhs)
9099 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9100 && varpool_node::get (lhs)->writeonly)
9102 unlink_stmt_vdef (stmt);
9103 gsi_remove (&gsi, true);
9104 release_defs (stmt);
9105 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9106 continue;
9109 /* For calls we can simply remove LHS when it is known
9110 to be write-only. */
9111 if (is_gimple_call (stmt)
9112 && gimple_get_lhs (stmt))
9114 tree lhs = get_base_address (gimple_get_lhs (stmt));
9116 if (VAR_P (lhs)
9117 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9118 && varpool_node::get (lhs)->writeonly)
9120 gimple_call_set_lhs (stmt, NULL);
9121 update_stmt (stmt);
9122 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9126 if (maybe_clean_eh_stmt (stmt)
9127 && gimple_purge_dead_eh_edges (bb))
9128 todo |= TODO_cleanup_cfg;
9129 gsi_next (&gsi);
9132 FOR_EACH_EDGE (e, ei, bb->succs)
9133 e->count = apply_scale (e->count, count_scale);
9135 /* If we have a basic block with no successors that does not
9136 end with a control statement or a noreturn call end it with
9137 a call to __builtin_unreachable. This situation can occur
9138 when inlining a noreturn call that does in fact return. */
9139 if (EDGE_COUNT (bb->succs) == 0)
9141 gimple *stmt = last_stmt (bb);
9142 if (!stmt
9143 || (!is_ctrl_stmt (stmt)
9144 && (!is_gimple_call (stmt)
9145 || !gimple_call_noreturn_p (stmt))))
9147 if (stmt && is_gimple_call (stmt))
9148 gimple_call_set_ctrl_altering (stmt, false);
9149 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9150 stmt = gimple_build_call (fndecl, 0);
9151 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9152 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9153 if (!cfun->after_inlining)
9155 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9156 int freq
9157 = compute_call_stmt_bb_frequency (current_function_decl,
9158 bb);
9159 node->create_edge (cgraph_node::get_create (fndecl),
9160 call_stmt, bb->count, freq);
9165 if (count_scale != REG_BR_PROB_BASE)
9166 compute_function_frequency ();
9168 if (current_loops
9169 && (todo & TODO_cleanup_cfg))
9170 loops_state_set (LOOPS_NEED_FIXUP);
9172 return todo;
9175 namespace {
9177 const pass_data pass_data_fixup_cfg =
9179 GIMPLE_PASS, /* type */
9180 "fixup_cfg", /* name */
9181 OPTGROUP_NONE, /* optinfo_flags */
9182 TV_NONE, /* tv_id */
9183 PROP_cfg, /* properties_required */
9184 0, /* properties_provided */
9185 0, /* properties_destroyed */
9186 0, /* todo_flags_start */
9187 0, /* todo_flags_finish */
9190 class pass_fixup_cfg : public gimple_opt_pass
9192 public:
9193 pass_fixup_cfg (gcc::context *ctxt)
9194 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9197 /* opt_pass methods: */
9198 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9199 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9201 }; // class pass_fixup_cfg
9203 } // anon namespace
9205 gimple_opt_pass *
9206 make_pass_fixup_cfg (gcc::context *ctxt)
9208 return new pass_fixup_cfg (ctxt);
9211 /* Garbage collection support for edge_def. */
9213 extern void gt_ggc_mx (tree&);
9214 extern void gt_ggc_mx (gimple *&);
9215 extern void gt_ggc_mx (rtx&);
9216 extern void gt_ggc_mx (basic_block&);
9218 static void
9219 gt_ggc_mx (rtx_insn *& x)
9221 if (x)
9222 gt_ggc_mx_rtx_def ((void *) x);
9225 void
9226 gt_ggc_mx (edge_def *e)
9228 tree block = LOCATION_BLOCK (e->goto_locus);
9229 gt_ggc_mx (e->src);
9230 gt_ggc_mx (e->dest);
9231 if (current_ir_type () == IR_GIMPLE)
9232 gt_ggc_mx (e->insns.g);
9233 else
9234 gt_ggc_mx (e->insns.r);
9235 gt_ggc_mx (block);
9238 /* PCH support for edge_def. */
9240 extern void gt_pch_nx (tree&);
9241 extern void gt_pch_nx (gimple *&);
9242 extern void gt_pch_nx (rtx&);
9243 extern void gt_pch_nx (basic_block&);
9245 static void
9246 gt_pch_nx (rtx_insn *& x)
9248 if (x)
9249 gt_pch_nx_rtx_def ((void *) x);
9252 void
9253 gt_pch_nx (edge_def *e)
9255 tree block = LOCATION_BLOCK (e->goto_locus);
9256 gt_pch_nx (e->src);
9257 gt_pch_nx (e->dest);
9258 if (current_ir_type () == IR_GIMPLE)
9259 gt_pch_nx (e->insns.g);
9260 else
9261 gt_pch_nx (e->insns.r);
9262 gt_pch_nx (block);
9265 void
9266 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9268 tree block = LOCATION_BLOCK (e->goto_locus);
9269 op (&(e->src), cookie);
9270 op (&(e->dest), cookie);
9271 if (current_ir_type () == IR_GIMPLE)
9272 op (&(e->insns.g), cookie);
9273 else
9274 op (&(e->insns.r), cookie);
9275 op (&(block), cookie);
9278 #if CHECKING_P
9280 namespace selftest {
9282 /* Helper function for CFG selftests: create a dummy function decl
9283 and push it as cfun. */
9285 static tree
9286 push_fndecl (const char *name)
9288 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9289 /* FIXME: this uses input_location: */
9290 tree fndecl = build_fn_decl (name, fn_type);
9291 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9292 NULL_TREE, integer_type_node);
9293 DECL_RESULT (fndecl) = retval;
9294 push_struct_function (fndecl);
9295 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9296 ASSERT_TRUE (fun != NULL);
9297 init_empty_tree_cfg_for_function (fun);
9298 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9299 ASSERT_EQ (0, n_edges_for_fn (fun));
9300 return fndecl;
9303 /* These tests directly create CFGs.
9304 Compare with the static fns within tree-cfg.c:
9305 - build_gimple_cfg
9306 - make_blocks: calls create_basic_block (seq, bb);
9307 - make_edges. */
9309 /* Verify a simple cfg of the form:
9310 ENTRY -> A -> B -> C -> EXIT. */
9312 static void
9313 test_linear_chain ()
9315 gimple_register_cfg_hooks ();
9317 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9318 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9320 /* Create some empty blocks. */
9321 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9322 basic_block bb_b = create_empty_bb (bb_a);
9323 basic_block bb_c = create_empty_bb (bb_b);
9325 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9326 ASSERT_EQ (0, n_edges_for_fn (fun));
9328 /* Create some edges: a simple linear chain of BBs. */
9329 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9330 make_edge (bb_a, bb_b, 0);
9331 make_edge (bb_b, bb_c, 0);
9332 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9334 /* Verify the edges. */
9335 ASSERT_EQ (4, n_edges_for_fn (fun));
9336 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9337 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9338 ASSERT_EQ (1, bb_a->preds->length ());
9339 ASSERT_EQ (1, bb_a->succs->length ());
9340 ASSERT_EQ (1, bb_b->preds->length ());
9341 ASSERT_EQ (1, bb_b->succs->length ());
9342 ASSERT_EQ (1, bb_c->preds->length ());
9343 ASSERT_EQ (1, bb_c->succs->length ());
9344 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9345 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9347 /* Verify the dominance information
9348 Each BB in our simple chain should be dominated by the one before
9349 it. */
9350 calculate_dominance_info (CDI_DOMINATORS);
9351 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9352 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9353 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9354 ASSERT_EQ (1, dom_by_b.length ());
9355 ASSERT_EQ (bb_c, dom_by_b[0]);
9356 free_dominance_info (CDI_DOMINATORS);
9357 dom_by_b.release ();
9359 /* Similarly for post-dominance: each BB in our chain is post-dominated
9360 by the one after it. */
9361 calculate_dominance_info (CDI_POST_DOMINATORS);
9362 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9363 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9364 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9365 ASSERT_EQ (1, postdom_by_b.length ());
9366 ASSERT_EQ (bb_a, postdom_by_b[0]);
9367 free_dominance_info (CDI_POST_DOMINATORS);
9368 postdom_by_b.release ();
9370 pop_cfun ();
9373 /* Verify a simple CFG of the form:
9374 ENTRY
9378 /t \f
9384 EXIT. */
9386 static void
9387 test_diamond ()
9389 gimple_register_cfg_hooks ();
9391 tree fndecl = push_fndecl ("cfg_test_diamond");
9392 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9394 /* Create some empty blocks. */
9395 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9396 basic_block bb_b = create_empty_bb (bb_a);
9397 basic_block bb_c = create_empty_bb (bb_a);
9398 basic_block bb_d = create_empty_bb (bb_b);
9400 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9401 ASSERT_EQ (0, n_edges_for_fn (fun));
9403 /* Create the edges. */
9404 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9405 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9406 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9407 make_edge (bb_b, bb_d, 0);
9408 make_edge (bb_c, bb_d, 0);
9409 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9411 /* Verify the edges. */
9412 ASSERT_EQ (6, n_edges_for_fn (fun));
9413 ASSERT_EQ (1, bb_a->preds->length ());
9414 ASSERT_EQ (2, bb_a->succs->length ());
9415 ASSERT_EQ (1, bb_b->preds->length ());
9416 ASSERT_EQ (1, bb_b->succs->length ());
9417 ASSERT_EQ (1, bb_c->preds->length ());
9418 ASSERT_EQ (1, bb_c->succs->length ());
9419 ASSERT_EQ (2, bb_d->preds->length ());
9420 ASSERT_EQ (1, bb_d->succs->length ());
9422 /* Verify the dominance information. */
9423 calculate_dominance_info (CDI_DOMINATORS);
9424 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9425 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9426 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9427 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9428 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9429 dom_by_a.release ();
9430 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9431 ASSERT_EQ (0, dom_by_b.length ());
9432 dom_by_b.release ();
9433 free_dominance_info (CDI_DOMINATORS);
9435 /* Similarly for post-dominance. */
9436 calculate_dominance_info (CDI_POST_DOMINATORS);
9437 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9438 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9439 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9440 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9441 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9442 postdom_by_d.release ();
9443 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9444 ASSERT_EQ (0, postdom_by_b.length ());
9445 postdom_by_b.release ();
9446 free_dominance_info (CDI_POST_DOMINATORS);
9448 pop_cfun ();
9451 /* Verify that we can handle a CFG containing a "complete" aka
9452 fully-connected subgraph (where A B C D below all have edges
9453 pointing to each other node, also to themselves).
9454 e.g.:
9455 ENTRY EXIT
9461 A<--->B
9462 ^^ ^^
9463 | \ / |
9464 | X |
9465 | / \ |
9466 VV VV
9467 C<--->D
9470 static void
9471 test_fully_connected ()
9473 gimple_register_cfg_hooks ();
9475 tree fndecl = push_fndecl ("cfg_fully_connected");
9476 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9478 const int n = 4;
9480 /* Create some empty blocks. */
9481 auto_vec <basic_block> subgraph_nodes;
9482 for (int i = 0; i < n; i++)
9483 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9485 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9486 ASSERT_EQ (0, n_edges_for_fn (fun));
9488 /* Create the edges. */
9489 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9490 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9491 for (int i = 0; i < n; i++)
9492 for (int j = 0; j < n; j++)
9493 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9495 /* Verify the edges. */
9496 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9497 /* The first one is linked to ENTRY/EXIT as well as itself and
9498 everything else. */
9499 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9500 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9501 /* The other ones in the subgraph are linked to everything in
9502 the subgraph (including themselves). */
9503 for (int i = 1; i < n; i++)
9505 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9506 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9509 /* Verify the dominance information. */
9510 calculate_dominance_info (CDI_DOMINATORS);
9511 /* The initial block in the subgraph should be dominated by ENTRY. */
9512 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9513 get_immediate_dominator (CDI_DOMINATORS,
9514 subgraph_nodes[0]));
9515 /* Every other block in the subgraph should be dominated by the
9516 initial block. */
9517 for (int i = 1; i < n; i++)
9518 ASSERT_EQ (subgraph_nodes[0],
9519 get_immediate_dominator (CDI_DOMINATORS,
9520 subgraph_nodes[i]));
9521 free_dominance_info (CDI_DOMINATORS);
9523 /* Similarly for post-dominance. */
9524 calculate_dominance_info (CDI_POST_DOMINATORS);
9525 /* The initial block in the subgraph should be postdominated by EXIT. */
9526 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9527 get_immediate_dominator (CDI_POST_DOMINATORS,
9528 subgraph_nodes[0]));
9529 /* Every other block in the subgraph should be postdominated by the
9530 initial block, since that leads to EXIT. */
9531 for (int i = 1; i < n; i++)
9532 ASSERT_EQ (subgraph_nodes[0],
9533 get_immediate_dominator (CDI_POST_DOMINATORS,
9534 subgraph_nodes[i]));
9535 free_dominance_info (CDI_POST_DOMINATORS);
9537 pop_cfun ();
9540 /* Run all of the selftests within this file. */
9542 void
9543 tree_cfg_c_tests ()
9545 test_linear_chain ();
9546 test_diamond ();
9547 test_fully_connected ();
9550 } // namespace selftest
9552 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9553 - loop
9554 - nested loops
9555 - switch statement (a block with many out-edges)
9556 - something that jumps to itself
9557 - etc */
9559 #endif /* CHECKING_P */