[ARM] Fix typo in comment in arm_expand_prologue
[official-gcc.git] / gcc / tree-cfg.c
blob63532a82acbb17fdd7d76801e2c40a1f23b6ca64
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
64 /* This file contains functions for building the Control Flow Graph (CFG)
65 for a function tree. */
67 /* Local declarations. */
69 /* Initial capacity for the basic block array. */
70 static const int initial_cfg_capacity = 20;
72 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
73 which use a particular edge. The CASE_LABEL_EXPRs are chained together
74 via their CASE_CHAIN field, which we clear after we're done with the
75 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
77 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
78 update the case vector in response to edge redirections.
80 Right now this table is set up and torn down at key points in the
81 compilation process. It would be nice if we could make the table
82 more persistent. The key is getting notification of changes to
83 the CFG (particularly edge removal, creation and redirection). */
85 static hash_map<edge, tree> *edge_to_cases;
87 /* If we record edge_to_cases, this bitmap will hold indexes
88 of basic blocks that end in a GIMPLE_SWITCH which we touched
89 due to edge manipulations. */
91 static bitmap touched_switch_bbs;
93 /* CFG statistics. */
94 struct cfg_stats_d
96 long num_merged_labels;
99 static struct cfg_stats_d cfg_stats;
101 /* Data to pass to replace_block_vars_by_duplicates_1. */
102 struct replace_decls_d
104 hash_map<tree, tree> *vars_map;
105 tree to_context;
108 /* Hash table to store last discriminator assigned for each locus. */
109 struct locus_discrim_map
111 location_t locus;
112 int discriminator;
115 /* Hashtable helpers. */
117 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
119 static inline hashval_t hash (const locus_discrim_map *);
120 static inline bool equal (const locus_discrim_map *,
121 const locus_discrim_map *);
124 /* Trivial hash function for a location_t. ITEM is a pointer to
125 a hash table entry that maps a location_t to a discriminator. */
127 inline hashval_t
128 locus_discrim_hasher::hash (const locus_discrim_map *item)
130 return LOCATION_LINE (item->locus);
133 /* Equality function for the locus-to-discriminator map. A and B
134 point to the two hash table entries to compare. */
136 inline bool
137 locus_discrim_hasher::equal (const locus_discrim_map *a,
138 const locus_discrim_map *b)
140 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
143 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
145 /* Basic blocks and flowgraphs. */
146 static void make_blocks (gimple_seq);
148 /* Edges. */
149 static void make_edges (void);
150 static void assign_discriminators (void);
151 static void make_cond_expr_edges (basic_block);
152 static void make_gimple_switch_edges (gswitch *, basic_block);
153 static bool make_goto_expr_edges (basic_block);
154 static void make_gimple_asm_edges (basic_block);
155 static edge gimple_redirect_edge_and_branch (edge, basic_block);
156 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
158 /* Various helpers. */
159 static inline bool stmt_starts_bb_p (gimple *, gimple *);
160 static int gimple_verify_flow_info (void);
161 static void gimple_make_forwarder_block (edge);
162 static gimple *first_non_label_stmt (basic_block);
163 static bool verify_gimple_transaction (gtransaction *);
164 static bool call_can_make_abnormal_goto (gimple *);
166 /* Flowgraph optimization and cleanup. */
167 static void gimple_merge_blocks (basic_block, basic_block);
168 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
169 static void remove_bb (basic_block);
170 static edge find_taken_edge_computed_goto (basic_block, tree);
171 static edge find_taken_edge_cond_expr (basic_block, tree);
172 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
173 static tree find_case_label_for_value (gswitch *, tree);
174 static void lower_phi_internal_fn ();
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
202 void
203 init_empty_tree_cfg (void)
205 init_empty_tree_cfg_for_function (cfun);
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
215 static void
216 build_gimple_cfg (gimple_seq seq)
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
223 init_empty_tree_cfg ();
225 make_blocks (seq);
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 lower_phi_internal_fn ();
250 cleanup_dead_labels ();
251 delete discriminator_per_locus;
252 discriminator_per_locus = NULL;
255 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
256 them and propagate the information to LOOP. We assume that the annotations
257 come immediately before the condition in BB, if any. */
259 static void
260 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
262 gimple_stmt_iterator gsi = gsi_last_bb (bb);
263 gimple *stmt = gsi_stmt (gsi);
265 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
266 return;
268 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
270 stmt = gsi_stmt (gsi);
271 if (gimple_code (stmt) != GIMPLE_CALL)
272 break;
273 if (!gimple_call_internal_p (stmt)
274 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
275 break;
277 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
279 case annot_expr_ivdep_kind:
280 loop->safelen = INT_MAX;
281 break;
282 case annot_expr_no_vector_kind:
283 loop->dont_vectorize = true;
284 break;
285 case annot_expr_vector_kind:
286 loop->force_vectorize = true;
287 cfun->has_force_vectorize_loops = true;
288 break;
289 default:
290 gcc_unreachable ();
293 stmt = gimple_build_assign (gimple_call_lhs (stmt),
294 gimple_call_arg (stmt, 0));
295 gsi_replace (&gsi, stmt, true);
299 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
300 them and propagate the information to the loop. We assume that the
301 annotations come immediately before the condition of the loop. */
303 static void
304 replace_loop_annotate (void)
306 struct loop *loop;
307 basic_block bb;
308 gimple_stmt_iterator gsi;
309 gimple *stmt;
311 FOR_EACH_LOOP (loop, 0)
313 /* First look into the header. */
314 replace_loop_annotate_in_block (loop->header, loop);
316 /* Then look into the latch, if any. */
317 if (loop->latch)
318 replace_loop_annotate_in_block (loop->latch, loop);
321 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
322 FOR_EACH_BB_FN (bb, cfun)
324 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
326 stmt = gsi_stmt (gsi);
327 if (gimple_code (stmt) != GIMPLE_CALL)
328 continue;
329 if (!gimple_call_internal_p (stmt)
330 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
331 continue;
333 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
335 case annot_expr_ivdep_kind:
336 case annot_expr_no_vector_kind:
337 case annot_expr_vector_kind:
338 break;
339 default:
340 gcc_unreachable ();
343 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
344 stmt = gimple_build_assign (gimple_call_lhs (stmt),
345 gimple_call_arg (stmt, 0));
346 gsi_replace (&gsi, stmt, true);
351 /* Lower internal PHI function from GIMPLE FE. */
353 static void
354 lower_phi_internal_fn ()
356 basic_block bb, pred = NULL;
357 gimple_stmt_iterator gsi;
358 tree lhs;
359 gphi *phi_node;
360 gimple *stmt;
362 /* After edge creation, handle __PHI function from GIMPLE FE. */
363 FOR_EACH_BB_FN (bb, cfun)
365 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
367 stmt = gsi_stmt (gsi);
368 if (! gimple_call_internal_p (stmt, IFN_PHI))
369 break;
371 lhs = gimple_call_lhs (stmt);
372 phi_node = create_phi_node (lhs, bb);
374 /* Add arguments to the PHI node. */
375 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
377 tree arg = gimple_call_arg (stmt, i);
378 if (TREE_CODE (arg) == LABEL_DECL)
379 pred = label_to_block (arg);
380 else
382 edge e = find_edge (pred, bb);
383 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
387 gsi_remove (&gsi, true);
392 static unsigned int
393 execute_build_cfg (void)
395 gimple_seq body = gimple_body (current_function_decl);
397 build_gimple_cfg (body);
398 gimple_set_body (current_function_decl, NULL);
399 if (dump_file && (dump_flags & TDF_DETAILS))
401 fprintf (dump_file, "Scope blocks:\n");
402 dump_scope_blocks (dump_file, dump_flags);
404 cleanup_tree_cfg ();
405 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
406 replace_loop_annotate ();
407 return 0;
410 namespace {
412 const pass_data pass_data_build_cfg =
414 GIMPLE_PASS, /* type */
415 "cfg", /* name */
416 OPTGROUP_NONE, /* optinfo_flags */
417 TV_TREE_CFG, /* tv_id */
418 PROP_gimple_leh, /* properties_required */
419 ( PROP_cfg | PROP_loops ), /* properties_provided */
420 0, /* properties_destroyed */
421 0, /* todo_flags_start */
422 0, /* todo_flags_finish */
425 class pass_build_cfg : public gimple_opt_pass
427 public:
428 pass_build_cfg (gcc::context *ctxt)
429 : gimple_opt_pass (pass_data_build_cfg, ctxt)
432 /* opt_pass methods: */
433 virtual unsigned int execute (function *) { return execute_build_cfg (); }
435 }; // class pass_build_cfg
437 } // anon namespace
439 gimple_opt_pass *
440 make_pass_build_cfg (gcc::context *ctxt)
442 return new pass_build_cfg (ctxt);
446 /* Return true if T is a computed goto. */
448 bool
449 computed_goto_p (gimple *t)
451 return (gimple_code (t) == GIMPLE_GOTO
452 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
455 /* Returns true if the sequence of statements STMTS only contains
456 a call to __builtin_unreachable (). */
458 bool
459 gimple_seq_unreachable_p (gimple_seq stmts)
461 if (stmts == NULL)
462 return false;
464 gimple_stmt_iterator gsi = gsi_last (stmts);
466 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
467 return false;
469 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
471 gimple *stmt = gsi_stmt (gsi);
472 if (gimple_code (stmt) != GIMPLE_LABEL
473 && !is_gimple_debug (stmt)
474 && !gimple_clobber_p (stmt))
475 return false;
477 return true;
480 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
481 the other edge points to a bb with just __builtin_unreachable ().
482 I.e. return true for C->M edge in:
483 <bb C>:
485 if (something)
486 goto <bb N>;
487 else
488 goto <bb M>;
489 <bb N>:
490 __builtin_unreachable ();
491 <bb M>: */
493 bool
494 assert_unreachable_fallthru_edge_p (edge e)
496 basic_block pred_bb = e->src;
497 gimple *last = last_stmt (pred_bb);
498 if (last && gimple_code (last) == GIMPLE_COND)
500 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
501 if (other_bb == e->dest)
502 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
503 if (EDGE_COUNT (other_bb->succs) == 0)
504 return gimple_seq_unreachable_p (bb_seq (other_bb));
506 return false;
510 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
511 could alter control flow except via eh. We initialize the flag at
512 CFG build time and only ever clear it later. */
514 static void
515 gimple_call_initialize_ctrl_altering (gimple *stmt)
517 int flags = gimple_call_flags (stmt);
519 /* A call alters control flow if it can make an abnormal goto. */
520 if (call_can_make_abnormal_goto (stmt)
521 /* A call also alters control flow if it does not return. */
522 || flags & ECF_NORETURN
523 /* TM ending statements have backedges out of the transaction.
524 Return true so we split the basic block containing them.
525 Note that the TM_BUILTIN test is merely an optimization. */
526 || ((flags & ECF_TM_BUILTIN)
527 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
528 /* BUILT_IN_RETURN call is same as return statement. */
529 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
530 /* IFN_UNIQUE should be the last insn, to make checking for it
531 as cheap as possible. */
532 || (gimple_call_internal_p (stmt)
533 && gimple_call_internal_unique_p (stmt)))
534 gimple_call_set_ctrl_altering (stmt, true);
535 else
536 gimple_call_set_ctrl_altering (stmt, false);
540 /* Insert SEQ after BB and build a flowgraph. */
542 static basic_block
543 make_blocks_1 (gimple_seq seq, basic_block bb)
545 gimple_stmt_iterator i = gsi_start (seq);
546 gimple *stmt = NULL;
547 bool start_new_block = true;
548 bool first_stmt_of_seq = true;
550 while (!gsi_end_p (i))
552 gimple *prev_stmt;
554 prev_stmt = stmt;
555 stmt = gsi_stmt (i);
557 if (stmt && is_gimple_call (stmt))
558 gimple_call_initialize_ctrl_altering (stmt);
560 /* If the statement starts a new basic block or if we have determined
561 in a previous pass that we need to create a new block for STMT, do
562 so now. */
563 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
565 if (!first_stmt_of_seq)
566 gsi_split_seq_before (&i, &seq);
567 bb = create_basic_block (seq, bb);
568 start_new_block = false;
571 /* Now add STMT to BB and create the subgraphs for special statement
572 codes. */
573 gimple_set_bb (stmt, bb);
575 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
576 next iteration. */
577 if (stmt_ends_bb_p (stmt))
579 /* If the stmt can make abnormal goto use a new temporary
580 for the assignment to the LHS. This makes sure the old value
581 of the LHS is available on the abnormal edge. Otherwise
582 we will end up with overlapping life-ranges for abnormal
583 SSA names. */
584 if (gimple_has_lhs (stmt)
585 && stmt_can_make_abnormal_goto (stmt)
586 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
588 tree lhs = gimple_get_lhs (stmt);
589 tree tmp = create_tmp_var (TREE_TYPE (lhs));
590 gimple *s = gimple_build_assign (lhs, tmp);
591 gimple_set_location (s, gimple_location (stmt));
592 gimple_set_block (s, gimple_block (stmt));
593 gimple_set_lhs (stmt, tmp);
594 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
595 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
596 DECL_GIMPLE_REG_P (tmp) = 1;
597 gsi_insert_after (&i, s, GSI_SAME_STMT);
599 start_new_block = true;
602 gsi_next (&i);
603 first_stmt_of_seq = false;
605 return bb;
608 /* Build a flowgraph for the sequence of stmts SEQ. */
610 static void
611 make_blocks (gimple_seq seq)
613 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
616 /* Create and return a new empty basic block after bb AFTER. */
618 static basic_block
619 create_bb (void *h, void *e, basic_block after)
621 basic_block bb;
623 gcc_assert (!e);
625 /* Create and initialize a new basic block. Since alloc_block uses
626 GC allocation that clears memory to allocate a basic block, we do
627 not have to clear the newly allocated basic block here. */
628 bb = alloc_block ();
630 bb->index = last_basic_block_for_fn (cfun);
631 bb->flags = BB_NEW;
632 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
634 /* Add the new block to the linked list of blocks. */
635 link_block (bb, after);
637 /* Grow the basic block array if needed. */
638 if ((size_t) last_basic_block_for_fn (cfun)
639 == basic_block_info_for_fn (cfun)->length ())
641 size_t new_size =
642 (last_basic_block_for_fn (cfun)
643 + (last_basic_block_for_fn (cfun) + 3) / 4);
644 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
647 /* Add the newly created block to the array. */
648 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
650 n_basic_blocks_for_fn (cfun)++;
651 last_basic_block_for_fn (cfun)++;
653 return bb;
657 /*---------------------------------------------------------------------------
658 Edge creation
659 ---------------------------------------------------------------------------*/
661 /* If basic block BB has an abnormal edge to a basic block
662 containing IFN_ABNORMAL_DISPATCHER internal call, return
663 that the dispatcher's basic block, otherwise return NULL. */
665 basic_block
666 get_abnormal_succ_dispatcher (basic_block bb)
668 edge e;
669 edge_iterator ei;
671 FOR_EACH_EDGE (e, ei, bb->succs)
672 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
674 gimple_stmt_iterator gsi
675 = gsi_start_nondebug_after_labels_bb (e->dest);
676 gimple *g = gsi_stmt (gsi);
677 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
678 return e->dest;
680 return NULL;
683 /* Helper function for make_edges. Create a basic block with
684 with ABNORMAL_DISPATCHER internal call in it if needed, and
685 create abnormal edges from BBS to it and from it to FOR_BB
686 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
688 static void
689 handle_abnormal_edges (basic_block *dispatcher_bbs,
690 basic_block for_bb, int *bb_to_omp_idx,
691 auto_vec<basic_block> *bbs, bool computed_goto)
693 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
694 unsigned int idx = 0;
695 basic_block bb;
696 bool inner = false;
698 if (bb_to_omp_idx)
700 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
701 if (bb_to_omp_idx[for_bb->index] != 0)
702 inner = true;
705 /* If the dispatcher has been created already, then there are basic
706 blocks with abnormal edges to it, so just make a new edge to
707 for_bb. */
708 if (*dispatcher == NULL)
710 /* Check if there are any basic blocks that need to have
711 abnormal edges to this dispatcher. If there are none, return
712 early. */
713 if (bb_to_omp_idx == NULL)
715 if (bbs->is_empty ())
716 return;
718 else
720 FOR_EACH_VEC_ELT (*bbs, idx, bb)
721 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
722 break;
723 if (bb == NULL)
724 return;
727 /* Create the dispatcher bb. */
728 *dispatcher = create_basic_block (NULL, for_bb);
729 if (computed_goto)
731 /* Factor computed gotos into a common computed goto site. Also
732 record the location of that site so that we can un-factor the
733 gotos after we have converted back to normal form. */
734 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
736 /* Create the destination of the factored goto. Each original
737 computed goto will put its desired destination into this
738 variable and jump to the label we create immediately below. */
739 tree var = create_tmp_var (ptr_type_node, "gotovar");
741 /* Build a label for the new block which will contain the
742 factored computed goto. */
743 tree factored_label_decl
744 = create_artificial_label (UNKNOWN_LOCATION);
745 gimple *factored_computed_goto_label
746 = gimple_build_label (factored_label_decl);
747 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
749 /* Build our new computed goto. */
750 gimple *factored_computed_goto = gimple_build_goto (var);
751 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
753 FOR_EACH_VEC_ELT (*bbs, idx, bb)
755 if (bb_to_omp_idx
756 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
757 continue;
759 gsi = gsi_last_bb (bb);
760 gimple *last = gsi_stmt (gsi);
762 gcc_assert (computed_goto_p (last));
764 /* Copy the original computed goto's destination into VAR. */
765 gimple *assignment
766 = gimple_build_assign (var, gimple_goto_dest (last));
767 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
769 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
770 e->goto_locus = gimple_location (last);
771 gsi_remove (&gsi, true);
774 else
776 tree arg = inner ? boolean_true_node : boolean_false_node;
777 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
778 1, arg);
779 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
780 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
782 /* Create predecessor edges of the dispatcher. */
783 FOR_EACH_VEC_ELT (*bbs, idx, bb)
785 if (bb_to_omp_idx
786 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
787 continue;
788 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
793 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
796 /* Creates outgoing edges for BB. Returns 1 when it ends with an
797 computed goto, returns 2 when it ends with a statement that
798 might return to this function via an nonlocal goto, otherwise
799 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
801 static int
802 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
804 gimple *last = last_stmt (bb);
805 bool fallthru = false;
806 int ret = 0;
808 if (!last)
809 return ret;
811 switch (gimple_code (last))
813 case GIMPLE_GOTO:
814 if (make_goto_expr_edges (bb))
815 ret = 1;
816 fallthru = false;
817 break;
818 case GIMPLE_RETURN:
820 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
821 e->goto_locus = gimple_location (last);
822 fallthru = false;
824 break;
825 case GIMPLE_COND:
826 make_cond_expr_edges (bb);
827 fallthru = false;
828 break;
829 case GIMPLE_SWITCH:
830 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
831 fallthru = false;
832 break;
833 case GIMPLE_RESX:
834 make_eh_edges (last);
835 fallthru = false;
836 break;
837 case GIMPLE_EH_DISPATCH:
838 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
839 break;
841 case GIMPLE_CALL:
842 /* If this function receives a nonlocal goto, then we need to
843 make edges from this call site to all the nonlocal goto
844 handlers. */
845 if (stmt_can_make_abnormal_goto (last))
846 ret = 2;
848 /* If this statement has reachable exception handlers, then
849 create abnormal edges to them. */
850 make_eh_edges (last);
852 /* BUILTIN_RETURN is really a return statement. */
853 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
855 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
856 fallthru = false;
858 /* Some calls are known not to return. */
859 else
860 fallthru = !gimple_call_noreturn_p (last);
861 break;
863 case GIMPLE_ASSIGN:
864 /* A GIMPLE_ASSIGN may throw internally and thus be considered
865 control-altering. */
866 if (is_ctrl_altering_stmt (last))
867 make_eh_edges (last);
868 fallthru = true;
869 break;
871 case GIMPLE_ASM:
872 make_gimple_asm_edges (bb);
873 fallthru = true;
874 break;
876 CASE_GIMPLE_OMP:
877 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
878 break;
880 case GIMPLE_TRANSACTION:
882 gtransaction *txn = as_a <gtransaction *> (last);
883 tree label1 = gimple_transaction_label_norm (txn);
884 tree label2 = gimple_transaction_label_uninst (txn);
886 if (label1)
887 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
888 if (label2)
889 make_edge (bb, label_to_block (label2),
890 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
892 tree label3 = gimple_transaction_label_over (txn);
893 if (gimple_transaction_subcode (txn)
894 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
895 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
897 fallthru = false;
899 break;
901 default:
902 gcc_assert (!stmt_ends_bb_p (last));
903 fallthru = true;
904 break;
907 if (fallthru)
908 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
910 return ret;
913 /* Join all the blocks in the flowgraph. */
915 static void
916 make_edges (void)
918 basic_block bb;
919 struct omp_region *cur_region = NULL;
920 auto_vec<basic_block> ab_edge_goto;
921 auto_vec<basic_block> ab_edge_call;
922 int *bb_to_omp_idx = NULL;
923 int cur_omp_region_idx = 0;
925 /* Create an edge from entry to the first block with executable
926 statements in it. */
927 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
928 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
929 EDGE_FALLTHRU);
931 /* Traverse the basic block array placing edges. */
932 FOR_EACH_BB_FN (bb, cfun)
934 int mer;
936 if (bb_to_omp_idx)
937 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
939 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
940 if (mer == 1)
941 ab_edge_goto.safe_push (bb);
942 else if (mer == 2)
943 ab_edge_call.safe_push (bb);
945 if (cur_region && bb_to_omp_idx == NULL)
946 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
949 /* Computed gotos are hell to deal with, especially if there are
950 lots of them with a large number of destinations. So we factor
951 them to a common computed goto location before we build the
952 edge list. After we convert back to normal form, we will un-factor
953 the computed gotos since factoring introduces an unwanted jump.
954 For non-local gotos and abnormal edges from calls to calls that return
955 twice or forced labels, factor the abnormal edges too, by having all
956 abnormal edges from the calls go to a common artificial basic block
957 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
958 basic block to all forced labels and calls returning twice.
959 We do this per-OpenMP structured block, because those regions
960 are guaranteed to be single entry single exit by the standard,
961 so it is not allowed to enter or exit such regions abnormally this way,
962 thus all computed gotos, non-local gotos and setjmp/longjmp calls
963 must not transfer control across SESE region boundaries. */
964 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
966 gimple_stmt_iterator gsi;
967 basic_block dispatcher_bb_array[2] = { NULL, NULL };
968 basic_block *dispatcher_bbs = dispatcher_bb_array;
969 int count = n_basic_blocks_for_fn (cfun);
971 if (bb_to_omp_idx)
972 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
974 FOR_EACH_BB_FN (bb, cfun)
976 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
978 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
979 tree target;
981 if (!label_stmt)
982 break;
984 target = gimple_label_label (label_stmt);
986 /* Make an edge to every label block that has been marked as a
987 potential target for a computed goto or a non-local goto. */
988 if (FORCED_LABEL (target))
989 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
990 &ab_edge_goto, true);
991 if (DECL_NONLOCAL (target))
993 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
994 &ab_edge_call, false);
995 break;
999 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1000 gsi_next_nondebug (&gsi);
1001 if (!gsi_end_p (gsi))
1003 /* Make an edge to every setjmp-like call. */
1004 gimple *call_stmt = gsi_stmt (gsi);
1005 if (is_gimple_call (call_stmt)
1006 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1007 || gimple_call_builtin_p (call_stmt,
1008 BUILT_IN_SETJMP_RECEIVER)))
1009 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1010 &ab_edge_call, false);
1014 if (bb_to_omp_idx)
1015 XDELETE (dispatcher_bbs);
1018 XDELETE (bb_to_omp_idx);
1020 omp_free_regions ();
1023 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1024 needed. Returns true if new bbs were created.
1025 Note: This is transitional code, and should not be used for new code. We
1026 should be able to get rid of this by rewriting all target va-arg
1027 gimplification hooks to use an interface gimple_build_cond_value as described
1028 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1030 bool
1031 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1033 gimple *stmt = gsi_stmt (*gsi);
1034 basic_block bb = gimple_bb (stmt);
1035 basic_block lastbb, afterbb;
1036 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1037 edge e;
1038 lastbb = make_blocks_1 (seq, bb);
1039 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1040 return false;
1041 e = split_block (bb, stmt);
1042 /* Move e->dest to come after the new basic blocks. */
1043 afterbb = e->dest;
1044 unlink_block (afterbb);
1045 link_block (afterbb, lastbb);
1046 redirect_edge_succ (e, bb->next_bb);
1047 bb = bb->next_bb;
1048 while (bb != afterbb)
1050 struct omp_region *cur_region = NULL;
1051 int cur_omp_region_idx = 0;
1052 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1053 gcc_assert (!mer && !cur_region);
1054 add_bb_to_loop (bb, afterbb->loop_father);
1055 bb = bb->next_bb;
1057 return true;
1060 /* Find the next available discriminator value for LOCUS. The
1061 discriminator distinguishes among several basic blocks that
1062 share a common locus, allowing for more accurate sample-based
1063 profiling. */
1065 static int
1066 next_discriminator_for_locus (location_t locus)
1068 struct locus_discrim_map item;
1069 struct locus_discrim_map **slot;
1071 item.locus = locus;
1072 item.discriminator = 0;
1073 slot = discriminator_per_locus->find_slot_with_hash (
1074 &item, LOCATION_LINE (locus), INSERT);
1075 gcc_assert (slot);
1076 if (*slot == HTAB_EMPTY_ENTRY)
1078 *slot = XNEW (struct locus_discrim_map);
1079 gcc_assert (*slot);
1080 (*slot)->locus = locus;
1081 (*slot)->discriminator = 0;
1083 (*slot)->discriminator++;
1084 return (*slot)->discriminator;
1087 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1089 static bool
1090 same_line_p (location_t locus1, location_t locus2)
1092 expanded_location from, to;
1094 if (locus1 == locus2)
1095 return true;
1097 from = expand_location (locus1);
1098 to = expand_location (locus2);
1100 if (from.line != to.line)
1101 return false;
1102 if (from.file == to.file)
1103 return true;
1104 return (from.file != NULL
1105 && to.file != NULL
1106 && filename_cmp (from.file, to.file) == 0);
1109 /* Assign discriminators to each basic block. */
1111 static void
1112 assign_discriminators (void)
1114 basic_block bb;
1116 FOR_EACH_BB_FN (bb, cfun)
1118 edge e;
1119 edge_iterator ei;
1120 gimple *last = last_stmt (bb);
1121 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1123 if (locus == UNKNOWN_LOCATION)
1124 continue;
1126 FOR_EACH_EDGE (e, ei, bb->succs)
1128 gimple *first = first_non_label_stmt (e->dest);
1129 gimple *last = last_stmt (e->dest);
1130 if ((first && same_line_p (locus, gimple_location (first)))
1131 || (last && same_line_p (locus, gimple_location (last))))
1133 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1134 bb->discriminator = next_discriminator_for_locus (locus);
1135 else
1136 e->dest->discriminator = next_discriminator_for_locus (locus);
1142 /* Create the edges for a GIMPLE_COND starting at block BB. */
1144 static void
1145 make_cond_expr_edges (basic_block bb)
1147 gcond *entry = as_a <gcond *> (last_stmt (bb));
1148 gimple *then_stmt, *else_stmt;
1149 basic_block then_bb, else_bb;
1150 tree then_label, else_label;
1151 edge e;
1153 gcc_assert (entry);
1154 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1156 /* Entry basic blocks for each component. */
1157 then_label = gimple_cond_true_label (entry);
1158 else_label = gimple_cond_false_label (entry);
1159 then_bb = label_to_block (then_label);
1160 else_bb = label_to_block (else_label);
1161 then_stmt = first_stmt (then_bb);
1162 else_stmt = first_stmt (else_bb);
1164 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1165 e->goto_locus = gimple_location (then_stmt);
1166 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1167 if (e)
1168 e->goto_locus = gimple_location (else_stmt);
1170 /* We do not need the labels anymore. */
1171 gimple_cond_set_true_label (entry, NULL_TREE);
1172 gimple_cond_set_false_label (entry, NULL_TREE);
1176 /* Called for each element in the hash table (P) as we delete the
1177 edge to cases hash table.
1179 Clear all the CASE_CHAINs to prevent problems with copying of
1180 SWITCH_EXPRs and structure sharing rules, then free the hash table
1181 element. */
1183 bool
1184 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1186 tree t, next;
1188 for (t = value; t; t = next)
1190 next = CASE_CHAIN (t);
1191 CASE_CHAIN (t) = NULL;
1194 return true;
1197 /* Start recording information mapping edges to case labels. */
1199 void
1200 start_recording_case_labels (void)
1202 gcc_assert (edge_to_cases == NULL);
1203 edge_to_cases = new hash_map<edge, tree>;
1204 touched_switch_bbs = BITMAP_ALLOC (NULL);
1207 /* Return nonzero if we are recording information for case labels. */
1209 static bool
1210 recording_case_labels_p (void)
1212 return (edge_to_cases != NULL);
1215 /* Stop recording information mapping edges to case labels and
1216 remove any information we have recorded. */
1217 void
1218 end_recording_case_labels (void)
1220 bitmap_iterator bi;
1221 unsigned i;
1222 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1223 delete edge_to_cases;
1224 edge_to_cases = NULL;
1225 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1227 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1228 if (bb)
1230 gimple *stmt = last_stmt (bb);
1231 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1232 group_case_labels_stmt (as_a <gswitch *> (stmt));
1235 BITMAP_FREE (touched_switch_bbs);
1238 /* If we are inside a {start,end}_recording_cases block, then return
1239 a chain of CASE_LABEL_EXPRs from T which reference E.
1241 Otherwise return NULL. */
1243 static tree
1244 get_cases_for_edge (edge e, gswitch *t)
1246 tree *slot;
1247 size_t i, n;
1249 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1250 chains available. Return NULL so the caller can detect this case. */
1251 if (!recording_case_labels_p ())
1252 return NULL;
1254 slot = edge_to_cases->get (e);
1255 if (slot)
1256 return *slot;
1258 /* If we did not find E in the hash table, then this must be the first
1259 time we have been queried for information about E & T. Add all the
1260 elements from T to the hash table then perform the query again. */
1262 n = gimple_switch_num_labels (t);
1263 for (i = 0; i < n; i++)
1265 tree elt = gimple_switch_label (t, i);
1266 tree lab = CASE_LABEL (elt);
1267 basic_block label_bb = label_to_block (lab);
1268 edge this_edge = find_edge (e->src, label_bb);
1270 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1271 a new chain. */
1272 tree &s = edge_to_cases->get_or_insert (this_edge);
1273 CASE_CHAIN (elt) = s;
1274 s = elt;
1277 return *edge_to_cases->get (e);
1280 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1282 static void
1283 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1285 size_t i, n;
1287 n = gimple_switch_num_labels (entry);
1289 for (i = 0; i < n; ++i)
1291 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1292 basic_block label_bb = label_to_block (lab);
1293 make_edge (bb, label_bb, 0);
1298 /* Return the basic block holding label DEST. */
1300 basic_block
1301 label_to_block_fn (struct function *ifun, tree dest)
1303 int uid = LABEL_DECL_UID (dest);
1305 /* We would die hard when faced by an undefined label. Emit a label to
1306 the very first basic block. This will hopefully make even the dataflow
1307 and undefined variable warnings quite right. */
1308 if (seen_error () && uid < 0)
1310 gimple_stmt_iterator gsi =
1311 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1312 gimple *stmt;
1314 stmt = gimple_build_label (dest);
1315 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1316 uid = LABEL_DECL_UID (dest);
1318 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1319 return NULL;
1320 return (*ifun->cfg->x_label_to_block_map)[uid];
1323 /* Create edges for a goto statement at block BB. Returns true
1324 if abnormal edges should be created. */
1326 static bool
1327 make_goto_expr_edges (basic_block bb)
1329 gimple_stmt_iterator last = gsi_last_bb (bb);
1330 gimple *goto_t = gsi_stmt (last);
1332 /* A simple GOTO creates normal edges. */
1333 if (simple_goto_p (goto_t))
1335 tree dest = gimple_goto_dest (goto_t);
1336 basic_block label_bb = label_to_block (dest);
1337 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1338 e->goto_locus = gimple_location (goto_t);
1339 gsi_remove (&last, true);
1340 return false;
1343 /* A computed GOTO creates abnormal edges. */
1344 return true;
1347 /* Create edges for an asm statement with labels at block BB. */
1349 static void
1350 make_gimple_asm_edges (basic_block bb)
1352 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1353 int i, n = gimple_asm_nlabels (stmt);
1355 for (i = 0; i < n; ++i)
1357 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1358 basic_block label_bb = label_to_block (label);
1359 make_edge (bb, label_bb, 0);
1363 /*---------------------------------------------------------------------------
1364 Flowgraph analysis
1365 ---------------------------------------------------------------------------*/
1367 /* Cleanup useless labels in basic blocks. This is something we wish
1368 to do early because it allows us to group case labels before creating
1369 the edges for the CFG, and it speeds up block statement iterators in
1370 all passes later on.
1371 We rerun this pass after CFG is created, to get rid of the labels that
1372 are no longer referenced. After then we do not run it any more, since
1373 (almost) no new labels should be created. */
1375 /* A map from basic block index to the leading label of that block. */
1376 static struct label_record
1378 /* The label. */
1379 tree label;
1381 /* True if the label is referenced from somewhere. */
1382 bool used;
1383 } *label_for_bb;
1385 /* Given LABEL return the first label in the same basic block. */
1387 static tree
1388 main_block_label (tree label)
1390 basic_block bb = label_to_block (label);
1391 tree main_label = label_for_bb[bb->index].label;
1393 /* label_to_block possibly inserted undefined label into the chain. */
1394 if (!main_label)
1396 label_for_bb[bb->index].label = label;
1397 main_label = label;
1400 label_for_bb[bb->index].used = true;
1401 return main_label;
1404 /* Clean up redundant labels within the exception tree. */
1406 static void
1407 cleanup_dead_labels_eh (void)
1409 eh_landing_pad lp;
1410 eh_region r;
1411 tree lab;
1412 int i;
1414 if (cfun->eh == NULL)
1415 return;
1417 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1418 if (lp && lp->post_landing_pad)
1420 lab = main_block_label (lp->post_landing_pad);
1421 if (lab != lp->post_landing_pad)
1423 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1424 EH_LANDING_PAD_NR (lab) = lp->index;
1428 FOR_ALL_EH_REGION (r)
1429 switch (r->type)
1431 case ERT_CLEANUP:
1432 case ERT_MUST_NOT_THROW:
1433 break;
1435 case ERT_TRY:
1437 eh_catch c;
1438 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1440 lab = c->label;
1441 if (lab)
1442 c->label = main_block_label (lab);
1445 break;
1447 case ERT_ALLOWED_EXCEPTIONS:
1448 lab = r->u.allowed.label;
1449 if (lab)
1450 r->u.allowed.label = main_block_label (lab);
1451 break;
1456 /* Cleanup redundant labels. This is a three-step process:
1457 1) Find the leading label for each block.
1458 2) Redirect all references to labels to the leading labels.
1459 3) Cleanup all useless labels. */
1461 void
1462 cleanup_dead_labels (void)
1464 basic_block bb;
1465 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1467 /* Find a suitable label for each block. We use the first user-defined
1468 label if there is one, or otherwise just the first label we see. */
1469 FOR_EACH_BB_FN (bb, cfun)
1471 gimple_stmt_iterator i;
1473 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1475 tree label;
1476 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1478 if (!label_stmt)
1479 break;
1481 label = gimple_label_label (label_stmt);
1483 /* If we have not yet seen a label for the current block,
1484 remember this one and see if there are more labels. */
1485 if (!label_for_bb[bb->index].label)
1487 label_for_bb[bb->index].label = label;
1488 continue;
1491 /* If we did see a label for the current block already, but it
1492 is an artificially created label, replace it if the current
1493 label is a user defined label. */
1494 if (!DECL_ARTIFICIAL (label)
1495 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1497 label_for_bb[bb->index].label = label;
1498 break;
1503 /* Now redirect all jumps/branches to the selected label.
1504 First do so for each block ending in a control statement. */
1505 FOR_EACH_BB_FN (bb, cfun)
1507 gimple *stmt = last_stmt (bb);
1508 tree label, new_label;
1510 if (!stmt)
1511 continue;
1513 switch (gimple_code (stmt))
1515 case GIMPLE_COND:
1517 gcond *cond_stmt = as_a <gcond *> (stmt);
1518 label = gimple_cond_true_label (cond_stmt);
1519 if (label)
1521 new_label = main_block_label (label);
1522 if (new_label != label)
1523 gimple_cond_set_true_label (cond_stmt, new_label);
1526 label = gimple_cond_false_label (cond_stmt);
1527 if (label)
1529 new_label = main_block_label (label);
1530 if (new_label != label)
1531 gimple_cond_set_false_label (cond_stmt, new_label);
1534 break;
1536 case GIMPLE_SWITCH:
1538 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1539 size_t i, n = gimple_switch_num_labels (switch_stmt);
1541 /* Replace all destination labels. */
1542 for (i = 0; i < n; ++i)
1544 tree case_label = gimple_switch_label (switch_stmt, i);
1545 label = CASE_LABEL (case_label);
1546 new_label = main_block_label (label);
1547 if (new_label != label)
1548 CASE_LABEL (case_label) = new_label;
1550 break;
1553 case GIMPLE_ASM:
1555 gasm *asm_stmt = as_a <gasm *> (stmt);
1556 int i, n = gimple_asm_nlabels (asm_stmt);
1558 for (i = 0; i < n; ++i)
1560 tree cons = gimple_asm_label_op (asm_stmt, i);
1561 tree label = main_block_label (TREE_VALUE (cons));
1562 TREE_VALUE (cons) = label;
1564 break;
1567 /* We have to handle gotos until they're removed, and we don't
1568 remove them until after we've created the CFG edges. */
1569 case GIMPLE_GOTO:
1570 if (!computed_goto_p (stmt))
1572 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1573 label = gimple_goto_dest (goto_stmt);
1574 new_label = main_block_label (label);
1575 if (new_label != label)
1576 gimple_goto_set_dest (goto_stmt, new_label);
1578 break;
1580 case GIMPLE_TRANSACTION:
1582 gtransaction *txn = as_a <gtransaction *> (stmt);
1584 label = gimple_transaction_label_norm (txn);
1585 if (label)
1587 new_label = main_block_label (label);
1588 if (new_label != label)
1589 gimple_transaction_set_label_norm (txn, new_label);
1592 label = gimple_transaction_label_uninst (txn);
1593 if (label)
1595 new_label = main_block_label (label);
1596 if (new_label != label)
1597 gimple_transaction_set_label_uninst (txn, new_label);
1600 label = gimple_transaction_label_over (txn);
1601 if (label)
1603 new_label = main_block_label (label);
1604 if (new_label != label)
1605 gimple_transaction_set_label_over (txn, new_label);
1608 break;
1610 default:
1611 break;
1615 /* Do the same for the exception region tree labels. */
1616 cleanup_dead_labels_eh ();
1618 /* Finally, purge dead labels. All user-defined labels and labels that
1619 can be the target of non-local gotos and labels which have their
1620 address taken are preserved. */
1621 FOR_EACH_BB_FN (bb, cfun)
1623 gimple_stmt_iterator i;
1624 tree label_for_this_bb = label_for_bb[bb->index].label;
1626 if (!label_for_this_bb)
1627 continue;
1629 /* If the main label of the block is unused, we may still remove it. */
1630 if (!label_for_bb[bb->index].used)
1631 label_for_this_bb = NULL;
1633 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1635 tree label;
1636 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1638 if (!label_stmt)
1639 break;
1641 label = gimple_label_label (label_stmt);
1643 if (label == label_for_this_bb
1644 || !DECL_ARTIFICIAL (label)
1645 || DECL_NONLOCAL (label)
1646 || FORCED_LABEL (label))
1647 gsi_next (&i);
1648 else
1649 gsi_remove (&i, true);
1653 free (label_for_bb);
1656 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1657 the ones jumping to the same label.
1658 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1660 void
1661 group_case_labels_stmt (gswitch *stmt)
1663 int old_size = gimple_switch_num_labels (stmt);
1664 int i, j, base_index, new_size = old_size;
1665 basic_block default_bb = NULL;
1667 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1669 /* Look for possible opportunities to merge cases. */
1670 i = 1;
1671 while (i < old_size)
1673 tree base_case, base_high;
1674 basic_block base_bb;
1676 base_case = gimple_switch_label (stmt, i);
1678 gcc_assert (base_case);
1679 base_bb = label_to_block (CASE_LABEL (base_case));
1681 /* Discard cases that have the same destination as the default case. */
1682 if (base_bb == default_bb)
1684 gimple_switch_set_label (stmt, i, NULL_TREE);
1685 i++;
1686 new_size--;
1687 continue;
1690 base_high = CASE_HIGH (base_case)
1691 ? CASE_HIGH (base_case)
1692 : CASE_LOW (base_case);
1693 base_index = i++;
1695 /* Try to merge case labels. Break out when we reach the end
1696 of the label vector or when we cannot merge the next case
1697 label with the current one. */
1698 while (i < old_size)
1700 tree merge_case = gimple_switch_label (stmt, i);
1701 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1702 wide_int bhp1 = wi::add (base_high, 1);
1704 /* Merge the cases if they jump to the same place,
1705 and their ranges are consecutive. */
1706 if (merge_bb == base_bb
1707 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1709 base_high = CASE_HIGH (merge_case) ?
1710 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1711 CASE_HIGH (base_case) = base_high;
1712 gimple_switch_set_label (stmt, i, NULL_TREE);
1713 new_size--;
1714 i++;
1716 else
1717 break;
1720 /* Discard cases that have an unreachable destination block. */
1721 if (EDGE_COUNT (base_bb->succs) == 0
1722 && gimple_seq_unreachable_p (bb_seq (base_bb)))
1724 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1725 if (base_edge != NULL)
1726 remove_edge_and_dominated_blocks (base_edge);
1727 gimple_switch_set_label (stmt, base_index, NULL_TREE);
1728 new_size--;
1732 /* Compress the case labels in the label vector, and adjust the
1733 length of the vector. */
1734 for (i = 0, j = 0; i < new_size; i++)
1736 while (! gimple_switch_label (stmt, j))
1737 j++;
1738 gimple_switch_set_label (stmt, i,
1739 gimple_switch_label (stmt, j++));
1742 gcc_assert (new_size <= old_size);
1743 gimple_switch_set_num_labels (stmt, new_size);
1746 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1747 and scan the sorted vector of cases. Combine the ones jumping to the
1748 same label. */
1750 void
1751 group_case_labels (void)
1753 basic_block bb;
1755 FOR_EACH_BB_FN (bb, cfun)
1757 gimple *stmt = last_stmt (bb);
1758 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1759 group_case_labels_stmt (as_a <gswitch *> (stmt));
1763 /* Checks whether we can merge block B into block A. */
1765 static bool
1766 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1768 gimple *stmt;
1770 if (!single_succ_p (a))
1771 return false;
1773 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1774 return false;
1776 if (single_succ (a) != b)
1777 return false;
1779 if (!single_pred_p (b))
1780 return false;
1782 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1783 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1784 return false;
1786 /* If A ends by a statement causing exceptions or something similar, we
1787 cannot merge the blocks. */
1788 stmt = last_stmt (a);
1789 if (stmt && stmt_ends_bb_p (stmt))
1790 return false;
1792 /* Do not allow a block with only a non-local label to be merged. */
1793 if (stmt)
1794 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1795 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1796 return false;
1798 /* Examine the labels at the beginning of B. */
1799 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1800 gsi_next (&gsi))
1802 tree lab;
1803 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1804 if (!label_stmt)
1805 break;
1806 lab = gimple_label_label (label_stmt);
1808 /* Do not remove user forced labels or for -O0 any user labels. */
1809 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1810 return false;
1813 /* Protect simple loop latches. We only want to avoid merging
1814 the latch with the loop header or with a block in another
1815 loop in this case. */
1816 if (current_loops
1817 && b->loop_father->latch == b
1818 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1819 && (b->loop_father->header == a
1820 || b->loop_father != a->loop_father))
1821 return false;
1823 /* It must be possible to eliminate all phi nodes in B. If ssa form
1824 is not up-to-date and a name-mapping is registered, we cannot eliminate
1825 any phis. Symbols marked for renaming are never a problem though. */
1826 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1827 gsi_next (&gsi))
1829 gphi *phi = gsi.phi ();
1830 /* Technically only new names matter. */
1831 if (name_registered_for_update_p (PHI_RESULT (phi)))
1832 return false;
1835 /* When not optimizing, don't merge if we'd lose goto_locus. */
1836 if (!optimize
1837 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1839 location_t goto_locus = single_succ_edge (a)->goto_locus;
1840 gimple_stmt_iterator prev, next;
1841 prev = gsi_last_nondebug_bb (a);
1842 next = gsi_after_labels (b);
1843 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1844 gsi_next_nondebug (&next);
1845 if ((gsi_end_p (prev)
1846 || gimple_location (gsi_stmt (prev)) != goto_locus)
1847 && (gsi_end_p (next)
1848 || gimple_location (gsi_stmt (next)) != goto_locus))
1849 return false;
1852 return true;
1855 /* Replaces all uses of NAME by VAL. */
1857 void
1858 replace_uses_by (tree name, tree val)
1860 imm_use_iterator imm_iter;
1861 use_operand_p use;
1862 gimple *stmt;
1863 edge e;
1865 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1867 /* Mark the block if we change the last stmt in it. */
1868 if (cfgcleanup_altered_bbs
1869 && stmt_ends_bb_p (stmt))
1870 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1872 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1874 replace_exp (use, val);
1876 if (gimple_code (stmt) == GIMPLE_PHI)
1878 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1879 PHI_ARG_INDEX_FROM_USE (use));
1880 if (e->flags & EDGE_ABNORMAL
1881 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1883 /* This can only occur for virtual operands, since
1884 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1885 would prevent replacement. */
1886 gcc_checking_assert (virtual_operand_p (name));
1887 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1892 if (gimple_code (stmt) != GIMPLE_PHI)
1894 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1895 gimple *orig_stmt = stmt;
1896 size_t i;
1898 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1899 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1900 only change sth from non-invariant to invariant, and only
1901 when propagating constants. */
1902 if (is_gimple_min_invariant (val))
1903 for (i = 0; i < gimple_num_ops (stmt); i++)
1905 tree op = gimple_op (stmt, i);
1906 /* Operands may be empty here. For example, the labels
1907 of a GIMPLE_COND are nulled out following the creation
1908 of the corresponding CFG edges. */
1909 if (op && TREE_CODE (op) == ADDR_EXPR)
1910 recompute_tree_invariant_for_addr_expr (op);
1913 if (fold_stmt (&gsi))
1914 stmt = gsi_stmt (gsi);
1916 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1917 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1919 update_stmt (stmt);
1923 gcc_checking_assert (has_zero_uses (name));
1925 /* Also update the trees stored in loop structures. */
1926 if (current_loops)
1928 struct loop *loop;
1930 FOR_EACH_LOOP (loop, 0)
1932 substitute_in_loop_info (loop, name, val);
1937 /* Merge block B into block A. */
1939 static void
1940 gimple_merge_blocks (basic_block a, basic_block b)
1942 gimple_stmt_iterator last, gsi;
1943 gphi_iterator psi;
1945 if (dump_file)
1946 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1948 /* Remove all single-valued PHI nodes from block B of the form
1949 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1950 gsi = gsi_last_bb (a);
1951 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1953 gimple *phi = gsi_stmt (psi);
1954 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1955 gimple *copy;
1956 bool may_replace_uses = (virtual_operand_p (def)
1957 || may_propagate_copy (def, use));
1959 /* In case we maintain loop closed ssa form, do not propagate arguments
1960 of loop exit phi nodes. */
1961 if (current_loops
1962 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1963 && !virtual_operand_p (def)
1964 && TREE_CODE (use) == SSA_NAME
1965 && a->loop_father != b->loop_father)
1966 may_replace_uses = false;
1968 if (!may_replace_uses)
1970 gcc_assert (!virtual_operand_p (def));
1972 /* Note that just emitting the copies is fine -- there is no problem
1973 with ordering of phi nodes. This is because A is the single
1974 predecessor of B, therefore results of the phi nodes cannot
1975 appear as arguments of the phi nodes. */
1976 copy = gimple_build_assign (def, use);
1977 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1978 remove_phi_node (&psi, false);
1980 else
1982 /* If we deal with a PHI for virtual operands, we can simply
1983 propagate these without fussing with folding or updating
1984 the stmt. */
1985 if (virtual_operand_p (def))
1987 imm_use_iterator iter;
1988 use_operand_p use_p;
1989 gimple *stmt;
1991 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1992 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1993 SET_USE (use_p, use);
1995 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1996 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1998 else
1999 replace_uses_by (def, use);
2001 remove_phi_node (&psi, true);
2005 /* Ensure that B follows A. */
2006 move_block_after (b, a);
2008 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2009 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2011 /* Remove labels from B and set gimple_bb to A for other statements. */
2012 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2014 gimple *stmt = gsi_stmt (gsi);
2015 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2017 tree label = gimple_label_label (label_stmt);
2018 int lp_nr;
2020 gsi_remove (&gsi, false);
2022 /* Now that we can thread computed gotos, we might have
2023 a situation where we have a forced label in block B
2024 However, the label at the start of block B might still be
2025 used in other ways (think about the runtime checking for
2026 Fortran assigned gotos). So we can not just delete the
2027 label. Instead we move the label to the start of block A. */
2028 if (FORCED_LABEL (label))
2030 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2031 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2033 /* Other user labels keep around in a form of a debug stmt. */
2034 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2036 gimple *dbg = gimple_build_debug_bind (label,
2037 integer_zero_node,
2038 stmt);
2039 gimple_debug_bind_reset_value (dbg);
2040 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2043 lp_nr = EH_LANDING_PAD_NR (label);
2044 if (lp_nr)
2046 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2047 lp->post_landing_pad = NULL;
2050 else
2052 gimple_set_bb (stmt, a);
2053 gsi_next (&gsi);
2057 /* When merging two BBs, if their counts are different, the larger count
2058 is selected as the new bb count. This is to handle inconsistent
2059 profiles. */
2060 if (a->loop_father == b->loop_father)
2062 a->count = MAX (a->count, b->count);
2063 a->frequency = MAX (a->frequency, b->frequency);
2066 /* Merge the sequences. */
2067 last = gsi_last_bb (a);
2068 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2069 set_bb_seq (b, NULL);
2071 if (cfgcleanup_altered_bbs)
2072 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2076 /* Return the one of two successors of BB that is not reachable by a
2077 complex edge, if there is one. Else, return BB. We use
2078 this in optimizations that use post-dominators for their heuristics,
2079 to catch the cases in C++ where function calls are involved. */
2081 basic_block
2082 single_noncomplex_succ (basic_block bb)
2084 edge e0, e1;
2085 if (EDGE_COUNT (bb->succs) != 2)
2086 return bb;
2088 e0 = EDGE_SUCC (bb, 0);
2089 e1 = EDGE_SUCC (bb, 1);
2090 if (e0->flags & EDGE_COMPLEX)
2091 return e1->dest;
2092 if (e1->flags & EDGE_COMPLEX)
2093 return e0->dest;
2095 return bb;
2098 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2100 void
2101 notice_special_calls (gcall *call)
2103 int flags = gimple_call_flags (call);
2105 if (flags & ECF_MAY_BE_ALLOCA)
2106 cfun->calls_alloca = true;
2107 if (flags & ECF_RETURNS_TWICE)
2108 cfun->calls_setjmp = true;
2112 /* Clear flags set by notice_special_calls. Used by dead code removal
2113 to update the flags. */
2115 void
2116 clear_special_calls (void)
2118 cfun->calls_alloca = false;
2119 cfun->calls_setjmp = false;
2122 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2124 static void
2125 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2127 /* Since this block is no longer reachable, we can just delete all
2128 of its PHI nodes. */
2129 remove_phi_nodes (bb);
2131 /* Remove edges to BB's successors. */
2132 while (EDGE_COUNT (bb->succs) > 0)
2133 remove_edge (EDGE_SUCC (bb, 0));
2137 /* Remove statements of basic block BB. */
2139 static void
2140 remove_bb (basic_block bb)
2142 gimple_stmt_iterator i;
2144 if (dump_file)
2146 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2147 if (dump_flags & TDF_DETAILS)
2149 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2150 fprintf (dump_file, "\n");
2154 if (current_loops)
2156 struct loop *loop = bb->loop_father;
2158 /* If a loop gets removed, clean up the information associated
2159 with it. */
2160 if (loop->latch == bb
2161 || loop->header == bb)
2162 free_numbers_of_iterations_estimates_loop (loop);
2165 /* Remove all the instructions in the block. */
2166 if (bb_seq (bb) != NULL)
2168 /* Walk backwards so as to get a chance to substitute all
2169 released DEFs into debug stmts. See
2170 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2171 details. */
2172 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2174 gimple *stmt = gsi_stmt (i);
2175 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2176 if (label_stmt
2177 && (FORCED_LABEL (gimple_label_label (label_stmt))
2178 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2180 basic_block new_bb;
2181 gimple_stmt_iterator new_gsi;
2183 /* A non-reachable non-local label may still be referenced.
2184 But it no longer needs to carry the extra semantics of
2185 non-locality. */
2186 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2188 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2189 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2192 new_bb = bb->prev_bb;
2193 new_gsi = gsi_start_bb (new_bb);
2194 gsi_remove (&i, false);
2195 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2197 else
2199 /* Release SSA definitions. */
2200 release_defs (stmt);
2201 gsi_remove (&i, true);
2204 if (gsi_end_p (i))
2205 i = gsi_last_bb (bb);
2206 else
2207 gsi_prev (&i);
2211 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2212 bb->il.gimple.seq = NULL;
2213 bb->il.gimple.phi_nodes = NULL;
2217 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2218 predicate VAL, return the edge that will be taken out of the block.
2219 If VAL does not match a unique edge, NULL is returned. */
2221 edge
2222 find_taken_edge (basic_block bb, tree val)
2224 gimple *stmt;
2226 stmt = last_stmt (bb);
2228 gcc_assert (stmt);
2229 gcc_assert (is_ctrl_stmt (stmt));
2231 if (val == NULL)
2232 return NULL;
2234 if (!is_gimple_min_invariant (val))
2235 return NULL;
2237 if (gimple_code (stmt) == GIMPLE_COND)
2238 return find_taken_edge_cond_expr (bb, val);
2240 if (gimple_code (stmt) == GIMPLE_SWITCH)
2241 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2243 if (computed_goto_p (stmt))
2245 /* Only optimize if the argument is a label, if the argument is
2246 not a label then we can not construct a proper CFG.
2248 It may be the case that we only need to allow the LABEL_REF to
2249 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2250 appear inside a LABEL_EXPR just to be safe. */
2251 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2252 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2253 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2254 return NULL;
2257 gcc_unreachable ();
2260 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2261 statement, determine which of the outgoing edges will be taken out of the
2262 block. Return NULL if either edge may be taken. */
2264 static edge
2265 find_taken_edge_computed_goto (basic_block bb, tree val)
2267 basic_block dest;
2268 edge e = NULL;
2270 dest = label_to_block (val);
2271 if (dest)
2273 e = find_edge (bb, dest);
2274 gcc_assert (e != NULL);
2277 return e;
2280 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2281 statement, determine which of the two edges will be taken out of the
2282 block. Return NULL if either edge may be taken. */
2284 static edge
2285 find_taken_edge_cond_expr (basic_block bb, tree val)
2287 edge true_edge, false_edge;
2289 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2291 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2292 return (integer_zerop (val) ? false_edge : true_edge);
2295 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2296 statement, determine which edge will be taken out of the block. Return
2297 NULL if any edge may be taken. */
2299 static edge
2300 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2301 tree val)
2303 basic_block dest_bb;
2304 edge e;
2305 tree taken_case;
2307 taken_case = find_case_label_for_value (switch_stmt, val);
2308 dest_bb = label_to_block (CASE_LABEL (taken_case));
2310 e = find_edge (bb, dest_bb);
2311 gcc_assert (e);
2312 return e;
2316 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2317 We can make optimal use here of the fact that the case labels are
2318 sorted: We can do a binary search for a case matching VAL. */
2320 static tree
2321 find_case_label_for_value (gswitch *switch_stmt, tree val)
2323 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2324 tree default_case = gimple_switch_default_label (switch_stmt);
2326 for (low = 0, high = n; high - low > 1; )
2328 size_t i = (high + low) / 2;
2329 tree t = gimple_switch_label (switch_stmt, i);
2330 int cmp;
2332 /* Cache the result of comparing CASE_LOW and val. */
2333 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2335 if (cmp > 0)
2336 high = i;
2337 else
2338 low = i;
2340 if (CASE_HIGH (t) == NULL)
2342 /* A singe-valued case label. */
2343 if (cmp == 0)
2344 return t;
2346 else
2348 /* A case range. We can only handle integer ranges. */
2349 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2350 return t;
2354 return default_case;
2358 /* Dump a basic block on stderr. */
2360 void
2361 gimple_debug_bb (basic_block bb)
2363 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2367 /* Dump basic block with index N on stderr. */
2369 basic_block
2370 gimple_debug_bb_n (int n)
2372 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2373 return BASIC_BLOCK_FOR_FN (cfun, n);
2377 /* Dump the CFG on stderr.
2379 FLAGS are the same used by the tree dumping functions
2380 (see TDF_* in dumpfile.h). */
2382 void
2383 gimple_debug_cfg (dump_flags_t flags)
2385 gimple_dump_cfg (stderr, flags);
2389 /* Dump the program showing basic block boundaries on the given FILE.
2391 FLAGS are the same used by the tree dumping functions (see TDF_* in
2392 tree.h). */
2394 void
2395 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2397 if (flags & TDF_DETAILS)
2399 dump_function_header (file, current_function_decl, flags);
2400 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2401 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2402 last_basic_block_for_fn (cfun));
2404 brief_dump_cfg (file, flags | TDF_COMMENT);
2405 fprintf (file, "\n");
2408 if (flags & TDF_STATS)
2409 dump_cfg_stats (file);
2411 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2415 /* Dump CFG statistics on FILE. */
2417 void
2418 dump_cfg_stats (FILE *file)
2420 static long max_num_merged_labels = 0;
2421 unsigned long size, total = 0;
2422 long num_edges;
2423 basic_block bb;
2424 const char * const fmt_str = "%-30s%-13s%12s\n";
2425 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2426 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2427 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2428 const char *funcname = current_function_name ();
2430 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2432 fprintf (file, "---------------------------------------------------------\n");
2433 fprintf (file, fmt_str, "", " Number of ", "Memory");
2434 fprintf (file, fmt_str, "", " instances ", "used ");
2435 fprintf (file, "---------------------------------------------------------\n");
2437 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2438 total += size;
2439 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2440 SCALE (size), LABEL (size));
2442 num_edges = 0;
2443 FOR_EACH_BB_FN (bb, cfun)
2444 num_edges += EDGE_COUNT (bb->succs);
2445 size = num_edges * sizeof (struct edge_def);
2446 total += size;
2447 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2449 fprintf (file, "---------------------------------------------------------\n");
2450 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2451 LABEL (total));
2452 fprintf (file, "---------------------------------------------------------\n");
2453 fprintf (file, "\n");
2455 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2456 max_num_merged_labels = cfg_stats.num_merged_labels;
2458 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2459 cfg_stats.num_merged_labels, max_num_merged_labels);
2461 fprintf (file, "\n");
2465 /* Dump CFG statistics on stderr. Keep extern so that it's always
2466 linked in the final executable. */
2468 DEBUG_FUNCTION void
2469 debug_cfg_stats (void)
2471 dump_cfg_stats (stderr);
2474 /*---------------------------------------------------------------------------
2475 Miscellaneous helpers
2476 ---------------------------------------------------------------------------*/
2478 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2479 flow. Transfers of control flow associated with EH are excluded. */
2481 static bool
2482 call_can_make_abnormal_goto (gimple *t)
2484 /* If the function has no non-local labels, then a call cannot make an
2485 abnormal transfer of control. */
2486 if (!cfun->has_nonlocal_label
2487 && !cfun->calls_setjmp)
2488 return false;
2490 /* Likewise if the call has no side effects. */
2491 if (!gimple_has_side_effects (t))
2492 return false;
2494 /* Likewise if the called function is leaf. */
2495 if (gimple_call_flags (t) & ECF_LEAF)
2496 return false;
2498 return true;
2502 /* Return true if T can make an abnormal transfer of control flow.
2503 Transfers of control flow associated with EH are excluded. */
2505 bool
2506 stmt_can_make_abnormal_goto (gimple *t)
2508 if (computed_goto_p (t))
2509 return true;
2510 if (is_gimple_call (t))
2511 return call_can_make_abnormal_goto (t);
2512 return false;
2516 /* Return true if T represents a stmt that always transfers control. */
2518 bool
2519 is_ctrl_stmt (gimple *t)
2521 switch (gimple_code (t))
2523 case GIMPLE_COND:
2524 case GIMPLE_SWITCH:
2525 case GIMPLE_GOTO:
2526 case GIMPLE_RETURN:
2527 case GIMPLE_RESX:
2528 return true;
2529 default:
2530 return false;
2535 /* Return true if T is a statement that may alter the flow of control
2536 (e.g., a call to a non-returning function). */
2538 bool
2539 is_ctrl_altering_stmt (gimple *t)
2541 gcc_assert (t);
2543 switch (gimple_code (t))
2545 case GIMPLE_CALL:
2546 /* Per stmt call flag indicates whether the call could alter
2547 controlflow. */
2548 if (gimple_call_ctrl_altering_p (t))
2549 return true;
2550 break;
2552 case GIMPLE_EH_DISPATCH:
2553 /* EH_DISPATCH branches to the individual catch handlers at
2554 this level of a try or allowed-exceptions region. It can
2555 fallthru to the next statement as well. */
2556 return true;
2558 case GIMPLE_ASM:
2559 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2560 return true;
2561 break;
2563 CASE_GIMPLE_OMP:
2564 /* OpenMP directives alter control flow. */
2565 return true;
2567 case GIMPLE_TRANSACTION:
2568 /* A transaction start alters control flow. */
2569 return true;
2571 default:
2572 break;
2575 /* If a statement can throw, it alters control flow. */
2576 return stmt_can_throw_internal (t);
2580 /* Return true if T is a simple local goto. */
2582 bool
2583 simple_goto_p (gimple *t)
2585 return (gimple_code (t) == GIMPLE_GOTO
2586 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2590 /* Return true if STMT should start a new basic block. PREV_STMT is
2591 the statement preceding STMT. It is used when STMT is a label or a
2592 case label. Labels should only start a new basic block if their
2593 previous statement wasn't a label. Otherwise, sequence of labels
2594 would generate unnecessary basic blocks that only contain a single
2595 label. */
2597 static inline bool
2598 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2600 if (stmt == NULL)
2601 return false;
2603 /* Labels start a new basic block only if the preceding statement
2604 wasn't a label of the same type. This prevents the creation of
2605 consecutive blocks that have nothing but a single label. */
2606 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2608 /* Nonlocal and computed GOTO targets always start a new block. */
2609 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2610 || FORCED_LABEL (gimple_label_label (label_stmt)))
2611 return true;
2613 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2615 if (DECL_NONLOCAL (gimple_label_label (
2616 as_a <glabel *> (prev_stmt))))
2617 return true;
2619 cfg_stats.num_merged_labels++;
2620 return false;
2622 else
2623 return true;
2625 else if (gimple_code (stmt) == GIMPLE_CALL)
2627 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2628 /* setjmp acts similar to a nonlocal GOTO target and thus should
2629 start a new block. */
2630 return true;
2631 if (gimple_call_internal_p (stmt, IFN_PHI)
2632 && prev_stmt
2633 && gimple_code (prev_stmt) != GIMPLE_LABEL
2634 && (gimple_code (prev_stmt) != GIMPLE_CALL
2635 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2636 /* PHI nodes start a new block unless preceeded by a label
2637 or another PHI. */
2638 return true;
2641 return false;
2645 /* Return true if T should end a basic block. */
2647 bool
2648 stmt_ends_bb_p (gimple *t)
2650 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2653 /* Remove block annotations and other data structures. */
2655 void
2656 delete_tree_cfg_annotations (struct function *fn)
2658 vec_free (label_to_block_map_for_fn (fn));
2661 /* Return the virtual phi in BB. */
2663 gphi *
2664 get_virtual_phi (basic_block bb)
2666 for (gphi_iterator gsi = gsi_start_phis (bb);
2667 !gsi_end_p (gsi);
2668 gsi_next (&gsi))
2670 gphi *phi = gsi.phi ();
2672 if (virtual_operand_p (PHI_RESULT (phi)))
2673 return phi;
2676 return NULL;
2679 /* Return the first statement in basic block BB. */
2681 gimple *
2682 first_stmt (basic_block bb)
2684 gimple_stmt_iterator i = gsi_start_bb (bb);
2685 gimple *stmt = NULL;
2687 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2689 gsi_next (&i);
2690 stmt = NULL;
2692 return stmt;
2695 /* Return the first non-label statement in basic block BB. */
2697 static gimple *
2698 first_non_label_stmt (basic_block bb)
2700 gimple_stmt_iterator i = gsi_start_bb (bb);
2701 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2702 gsi_next (&i);
2703 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2706 /* Return the last statement in basic block BB. */
2708 gimple *
2709 last_stmt (basic_block bb)
2711 gimple_stmt_iterator i = gsi_last_bb (bb);
2712 gimple *stmt = NULL;
2714 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2716 gsi_prev (&i);
2717 stmt = NULL;
2719 return stmt;
2722 /* Return the last statement of an otherwise empty block. Return NULL
2723 if the block is totally empty, or if it contains more than one
2724 statement. */
2726 gimple *
2727 last_and_only_stmt (basic_block bb)
2729 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2730 gimple *last, *prev;
2732 if (gsi_end_p (i))
2733 return NULL;
2735 last = gsi_stmt (i);
2736 gsi_prev_nondebug (&i);
2737 if (gsi_end_p (i))
2738 return last;
2740 /* Empty statements should no longer appear in the instruction stream.
2741 Everything that might have appeared before should be deleted by
2742 remove_useless_stmts, and the optimizers should just gsi_remove
2743 instead of smashing with build_empty_stmt.
2745 Thus the only thing that should appear here in a block containing
2746 one executable statement is a label. */
2747 prev = gsi_stmt (i);
2748 if (gimple_code (prev) == GIMPLE_LABEL)
2749 return last;
2750 else
2751 return NULL;
2754 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2756 static void
2757 reinstall_phi_args (edge new_edge, edge old_edge)
2759 edge_var_map *vm;
2760 int i;
2761 gphi_iterator phis;
2763 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2764 if (!v)
2765 return;
2767 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2768 v->iterate (i, &vm) && !gsi_end_p (phis);
2769 i++, gsi_next (&phis))
2771 gphi *phi = phis.phi ();
2772 tree result = redirect_edge_var_map_result (vm);
2773 tree arg = redirect_edge_var_map_def (vm);
2775 gcc_assert (result == gimple_phi_result (phi));
2777 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2780 redirect_edge_var_map_clear (old_edge);
2783 /* Returns the basic block after which the new basic block created
2784 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2785 near its "logical" location. This is of most help to humans looking
2786 at debugging dumps. */
2788 basic_block
2789 split_edge_bb_loc (edge edge_in)
2791 basic_block dest = edge_in->dest;
2792 basic_block dest_prev = dest->prev_bb;
2794 if (dest_prev)
2796 edge e = find_edge (dest_prev, dest);
2797 if (e && !(e->flags & EDGE_COMPLEX))
2798 return edge_in->src;
2800 return dest_prev;
2803 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2804 Abort on abnormal edges. */
2806 static basic_block
2807 gimple_split_edge (edge edge_in)
2809 basic_block new_bb, after_bb, dest;
2810 edge new_edge, e;
2812 /* Abnormal edges cannot be split. */
2813 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2815 dest = edge_in->dest;
2817 after_bb = split_edge_bb_loc (edge_in);
2819 new_bb = create_empty_bb (after_bb);
2820 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2821 new_bb->count = edge_in->count;
2822 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2823 new_edge->probability = REG_BR_PROB_BASE;
2824 new_edge->count = edge_in->count;
2826 e = redirect_edge_and_branch (edge_in, new_bb);
2827 gcc_assert (e == edge_in);
2828 reinstall_phi_args (new_edge, e);
2830 return new_bb;
2834 /* Verify properties of the address expression T with base object BASE. */
2836 static tree
2837 verify_address (tree t, tree base)
2839 bool old_constant;
2840 bool old_side_effects;
2841 bool new_constant;
2842 bool new_side_effects;
2844 old_constant = TREE_CONSTANT (t);
2845 old_side_effects = TREE_SIDE_EFFECTS (t);
2847 recompute_tree_invariant_for_addr_expr (t);
2848 new_side_effects = TREE_SIDE_EFFECTS (t);
2849 new_constant = TREE_CONSTANT (t);
2851 if (old_constant != new_constant)
2853 error ("constant not recomputed when ADDR_EXPR changed");
2854 return t;
2856 if (old_side_effects != new_side_effects)
2858 error ("side effects not recomputed when ADDR_EXPR changed");
2859 return t;
2862 if (!(VAR_P (base)
2863 || TREE_CODE (base) == PARM_DECL
2864 || TREE_CODE (base) == RESULT_DECL))
2865 return NULL_TREE;
2867 if (DECL_GIMPLE_REG_P (base))
2869 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2870 return base;
2873 return NULL_TREE;
2876 /* Callback for walk_tree, check that all elements with address taken are
2877 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2878 inside a PHI node. */
2880 static tree
2881 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2883 tree t = *tp, x;
2885 if (TYPE_P (t))
2886 *walk_subtrees = 0;
2888 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2889 #define CHECK_OP(N, MSG) \
2890 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2891 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2893 switch (TREE_CODE (t))
2895 case SSA_NAME:
2896 if (SSA_NAME_IN_FREE_LIST (t))
2898 error ("SSA name in freelist but still referenced");
2899 return *tp;
2901 break;
2903 case PARM_DECL:
2904 case VAR_DECL:
2905 case RESULT_DECL:
2907 tree context = decl_function_context (t);
2908 if (context != cfun->decl
2909 && !SCOPE_FILE_SCOPE_P (context)
2910 && !TREE_STATIC (t)
2911 && !DECL_EXTERNAL (t))
2913 error ("Local declaration from a different function");
2914 return t;
2917 break;
2919 case INDIRECT_REF:
2920 error ("INDIRECT_REF in gimple IL");
2921 return t;
2923 case MEM_REF:
2924 x = TREE_OPERAND (t, 0);
2925 if (!POINTER_TYPE_P (TREE_TYPE (x))
2926 || !is_gimple_mem_ref_addr (x))
2928 error ("invalid first operand of MEM_REF");
2929 return x;
2931 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2932 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2934 error ("invalid offset operand of MEM_REF");
2935 return TREE_OPERAND (t, 1);
2937 if (TREE_CODE (x) == ADDR_EXPR)
2939 tree va = verify_address (x, TREE_OPERAND (x, 0));
2940 if (va)
2941 return va;
2942 x = TREE_OPERAND (x, 0);
2944 walk_tree (&x, verify_expr, data, NULL);
2945 *walk_subtrees = 0;
2946 break;
2948 case ASSERT_EXPR:
2949 x = fold (ASSERT_EXPR_COND (t));
2950 if (x == boolean_false_node)
2952 error ("ASSERT_EXPR with an always-false condition");
2953 return *tp;
2955 break;
2957 case MODIFY_EXPR:
2958 error ("MODIFY_EXPR not expected while having tuples");
2959 return *tp;
2961 case ADDR_EXPR:
2963 tree tem;
2965 gcc_assert (is_gimple_address (t));
2967 /* Skip any references (they will be checked when we recurse down the
2968 tree) and ensure that any variable used as a prefix is marked
2969 addressable. */
2970 for (x = TREE_OPERAND (t, 0);
2971 handled_component_p (x);
2972 x = TREE_OPERAND (x, 0))
2975 if ((tem = verify_address (t, x)))
2976 return tem;
2978 if (!(VAR_P (x)
2979 || TREE_CODE (x) == PARM_DECL
2980 || TREE_CODE (x) == RESULT_DECL))
2981 return NULL;
2983 if (!TREE_ADDRESSABLE (x))
2985 error ("address taken, but ADDRESSABLE bit not set");
2986 return x;
2989 break;
2992 case COND_EXPR:
2993 x = COND_EXPR_COND (t);
2994 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2996 error ("non-integral used in condition");
2997 return x;
2999 if (!is_gimple_condexpr (x))
3001 error ("invalid conditional operand");
3002 return x;
3004 break;
3006 case NON_LVALUE_EXPR:
3007 case TRUTH_NOT_EXPR:
3008 gcc_unreachable ();
3010 CASE_CONVERT:
3011 case FIX_TRUNC_EXPR:
3012 case FLOAT_EXPR:
3013 case NEGATE_EXPR:
3014 case ABS_EXPR:
3015 case BIT_NOT_EXPR:
3016 CHECK_OP (0, "invalid operand to unary operator");
3017 break;
3019 case REALPART_EXPR:
3020 case IMAGPART_EXPR:
3021 case BIT_FIELD_REF:
3022 if (!is_gimple_reg_type (TREE_TYPE (t)))
3024 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3025 return t;
3028 if (TREE_CODE (t) == BIT_FIELD_REF)
3030 tree t0 = TREE_OPERAND (t, 0);
3031 tree t1 = TREE_OPERAND (t, 1);
3032 tree t2 = TREE_OPERAND (t, 2);
3033 if (!tree_fits_uhwi_p (t1)
3034 || !tree_fits_uhwi_p (t2))
3036 error ("invalid position or size operand to BIT_FIELD_REF");
3037 return t;
3039 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3040 && (TYPE_PRECISION (TREE_TYPE (t))
3041 != tree_to_uhwi (t1)))
3043 error ("integral result type precision does not match "
3044 "field size of BIT_FIELD_REF");
3045 return t;
3047 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3048 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3049 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3050 != tree_to_uhwi (t1)))
3052 error ("mode size of non-integral result does not "
3053 "match field size of BIT_FIELD_REF");
3054 return t;
3056 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3057 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3058 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3060 error ("position plus size exceeds size of referenced object in "
3061 "BIT_FIELD_REF");
3062 return t;
3065 t = TREE_OPERAND (t, 0);
3067 /* Fall-through. */
3068 case COMPONENT_REF:
3069 case ARRAY_REF:
3070 case ARRAY_RANGE_REF:
3071 case VIEW_CONVERT_EXPR:
3072 /* We have a nest of references. Verify that each of the operands
3073 that determine where to reference is either a constant or a variable,
3074 verify that the base is valid, and then show we've already checked
3075 the subtrees. */
3076 while (handled_component_p (t))
3078 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3079 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3080 else if (TREE_CODE (t) == ARRAY_REF
3081 || TREE_CODE (t) == ARRAY_RANGE_REF)
3083 CHECK_OP (1, "invalid array index");
3084 if (TREE_OPERAND (t, 2))
3085 CHECK_OP (2, "invalid array lower bound");
3086 if (TREE_OPERAND (t, 3))
3087 CHECK_OP (3, "invalid array stride");
3089 else if (TREE_CODE (t) == BIT_FIELD_REF
3090 || TREE_CODE (t) == REALPART_EXPR
3091 || TREE_CODE (t) == IMAGPART_EXPR)
3093 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3094 "REALPART_EXPR");
3095 return t;
3098 t = TREE_OPERAND (t, 0);
3101 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3103 error ("invalid reference prefix");
3104 return t;
3106 walk_tree (&t, verify_expr, data, NULL);
3107 *walk_subtrees = 0;
3108 break;
3109 case PLUS_EXPR:
3110 case MINUS_EXPR:
3111 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3112 POINTER_PLUS_EXPR. */
3113 if (POINTER_TYPE_P (TREE_TYPE (t)))
3115 error ("invalid operand to plus/minus, type is a pointer");
3116 return t;
3118 CHECK_OP (0, "invalid operand to binary operator");
3119 CHECK_OP (1, "invalid operand to binary operator");
3120 break;
3122 case POINTER_PLUS_EXPR:
3123 /* Check to make sure the first operand is a pointer or reference type. */
3124 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3126 error ("invalid operand to pointer plus, first operand is not a pointer");
3127 return t;
3129 /* Check to make sure the second operand is a ptrofftype. */
3130 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3132 error ("invalid operand to pointer plus, second operand is not an "
3133 "integer type of appropriate width");
3134 return t;
3136 /* FALLTHROUGH */
3137 case LT_EXPR:
3138 case LE_EXPR:
3139 case GT_EXPR:
3140 case GE_EXPR:
3141 case EQ_EXPR:
3142 case NE_EXPR:
3143 case UNORDERED_EXPR:
3144 case ORDERED_EXPR:
3145 case UNLT_EXPR:
3146 case UNLE_EXPR:
3147 case UNGT_EXPR:
3148 case UNGE_EXPR:
3149 case UNEQ_EXPR:
3150 case LTGT_EXPR:
3151 case MULT_EXPR:
3152 case TRUNC_DIV_EXPR:
3153 case CEIL_DIV_EXPR:
3154 case FLOOR_DIV_EXPR:
3155 case ROUND_DIV_EXPR:
3156 case TRUNC_MOD_EXPR:
3157 case CEIL_MOD_EXPR:
3158 case FLOOR_MOD_EXPR:
3159 case ROUND_MOD_EXPR:
3160 case RDIV_EXPR:
3161 case EXACT_DIV_EXPR:
3162 case MIN_EXPR:
3163 case MAX_EXPR:
3164 case LSHIFT_EXPR:
3165 case RSHIFT_EXPR:
3166 case LROTATE_EXPR:
3167 case RROTATE_EXPR:
3168 case BIT_IOR_EXPR:
3169 case BIT_XOR_EXPR:
3170 case BIT_AND_EXPR:
3171 CHECK_OP (0, "invalid operand to binary operator");
3172 CHECK_OP (1, "invalid operand to binary operator");
3173 break;
3175 case CONSTRUCTOR:
3176 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3177 *walk_subtrees = 0;
3178 break;
3180 case CASE_LABEL_EXPR:
3181 if (CASE_CHAIN (t))
3183 error ("invalid CASE_CHAIN");
3184 return t;
3186 break;
3188 default:
3189 break;
3191 return NULL;
3193 #undef CHECK_OP
3197 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3198 Returns true if there is an error, otherwise false. */
3200 static bool
3201 verify_types_in_gimple_min_lval (tree expr)
3203 tree op;
3205 if (is_gimple_id (expr))
3206 return false;
3208 if (TREE_CODE (expr) != TARGET_MEM_REF
3209 && TREE_CODE (expr) != MEM_REF)
3211 error ("invalid expression for min lvalue");
3212 return true;
3215 /* TARGET_MEM_REFs are strange beasts. */
3216 if (TREE_CODE (expr) == TARGET_MEM_REF)
3217 return false;
3219 op = TREE_OPERAND (expr, 0);
3220 if (!is_gimple_val (op))
3222 error ("invalid operand in indirect reference");
3223 debug_generic_stmt (op);
3224 return true;
3226 /* Memory references now generally can involve a value conversion. */
3228 return false;
3231 /* Verify if EXPR is a valid GIMPLE reference expression. If
3232 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3233 if there is an error, otherwise false. */
3235 static bool
3236 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3238 while (handled_component_p (expr))
3240 tree op = TREE_OPERAND (expr, 0);
3242 if (TREE_CODE (expr) == ARRAY_REF
3243 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3245 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3246 || (TREE_OPERAND (expr, 2)
3247 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3248 || (TREE_OPERAND (expr, 3)
3249 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3251 error ("invalid operands to array reference");
3252 debug_generic_stmt (expr);
3253 return true;
3257 /* Verify if the reference array element types are compatible. */
3258 if (TREE_CODE (expr) == ARRAY_REF
3259 && !useless_type_conversion_p (TREE_TYPE (expr),
3260 TREE_TYPE (TREE_TYPE (op))))
3262 error ("type mismatch in array reference");
3263 debug_generic_stmt (TREE_TYPE (expr));
3264 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3265 return true;
3267 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3268 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3269 TREE_TYPE (TREE_TYPE (op))))
3271 error ("type mismatch in array range reference");
3272 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3273 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3274 return true;
3277 if ((TREE_CODE (expr) == REALPART_EXPR
3278 || TREE_CODE (expr) == IMAGPART_EXPR)
3279 && !useless_type_conversion_p (TREE_TYPE (expr),
3280 TREE_TYPE (TREE_TYPE (op))))
3282 error ("type mismatch in real/imagpart reference");
3283 debug_generic_stmt (TREE_TYPE (expr));
3284 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3285 return true;
3288 if (TREE_CODE (expr) == COMPONENT_REF
3289 && !useless_type_conversion_p (TREE_TYPE (expr),
3290 TREE_TYPE (TREE_OPERAND (expr, 1))))
3292 error ("type mismatch in component reference");
3293 debug_generic_stmt (TREE_TYPE (expr));
3294 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3295 return true;
3298 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3300 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3301 that their operand is not an SSA name or an invariant when
3302 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3303 bug). Otherwise there is nothing to verify, gross mismatches at
3304 most invoke undefined behavior. */
3305 if (require_lvalue
3306 && (TREE_CODE (op) == SSA_NAME
3307 || is_gimple_min_invariant (op)))
3309 error ("conversion of an SSA_NAME on the left hand side");
3310 debug_generic_stmt (expr);
3311 return true;
3313 else if (TREE_CODE (op) == SSA_NAME
3314 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3316 error ("conversion of register to a different size");
3317 debug_generic_stmt (expr);
3318 return true;
3320 else if (!handled_component_p (op))
3321 return false;
3324 expr = op;
3327 if (TREE_CODE (expr) == MEM_REF)
3329 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3331 error ("invalid address operand in MEM_REF");
3332 debug_generic_stmt (expr);
3333 return true;
3335 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3336 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3338 error ("invalid offset operand in MEM_REF");
3339 debug_generic_stmt (expr);
3340 return true;
3343 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3345 if (!TMR_BASE (expr)
3346 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3348 error ("invalid address operand in TARGET_MEM_REF");
3349 return true;
3351 if (!TMR_OFFSET (expr)
3352 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3353 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3355 error ("invalid offset operand in TARGET_MEM_REF");
3356 debug_generic_stmt (expr);
3357 return true;
3361 return ((require_lvalue || !is_gimple_min_invariant (expr))
3362 && verify_types_in_gimple_min_lval (expr));
3365 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3366 list of pointer-to types that is trivially convertible to DEST. */
3368 static bool
3369 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3371 tree src;
3373 if (!TYPE_POINTER_TO (src_obj))
3374 return true;
3376 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3377 if (useless_type_conversion_p (dest, src))
3378 return true;
3380 return false;
3383 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3384 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3386 static bool
3387 valid_fixed_convert_types_p (tree type1, tree type2)
3389 return (FIXED_POINT_TYPE_P (type1)
3390 && (INTEGRAL_TYPE_P (type2)
3391 || SCALAR_FLOAT_TYPE_P (type2)
3392 || FIXED_POINT_TYPE_P (type2)));
3395 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3396 is a problem, otherwise false. */
3398 static bool
3399 verify_gimple_call (gcall *stmt)
3401 tree fn = gimple_call_fn (stmt);
3402 tree fntype, fndecl;
3403 unsigned i;
3405 if (gimple_call_internal_p (stmt))
3407 if (fn)
3409 error ("gimple call has two targets");
3410 debug_generic_stmt (fn);
3411 return true;
3413 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3414 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3416 return false;
3419 else
3421 if (!fn)
3423 error ("gimple call has no target");
3424 return true;
3428 if (fn && !is_gimple_call_addr (fn))
3430 error ("invalid function in gimple call");
3431 debug_generic_stmt (fn);
3432 return true;
3435 if (fn
3436 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3437 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3438 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3440 error ("non-function in gimple call");
3441 return true;
3444 fndecl = gimple_call_fndecl (stmt);
3445 if (fndecl
3446 && TREE_CODE (fndecl) == FUNCTION_DECL
3447 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3448 && !DECL_PURE_P (fndecl)
3449 && !TREE_READONLY (fndecl))
3451 error ("invalid pure const state for function");
3452 return true;
3455 tree lhs = gimple_call_lhs (stmt);
3456 if (lhs
3457 && (!is_gimple_lvalue (lhs)
3458 || verify_types_in_gimple_reference (lhs, true)))
3460 error ("invalid LHS in gimple call");
3461 return true;
3464 if (gimple_call_ctrl_altering_p (stmt)
3465 && gimple_call_noreturn_p (stmt)
3466 && should_remove_lhs_p (lhs))
3468 error ("LHS in noreturn call");
3469 return true;
3472 fntype = gimple_call_fntype (stmt);
3473 if (fntype
3474 && lhs
3475 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3476 /* ??? At least C++ misses conversions at assignments from
3477 void * call results.
3478 ??? Java is completely off. Especially with functions
3479 returning java.lang.Object.
3480 For now simply allow arbitrary pointer type conversions. */
3481 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3482 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3484 error ("invalid conversion in gimple call");
3485 debug_generic_stmt (TREE_TYPE (lhs));
3486 debug_generic_stmt (TREE_TYPE (fntype));
3487 return true;
3490 if (gimple_call_chain (stmt)
3491 && !is_gimple_val (gimple_call_chain (stmt)))
3493 error ("invalid static chain in gimple call");
3494 debug_generic_stmt (gimple_call_chain (stmt));
3495 return true;
3498 /* If there is a static chain argument, the call should either be
3499 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3500 if (gimple_call_chain (stmt)
3501 && fndecl
3502 && !DECL_STATIC_CHAIN (fndecl))
3504 error ("static chain with function that doesn%'t use one");
3505 return true;
3508 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3510 switch (DECL_FUNCTION_CODE (fndecl))
3512 case BUILT_IN_UNREACHABLE:
3513 case BUILT_IN_TRAP:
3514 if (gimple_call_num_args (stmt) > 0)
3516 /* Built-in unreachable with parameters might not be caught by
3517 undefined behavior sanitizer. Front-ends do check users do not
3518 call them that way but we also produce calls to
3519 __builtin_unreachable internally, for example when IPA figures
3520 out a call cannot happen in a legal program. In such cases,
3521 we must make sure arguments are stripped off. */
3522 error ("__builtin_unreachable or __builtin_trap call with "
3523 "arguments");
3524 return true;
3526 break;
3527 default:
3528 break;
3532 /* ??? The C frontend passes unpromoted arguments in case it
3533 didn't see a function declaration before the call. So for now
3534 leave the call arguments mostly unverified. Once we gimplify
3535 unit-at-a-time we have a chance to fix this. */
3537 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3539 tree arg = gimple_call_arg (stmt, i);
3540 if ((is_gimple_reg_type (TREE_TYPE (arg))
3541 && !is_gimple_val (arg))
3542 || (!is_gimple_reg_type (TREE_TYPE (arg))
3543 && !is_gimple_lvalue (arg)))
3545 error ("invalid argument to gimple call");
3546 debug_generic_expr (arg);
3547 return true;
3551 return false;
3554 /* Verifies the gimple comparison with the result type TYPE and
3555 the operands OP0 and OP1, comparison code is CODE. */
3557 static bool
3558 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3560 tree op0_type = TREE_TYPE (op0);
3561 tree op1_type = TREE_TYPE (op1);
3563 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3565 error ("invalid operands in gimple comparison");
3566 return true;
3569 /* For comparisons we do not have the operations type as the
3570 effective type the comparison is carried out in. Instead
3571 we require that either the first operand is trivially
3572 convertible into the second, or the other way around.
3573 Because we special-case pointers to void we allow
3574 comparisons of pointers with the same mode as well. */
3575 if (!useless_type_conversion_p (op0_type, op1_type)
3576 && !useless_type_conversion_p (op1_type, op0_type)
3577 && (!POINTER_TYPE_P (op0_type)
3578 || !POINTER_TYPE_P (op1_type)
3579 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3581 error ("mismatching comparison operand types");
3582 debug_generic_expr (op0_type);
3583 debug_generic_expr (op1_type);
3584 return true;
3587 /* The resulting type of a comparison may be an effective boolean type. */
3588 if (INTEGRAL_TYPE_P (type)
3589 && (TREE_CODE (type) == BOOLEAN_TYPE
3590 || TYPE_PRECISION (type) == 1))
3592 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3593 || TREE_CODE (op1_type) == VECTOR_TYPE)
3594 && code != EQ_EXPR && code != NE_EXPR
3595 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3596 && !VECTOR_INTEGER_TYPE_P (op0_type))
3598 error ("unsupported operation or type for vector comparison"
3599 " returning a boolean");
3600 debug_generic_expr (op0_type);
3601 debug_generic_expr (op1_type);
3602 return true;
3605 /* Or a boolean vector type with the same element count
3606 as the comparison operand types. */
3607 else if (TREE_CODE (type) == VECTOR_TYPE
3608 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3610 if (TREE_CODE (op0_type) != VECTOR_TYPE
3611 || TREE_CODE (op1_type) != VECTOR_TYPE)
3613 error ("non-vector operands in vector comparison");
3614 debug_generic_expr (op0_type);
3615 debug_generic_expr (op1_type);
3616 return true;
3619 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3621 error ("invalid vector comparison resulting type");
3622 debug_generic_expr (type);
3623 return true;
3626 else
3628 error ("bogus comparison result type");
3629 debug_generic_expr (type);
3630 return true;
3633 return false;
3636 /* Verify a gimple assignment statement STMT with an unary rhs.
3637 Returns true if anything is wrong. */
3639 static bool
3640 verify_gimple_assign_unary (gassign *stmt)
3642 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3643 tree lhs = gimple_assign_lhs (stmt);
3644 tree lhs_type = TREE_TYPE (lhs);
3645 tree rhs1 = gimple_assign_rhs1 (stmt);
3646 tree rhs1_type = TREE_TYPE (rhs1);
3648 if (!is_gimple_reg (lhs))
3650 error ("non-register as LHS of unary operation");
3651 return true;
3654 if (!is_gimple_val (rhs1))
3656 error ("invalid operand in unary operation");
3657 return true;
3660 /* First handle conversions. */
3661 switch (rhs_code)
3663 CASE_CONVERT:
3665 /* Allow conversions from pointer type to integral type only if
3666 there is no sign or zero extension involved.
3667 For targets were the precision of ptrofftype doesn't match that
3668 of pointers we need to allow arbitrary conversions to ptrofftype. */
3669 if ((POINTER_TYPE_P (lhs_type)
3670 && INTEGRAL_TYPE_P (rhs1_type))
3671 || (POINTER_TYPE_P (rhs1_type)
3672 && INTEGRAL_TYPE_P (lhs_type)
3673 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3674 || ptrofftype_p (sizetype))))
3675 return false;
3677 /* Allow conversion from integral to offset type and vice versa. */
3678 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3679 && INTEGRAL_TYPE_P (rhs1_type))
3680 || (INTEGRAL_TYPE_P (lhs_type)
3681 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3682 return false;
3684 /* Otherwise assert we are converting between types of the
3685 same kind. */
3686 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3688 error ("invalid types in nop conversion");
3689 debug_generic_expr (lhs_type);
3690 debug_generic_expr (rhs1_type);
3691 return true;
3694 return false;
3697 case ADDR_SPACE_CONVERT_EXPR:
3699 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3700 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3701 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3703 error ("invalid types in address space conversion");
3704 debug_generic_expr (lhs_type);
3705 debug_generic_expr (rhs1_type);
3706 return true;
3709 return false;
3712 case FIXED_CONVERT_EXPR:
3714 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3715 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3717 error ("invalid types in fixed-point conversion");
3718 debug_generic_expr (lhs_type);
3719 debug_generic_expr (rhs1_type);
3720 return true;
3723 return false;
3726 case FLOAT_EXPR:
3728 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3729 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3730 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3732 error ("invalid types in conversion to floating point");
3733 debug_generic_expr (lhs_type);
3734 debug_generic_expr (rhs1_type);
3735 return true;
3738 return false;
3741 case FIX_TRUNC_EXPR:
3743 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3744 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3745 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3747 error ("invalid types in conversion to integer");
3748 debug_generic_expr (lhs_type);
3749 debug_generic_expr (rhs1_type);
3750 return true;
3753 return false;
3755 case REDUC_MAX_EXPR:
3756 case REDUC_MIN_EXPR:
3757 case REDUC_PLUS_EXPR:
3758 if (!VECTOR_TYPE_P (rhs1_type)
3759 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3761 error ("reduction should convert from vector to element type");
3762 debug_generic_expr (lhs_type);
3763 debug_generic_expr (rhs1_type);
3764 return true;
3766 return false;
3768 case VEC_UNPACK_HI_EXPR:
3769 case VEC_UNPACK_LO_EXPR:
3770 case VEC_UNPACK_FLOAT_HI_EXPR:
3771 case VEC_UNPACK_FLOAT_LO_EXPR:
3772 /* FIXME. */
3773 return false;
3775 case NEGATE_EXPR:
3776 case ABS_EXPR:
3777 case BIT_NOT_EXPR:
3778 case PAREN_EXPR:
3779 case CONJ_EXPR:
3780 break;
3782 default:
3783 gcc_unreachable ();
3786 /* For the remaining codes assert there is no conversion involved. */
3787 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3789 error ("non-trivial conversion in unary operation");
3790 debug_generic_expr (lhs_type);
3791 debug_generic_expr (rhs1_type);
3792 return true;
3795 return false;
3798 /* Verify a gimple assignment statement STMT with a binary rhs.
3799 Returns true if anything is wrong. */
3801 static bool
3802 verify_gimple_assign_binary (gassign *stmt)
3804 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3805 tree lhs = gimple_assign_lhs (stmt);
3806 tree lhs_type = TREE_TYPE (lhs);
3807 tree rhs1 = gimple_assign_rhs1 (stmt);
3808 tree rhs1_type = TREE_TYPE (rhs1);
3809 tree rhs2 = gimple_assign_rhs2 (stmt);
3810 tree rhs2_type = TREE_TYPE (rhs2);
3812 if (!is_gimple_reg (lhs))
3814 error ("non-register as LHS of binary operation");
3815 return true;
3818 if (!is_gimple_val (rhs1)
3819 || !is_gimple_val (rhs2))
3821 error ("invalid operands in binary operation");
3822 return true;
3825 /* First handle operations that involve different types. */
3826 switch (rhs_code)
3828 case COMPLEX_EXPR:
3830 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3831 || !(INTEGRAL_TYPE_P (rhs1_type)
3832 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3833 || !(INTEGRAL_TYPE_P (rhs2_type)
3834 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3836 error ("type mismatch in complex expression");
3837 debug_generic_expr (lhs_type);
3838 debug_generic_expr (rhs1_type);
3839 debug_generic_expr (rhs2_type);
3840 return true;
3843 return false;
3846 case LSHIFT_EXPR:
3847 case RSHIFT_EXPR:
3848 case LROTATE_EXPR:
3849 case RROTATE_EXPR:
3851 /* Shifts and rotates are ok on integral types, fixed point
3852 types and integer vector types. */
3853 if ((!INTEGRAL_TYPE_P (rhs1_type)
3854 && !FIXED_POINT_TYPE_P (rhs1_type)
3855 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3856 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3857 || (!INTEGRAL_TYPE_P (rhs2_type)
3858 /* Vector shifts of vectors are also ok. */
3859 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3860 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3861 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3862 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3863 || !useless_type_conversion_p (lhs_type, rhs1_type))
3865 error ("type mismatch in shift expression");
3866 debug_generic_expr (lhs_type);
3867 debug_generic_expr (rhs1_type);
3868 debug_generic_expr (rhs2_type);
3869 return true;
3872 return false;
3875 case WIDEN_LSHIFT_EXPR:
3877 if (!INTEGRAL_TYPE_P (lhs_type)
3878 || !INTEGRAL_TYPE_P (rhs1_type)
3879 || TREE_CODE (rhs2) != INTEGER_CST
3880 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3882 error ("type mismatch in widening vector shift expression");
3883 debug_generic_expr (lhs_type);
3884 debug_generic_expr (rhs1_type);
3885 debug_generic_expr (rhs2_type);
3886 return true;
3889 return false;
3892 case VEC_WIDEN_LSHIFT_HI_EXPR:
3893 case VEC_WIDEN_LSHIFT_LO_EXPR:
3895 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3896 || TREE_CODE (lhs_type) != VECTOR_TYPE
3897 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3898 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3899 || TREE_CODE (rhs2) != INTEGER_CST
3900 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3901 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3903 error ("type mismatch in widening vector shift expression");
3904 debug_generic_expr (lhs_type);
3905 debug_generic_expr (rhs1_type);
3906 debug_generic_expr (rhs2_type);
3907 return true;
3910 return false;
3913 case PLUS_EXPR:
3914 case MINUS_EXPR:
3916 tree lhs_etype = lhs_type;
3917 tree rhs1_etype = rhs1_type;
3918 tree rhs2_etype = rhs2_type;
3919 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3921 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3922 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3924 error ("invalid non-vector operands to vector valued plus");
3925 return true;
3927 lhs_etype = TREE_TYPE (lhs_type);
3928 rhs1_etype = TREE_TYPE (rhs1_type);
3929 rhs2_etype = TREE_TYPE (rhs2_type);
3931 if (POINTER_TYPE_P (lhs_etype)
3932 || POINTER_TYPE_P (rhs1_etype)
3933 || POINTER_TYPE_P (rhs2_etype))
3935 error ("invalid (pointer) operands to plus/minus");
3936 return true;
3939 /* Continue with generic binary expression handling. */
3940 break;
3943 case POINTER_PLUS_EXPR:
3945 if (!POINTER_TYPE_P (rhs1_type)
3946 || !useless_type_conversion_p (lhs_type, rhs1_type)
3947 || !ptrofftype_p (rhs2_type))
3949 error ("type mismatch in pointer plus expression");
3950 debug_generic_stmt (lhs_type);
3951 debug_generic_stmt (rhs1_type);
3952 debug_generic_stmt (rhs2_type);
3953 return true;
3956 return false;
3959 case TRUTH_ANDIF_EXPR:
3960 case TRUTH_ORIF_EXPR:
3961 case TRUTH_AND_EXPR:
3962 case TRUTH_OR_EXPR:
3963 case TRUTH_XOR_EXPR:
3965 gcc_unreachable ();
3967 case LT_EXPR:
3968 case LE_EXPR:
3969 case GT_EXPR:
3970 case GE_EXPR:
3971 case EQ_EXPR:
3972 case NE_EXPR:
3973 case UNORDERED_EXPR:
3974 case ORDERED_EXPR:
3975 case UNLT_EXPR:
3976 case UNLE_EXPR:
3977 case UNGT_EXPR:
3978 case UNGE_EXPR:
3979 case UNEQ_EXPR:
3980 case LTGT_EXPR:
3981 /* Comparisons are also binary, but the result type is not
3982 connected to the operand types. */
3983 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3985 case WIDEN_MULT_EXPR:
3986 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3987 return true;
3988 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3989 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3991 case WIDEN_SUM_EXPR:
3992 case VEC_WIDEN_MULT_HI_EXPR:
3993 case VEC_WIDEN_MULT_LO_EXPR:
3994 case VEC_WIDEN_MULT_EVEN_EXPR:
3995 case VEC_WIDEN_MULT_ODD_EXPR:
3996 case VEC_PACK_TRUNC_EXPR:
3997 case VEC_PACK_SAT_EXPR:
3998 case VEC_PACK_FIX_TRUNC_EXPR:
3999 /* FIXME. */
4000 return false;
4002 case MULT_EXPR:
4003 case MULT_HIGHPART_EXPR:
4004 case TRUNC_DIV_EXPR:
4005 case CEIL_DIV_EXPR:
4006 case FLOOR_DIV_EXPR:
4007 case ROUND_DIV_EXPR:
4008 case TRUNC_MOD_EXPR:
4009 case CEIL_MOD_EXPR:
4010 case FLOOR_MOD_EXPR:
4011 case ROUND_MOD_EXPR:
4012 case RDIV_EXPR:
4013 case EXACT_DIV_EXPR:
4014 case MIN_EXPR:
4015 case MAX_EXPR:
4016 case BIT_IOR_EXPR:
4017 case BIT_XOR_EXPR:
4018 case BIT_AND_EXPR:
4019 /* Continue with generic binary expression handling. */
4020 break;
4022 default:
4023 gcc_unreachable ();
4026 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4027 || !useless_type_conversion_p (lhs_type, rhs2_type))
4029 error ("type mismatch in binary expression");
4030 debug_generic_stmt (lhs_type);
4031 debug_generic_stmt (rhs1_type);
4032 debug_generic_stmt (rhs2_type);
4033 return true;
4036 return false;
4039 /* Verify a gimple assignment statement STMT with a ternary rhs.
4040 Returns true if anything is wrong. */
4042 static bool
4043 verify_gimple_assign_ternary (gassign *stmt)
4045 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4046 tree lhs = gimple_assign_lhs (stmt);
4047 tree lhs_type = TREE_TYPE (lhs);
4048 tree rhs1 = gimple_assign_rhs1 (stmt);
4049 tree rhs1_type = TREE_TYPE (rhs1);
4050 tree rhs2 = gimple_assign_rhs2 (stmt);
4051 tree rhs2_type = TREE_TYPE (rhs2);
4052 tree rhs3 = gimple_assign_rhs3 (stmt);
4053 tree rhs3_type = TREE_TYPE (rhs3);
4055 if (!is_gimple_reg (lhs))
4057 error ("non-register as LHS of ternary operation");
4058 return true;
4061 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4062 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4063 || !is_gimple_val (rhs2)
4064 || !is_gimple_val (rhs3))
4066 error ("invalid operands in ternary operation");
4067 return true;
4070 /* First handle operations that involve different types. */
4071 switch (rhs_code)
4073 case WIDEN_MULT_PLUS_EXPR:
4074 case WIDEN_MULT_MINUS_EXPR:
4075 if ((!INTEGRAL_TYPE_P (rhs1_type)
4076 && !FIXED_POINT_TYPE_P (rhs1_type))
4077 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4078 || !useless_type_conversion_p (lhs_type, rhs3_type)
4079 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4080 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4082 error ("type mismatch in widening multiply-accumulate expression");
4083 debug_generic_expr (lhs_type);
4084 debug_generic_expr (rhs1_type);
4085 debug_generic_expr (rhs2_type);
4086 debug_generic_expr (rhs3_type);
4087 return true;
4089 break;
4091 case FMA_EXPR:
4092 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4093 || !useless_type_conversion_p (lhs_type, rhs2_type)
4094 || !useless_type_conversion_p (lhs_type, rhs3_type))
4096 error ("type mismatch in fused multiply-add expression");
4097 debug_generic_expr (lhs_type);
4098 debug_generic_expr (rhs1_type);
4099 debug_generic_expr (rhs2_type);
4100 debug_generic_expr (rhs3_type);
4101 return true;
4103 break;
4105 case VEC_COND_EXPR:
4106 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4107 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4108 != TYPE_VECTOR_SUBPARTS (lhs_type))
4110 error ("the first argument of a VEC_COND_EXPR must be of a "
4111 "boolean vector type of the same number of elements "
4112 "as the result");
4113 debug_generic_expr (lhs_type);
4114 debug_generic_expr (rhs1_type);
4115 return true;
4117 /* Fallthrough. */
4118 case COND_EXPR:
4119 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4120 || !useless_type_conversion_p (lhs_type, rhs3_type))
4122 error ("type mismatch in conditional expression");
4123 debug_generic_expr (lhs_type);
4124 debug_generic_expr (rhs2_type);
4125 debug_generic_expr (rhs3_type);
4126 return true;
4128 break;
4130 case VEC_PERM_EXPR:
4131 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4132 || !useless_type_conversion_p (lhs_type, rhs2_type))
4134 error ("type mismatch in vector permute expression");
4135 debug_generic_expr (lhs_type);
4136 debug_generic_expr (rhs1_type);
4137 debug_generic_expr (rhs2_type);
4138 debug_generic_expr (rhs3_type);
4139 return true;
4142 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4143 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4144 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4146 error ("vector types expected in vector permute expression");
4147 debug_generic_expr (lhs_type);
4148 debug_generic_expr (rhs1_type);
4149 debug_generic_expr (rhs2_type);
4150 debug_generic_expr (rhs3_type);
4151 return true;
4154 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4155 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4156 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4157 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4158 != TYPE_VECTOR_SUBPARTS (lhs_type))
4160 error ("vectors with different element number found "
4161 "in vector permute expression");
4162 debug_generic_expr (lhs_type);
4163 debug_generic_expr (rhs1_type);
4164 debug_generic_expr (rhs2_type);
4165 debug_generic_expr (rhs3_type);
4166 return true;
4169 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4170 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4171 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4173 error ("invalid mask type in vector permute expression");
4174 debug_generic_expr (lhs_type);
4175 debug_generic_expr (rhs1_type);
4176 debug_generic_expr (rhs2_type);
4177 debug_generic_expr (rhs3_type);
4178 return true;
4181 return false;
4183 case SAD_EXPR:
4184 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4185 || !useless_type_conversion_p (lhs_type, rhs3_type)
4186 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4187 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4189 error ("type mismatch in sad expression");
4190 debug_generic_expr (lhs_type);
4191 debug_generic_expr (rhs1_type);
4192 debug_generic_expr (rhs2_type);
4193 debug_generic_expr (rhs3_type);
4194 return true;
4197 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4198 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4199 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4201 error ("vector types expected in sad expression");
4202 debug_generic_expr (lhs_type);
4203 debug_generic_expr (rhs1_type);
4204 debug_generic_expr (rhs2_type);
4205 debug_generic_expr (rhs3_type);
4206 return true;
4209 return false;
4211 case BIT_INSERT_EXPR:
4212 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4214 error ("type mismatch in BIT_INSERT_EXPR");
4215 debug_generic_expr (lhs_type);
4216 debug_generic_expr (rhs1_type);
4217 return true;
4219 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4220 && INTEGRAL_TYPE_P (rhs2_type))
4221 || (VECTOR_TYPE_P (rhs1_type)
4222 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4224 error ("not allowed type combination in BIT_INSERT_EXPR");
4225 debug_generic_expr (rhs1_type);
4226 debug_generic_expr (rhs2_type);
4227 return true;
4229 if (! tree_fits_uhwi_p (rhs3)
4230 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4232 error ("invalid position or size in BIT_INSERT_EXPR");
4233 return true;
4235 if (INTEGRAL_TYPE_P (rhs1_type))
4237 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4238 if (bitpos >= TYPE_PRECISION (rhs1_type)
4239 || (bitpos + TYPE_PRECISION (rhs2_type)
4240 > TYPE_PRECISION (rhs1_type)))
4242 error ("insertion out of range in BIT_INSERT_EXPR");
4243 return true;
4246 else if (VECTOR_TYPE_P (rhs1_type))
4248 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4249 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4250 if (bitpos % bitsize != 0)
4252 error ("vector insertion not at element boundary");
4253 return true;
4256 return false;
4258 case DOT_PROD_EXPR:
4259 case REALIGN_LOAD_EXPR:
4260 /* FIXME. */
4261 return false;
4263 default:
4264 gcc_unreachable ();
4266 return false;
4269 /* Verify a gimple assignment statement STMT with a single rhs.
4270 Returns true if anything is wrong. */
4272 static bool
4273 verify_gimple_assign_single (gassign *stmt)
4275 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4276 tree lhs = gimple_assign_lhs (stmt);
4277 tree lhs_type = TREE_TYPE (lhs);
4278 tree rhs1 = gimple_assign_rhs1 (stmt);
4279 tree rhs1_type = TREE_TYPE (rhs1);
4280 bool res = false;
4282 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4284 error ("non-trivial conversion at assignment");
4285 debug_generic_expr (lhs_type);
4286 debug_generic_expr (rhs1_type);
4287 return true;
4290 if (gimple_clobber_p (stmt)
4291 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4293 error ("non-decl/MEM_REF LHS in clobber statement");
4294 debug_generic_expr (lhs);
4295 return true;
4298 if (handled_component_p (lhs)
4299 || TREE_CODE (lhs) == MEM_REF
4300 || TREE_CODE (lhs) == TARGET_MEM_REF)
4301 res |= verify_types_in_gimple_reference (lhs, true);
4303 /* Special codes we cannot handle via their class. */
4304 switch (rhs_code)
4306 case ADDR_EXPR:
4308 tree op = TREE_OPERAND (rhs1, 0);
4309 if (!is_gimple_addressable (op))
4311 error ("invalid operand in unary expression");
4312 return true;
4315 /* Technically there is no longer a need for matching types, but
4316 gimple hygiene asks for this check. In LTO we can end up
4317 combining incompatible units and thus end up with addresses
4318 of globals that change their type to a common one. */
4319 if (!in_lto_p
4320 && !types_compatible_p (TREE_TYPE (op),
4321 TREE_TYPE (TREE_TYPE (rhs1)))
4322 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4323 TREE_TYPE (op)))
4325 error ("type mismatch in address expression");
4326 debug_generic_stmt (TREE_TYPE (rhs1));
4327 debug_generic_stmt (TREE_TYPE (op));
4328 return true;
4331 return verify_types_in_gimple_reference (op, true);
4334 /* tcc_reference */
4335 case INDIRECT_REF:
4336 error ("INDIRECT_REF in gimple IL");
4337 return true;
4339 case COMPONENT_REF:
4340 case BIT_FIELD_REF:
4341 case ARRAY_REF:
4342 case ARRAY_RANGE_REF:
4343 case VIEW_CONVERT_EXPR:
4344 case REALPART_EXPR:
4345 case IMAGPART_EXPR:
4346 case TARGET_MEM_REF:
4347 case MEM_REF:
4348 if (!is_gimple_reg (lhs)
4349 && is_gimple_reg_type (TREE_TYPE (lhs)))
4351 error ("invalid rhs for gimple memory store");
4352 debug_generic_stmt (lhs);
4353 debug_generic_stmt (rhs1);
4354 return true;
4356 return res || verify_types_in_gimple_reference (rhs1, false);
4358 /* tcc_constant */
4359 case SSA_NAME:
4360 case INTEGER_CST:
4361 case REAL_CST:
4362 case FIXED_CST:
4363 case COMPLEX_CST:
4364 case VECTOR_CST:
4365 case STRING_CST:
4366 return res;
4368 /* tcc_declaration */
4369 case CONST_DECL:
4370 return res;
4371 case VAR_DECL:
4372 case PARM_DECL:
4373 if (!is_gimple_reg (lhs)
4374 && !is_gimple_reg (rhs1)
4375 && is_gimple_reg_type (TREE_TYPE (lhs)))
4377 error ("invalid rhs for gimple memory store");
4378 debug_generic_stmt (lhs);
4379 debug_generic_stmt (rhs1);
4380 return true;
4382 return res;
4384 case CONSTRUCTOR:
4385 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4387 unsigned int i;
4388 tree elt_i, elt_v, elt_t = NULL_TREE;
4390 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4391 return res;
4392 /* For vector CONSTRUCTORs we require that either it is empty
4393 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4394 (then the element count must be correct to cover the whole
4395 outer vector and index must be NULL on all elements, or it is
4396 a CONSTRUCTOR of scalar elements, where we as an exception allow
4397 smaller number of elements (assuming zero filling) and
4398 consecutive indexes as compared to NULL indexes (such
4399 CONSTRUCTORs can appear in the IL from FEs). */
4400 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4402 if (elt_t == NULL_TREE)
4404 elt_t = TREE_TYPE (elt_v);
4405 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4407 tree elt_t = TREE_TYPE (elt_v);
4408 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4409 TREE_TYPE (elt_t)))
4411 error ("incorrect type of vector CONSTRUCTOR"
4412 " elements");
4413 debug_generic_stmt (rhs1);
4414 return true;
4416 else if (CONSTRUCTOR_NELTS (rhs1)
4417 * TYPE_VECTOR_SUBPARTS (elt_t)
4418 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4420 error ("incorrect number of vector CONSTRUCTOR"
4421 " elements");
4422 debug_generic_stmt (rhs1);
4423 return true;
4426 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4427 elt_t))
4429 error ("incorrect type of vector CONSTRUCTOR elements");
4430 debug_generic_stmt (rhs1);
4431 return true;
4433 else if (CONSTRUCTOR_NELTS (rhs1)
4434 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4436 error ("incorrect number of vector CONSTRUCTOR elements");
4437 debug_generic_stmt (rhs1);
4438 return true;
4441 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4443 error ("incorrect type of vector CONSTRUCTOR elements");
4444 debug_generic_stmt (rhs1);
4445 return true;
4447 if (elt_i != NULL_TREE
4448 && (TREE_CODE (elt_t) == VECTOR_TYPE
4449 || TREE_CODE (elt_i) != INTEGER_CST
4450 || compare_tree_int (elt_i, i) != 0))
4452 error ("vector CONSTRUCTOR with non-NULL element index");
4453 debug_generic_stmt (rhs1);
4454 return true;
4456 if (!is_gimple_val (elt_v))
4458 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4459 debug_generic_stmt (rhs1);
4460 return true;
4464 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4466 error ("non-vector CONSTRUCTOR with elements");
4467 debug_generic_stmt (rhs1);
4468 return true;
4470 return res;
4471 case OBJ_TYPE_REF:
4472 case ASSERT_EXPR:
4473 case WITH_SIZE_EXPR:
4474 /* FIXME. */
4475 return res;
4477 default:;
4480 return res;
4483 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4484 is a problem, otherwise false. */
4486 static bool
4487 verify_gimple_assign (gassign *stmt)
4489 switch (gimple_assign_rhs_class (stmt))
4491 case GIMPLE_SINGLE_RHS:
4492 return verify_gimple_assign_single (stmt);
4494 case GIMPLE_UNARY_RHS:
4495 return verify_gimple_assign_unary (stmt);
4497 case GIMPLE_BINARY_RHS:
4498 return verify_gimple_assign_binary (stmt);
4500 case GIMPLE_TERNARY_RHS:
4501 return verify_gimple_assign_ternary (stmt);
4503 default:
4504 gcc_unreachable ();
4508 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4509 is a problem, otherwise false. */
4511 static bool
4512 verify_gimple_return (greturn *stmt)
4514 tree op = gimple_return_retval (stmt);
4515 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4517 /* We cannot test for present return values as we do not fix up missing
4518 return values from the original source. */
4519 if (op == NULL)
4520 return false;
4522 if (!is_gimple_val (op)
4523 && TREE_CODE (op) != RESULT_DECL)
4525 error ("invalid operand in return statement");
4526 debug_generic_stmt (op);
4527 return true;
4530 if ((TREE_CODE (op) == RESULT_DECL
4531 && DECL_BY_REFERENCE (op))
4532 || (TREE_CODE (op) == SSA_NAME
4533 && SSA_NAME_VAR (op)
4534 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4535 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4536 op = TREE_TYPE (op);
4538 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4540 error ("invalid conversion in return statement");
4541 debug_generic_stmt (restype);
4542 debug_generic_stmt (TREE_TYPE (op));
4543 return true;
4546 return false;
4550 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4551 is a problem, otherwise false. */
4553 static bool
4554 verify_gimple_goto (ggoto *stmt)
4556 tree dest = gimple_goto_dest (stmt);
4558 /* ??? We have two canonical forms of direct goto destinations, a
4559 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4560 if (TREE_CODE (dest) != LABEL_DECL
4561 && (!is_gimple_val (dest)
4562 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4564 error ("goto destination is neither a label nor a pointer");
4565 return true;
4568 return false;
4571 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4572 is a problem, otherwise false. */
4574 static bool
4575 verify_gimple_switch (gswitch *stmt)
4577 unsigned int i, n;
4578 tree elt, prev_upper_bound = NULL_TREE;
4579 tree index_type, elt_type = NULL_TREE;
4581 if (!is_gimple_val (gimple_switch_index (stmt)))
4583 error ("invalid operand to switch statement");
4584 debug_generic_stmt (gimple_switch_index (stmt));
4585 return true;
4588 index_type = TREE_TYPE (gimple_switch_index (stmt));
4589 if (! INTEGRAL_TYPE_P (index_type))
4591 error ("non-integral type switch statement");
4592 debug_generic_expr (index_type);
4593 return true;
4596 elt = gimple_switch_label (stmt, 0);
4597 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4599 error ("invalid default case label in switch statement");
4600 debug_generic_expr (elt);
4601 return true;
4604 n = gimple_switch_num_labels (stmt);
4605 for (i = 1; i < n; i++)
4607 elt = gimple_switch_label (stmt, i);
4609 if (! CASE_LOW (elt))
4611 error ("invalid case label in switch statement");
4612 debug_generic_expr (elt);
4613 return true;
4615 if (CASE_HIGH (elt)
4616 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4618 error ("invalid case range in switch statement");
4619 debug_generic_expr (elt);
4620 return true;
4623 if (elt_type)
4625 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4626 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4628 error ("type mismatch for case label in switch statement");
4629 debug_generic_expr (elt);
4630 return true;
4633 else
4635 elt_type = TREE_TYPE (CASE_LOW (elt));
4636 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4638 error ("type precision mismatch in switch statement");
4639 return true;
4643 if (prev_upper_bound)
4645 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4647 error ("case labels not sorted in switch statement");
4648 return true;
4652 prev_upper_bound = CASE_HIGH (elt);
4653 if (! prev_upper_bound)
4654 prev_upper_bound = CASE_LOW (elt);
4657 return false;
4660 /* Verify a gimple debug statement STMT.
4661 Returns true if anything is wrong. */
4663 static bool
4664 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4666 /* There isn't much that could be wrong in a gimple debug stmt. A
4667 gimple debug bind stmt, for example, maps a tree, that's usually
4668 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4669 component or member of an aggregate type, to another tree, that
4670 can be an arbitrary expression. These stmts expand into debug
4671 insns, and are converted to debug notes by var-tracking.c. */
4672 return false;
4675 /* Verify a gimple label statement STMT.
4676 Returns true if anything is wrong. */
4678 static bool
4679 verify_gimple_label (glabel *stmt)
4681 tree decl = gimple_label_label (stmt);
4682 int uid;
4683 bool err = false;
4685 if (TREE_CODE (decl) != LABEL_DECL)
4686 return true;
4687 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4688 && DECL_CONTEXT (decl) != current_function_decl)
4690 error ("label's context is not the current function decl");
4691 err |= true;
4694 uid = LABEL_DECL_UID (decl);
4695 if (cfun->cfg
4696 && (uid == -1
4697 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4699 error ("incorrect entry in label_to_block_map");
4700 err |= true;
4703 uid = EH_LANDING_PAD_NR (decl);
4704 if (uid)
4706 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4707 if (decl != lp->post_landing_pad)
4709 error ("incorrect setting of landing pad number");
4710 err |= true;
4714 return err;
4717 /* Verify a gimple cond statement STMT.
4718 Returns true if anything is wrong. */
4720 static bool
4721 verify_gimple_cond (gcond *stmt)
4723 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4725 error ("invalid comparison code in gimple cond");
4726 return true;
4728 if (!(!gimple_cond_true_label (stmt)
4729 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4730 || !(!gimple_cond_false_label (stmt)
4731 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4733 error ("invalid labels in gimple cond");
4734 return true;
4737 return verify_gimple_comparison (boolean_type_node,
4738 gimple_cond_lhs (stmt),
4739 gimple_cond_rhs (stmt),
4740 gimple_cond_code (stmt));
4743 /* Verify the GIMPLE statement STMT. Returns true if there is an
4744 error, otherwise false. */
4746 static bool
4747 verify_gimple_stmt (gimple *stmt)
4749 switch (gimple_code (stmt))
4751 case GIMPLE_ASSIGN:
4752 return verify_gimple_assign (as_a <gassign *> (stmt));
4754 case GIMPLE_LABEL:
4755 return verify_gimple_label (as_a <glabel *> (stmt));
4757 case GIMPLE_CALL:
4758 return verify_gimple_call (as_a <gcall *> (stmt));
4760 case GIMPLE_COND:
4761 return verify_gimple_cond (as_a <gcond *> (stmt));
4763 case GIMPLE_GOTO:
4764 return verify_gimple_goto (as_a <ggoto *> (stmt));
4766 case GIMPLE_SWITCH:
4767 return verify_gimple_switch (as_a <gswitch *> (stmt));
4769 case GIMPLE_RETURN:
4770 return verify_gimple_return (as_a <greturn *> (stmt));
4772 case GIMPLE_ASM:
4773 return false;
4775 case GIMPLE_TRANSACTION:
4776 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4778 /* Tuples that do not have tree operands. */
4779 case GIMPLE_NOP:
4780 case GIMPLE_PREDICT:
4781 case GIMPLE_RESX:
4782 case GIMPLE_EH_DISPATCH:
4783 case GIMPLE_EH_MUST_NOT_THROW:
4784 return false;
4786 CASE_GIMPLE_OMP:
4787 /* OpenMP directives are validated by the FE and never operated
4788 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4789 non-gimple expressions when the main index variable has had
4790 its address taken. This does not affect the loop itself
4791 because the header of an GIMPLE_OMP_FOR is merely used to determine
4792 how to setup the parallel iteration. */
4793 return false;
4795 case GIMPLE_DEBUG:
4796 return verify_gimple_debug (stmt);
4798 default:
4799 gcc_unreachable ();
4803 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4804 and false otherwise. */
4806 static bool
4807 verify_gimple_phi (gimple *phi)
4809 bool err = false;
4810 unsigned i;
4811 tree phi_result = gimple_phi_result (phi);
4812 bool virtual_p;
4814 if (!phi_result)
4816 error ("invalid PHI result");
4817 return true;
4820 virtual_p = virtual_operand_p (phi_result);
4821 if (TREE_CODE (phi_result) != SSA_NAME
4822 || (virtual_p
4823 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4825 error ("invalid PHI result");
4826 err = true;
4829 for (i = 0; i < gimple_phi_num_args (phi); i++)
4831 tree t = gimple_phi_arg_def (phi, i);
4833 if (!t)
4835 error ("missing PHI def");
4836 err |= true;
4837 continue;
4839 /* Addressable variables do have SSA_NAMEs but they
4840 are not considered gimple values. */
4841 else if ((TREE_CODE (t) == SSA_NAME
4842 && virtual_p != virtual_operand_p (t))
4843 || (virtual_p
4844 && (TREE_CODE (t) != SSA_NAME
4845 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4846 || (!virtual_p
4847 && !is_gimple_val (t)))
4849 error ("invalid PHI argument");
4850 debug_generic_expr (t);
4851 err |= true;
4853 #ifdef ENABLE_TYPES_CHECKING
4854 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4856 error ("incompatible types in PHI argument %u", i);
4857 debug_generic_stmt (TREE_TYPE (phi_result));
4858 debug_generic_stmt (TREE_TYPE (t));
4859 err |= true;
4861 #endif
4864 return err;
4867 /* Verify the GIMPLE statements inside the sequence STMTS. */
4869 static bool
4870 verify_gimple_in_seq_2 (gimple_seq stmts)
4872 gimple_stmt_iterator ittr;
4873 bool err = false;
4875 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4877 gimple *stmt = gsi_stmt (ittr);
4879 switch (gimple_code (stmt))
4881 case GIMPLE_BIND:
4882 err |= verify_gimple_in_seq_2 (
4883 gimple_bind_body (as_a <gbind *> (stmt)));
4884 break;
4886 case GIMPLE_TRY:
4887 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4888 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4889 break;
4891 case GIMPLE_EH_FILTER:
4892 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4893 break;
4895 case GIMPLE_EH_ELSE:
4897 geh_else *eh_else = as_a <geh_else *> (stmt);
4898 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4899 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4901 break;
4903 case GIMPLE_CATCH:
4904 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4905 as_a <gcatch *> (stmt)));
4906 break;
4908 case GIMPLE_TRANSACTION:
4909 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4910 break;
4912 default:
4914 bool err2 = verify_gimple_stmt (stmt);
4915 if (err2)
4916 debug_gimple_stmt (stmt);
4917 err |= err2;
4922 return err;
4925 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4926 is a problem, otherwise false. */
4928 static bool
4929 verify_gimple_transaction (gtransaction *stmt)
4931 tree lab;
4933 lab = gimple_transaction_label_norm (stmt);
4934 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4935 return true;
4936 lab = gimple_transaction_label_uninst (stmt);
4937 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4938 return true;
4939 lab = gimple_transaction_label_over (stmt);
4940 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4941 return true;
4943 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4947 /* Verify the GIMPLE statements inside the statement list STMTS. */
4949 DEBUG_FUNCTION void
4950 verify_gimple_in_seq (gimple_seq stmts)
4952 timevar_push (TV_TREE_STMT_VERIFY);
4953 if (verify_gimple_in_seq_2 (stmts))
4954 internal_error ("verify_gimple failed");
4955 timevar_pop (TV_TREE_STMT_VERIFY);
4958 /* Return true when the T can be shared. */
4960 static bool
4961 tree_node_can_be_shared (tree t)
4963 if (IS_TYPE_OR_DECL_P (t)
4964 || is_gimple_min_invariant (t)
4965 || TREE_CODE (t) == SSA_NAME
4966 || t == error_mark_node
4967 || TREE_CODE (t) == IDENTIFIER_NODE)
4968 return true;
4970 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4971 return true;
4973 if (DECL_P (t))
4974 return true;
4976 return false;
4979 /* Called via walk_tree. Verify tree sharing. */
4981 static tree
4982 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4984 hash_set<void *> *visited = (hash_set<void *> *) data;
4986 if (tree_node_can_be_shared (*tp))
4988 *walk_subtrees = false;
4989 return NULL;
4992 if (visited->add (*tp))
4993 return *tp;
4995 return NULL;
4998 /* Called via walk_gimple_stmt. Verify tree sharing. */
5000 static tree
5001 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5003 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5004 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5007 static bool eh_error_found;
5008 bool
5009 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5010 hash_set<gimple *> *visited)
5012 if (!visited->contains (stmt))
5014 error ("dead STMT in EH table");
5015 debug_gimple_stmt (stmt);
5016 eh_error_found = true;
5018 return true;
5021 /* Verify if the location LOCs block is in BLOCKS. */
5023 static bool
5024 verify_location (hash_set<tree> *blocks, location_t loc)
5026 tree block = LOCATION_BLOCK (loc);
5027 if (block != NULL_TREE
5028 && !blocks->contains (block))
5030 error ("location references block not in block tree");
5031 return true;
5033 if (block != NULL_TREE)
5034 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5035 return false;
5038 /* Called via walk_tree. Verify that expressions have no blocks. */
5040 static tree
5041 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5043 if (!EXPR_P (*tp))
5045 *walk_subtrees = false;
5046 return NULL;
5049 location_t loc = EXPR_LOCATION (*tp);
5050 if (LOCATION_BLOCK (loc) != NULL)
5051 return *tp;
5053 return NULL;
5056 /* Called via walk_tree. Verify locations of expressions. */
5058 static tree
5059 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5061 hash_set<tree> *blocks = (hash_set<tree> *) data;
5063 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5065 tree t = DECL_DEBUG_EXPR (*tp);
5066 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5067 if (addr)
5068 return addr;
5070 if ((VAR_P (*tp)
5071 || TREE_CODE (*tp) == PARM_DECL
5072 || TREE_CODE (*tp) == RESULT_DECL)
5073 && DECL_HAS_VALUE_EXPR_P (*tp))
5075 tree t = DECL_VALUE_EXPR (*tp);
5076 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5077 if (addr)
5078 return addr;
5081 if (!EXPR_P (*tp))
5083 *walk_subtrees = false;
5084 return NULL;
5087 location_t loc = EXPR_LOCATION (*tp);
5088 if (verify_location (blocks, loc))
5089 return *tp;
5091 return NULL;
5094 /* Called via walk_gimple_op. Verify locations of expressions. */
5096 static tree
5097 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5099 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5100 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5103 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5105 static void
5106 collect_subblocks (hash_set<tree> *blocks, tree block)
5108 tree t;
5109 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5111 blocks->add (t);
5112 collect_subblocks (blocks, t);
5116 /* Verify the GIMPLE statements in the CFG of FN. */
5118 DEBUG_FUNCTION void
5119 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5121 basic_block bb;
5122 bool err = false;
5124 timevar_push (TV_TREE_STMT_VERIFY);
5125 hash_set<void *> visited;
5126 hash_set<gimple *> visited_stmts;
5128 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5129 hash_set<tree> blocks;
5130 if (DECL_INITIAL (fn->decl))
5132 blocks.add (DECL_INITIAL (fn->decl));
5133 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5136 FOR_EACH_BB_FN (bb, fn)
5138 gimple_stmt_iterator gsi;
5140 for (gphi_iterator gpi = gsi_start_phis (bb);
5141 !gsi_end_p (gpi);
5142 gsi_next (&gpi))
5144 gphi *phi = gpi.phi ();
5145 bool err2 = false;
5146 unsigned i;
5148 visited_stmts.add (phi);
5150 if (gimple_bb (phi) != bb)
5152 error ("gimple_bb (phi) is set to a wrong basic block");
5153 err2 = true;
5156 err2 |= verify_gimple_phi (phi);
5158 /* Only PHI arguments have locations. */
5159 if (gimple_location (phi) != UNKNOWN_LOCATION)
5161 error ("PHI node with location");
5162 err2 = true;
5165 for (i = 0; i < gimple_phi_num_args (phi); i++)
5167 tree arg = gimple_phi_arg_def (phi, i);
5168 tree addr = walk_tree (&arg, verify_node_sharing_1,
5169 &visited, NULL);
5170 if (addr)
5172 error ("incorrect sharing of tree nodes");
5173 debug_generic_expr (addr);
5174 err2 |= true;
5176 location_t loc = gimple_phi_arg_location (phi, i);
5177 if (virtual_operand_p (gimple_phi_result (phi))
5178 && loc != UNKNOWN_LOCATION)
5180 error ("virtual PHI with argument locations");
5181 err2 = true;
5183 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5184 if (addr)
5186 debug_generic_expr (addr);
5187 err2 = true;
5189 err2 |= verify_location (&blocks, loc);
5192 if (err2)
5193 debug_gimple_stmt (phi);
5194 err |= err2;
5197 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5199 gimple *stmt = gsi_stmt (gsi);
5200 bool err2 = false;
5201 struct walk_stmt_info wi;
5202 tree addr;
5203 int lp_nr;
5205 visited_stmts.add (stmt);
5207 if (gimple_bb (stmt) != bb)
5209 error ("gimple_bb (stmt) is set to a wrong basic block");
5210 err2 = true;
5213 err2 |= verify_gimple_stmt (stmt);
5214 err2 |= verify_location (&blocks, gimple_location (stmt));
5216 memset (&wi, 0, sizeof (wi));
5217 wi.info = (void *) &visited;
5218 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5219 if (addr)
5221 error ("incorrect sharing of tree nodes");
5222 debug_generic_expr (addr);
5223 err2 |= true;
5226 memset (&wi, 0, sizeof (wi));
5227 wi.info = (void *) &blocks;
5228 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5229 if (addr)
5231 debug_generic_expr (addr);
5232 err2 |= true;
5235 /* ??? Instead of not checking these stmts at all the walker
5236 should know its context via wi. */
5237 if (!is_gimple_debug (stmt)
5238 && !is_gimple_omp (stmt))
5240 memset (&wi, 0, sizeof (wi));
5241 addr = walk_gimple_op (stmt, verify_expr, &wi);
5242 if (addr)
5244 debug_generic_expr (addr);
5245 inform (gimple_location (stmt), "in statement");
5246 err2 |= true;
5250 /* If the statement is marked as part of an EH region, then it is
5251 expected that the statement could throw. Verify that when we
5252 have optimizations that simplify statements such that we prove
5253 that they cannot throw, that we update other data structures
5254 to match. */
5255 lp_nr = lookup_stmt_eh_lp (stmt);
5256 if (lp_nr > 0)
5258 if (!stmt_could_throw_p (stmt))
5260 if (verify_nothrow)
5262 error ("statement marked for throw, but doesn%'t");
5263 err2 |= true;
5266 else if (!gsi_one_before_end_p (gsi))
5268 error ("statement marked for throw in middle of block");
5269 err2 |= true;
5273 if (err2)
5274 debug_gimple_stmt (stmt);
5275 err |= err2;
5279 eh_error_found = false;
5280 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5281 if (eh_table)
5282 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5283 (&visited_stmts);
5285 if (err || eh_error_found)
5286 internal_error ("verify_gimple failed");
5288 verify_histograms ();
5289 timevar_pop (TV_TREE_STMT_VERIFY);
5293 /* Verifies that the flow information is OK. */
5295 static int
5296 gimple_verify_flow_info (void)
5298 int err = 0;
5299 basic_block bb;
5300 gimple_stmt_iterator gsi;
5301 gimple *stmt;
5302 edge e;
5303 edge_iterator ei;
5305 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5306 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5308 error ("ENTRY_BLOCK has IL associated with it");
5309 err = 1;
5312 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5313 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5315 error ("EXIT_BLOCK has IL associated with it");
5316 err = 1;
5319 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5320 if (e->flags & EDGE_FALLTHRU)
5322 error ("fallthru to exit from bb %d", e->src->index);
5323 err = 1;
5326 FOR_EACH_BB_FN (bb, cfun)
5328 bool found_ctrl_stmt = false;
5330 stmt = NULL;
5332 /* Skip labels on the start of basic block. */
5333 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5335 tree label;
5336 gimple *prev_stmt = stmt;
5338 stmt = gsi_stmt (gsi);
5340 if (gimple_code (stmt) != GIMPLE_LABEL)
5341 break;
5343 label = gimple_label_label (as_a <glabel *> (stmt));
5344 if (prev_stmt && DECL_NONLOCAL (label))
5346 error ("nonlocal label ");
5347 print_generic_expr (stderr, label);
5348 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5349 bb->index);
5350 err = 1;
5353 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5355 error ("EH landing pad label ");
5356 print_generic_expr (stderr, label);
5357 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5358 bb->index);
5359 err = 1;
5362 if (label_to_block (label) != bb)
5364 error ("label ");
5365 print_generic_expr (stderr, label);
5366 fprintf (stderr, " to block does not match in bb %d",
5367 bb->index);
5368 err = 1;
5371 if (decl_function_context (label) != current_function_decl)
5373 error ("label ");
5374 print_generic_expr (stderr, label);
5375 fprintf (stderr, " has incorrect context in bb %d",
5376 bb->index);
5377 err = 1;
5381 /* Verify that body of basic block BB is free of control flow. */
5382 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5384 gimple *stmt = gsi_stmt (gsi);
5386 if (found_ctrl_stmt)
5388 error ("control flow in the middle of basic block %d",
5389 bb->index);
5390 err = 1;
5393 if (stmt_ends_bb_p (stmt))
5394 found_ctrl_stmt = true;
5396 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5398 error ("label ");
5399 print_generic_expr (stderr, gimple_label_label (label_stmt));
5400 fprintf (stderr, " in the middle of basic block %d", bb->index);
5401 err = 1;
5405 gsi = gsi_last_bb (bb);
5406 if (gsi_end_p (gsi))
5407 continue;
5409 stmt = gsi_stmt (gsi);
5411 if (gimple_code (stmt) == GIMPLE_LABEL)
5412 continue;
5414 err |= verify_eh_edges (stmt);
5416 if (is_ctrl_stmt (stmt))
5418 FOR_EACH_EDGE (e, ei, bb->succs)
5419 if (e->flags & EDGE_FALLTHRU)
5421 error ("fallthru edge after a control statement in bb %d",
5422 bb->index);
5423 err = 1;
5427 if (gimple_code (stmt) != GIMPLE_COND)
5429 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5430 after anything else but if statement. */
5431 FOR_EACH_EDGE (e, ei, bb->succs)
5432 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5434 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5435 bb->index);
5436 err = 1;
5440 switch (gimple_code (stmt))
5442 case GIMPLE_COND:
5444 edge true_edge;
5445 edge false_edge;
5447 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5449 if (!true_edge
5450 || !false_edge
5451 || !(true_edge->flags & EDGE_TRUE_VALUE)
5452 || !(false_edge->flags & EDGE_FALSE_VALUE)
5453 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5454 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5455 || EDGE_COUNT (bb->succs) >= 3)
5457 error ("wrong outgoing edge flags at end of bb %d",
5458 bb->index);
5459 err = 1;
5462 break;
5464 case GIMPLE_GOTO:
5465 if (simple_goto_p (stmt))
5467 error ("explicit goto at end of bb %d", bb->index);
5468 err = 1;
5470 else
5472 /* FIXME. We should double check that the labels in the
5473 destination blocks have their address taken. */
5474 FOR_EACH_EDGE (e, ei, bb->succs)
5475 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5476 | EDGE_FALSE_VALUE))
5477 || !(e->flags & EDGE_ABNORMAL))
5479 error ("wrong outgoing edge flags at end of bb %d",
5480 bb->index);
5481 err = 1;
5484 break;
5486 case GIMPLE_CALL:
5487 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5488 break;
5489 /* fallthru */
5490 case GIMPLE_RETURN:
5491 if (!single_succ_p (bb)
5492 || (single_succ_edge (bb)->flags
5493 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5494 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5496 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5497 err = 1;
5499 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5501 error ("return edge does not point to exit in bb %d",
5502 bb->index);
5503 err = 1;
5505 break;
5507 case GIMPLE_SWITCH:
5509 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5510 tree prev;
5511 edge e;
5512 size_t i, n;
5514 n = gimple_switch_num_labels (switch_stmt);
5516 /* Mark all the destination basic blocks. */
5517 for (i = 0; i < n; ++i)
5519 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5520 basic_block label_bb = label_to_block (lab);
5521 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5522 label_bb->aux = (void *)1;
5525 /* Verify that the case labels are sorted. */
5526 prev = gimple_switch_label (switch_stmt, 0);
5527 for (i = 1; i < n; ++i)
5529 tree c = gimple_switch_label (switch_stmt, i);
5530 if (!CASE_LOW (c))
5532 error ("found default case not at the start of "
5533 "case vector");
5534 err = 1;
5535 continue;
5537 if (CASE_LOW (prev)
5538 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5540 error ("case labels not sorted: ");
5541 print_generic_expr (stderr, prev);
5542 fprintf (stderr," is greater than ");
5543 print_generic_expr (stderr, c);
5544 fprintf (stderr," but comes before it.\n");
5545 err = 1;
5547 prev = c;
5549 /* VRP will remove the default case if it can prove it will
5550 never be executed. So do not verify there always exists
5551 a default case here. */
5553 FOR_EACH_EDGE (e, ei, bb->succs)
5555 if (!e->dest->aux)
5557 error ("extra outgoing edge %d->%d",
5558 bb->index, e->dest->index);
5559 err = 1;
5562 e->dest->aux = (void *)2;
5563 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5564 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5566 error ("wrong outgoing edge flags at end of bb %d",
5567 bb->index);
5568 err = 1;
5572 /* Check that we have all of them. */
5573 for (i = 0; i < n; ++i)
5575 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5576 basic_block label_bb = label_to_block (lab);
5578 if (label_bb->aux != (void *)2)
5580 error ("missing edge %i->%i", bb->index, label_bb->index);
5581 err = 1;
5585 FOR_EACH_EDGE (e, ei, bb->succs)
5586 e->dest->aux = (void *)0;
5588 break;
5590 case GIMPLE_EH_DISPATCH:
5591 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5592 break;
5594 default:
5595 break;
5599 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5600 verify_dominators (CDI_DOMINATORS);
5602 return err;
5606 /* Updates phi nodes after creating a forwarder block joined
5607 by edge FALLTHRU. */
5609 static void
5610 gimple_make_forwarder_block (edge fallthru)
5612 edge e;
5613 edge_iterator ei;
5614 basic_block dummy, bb;
5615 tree var;
5616 gphi_iterator gsi;
5618 dummy = fallthru->src;
5619 bb = fallthru->dest;
5621 if (single_pred_p (bb))
5622 return;
5624 /* If we redirected a branch we must create new PHI nodes at the
5625 start of BB. */
5626 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5628 gphi *phi, *new_phi;
5630 phi = gsi.phi ();
5631 var = gimple_phi_result (phi);
5632 new_phi = create_phi_node (var, bb);
5633 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5634 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5635 UNKNOWN_LOCATION);
5638 /* Add the arguments we have stored on edges. */
5639 FOR_EACH_EDGE (e, ei, bb->preds)
5641 if (e == fallthru)
5642 continue;
5644 flush_pending_stmts (e);
5649 /* Return a non-special label in the head of basic block BLOCK.
5650 Create one if it doesn't exist. */
5652 tree
5653 gimple_block_label (basic_block bb)
5655 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5656 bool first = true;
5657 tree label;
5658 glabel *stmt;
5660 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5662 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5663 if (!stmt)
5664 break;
5665 label = gimple_label_label (stmt);
5666 if (!DECL_NONLOCAL (label))
5668 if (!first)
5669 gsi_move_before (&i, &s);
5670 return label;
5674 label = create_artificial_label (UNKNOWN_LOCATION);
5675 stmt = gimple_build_label (label);
5676 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5677 return label;
5681 /* Attempt to perform edge redirection by replacing a possibly complex
5682 jump instruction by a goto or by removing the jump completely.
5683 This can apply only if all edges now point to the same block. The
5684 parameters and return values are equivalent to
5685 redirect_edge_and_branch. */
5687 static edge
5688 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5690 basic_block src = e->src;
5691 gimple_stmt_iterator i;
5692 gimple *stmt;
5694 /* We can replace or remove a complex jump only when we have exactly
5695 two edges. */
5696 if (EDGE_COUNT (src->succs) != 2
5697 /* Verify that all targets will be TARGET. Specifically, the
5698 edge that is not E must also go to TARGET. */
5699 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5700 return NULL;
5702 i = gsi_last_bb (src);
5703 if (gsi_end_p (i))
5704 return NULL;
5706 stmt = gsi_stmt (i);
5708 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5710 gsi_remove (&i, true);
5711 e = ssa_redirect_edge (e, target);
5712 e->flags = EDGE_FALLTHRU;
5713 return e;
5716 return NULL;
5720 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5721 edge representing the redirected branch. */
5723 static edge
5724 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5726 basic_block bb = e->src;
5727 gimple_stmt_iterator gsi;
5728 edge ret;
5729 gimple *stmt;
5731 if (e->flags & EDGE_ABNORMAL)
5732 return NULL;
5734 if (e->dest == dest)
5735 return NULL;
5737 if (e->flags & EDGE_EH)
5738 return redirect_eh_edge (e, dest);
5740 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5742 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5743 if (ret)
5744 return ret;
5747 gsi = gsi_last_bb (bb);
5748 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5750 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5752 case GIMPLE_COND:
5753 /* For COND_EXPR, we only need to redirect the edge. */
5754 break;
5756 case GIMPLE_GOTO:
5757 /* No non-abnormal edges should lead from a non-simple goto, and
5758 simple ones should be represented implicitly. */
5759 gcc_unreachable ();
5761 case GIMPLE_SWITCH:
5763 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5764 tree label = gimple_block_label (dest);
5765 tree cases = get_cases_for_edge (e, switch_stmt);
5767 /* If we have a list of cases associated with E, then use it
5768 as it's a lot faster than walking the entire case vector. */
5769 if (cases)
5771 edge e2 = find_edge (e->src, dest);
5772 tree last, first;
5774 first = cases;
5775 while (cases)
5777 last = cases;
5778 CASE_LABEL (cases) = label;
5779 cases = CASE_CHAIN (cases);
5782 /* If there was already an edge in the CFG, then we need
5783 to move all the cases associated with E to E2. */
5784 if (e2)
5786 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5788 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5789 CASE_CHAIN (cases2) = first;
5791 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5793 else
5795 size_t i, n = gimple_switch_num_labels (switch_stmt);
5797 for (i = 0; i < n; i++)
5799 tree elt = gimple_switch_label (switch_stmt, i);
5800 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5801 CASE_LABEL (elt) = label;
5805 break;
5807 case GIMPLE_ASM:
5809 gasm *asm_stmt = as_a <gasm *> (stmt);
5810 int i, n = gimple_asm_nlabels (asm_stmt);
5811 tree label = NULL;
5813 for (i = 0; i < n; ++i)
5815 tree cons = gimple_asm_label_op (asm_stmt, i);
5816 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5818 if (!label)
5819 label = gimple_block_label (dest);
5820 TREE_VALUE (cons) = label;
5824 /* If we didn't find any label matching the former edge in the
5825 asm labels, we must be redirecting the fallthrough
5826 edge. */
5827 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5829 break;
5831 case GIMPLE_RETURN:
5832 gsi_remove (&gsi, true);
5833 e->flags |= EDGE_FALLTHRU;
5834 break;
5836 case GIMPLE_OMP_RETURN:
5837 case GIMPLE_OMP_CONTINUE:
5838 case GIMPLE_OMP_SECTIONS_SWITCH:
5839 case GIMPLE_OMP_FOR:
5840 /* The edges from OMP constructs can be simply redirected. */
5841 break;
5843 case GIMPLE_EH_DISPATCH:
5844 if (!(e->flags & EDGE_FALLTHRU))
5845 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5846 break;
5848 case GIMPLE_TRANSACTION:
5849 if (e->flags & EDGE_TM_ABORT)
5850 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5851 gimple_block_label (dest));
5852 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5853 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5854 gimple_block_label (dest));
5855 else
5856 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5857 gimple_block_label (dest));
5858 break;
5860 default:
5861 /* Otherwise it must be a fallthru edge, and we don't need to
5862 do anything besides redirecting it. */
5863 gcc_assert (e->flags & EDGE_FALLTHRU);
5864 break;
5867 /* Update/insert PHI nodes as necessary. */
5869 /* Now update the edges in the CFG. */
5870 e = ssa_redirect_edge (e, dest);
5872 return e;
5875 /* Returns true if it is possible to remove edge E by redirecting
5876 it to the destination of the other edge from E->src. */
5878 static bool
5879 gimple_can_remove_branch_p (const_edge e)
5881 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5882 return false;
5884 return true;
5887 /* Simple wrapper, as we can always redirect fallthru edges. */
5889 static basic_block
5890 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5892 e = gimple_redirect_edge_and_branch (e, dest);
5893 gcc_assert (e);
5895 return NULL;
5899 /* Splits basic block BB after statement STMT (but at least after the
5900 labels). If STMT is NULL, BB is split just after the labels. */
5902 static basic_block
5903 gimple_split_block (basic_block bb, void *stmt)
5905 gimple_stmt_iterator gsi;
5906 gimple_stmt_iterator gsi_tgt;
5907 gimple_seq list;
5908 basic_block new_bb;
5909 edge e;
5910 edge_iterator ei;
5912 new_bb = create_empty_bb (bb);
5914 /* Redirect the outgoing edges. */
5915 new_bb->succs = bb->succs;
5916 bb->succs = NULL;
5917 FOR_EACH_EDGE (e, ei, new_bb->succs)
5918 e->src = new_bb;
5920 /* Get a stmt iterator pointing to the first stmt to move. */
5921 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5922 gsi = gsi_after_labels (bb);
5923 else
5925 gsi = gsi_for_stmt ((gimple *) stmt);
5926 gsi_next (&gsi);
5929 /* Move everything from GSI to the new basic block. */
5930 if (gsi_end_p (gsi))
5931 return new_bb;
5933 /* Split the statement list - avoid re-creating new containers as this
5934 brings ugly quadratic memory consumption in the inliner.
5935 (We are still quadratic since we need to update stmt BB pointers,
5936 sadly.) */
5937 gsi_split_seq_before (&gsi, &list);
5938 set_bb_seq (new_bb, list);
5939 for (gsi_tgt = gsi_start (list);
5940 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5941 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5943 return new_bb;
5947 /* Moves basic block BB after block AFTER. */
5949 static bool
5950 gimple_move_block_after (basic_block bb, basic_block after)
5952 if (bb->prev_bb == after)
5953 return true;
5955 unlink_block (bb);
5956 link_block (bb, after);
5958 return true;
5962 /* Return TRUE if block BB has no executable statements, otherwise return
5963 FALSE. */
5965 static bool
5966 gimple_empty_block_p (basic_block bb)
5968 /* BB must have no executable statements. */
5969 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5970 if (phi_nodes (bb))
5971 return false;
5972 if (gsi_end_p (gsi))
5973 return true;
5974 if (is_gimple_debug (gsi_stmt (gsi)))
5975 gsi_next_nondebug (&gsi);
5976 return gsi_end_p (gsi);
5980 /* Split a basic block if it ends with a conditional branch and if the
5981 other part of the block is not empty. */
5983 static basic_block
5984 gimple_split_block_before_cond_jump (basic_block bb)
5986 gimple *last, *split_point;
5987 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5988 if (gsi_end_p (gsi))
5989 return NULL;
5990 last = gsi_stmt (gsi);
5991 if (gimple_code (last) != GIMPLE_COND
5992 && gimple_code (last) != GIMPLE_SWITCH)
5993 return NULL;
5994 gsi_prev (&gsi);
5995 split_point = gsi_stmt (gsi);
5996 return split_block (bb, split_point)->dest;
6000 /* Return true if basic_block can be duplicated. */
6002 static bool
6003 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6005 return true;
6008 /* Create a duplicate of the basic block BB. NOTE: This does not
6009 preserve SSA form. */
6011 static basic_block
6012 gimple_duplicate_bb (basic_block bb)
6014 basic_block new_bb;
6015 gimple_stmt_iterator gsi_tgt;
6017 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6019 /* Copy the PHI nodes. We ignore PHI node arguments here because
6020 the incoming edges have not been setup yet. */
6021 for (gphi_iterator gpi = gsi_start_phis (bb);
6022 !gsi_end_p (gpi);
6023 gsi_next (&gpi))
6025 gphi *phi, *copy;
6026 phi = gpi.phi ();
6027 copy = create_phi_node (NULL_TREE, new_bb);
6028 create_new_def_for (gimple_phi_result (phi), copy,
6029 gimple_phi_result_ptr (copy));
6030 gimple_set_uid (copy, gimple_uid (phi));
6033 gsi_tgt = gsi_start_bb (new_bb);
6034 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6035 !gsi_end_p (gsi);
6036 gsi_next (&gsi))
6038 def_operand_p def_p;
6039 ssa_op_iter op_iter;
6040 tree lhs;
6041 gimple *stmt, *copy;
6043 stmt = gsi_stmt (gsi);
6044 if (gimple_code (stmt) == GIMPLE_LABEL)
6045 continue;
6047 /* Don't duplicate label debug stmts. */
6048 if (gimple_debug_bind_p (stmt)
6049 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6050 == LABEL_DECL)
6051 continue;
6053 /* Create a new copy of STMT and duplicate STMT's virtual
6054 operands. */
6055 copy = gimple_copy (stmt);
6056 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6058 maybe_duplicate_eh_stmt (copy, stmt);
6059 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6061 /* When copying around a stmt writing into a local non-user
6062 aggregate, make sure it won't share stack slot with other
6063 vars. */
6064 lhs = gimple_get_lhs (stmt);
6065 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6067 tree base = get_base_address (lhs);
6068 if (base
6069 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6070 && DECL_IGNORED_P (base)
6071 && !TREE_STATIC (base)
6072 && !DECL_EXTERNAL (base)
6073 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6074 DECL_NONSHAREABLE (base) = 1;
6077 /* Create new names for all the definitions created by COPY and
6078 add replacement mappings for each new name. */
6079 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6080 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6083 return new_bb;
6086 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6088 static void
6089 add_phi_args_after_copy_edge (edge e_copy)
6091 basic_block bb, bb_copy = e_copy->src, dest;
6092 edge e;
6093 edge_iterator ei;
6094 gphi *phi, *phi_copy;
6095 tree def;
6096 gphi_iterator psi, psi_copy;
6098 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6099 return;
6101 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6103 if (e_copy->dest->flags & BB_DUPLICATED)
6104 dest = get_bb_original (e_copy->dest);
6105 else
6106 dest = e_copy->dest;
6108 e = find_edge (bb, dest);
6109 if (!e)
6111 /* During loop unrolling the target of the latch edge is copied.
6112 In this case we are not looking for edge to dest, but to
6113 duplicated block whose original was dest. */
6114 FOR_EACH_EDGE (e, ei, bb->succs)
6116 if ((e->dest->flags & BB_DUPLICATED)
6117 && get_bb_original (e->dest) == dest)
6118 break;
6121 gcc_assert (e != NULL);
6124 for (psi = gsi_start_phis (e->dest),
6125 psi_copy = gsi_start_phis (e_copy->dest);
6126 !gsi_end_p (psi);
6127 gsi_next (&psi), gsi_next (&psi_copy))
6129 phi = psi.phi ();
6130 phi_copy = psi_copy.phi ();
6131 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6132 add_phi_arg (phi_copy, def, e_copy,
6133 gimple_phi_arg_location_from_edge (phi, e));
6138 /* Basic block BB_COPY was created by code duplication. Add phi node
6139 arguments for edges going out of BB_COPY. The blocks that were
6140 duplicated have BB_DUPLICATED set. */
6142 void
6143 add_phi_args_after_copy_bb (basic_block bb_copy)
6145 edge e_copy;
6146 edge_iterator ei;
6148 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6150 add_phi_args_after_copy_edge (e_copy);
6154 /* Blocks in REGION_COPY array of length N_REGION were created by
6155 duplication of basic blocks. Add phi node arguments for edges
6156 going from these blocks. If E_COPY is not NULL, also add
6157 phi node arguments for its destination.*/
6159 void
6160 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6161 edge e_copy)
6163 unsigned i;
6165 for (i = 0; i < n_region; i++)
6166 region_copy[i]->flags |= BB_DUPLICATED;
6168 for (i = 0; i < n_region; i++)
6169 add_phi_args_after_copy_bb (region_copy[i]);
6170 if (e_copy)
6171 add_phi_args_after_copy_edge (e_copy);
6173 for (i = 0; i < n_region; i++)
6174 region_copy[i]->flags &= ~BB_DUPLICATED;
6177 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6178 important exit edge EXIT. By important we mean that no SSA name defined
6179 inside region is live over the other exit edges of the region. All entry
6180 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6181 to the duplicate of the region. Dominance and loop information is
6182 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6183 UPDATE_DOMINANCE is false then we assume that the caller will update the
6184 dominance information after calling this function. The new basic
6185 blocks are stored to REGION_COPY in the same order as they had in REGION,
6186 provided that REGION_COPY is not NULL.
6187 The function returns false if it is unable to copy the region,
6188 true otherwise. */
6190 bool
6191 gimple_duplicate_sese_region (edge entry, edge exit,
6192 basic_block *region, unsigned n_region,
6193 basic_block *region_copy,
6194 bool update_dominance)
6196 unsigned i;
6197 bool free_region_copy = false, copying_header = false;
6198 struct loop *loop = entry->dest->loop_father;
6199 edge exit_copy;
6200 vec<basic_block> doms;
6201 edge redirected;
6202 int total_freq = 0, entry_freq = 0;
6203 gcov_type total_count = 0, entry_count = 0;
6205 if (!can_copy_bbs_p (region, n_region))
6206 return false;
6208 /* Some sanity checking. Note that we do not check for all possible
6209 missuses of the functions. I.e. if you ask to copy something weird,
6210 it will work, but the state of structures probably will not be
6211 correct. */
6212 for (i = 0; i < n_region; i++)
6214 /* We do not handle subloops, i.e. all the blocks must belong to the
6215 same loop. */
6216 if (region[i]->loop_father != loop)
6217 return false;
6219 if (region[i] != entry->dest
6220 && region[i] == loop->header)
6221 return false;
6224 /* In case the function is used for loop header copying (which is the primary
6225 use), ensure that EXIT and its copy will be new latch and entry edges. */
6226 if (loop->header == entry->dest)
6228 copying_header = true;
6230 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6231 return false;
6233 for (i = 0; i < n_region; i++)
6234 if (region[i] != exit->src
6235 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6236 return false;
6239 initialize_original_copy_tables ();
6241 if (copying_header)
6242 set_loop_copy (loop, loop_outer (loop));
6243 else
6244 set_loop_copy (loop, loop);
6246 if (!region_copy)
6248 region_copy = XNEWVEC (basic_block, n_region);
6249 free_region_copy = true;
6252 /* Record blocks outside the region that are dominated by something
6253 inside. */
6254 if (update_dominance)
6256 doms.create (0);
6257 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6260 if (entry->dest->count)
6262 total_count = entry->dest->count;
6263 entry_count = entry->count;
6264 /* Fix up corner cases, to avoid division by zero or creation of negative
6265 frequencies. */
6266 if (entry_count > total_count)
6267 entry_count = total_count;
6269 else
6271 total_freq = entry->dest->frequency;
6272 entry_freq = EDGE_FREQUENCY (entry);
6273 /* Fix up corner cases, to avoid division by zero or creation of negative
6274 frequencies. */
6275 if (total_freq == 0)
6276 total_freq = 1;
6277 else if (entry_freq > total_freq)
6278 entry_freq = total_freq;
6281 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6282 split_edge_bb_loc (entry), update_dominance);
6283 if (total_count)
6285 scale_bbs_frequencies_gcov_type (region, n_region,
6286 total_count - entry_count,
6287 total_count);
6288 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6289 total_count);
6291 else
6293 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6294 total_freq);
6295 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6298 if (copying_header)
6300 loop->header = exit->dest;
6301 loop->latch = exit->src;
6304 /* Redirect the entry and add the phi node arguments. */
6305 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6306 gcc_assert (redirected != NULL);
6307 flush_pending_stmts (entry);
6309 /* Concerning updating of dominators: We must recount dominators
6310 for entry block and its copy. Anything that is outside of the
6311 region, but was dominated by something inside needs recounting as
6312 well. */
6313 if (update_dominance)
6315 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6316 doms.safe_push (get_bb_original (entry->dest));
6317 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6318 doms.release ();
6321 /* Add the other PHI node arguments. */
6322 add_phi_args_after_copy (region_copy, n_region, NULL);
6324 if (free_region_copy)
6325 free (region_copy);
6327 free_original_copy_tables ();
6328 return true;
6331 /* Checks if BB is part of the region defined by N_REGION BBS. */
6332 static bool
6333 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6335 unsigned int n;
6337 for (n = 0; n < n_region; n++)
6339 if (bb == bbs[n])
6340 return true;
6342 return false;
6345 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6346 are stored to REGION_COPY in the same order in that they appear
6347 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6348 the region, EXIT an exit from it. The condition guarding EXIT
6349 is moved to ENTRY. Returns true if duplication succeeds, false
6350 otherwise.
6352 For example,
6354 some_code;
6355 if (cond)
6357 else
6360 is transformed to
6362 if (cond)
6364 some_code;
6367 else
6369 some_code;
6374 bool
6375 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6376 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6377 basic_block *region_copy ATTRIBUTE_UNUSED)
6379 unsigned i;
6380 bool free_region_copy = false;
6381 struct loop *loop = exit->dest->loop_father;
6382 struct loop *orig_loop = entry->dest->loop_father;
6383 basic_block switch_bb, entry_bb, nentry_bb;
6384 vec<basic_block> doms;
6385 int total_freq = 0, exit_freq = 0;
6386 gcov_type total_count = 0, exit_count = 0;
6387 edge exits[2], nexits[2], e;
6388 gimple_stmt_iterator gsi;
6389 gimple *cond_stmt;
6390 edge sorig, snew;
6391 basic_block exit_bb;
6392 gphi_iterator psi;
6393 gphi *phi;
6394 tree def;
6395 struct loop *target, *aloop, *cloop;
6397 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6398 exits[0] = exit;
6399 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6401 if (!can_copy_bbs_p (region, n_region))
6402 return false;
6404 initialize_original_copy_tables ();
6405 set_loop_copy (orig_loop, loop);
6407 target= loop;
6408 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6410 if (bb_part_of_region_p (aloop->header, region, n_region))
6412 cloop = duplicate_loop (aloop, target);
6413 duplicate_subloops (aloop, cloop);
6417 if (!region_copy)
6419 region_copy = XNEWVEC (basic_block, n_region);
6420 free_region_copy = true;
6423 gcc_assert (!need_ssa_update_p (cfun));
6425 /* Record blocks outside the region that are dominated by something
6426 inside. */
6427 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6429 if (exit->src->count)
6431 total_count = exit->src->count;
6432 exit_count = exit->count;
6433 /* Fix up corner cases, to avoid division by zero or creation of negative
6434 frequencies. */
6435 if (exit_count > total_count)
6436 exit_count = total_count;
6438 else
6440 total_freq = exit->src->frequency;
6441 exit_freq = EDGE_FREQUENCY (exit);
6442 /* Fix up corner cases, to avoid division by zero or creation of negative
6443 frequencies. */
6444 if (total_freq == 0)
6445 total_freq = 1;
6446 if (exit_freq > total_freq)
6447 exit_freq = total_freq;
6450 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6451 split_edge_bb_loc (exit), true);
6452 if (total_count)
6454 scale_bbs_frequencies_gcov_type (region, n_region,
6455 total_count - exit_count,
6456 total_count);
6457 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6458 total_count);
6460 else
6462 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6463 total_freq);
6464 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6467 /* Create the switch block, and put the exit condition to it. */
6468 entry_bb = entry->dest;
6469 nentry_bb = get_bb_copy (entry_bb);
6470 if (!last_stmt (entry->src)
6471 || !stmt_ends_bb_p (last_stmt (entry->src)))
6472 switch_bb = entry->src;
6473 else
6474 switch_bb = split_edge (entry);
6475 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6477 gsi = gsi_last_bb (switch_bb);
6478 cond_stmt = last_stmt (exit->src);
6479 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6480 cond_stmt = gimple_copy (cond_stmt);
6482 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6484 sorig = single_succ_edge (switch_bb);
6485 sorig->flags = exits[1]->flags;
6486 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6488 /* Register the new edge from SWITCH_BB in loop exit lists. */
6489 rescan_loop_exit (snew, true, false);
6491 /* Add the PHI node arguments. */
6492 add_phi_args_after_copy (region_copy, n_region, snew);
6494 /* Get rid of now superfluous conditions and associated edges (and phi node
6495 arguments). */
6496 exit_bb = exit->dest;
6498 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6499 PENDING_STMT (e) = NULL;
6501 /* The latch of ORIG_LOOP was copied, and so was the backedge
6502 to the original header. We redirect this backedge to EXIT_BB. */
6503 for (i = 0; i < n_region; i++)
6504 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6506 gcc_assert (single_succ_edge (region_copy[i]));
6507 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6508 PENDING_STMT (e) = NULL;
6509 for (psi = gsi_start_phis (exit_bb);
6510 !gsi_end_p (psi);
6511 gsi_next (&psi))
6513 phi = psi.phi ();
6514 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6515 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6518 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6519 PENDING_STMT (e) = NULL;
6521 /* Anything that is outside of the region, but was dominated by something
6522 inside needs to update dominance info. */
6523 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6524 doms.release ();
6525 /* Update the SSA web. */
6526 update_ssa (TODO_update_ssa);
6528 if (free_region_copy)
6529 free (region_copy);
6531 free_original_copy_tables ();
6532 return true;
6535 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6536 adding blocks when the dominator traversal reaches EXIT. This
6537 function silently assumes that ENTRY strictly dominates EXIT. */
6539 void
6540 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6541 vec<basic_block> *bbs_p)
6543 basic_block son;
6545 for (son = first_dom_son (CDI_DOMINATORS, entry);
6546 son;
6547 son = next_dom_son (CDI_DOMINATORS, son))
6549 bbs_p->safe_push (son);
6550 if (son != exit)
6551 gather_blocks_in_sese_region (son, exit, bbs_p);
6555 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6556 The duplicates are recorded in VARS_MAP. */
6558 static void
6559 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6560 tree to_context)
6562 tree t = *tp, new_t;
6563 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6565 if (DECL_CONTEXT (t) == to_context)
6566 return;
6568 bool existed;
6569 tree &loc = vars_map->get_or_insert (t, &existed);
6571 if (!existed)
6573 if (SSA_VAR_P (t))
6575 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6576 add_local_decl (f, new_t);
6578 else
6580 gcc_assert (TREE_CODE (t) == CONST_DECL);
6581 new_t = copy_node (t);
6583 DECL_CONTEXT (new_t) = to_context;
6585 loc = new_t;
6587 else
6588 new_t = loc;
6590 *tp = new_t;
6594 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6595 VARS_MAP maps old ssa names and var_decls to the new ones. */
6597 static tree
6598 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6599 tree to_context)
6601 tree new_name;
6603 gcc_assert (!virtual_operand_p (name));
6605 tree *loc = vars_map->get (name);
6607 if (!loc)
6609 tree decl = SSA_NAME_VAR (name);
6610 if (decl)
6612 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6613 replace_by_duplicate_decl (&decl, vars_map, to_context);
6614 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6615 decl, SSA_NAME_DEF_STMT (name));
6617 else
6618 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6619 name, SSA_NAME_DEF_STMT (name));
6621 /* Now that we've used the def stmt to define new_name, make sure it
6622 doesn't define name anymore. */
6623 SSA_NAME_DEF_STMT (name) = NULL;
6625 vars_map->put (name, new_name);
6627 else
6628 new_name = *loc;
6630 return new_name;
6633 struct move_stmt_d
6635 tree orig_block;
6636 tree new_block;
6637 tree from_context;
6638 tree to_context;
6639 hash_map<tree, tree> *vars_map;
6640 htab_t new_label_map;
6641 hash_map<void *, void *> *eh_map;
6642 bool remap_decls_p;
6645 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6646 contained in *TP if it has been ORIG_BLOCK previously and change the
6647 DECL_CONTEXT of every local variable referenced in *TP. */
6649 static tree
6650 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6652 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6653 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6654 tree t = *tp;
6656 if (EXPR_P (t))
6658 tree block = TREE_BLOCK (t);
6659 if (block == NULL_TREE)
6661 else if (block == p->orig_block
6662 || p->orig_block == NULL_TREE)
6663 TREE_SET_BLOCK (t, p->new_block);
6664 else if (flag_checking)
6666 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6667 block = BLOCK_SUPERCONTEXT (block);
6668 gcc_assert (block == p->orig_block);
6671 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6673 if (TREE_CODE (t) == SSA_NAME)
6674 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6675 else if (TREE_CODE (t) == PARM_DECL
6676 && gimple_in_ssa_p (cfun))
6677 *tp = *(p->vars_map->get (t));
6678 else if (TREE_CODE (t) == LABEL_DECL)
6680 if (p->new_label_map)
6682 struct tree_map in, *out;
6683 in.base.from = t;
6684 out = (struct tree_map *)
6685 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6686 if (out)
6687 *tp = t = out->to;
6690 DECL_CONTEXT (t) = p->to_context;
6692 else if (p->remap_decls_p)
6694 /* Replace T with its duplicate. T should no longer appear in the
6695 parent function, so this looks wasteful; however, it may appear
6696 in referenced_vars, and more importantly, as virtual operands of
6697 statements, and in alias lists of other variables. It would be
6698 quite difficult to expunge it from all those places. ??? It might
6699 suffice to do this for addressable variables. */
6700 if ((VAR_P (t) && !is_global_var (t))
6701 || TREE_CODE (t) == CONST_DECL)
6702 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6704 *walk_subtrees = 0;
6706 else if (TYPE_P (t))
6707 *walk_subtrees = 0;
6709 return NULL_TREE;
6712 /* Helper for move_stmt_r. Given an EH region number for the source
6713 function, map that to the duplicate EH regio number in the dest. */
6715 static int
6716 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6718 eh_region old_r, new_r;
6720 old_r = get_eh_region_from_number (old_nr);
6721 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6723 return new_r->index;
6726 /* Similar, but operate on INTEGER_CSTs. */
6728 static tree
6729 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6731 int old_nr, new_nr;
6733 old_nr = tree_to_shwi (old_t_nr);
6734 new_nr = move_stmt_eh_region_nr (old_nr, p);
6736 return build_int_cst (integer_type_node, new_nr);
6739 /* Like move_stmt_op, but for gimple statements.
6741 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6742 contained in the current statement in *GSI_P and change the
6743 DECL_CONTEXT of every local variable referenced in the current
6744 statement. */
6746 static tree
6747 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6748 struct walk_stmt_info *wi)
6750 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6751 gimple *stmt = gsi_stmt (*gsi_p);
6752 tree block = gimple_block (stmt);
6754 if (block == p->orig_block
6755 || (p->orig_block == NULL_TREE
6756 && block != NULL_TREE))
6757 gimple_set_block (stmt, p->new_block);
6759 switch (gimple_code (stmt))
6761 case GIMPLE_CALL:
6762 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6764 tree r, fndecl = gimple_call_fndecl (stmt);
6765 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6766 switch (DECL_FUNCTION_CODE (fndecl))
6768 case BUILT_IN_EH_COPY_VALUES:
6769 r = gimple_call_arg (stmt, 1);
6770 r = move_stmt_eh_region_tree_nr (r, p);
6771 gimple_call_set_arg (stmt, 1, r);
6772 /* FALLTHRU */
6774 case BUILT_IN_EH_POINTER:
6775 case BUILT_IN_EH_FILTER:
6776 r = gimple_call_arg (stmt, 0);
6777 r = move_stmt_eh_region_tree_nr (r, p);
6778 gimple_call_set_arg (stmt, 0, r);
6779 break;
6781 default:
6782 break;
6785 break;
6787 case GIMPLE_RESX:
6789 gresx *resx_stmt = as_a <gresx *> (stmt);
6790 int r = gimple_resx_region (resx_stmt);
6791 r = move_stmt_eh_region_nr (r, p);
6792 gimple_resx_set_region (resx_stmt, r);
6794 break;
6796 case GIMPLE_EH_DISPATCH:
6798 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6799 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6800 r = move_stmt_eh_region_nr (r, p);
6801 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6803 break;
6805 case GIMPLE_OMP_RETURN:
6806 case GIMPLE_OMP_CONTINUE:
6807 break;
6808 default:
6809 if (is_gimple_omp (stmt))
6811 /* Do not remap variables inside OMP directives. Variables
6812 referenced in clauses and directive header belong to the
6813 parent function and should not be moved into the child
6814 function. */
6815 bool save_remap_decls_p = p->remap_decls_p;
6816 p->remap_decls_p = false;
6817 *handled_ops_p = true;
6819 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6820 move_stmt_op, wi);
6822 p->remap_decls_p = save_remap_decls_p;
6824 break;
6827 return NULL_TREE;
6830 /* Move basic block BB from function CFUN to function DEST_FN. The
6831 block is moved out of the original linked list and placed after
6832 block AFTER in the new list. Also, the block is removed from the
6833 original array of blocks and placed in DEST_FN's array of blocks.
6834 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6835 updated to reflect the moved edges.
6837 The local variables are remapped to new instances, VARS_MAP is used
6838 to record the mapping. */
6840 static void
6841 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6842 basic_block after, bool update_edge_count_p,
6843 struct move_stmt_d *d)
6845 struct control_flow_graph *cfg;
6846 edge_iterator ei;
6847 edge e;
6848 gimple_stmt_iterator si;
6849 unsigned old_len, new_len;
6851 /* Remove BB from dominance structures. */
6852 delete_from_dominance_info (CDI_DOMINATORS, bb);
6854 /* Move BB from its current loop to the copy in the new function. */
6855 if (current_loops)
6857 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6858 if (new_loop)
6859 bb->loop_father = new_loop;
6862 /* Link BB to the new linked list. */
6863 move_block_after (bb, after);
6865 /* Update the edge count in the corresponding flowgraphs. */
6866 if (update_edge_count_p)
6867 FOR_EACH_EDGE (e, ei, bb->succs)
6869 cfun->cfg->x_n_edges--;
6870 dest_cfun->cfg->x_n_edges++;
6873 /* Remove BB from the original basic block array. */
6874 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6875 cfun->cfg->x_n_basic_blocks--;
6877 /* Grow DEST_CFUN's basic block array if needed. */
6878 cfg = dest_cfun->cfg;
6879 cfg->x_n_basic_blocks++;
6880 if (bb->index >= cfg->x_last_basic_block)
6881 cfg->x_last_basic_block = bb->index + 1;
6883 old_len = vec_safe_length (cfg->x_basic_block_info);
6884 if ((unsigned) cfg->x_last_basic_block >= old_len)
6886 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6887 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6890 (*cfg->x_basic_block_info)[bb->index] = bb;
6892 /* Remap the variables in phi nodes. */
6893 for (gphi_iterator psi = gsi_start_phis (bb);
6894 !gsi_end_p (psi); )
6896 gphi *phi = psi.phi ();
6897 use_operand_p use;
6898 tree op = PHI_RESULT (phi);
6899 ssa_op_iter oi;
6900 unsigned i;
6902 if (virtual_operand_p (op))
6904 /* Remove the phi nodes for virtual operands (alias analysis will be
6905 run for the new function, anyway). */
6906 remove_phi_node (&psi, true);
6907 continue;
6910 SET_PHI_RESULT (phi,
6911 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6912 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6914 op = USE_FROM_PTR (use);
6915 if (TREE_CODE (op) == SSA_NAME)
6916 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6919 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6921 location_t locus = gimple_phi_arg_location (phi, i);
6922 tree block = LOCATION_BLOCK (locus);
6924 if (locus == UNKNOWN_LOCATION)
6925 continue;
6926 if (d->orig_block == NULL_TREE || block == d->orig_block)
6928 locus = set_block (locus, d->new_block);
6929 gimple_phi_arg_set_location (phi, i, locus);
6933 gsi_next (&psi);
6936 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6938 gimple *stmt = gsi_stmt (si);
6939 struct walk_stmt_info wi;
6941 memset (&wi, 0, sizeof (wi));
6942 wi.info = d;
6943 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6945 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6947 tree label = gimple_label_label (label_stmt);
6948 int uid = LABEL_DECL_UID (label);
6950 gcc_assert (uid > -1);
6952 old_len = vec_safe_length (cfg->x_label_to_block_map);
6953 if (old_len <= (unsigned) uid)
6955 new_len = 3 * uid / 2 + 1;
6956 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6959 (*cfg->x_label_to_block_map)[uid] = bb;
6960 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6962 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6964 if (uid >= dest_cfun->cfg->last_label_uid)
6965 dest_cfun->cfg->last_label_uid = uid + 1;
6968 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6969 remove_stmt_from_eh_lp_fn (cfun, stmt);
6971 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6972 gimple_remove_stmt_histograms (cfun, stmt);
6974 /* We cannot leave any operands allocated from the operand caches of
6975 the current function. */
6976 free_stmt_operands (cfun, stmt);
6977 push_cfun (dest_cfun);
6978 update_stmt (stmt);
6979 pop_cfun ();
6982 FOR_EACH_EDGE (e, ei, bb->succs)
6983 if (e->goto_locus != UNKNOWN_LOCATION)
6985 tree block = LOCATION_BLOCK (e->goto_locus);
6986 if (d->orig_block == NULL_TREE
6987 || block == d->orig_block)
6988 e->goto_locus = set_block (e->goto_locus, d->new_block);
6992 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6993 the outermost EH region. Use REGION as the incoming base EH region. */
6995 static eh_region
6996 find_outermost_region_in_block (struct function *src_cfun,
6997 basic_block bb, eh_region region)
6999 gimple_stmt_iterator si;
7001 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7003 gimple *stmt = gsi_stmt (si);
7004 eh_region stmt_region;
7005 int lp_nr;
7007 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7008 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7009 if (stmt_region)
7011 if (region == NULL)
7012 region = stmt_region;
7013 else if (stmt_region != region)
7015 region = eh_region_outermost (src_cfun, stmt_region, region);
7016 gcc_assert (region != NULL);
7021 return region;
7024 static tree
7025 new_label_mapper (tree decl, void *data)
7027 htab_t hash = (htab_t) data;
7028 struct tree_map *m;
7029 void **slot;
7031 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7033 m = XNEW (struct tree_map);
7034 m->hash = DECL_UID (decl);
7035 m->base.from = decl;
7036 m->to = create_artificial_label (UNKNOWN_LOCATION);
7037 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7038 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7039 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7041 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7042 gcc_assert (*slot == NULL);
7044 *slot = m;
7046 return m->to;
7049 /* Tree walker to replace the decls used inside value expressions by
7050 duplicates. */
7052 static tree
7053 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7055 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7057 switch (TREE_CODE (*tp))
7059 case VAR_DECL:
7060 case PARM_DECL:
7061 case RESULT_DECL:
7062 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7063 break;
7064 default:
7065 break;
7068 if (IS_TYPE_OR_DECL_P (*tp))
7069 *walk_subtrees = false;
7071 return NULL;
7074 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7075 subblocks. */
7077 static void
7078 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7079 tree to_context)
7081 tree *tp, t;
7083 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7085 t = *tp;
7086 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7087 continue;
7088 replace_by_duplicate_decl (&t, vars_map, to_context);
7089 if (t != *tp)
7091 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7093 tree x = DECL_VALUE_EXPR (*tp);
7094 struct replace_decls_d rd = { vars_map, to_context };
7095 unshare_expr (x);
7096 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7097 SET_DECL_VALUE_EXPR (t, x);
7098 DECL_HAS_VALUE_EXPR_P (t) = 1;
7100 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7101 *tp = t;
7105 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7106 replace_block_vars_by_duplicates (block, vars_map, to_context);
7109 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7110 from FN1 to FN2. */
7112 static void
7113 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7114 struct loop *loop)
7116 /* Discard it from the old loop array. */
7117 (*get_loops (fn1))[loop->num] = NULL;
7119 /* Place it in the new loop array, assigning it a new number. */
7120 loop->num = number_of_loops (fn2);
7121 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7123 /* Recurse to children. */
7124 for (loop = loop->inner; loop; loop = loop->next)
7125 fixup_loop_arrays_after_move (fn1, fn2, loop);
7128 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7129 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7131 DEBUG_FUNCTION void
7132 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7134 basic_block bb;
7135 edge_iterator ei;
7136 edge e;
7137 bitmap bbs = BITMAP_ALLOC (NULL);
7138 int i;
7140 gcc_assert (entry != NULL);
7141 gcc_assert (entry != exit);
7142 gcc_assert (bbs_p != NULL);
7144 gcc_assert (bbs_p->length () > 0);
7146 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7147 bitmap_set_bit (bbs, bb->index);
7149 gcc_assert (bitmap_bit_p (bbs, entry->index));
7150 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7152 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7154 if (bb == entry)
7156 gcc_assert (single_pred_p (entry));
7157 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7159 else
7160 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7162 e = ei_edge (ei);
7163 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7166 if (bb == exit)
7168 gcc_assert (single_succ_p (exit));
7169 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7171 else
7172 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7174 e = ei_edge (ei);
7175 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7179 BITMAP_FREE (bbs);
7182 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7184 bool
7185 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7187 bitmap release_names = (bitmap)data;
7189 if (TREE_CODE (from) != SSA_NAME)
7190 return true;
7192 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7193 return true;
7196 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7197 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7198 single basic block in the original CFG and the new basic block is
7199 returned. DEST_CFUN must not have a CFG yet.
7201 Note that the region need not be a pure SESE region. Blocks inside
7202 the region may contain calls to abort/exit. The only restriction
7203 is that ENTRY_BB should be the only entry point and it must
7204 dominate EXIT_BB.
7206 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7207 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7208 to the new function.
7210 All local variables referenced in the region are assumed to be in
7211 the corresponding BLOCK_VARS and unexpanded variable lists
7212 associated with DEST_CFUN.
7214 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7215 reimplement move_sese_region_to_fn by duplicating the region rather than
7216 moving it. */
7218 basic_block
7219 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7220 basic_block exit_bb, tree orig_block)
7222 vec<basic_block> bbs, dom_bbs;
7223 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7224 basic_block after, bb, *entry_pred, *exit_succ, abb;
7225 struct function *saved_cfun = cfun;
7226 int *entry_flag, *exit_flag;
7227 unsigned *entry_prob, *exit_prob;
7228 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7229 edge e;
7230 edge_iterator ei;
7231 htab_t new_label_map;
7232 hash_map<void *, void *> *eh_map;
7233 struct loop *loop = entry_bb->loop_father;
7234 struct loop *loop0 = get_loop (saved_cfun, 0);
7235 struct move_stmt_d d;
7237 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7238 region. */
7239 gcc_assert (entry_bb != exit_bb
7240 && (!exit_bb
7241 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7243 /* Collect all the blocks in the region. Manually add ENTRY_BB
7244 because it won't be added by dfs_enumerate_from. */
7245 bbs.create (0);
7246 bbs.safe_push (entry_bb);
7247 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7249 if (flag_checking)
7250 verify_sese (entry_bb, exit_bb, &bbs);
7252 /* The blocks that used to be dominated by something in BBS will now be
7253 dominated by the new block. */
7254 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7255 bbs.address (),
7256 bbs.length ());
7258 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7259 the predecessor edges to ENTRY_BB and the successor edges to
7260 EXIT_BB so that we can re-attach them to the new basic block that
7261 will replace the region. */
7262 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7263 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7264 entry_flag = XNEWVEC (int, num_entry_edges);
7265 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7266 i = 0;
7267 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7269 entry_prob[i] = e->probability;
7270 entry_flag[i] = e->flags;
7271 entry_pred[i++] = e->src;
7272 remove_edge (e);
7275 if (exit_bb)
7277 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7278 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7279 exit_flag = XNEWVEC (int, num_exit_edges);
7280 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7281 i = 0;
7282 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7284 exit_prob[i] = e->probability;
7285 exit_flag[i] = e->flags;
7286 exit_succ[i++] = e->dest;
7287 remove_edge (e);
7290 else
7292 num_exit_edges = 0;
7293 exit_succ = NULL;
7294 exit_flag = NULL;
7295 exit_prob = NULL;
7298 /* Switch context to the child function to initialize DEST_FN's CFG. */
7299 gcc_assert (dest_cfun->cfg == NULL);
7300 push_cfun (dest_cfun);
7302 init_empty_tree_cfg ();
7304 /* Initialize EH information for the new function. */
7305 eh_map = NULL;
7306 new_label_map = NULL;
7307 if (saved_cfun->eh)
7309 eh_region region = NULL;
7311 FOR_EACH_VEC_ELT (bbs, i, bb)
7312 region = find_outermost_region_in_block (saved_cfun, bb, region);
7314 init_eh_for_function ();
7315 if (region != NULL)
7317 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7318 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7319 new_label_mapper, new_label_map);
7323 /* Initialize an empty loop tree. */
7324 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7325 init_loops_structure (dest_cfun, loops, 1);
7326 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7327 set_loops_for_fn (dest_cfun, loops);
7329 /* Move the outlined loop tree part. */
7330 num_nodes = bbs.length ();
7331 FOR_EACH_VEC_ELT (bbs, i, bb)
7333 if (bb->loop_father->header == bb)
7335 struct loop *this_loop = bb->loop_father;
7336 struct loop *outer = loop_outer (this_loop);
7337 if (outer == loop
7338 /* If the SESE region contains some bbs ending with
7339 a noreturn call, those are considered to belong
7340 to the outermost loop in saved_cfun, rather than
7341 the entry_bb's loop_father. */
7342 || outer == loop0)
7344 if (outer != loop)
7345 num_nodes -= this_loop->num_nodes;
7346 flow_loop_tree_node_remove (bb->loop_father);
7347 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7348 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7351 else if (bb->loop_father == loop0 && loop0 != loop)
7352 num_nodes--;
7354 /* Remove loop exits from the outlined region. */
7355 if (loops_for_fn (saved_cfun)->exits)
7356 FOR_EACH_EDGE (e, ei, bb->succs)
7358 struct loops *l = loops_for_fn (saved_cfun);
7359 loop_exit **slot
7360 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7361 NO_INSERT);
7362 if (slot)
7363 l->exits->clear_slot (slot);
7368 /* Adjust the number of blocks in the tree root of the outlined part. */
7369 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7371 /* Setup a mapping to be used by move_block_to_fn. */
7372 loop->aux = current_loops->tree_root;
7373 loop0->aux = current_loops->tree_root;
7375 pop_cfun ();
7377 /* Move blocks from BBS into DEST_CFUN. */
7378 gcc_assert (bbs.length () >= 2);
7379 after = dest_cfun->cfg->x_entry_block_ptr;
7380 hash_map<tree, tree> vars_map;
7382 memset (&d, 0, sizeof (d));
7383 d.orig_block = orig_block;
7384 d.new_block = DECL_INITIAL (dest_cfun->decl);
7385 d.from_context = cfun->decl;
7386 d.to_context = dest_cfun->decl;
7387 d.vars_map = &vars_map;
7388 d.new_label_map = new_label_map;
7389 d.eh_map = eh_map;
7390 d.remap_decls_p = true;
7392 if (gimple_in_ssa_p (cfun))
7393 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7395 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7396 set_ssa_default_def (dest_cfun, arg, narg);
7397 vars_map.put (arg, narg);
7400 FOR_EACH_VEC_ELT (bbs, i, bb)
7402 /* No need to update edge counts on the last block. It has
7403 already been updated earlier when we detached the region from
7404 the original CFG. */
7405 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7406 after = bb;
7409 loop->aux = NULL;
7410 loop0->aux = NULL;
7411 /* Loop sizes are no longer correct, fix them up. */
7412 loop->num_nodes -= num_nodes;
7413 for (struct loop *outer = loop_outer (loop);
7414 outer; outer = loop_outer (outer))
7415 outer->num_nodes -= num_nodes;
7416 loop0->num_nodes -= bbs.length () - num_nodes;
7418 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7420 struct loop *aloop;
7421 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7422 if (aloop != NULL)
7424 if (aloop->simduid)
7426 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7427 d.to_context);
7428 dest_cfun->has_simduid_loops = true;
7430 if (aloop->force_vectorize)
7431 dest_cfun->has_force_vectorize_loops = true;
7435 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7436 if (orig_block)
7438 tree block;
7439 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7440 == NULL_TREE);
7441 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7442 = BLOCK_SUBBLOCKS (orig_block);
7443 for (block = BLOCK_SUBBLOCKS (orig_block);
7444 block; block = BLOCK_CHAIN (block))
7445 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7446 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7449 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7450 &vars_map, dest_cfun->decl);
7452 if (new_label_map)
7453 htab_delete (new_label_map);
7454 if (eh_map)
7455 delete eh_map;
7457 if (gimple_in_ssa_p (cfun))
7459 /* We need to release ssa-names in a defined order, so first find them,
7460 and then iterate in ascending version order. */
7461 bitmap release_names = BITMAP_ALLOC (NULL);
7462 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7463 bitmap_iterator bi;
7464 unsigned i;
7465 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7466 release_ssa_name (ssa_name (i));
7467 BITMAP_FREE (release_names);
7470 /* Rewire the entry and exit blocks. The successor to the entry
7471 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7472 the child function. Similarly, the predecessor of DEST_FN's
7473 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7474 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7475 various CFG manipulation function get to the right CFG.
7477 FIXME, this is silly. The CFG ought to become a parameter to
7478 these helpers. */
7479 push_cfun (dest_cfun);
7480 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7481 if (exit_bb)
7482 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7483 pop_cfun ();
7485 /* Back in the original function, the SESE region has disappeared,
7486 create a new basic block in its place. */
7487 bb = create_empty_bb (entry_pred[0]);
7488 if (current_loops)
7489 add_bb_to_loop (bb, loop);
7490 for (i = 0; i < num_entry_edges; i++)
7492 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7493 e->probability = entry_prob[i];
7496 for (i = 0; i < num_exit_edges; i++)
7498 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7499 e->probability = exit_prob[i];
7502 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7503 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7504 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7505 dom_bbs.release ();
7507 if (exit_bb)
7509 free (exit_prob);
7510 free (exit_flag);
7511 free (exit_succ);
7513 free (entry_prob);
7514 free (entry_flag);
7515 free (entry_pred);
7516 bbs.release ();
7518 return bb;
7521 /* Dump default def DEF to file FILE using FLAGS and indentation
7522 SPC. */
7524 static void
7525 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7527 for (int i = 0; i < spc; ++i)
7528 fprintf (file, " ");
7529 dump_ssaname_info_to_file (file, def, spc);
7531 print_generic_expr (file, TREE_TYPE (def), flags);
7532 fprintf (file, " ");
7533 print_generic_expr (file, def, flags);
7534 fprintf (file, " = ");
7535 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7536 fprintf (file, ";\n");
7539 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7542 void
7543 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7545 tree arg, var, old_current_fndecl = current_function_decl;
7546 struct function *dsf;
7547 bool ignore_topmost_bind = false, any_var = false;
7548 basic_block bb;
7549 tree chain;
7550 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7551 && decl_is_tm_clone (fndecl));
7552 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7554 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7556 fprintf (file, "__attribute__((");
7558 bool first = true;
7559 tree chain;
7560 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7561 first = false, chain = TREE_CHAIN (chain))
7563 if (!first)
7564 fprintf (file, ", ");
7566 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7567 if (TREE_VALUE (chain) != NULL_TREE)
7569 fprintf (file, " (");
7570 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7571 fprintf (file, ")");
7575 fprintf (file, "))\n");
7578 current_function_decl = fndecl;
7579 if (flags & TDF_GIMPLE)
7581 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7582 dump_flags | TDF_SLIM);
7583 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7585 else
7586 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7588 arg = DECL_ARGUMENTS (fndecl);
7589 while (arg)
7591 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7592 fprintf (file, " ");
7593 print_generic_expr (file, arg, dump_flags);
7594 if (flags & TDF_VERBOSE)
7595 print_node (file, "", arg, 4);
7596 if (DECL_CHAIN (arg))
7597 fprintf (file, ", ");
7598 arg = DECL_CHAIN (arg);
7600 fprintf (file, ")\n");
7602 if (flags & TDF_VERBOSE)
7603 print_node (file, "", fndecl, 2);
7605 dsf = DECL_STRUCT_FUNCTION (fndecl);
7606 if (dsf && (flags & TDF_EH))
7607 dump_eh_tree (file, dsf);
7609 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7611 dump_node (fndecl, TDF_SLIM | flags, file);
7612 current_function_decl = old_current_fndecl;
7613 return;
7616 /* When GIMPLE is lowered, the variables are no longer available in
7617 BIND_EXPRs, so display them separately. */
7618 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7620 unsigned ix;
7621 ignore_topmost_bind = true;
7623 fprintf (file, "{\n");
7624 if (gimple_in_ssa_p (fun)
7625 && (flags & TDF_ALIAS))
7627 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7628 arg = DECL_CHAIN (arg))
7630 tree def = ssa_default_def (fun, arg);
7631 if (def)
7632 dump_default_def (file, def, 2, flags);
7635 tree res = DECL_RESULT (fun->decl);
7636 if (res != NULL_TREE
7637 && DECL_BY_REFERENCE (res))
7639 tree def = ssa_default_def (fun, res);
7640 if (def)
7641 dump_default_def (file, def, 2, flags);
7644 tree static_chain = fun->static_chain_decl;
7645 if (static_chain != NULL_TREE)
7647 tree def = ssa_default_def (fun, static_chain);
7648 if (def)
7649 dump_default_def (file, def, 2, flags);
7653 if (!vec_safe_is_empty (fun->local_decls))
7654 FOR_EACH_LOCAL_DECL (fun, ix, var)
7656 print_generic_decl (file, var, flags);
7657 if (flags & TDF_VERBOSE)
7658 print_node (file, "", var, 4);
7659 fprintf (file, "\n");
7661 any_var = true;
7664 tree name;
7666 if (gimple_in_ssa_p (cfun))
7667 FOR_EACH_SSA_NAME (ix, name, cfun)
7669 if (!SSA_NAME_VAR (name))
7671 fprintf (file, " ");
7672 print_generic_expr (file, TREE_TYPE (name), flags);
7673 fprintf (file, " ");
7674 print_generic_expr (file, name, flags);
7675 fprintf (file, ";\n");
7677 any_var = true;
7682 if (fun && fun->decl == fndecl
7683 && fun->cfg
7684 && basic_block_info_for_fn (fun))
7686 /* If the CFG has been built, emit a CFG-based dump. */
7687 if (!ignore_topmost_bind)
7688 fprintf (file, "{\n");
7690 if (any_var && n_basic_blocks_for_fn (fun))
7691 fprintf (file, "\n");
7693 FOR_EACH_BB_FN (bb, fun)
7694 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7696 fprintf (file, "}\n");
7698 else if (fun->curr_properties & PROP_gimple_any)
7700 /* The function is now in GIMPLE form but the CFG has not been
7701 built yet. Emit the single sequence of GIMPLE statements
7702 that make up its body. */
7703 gimple_seq body = gimple_body (fndecl);
7705 if (gimple_seq_first_stmt (body)
7706 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7707 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7708 print_gimple_seq (file, body, 0, flags);
7709 else
7711 if (!ignore_topmost_bind)
7712 fprintf (file, "{\n");
7714 if (any_var)
7715 fprintf (file, "\n");
7717 print_gimple_seq (file, body, 2, flags);
7718 fprintf (file, "}\n");
7721 else
7723 int indent;
7725 /* Make a tree based dump. */
7726 chain = DECL_SAVED_TREE (fndecl);
7727 if (chain && TREE_CODE (chain) == BIND_EXPR)
7729 if (ignore_topmost_bind)
7731 chain = BIND_EXPR_BODY (chain);
7732 indent = 2;
7734 else
7735 indent = 0;
7737 else
7739 if (!ignore_topmost_bind)
7741 fprintf (file, "{\n");
7742 /* No topmost bind, pretend it's ignored for later. */
7743 ignore_topmost_bind = true;
7745 indent = 2;
7748 if (any_var)
7749 fprintf (file, "\n");
7751 print_generic_stmt_indented (file, chain, flags, indent);
7752 if (ignore_topmost_bind)
7753 fprintf (file, "}\n");
7756 if (flags & TDF_ENUMERATE_LOCALS)
7757 dump_enumerated_decls (file, flags);
7758 fprintf (file, "\n\n");
7760 current_function_decl = old_current_fndecl;
7763 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7765 DEBUG_FUNCTION void
7766 debug_function (tree fn, dump_flags_t flags)
7768 dump_function_to_file (fn, stderr, flags);
7772 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7774 static void
7775 print_pred_bbs (FILE *file, basic_block bb)
7777 edge e;
7778 edge_iterator ei;
7780 FOR_EACH_EDGE (e, ei, bb->preds)
7781 fprintf (file, "bb_%d ", e->src->index);
7785 /* Print on FILE the indexes for the successors of basic_block BB. */
7787 static void
7788 print_succ_bbs (FILE *file, basic_block bb)
7790 edge e;
7791 edge_iterator ei;
7793 FOR_EACH_EDGE (e, ei, bb->succs)
7794 fprintf (file, "bb_%d ", e->dest->index);
7797 /* Print to FILE the basic block BB following the VERBOSITY level. */
7799 void
7800 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7802 char *s_indent = (char *) alloca ((size_t) indent + 1);
7803 memset ((void *) s_indent, ' ', (size_t) indent);
7804 s_indent[indent] = '\0';
7806 /* Print basic_block's header. */
7807 if (verbosity >= 2)
7809 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7810 print_pred_bbs (file, bb);
7811 fprintf (file, "}, succs = {");
7812 print_succ_bbs (file, bb);
7813 fprintf (file, "})\n");
7816 /* Print basic_block's body. */
7817 if (verbosity >= 3)
7819 fprintf (file, "%s {\n", s_indent);
7820 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7821 fprintf (file, "%s }\n", s_indent);
7825 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7827 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7828 VERBOSITY level this outputs the contents of the loop, or just its
7829 structure. */
7831 static void
7832 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7834 char *s_indent;
7835 basic_block bb;
7837 if (loop == NULL)
7838 return;
7840 s_indent = (char *) alloca ((size_t) indent + 1);
7841 memset ((void *) s_indent, ' ', (size_t) indent);
7842 s_indent[indent] = '\0';
7844 /* Print loop's header. */
7845 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7846 if (loop->header)
7847 fprintf (file, "header = %d", loop->header->index);
7848 else
7850 fprintf (file, "deleted)\n");
7851 return;
7853 if (loop->latch)
7854 fprintf (file, ", latch = %d", loop->latch->index);
7855 else
7856 fprintf (file, ", multiple latches");
7857 fprintf (file, ", niter = ");
7858 print_generic_expr (file, loop->nb_iterations);
7860 if (loop->any_upper_bound)
7862 fprintf (file, ", upper_bound = ");
7863 print_decu (loop->nb_iterations_upper_bound, file);
7865 if (loop->any_likely_upper_bound)
7867 fprintf (file, ", likely_upper_bound = ");
7868 print_decu (loop->nb_iterations_likely_upper_bound, file);
7871 if (loop->any_estimate)
7873 fprintf (file, ", estimate = ");
7874 print_decu (loop->nb_iterations_estimate, file);
7876 fprintf (file, ")\n");
7878 /* Print loop's body. */
7879 if (verbosity >= 1)
7881 fprintf (file, "%s{\n", s_indent);
7882 FOR_EACH_BB_FN (bb, cfun)
7883 if (bb->loop_father == loop)
7884 print_loops_bb (file, bb, indent, verbosity);
7886 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7887 fprintf (file, "%s}\n", s_indent);
7891 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7892 spaces. Following VERBOSITY level this outputs the contents of the
7893 loop, or just its structure. */
7895 static void
7896 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7897 int verbosity)
7899 if (loop == NULL)
7900 return;
7902 print_loop (file, loop, indent, verbosity);
7903 print_loop_and_siblings (file, loop->next, indent, verbosity);
7906 /* Follow a CFG edge from the entry point of the program, and on entry
7907 of a loop, pretty print the loop structure on FILE. */
7909 void
7910 print_loops (FILE *file, int verbosity)
7912 basic_block bb;
7914 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7915 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7916 if (bb && bb->loop_father)
7917 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7920 /* Dump a loop. */
7922 DEBUG_FUNCTION void
7923 debug (struct loop &ref)
7925 print_loop (stderr, &ref, 0, /*verbosity*/0);
7928 DEBUG_FUNCTION void
7929 debug (struct loop *ptr)
7931 if (ptr)
7932 debug (*ptr);
7933 else
7934 fprintf (stderr, "<nil>\n");
7937 /* Dump a loop verbosely. */
7939 DEBUG_FUNCTION void
7940 debug_verbose (struct loop &ref)
7942 print_loop (stderr, &ref, 0, /*verbosity*/3);
7945 DEBUG_FUNCTION void
7946 debug_verbose (struct loop *ptr)
7948 if (ptr)
7949 debug (*ptr);
7950 else
7951 fprintf (stderr, "<nil>\n");
7955 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7957 DEBUG_FUNCTION void
7958 debug_loops (int verbosity)
7960 print_loops (stderr, verbosity);
7963 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7965 DEBUG_FUNCTION void
7966 debug_loop (struct loop *loop, int verbosity)
7968 print_loop (stderr, loop, 0, verbosity);
7971 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7972 level. */
7974 DEBUG_FUNCTION void
7975 debug_loop_num (unsigned num, int verbosity)
7977 debug_loop (get_loop (cfun, num), verbosity);
7980 /* Return true if BB ends with a call, possibly followed by some
7981 instructions that must stay with the call. Return false,
7982 otherwise. */
7984 static bool
7985 gimple_block_ends_with_call_p (basic_block bb)
7987 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7988 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7992 /* Return true if BB ends with a conditional branch. Return false,
7993 otherwise. */
7995 static bool
7996 gimple_block_ends_with_condjump_p (const_basic_block bb)
7998 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7999 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8003 /* Return true if statement T may terminate execution of BB in ways not
8004 explicitly represtented in the CFG. */
8006 bool
8007 stmt_can_terminate_bb_p (gimple *t)
8009 tree fndecl = NULL_TREE;
8010 int call_flags = 0;
8012 /* Eh exception not handled internally terminates execution of the whole
8013 function. */
8014 if (stmt_can_throw_external (t))
8015 return true;
8017 /* NORETURN and LONGJMP calls already have an edge to exit.
8018 CONST and PURE calls do not need one.
8019 We don't currently check for CONST and PURE here, although
8020 it would be a good idea, because those attributes are
8021 figured out from the RTL in mark_constant_function, and
8022 the counter incrementation code from -fprofile-arcs
8023 leads to different results from -fbranch-probabilities. */
8024 if (is_gimple_call (t))
8026 fndecl = gimple_call_fndecl (t);
8027 call_flags = gimple_call_flags (t);
8030 if (is_gimple_call (t)
8031 && fndecl
8032 && DECL_BUILT_IN (fndecl)
8033 && (call_flags & ECF_NOTHROW)
8034 && !(call_flags & ECF_RETURNS_TWICE)
8035 /* fork() doesn't really return twice, but the effect of
8036 wrapping it in __gcov_fork() which calls __gcov_flush()
8037 and clears the counters before forking has the same
8038 effect as returning twice. Force a fake edge. */
8039 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8040 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8041 return false;
8043 if (is_gimple_call (t))
8045 edge_iterator ei;
8046 edge e;
8047 basic_block bb;
8049 if (call_flags & (ECF_PURE | ECF_CONST)
8050 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8051 return false;
8053 /* Function call may do longjmp, terminate program or do other things.
8054 Special case noreturn that have non-abnormal edges out as in this case
8055 the fact is sufficiently represented by lack of edges out of T. */
8056 if (!(call_flags & ECF_NORETURN))
8057 return true;
8059 bb = gimple_bb (t);
8060 FOR_EACH_EDGE (e, ei, bb->succs)
8061 if ((e->flags & EDGE_FAKE) == 0)
8062 return true;
8065 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8066 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8067 return true;
8069 return false;
8073 /* Add fake edges to the function exit for any non constant and non
8074 noreturn calls (or noreturn calls with EH/abnormal edges),
8075 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8076 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8077 that were split.
8079 The goal is to expose cases in which entering a basic block does
8080 not imply that all subsequent instructions must be executed. */
8082 static int
8083 gimple_flow_call_edges_add (sbitmap blocks)
8085 int i;
8086 int blocks_split = 0;
8087 int last_bb = last_basic_block_for_fn (cfun);
8088 bool check_last_block = false;
8090 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8091 return 0;
8093 if (! blocks)
8094 check_last_block = true;
8095 else
8096 check_last_block = bitmap_bit_p (blocks,
8097 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8099 /* In the last basic block, before epilogue generation, there will be
8100 a fallthru edge to EXIT. Special care is required if the last insn
8101 of the last basic block is a call because make_edge folds duplicate
8102 edges, which would result in the fallthru edge also being marked
8103 fake, which would result in the fallthru edge being removed by
8104 remove_fake_edges, which would result in an invalid CFG.
8106 Moreover, we can't elide the outgoing fake edge, since the block
8107 profiler needs to take this into account in order to solve the minimal
8108 spanning tree in the case that the call doesn't return.
8110 Handle this by adding a dummy instruction in a new last basic block. */
8111 if (check_last_block)
8113 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8114 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8115 gimple *t = NULL;
8117 if (!gsi_end_p (gsi))
8118 t = gsi_stmt (gsi);
8120 if (t && stmt_can_terminate_bb_p (t))
8122 edge e;
8124 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8125 if (e)
8127 gsi_insert_on_edge (e, gimple_build_nop ());
8128 gsi_commit_edge_inserts ();
8133 /* Now add fake edges to the function exit for any non constant
8134 calls since there is no way that we can determine if they will
8135 return or not... */
8136 for (i = 0; i < last_bb; i++)
8138 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8139 gimple_stmt_iterator gsi;
8140 gimple *stmt, *last_stmt;
8142 if (!bb)
8143 continue;
8145 if (blocks && !bitmap_bit_p (blocks, i))
8146 continue;
8148 gsi = gsi_last_nondebug_bb (bb);
8149 if (!gsi_end_p (gsi))
8151 last_stmt = gsi_stmt (gsi);
8154 stmt = gsi_stmt (gsi);
8155 if (stmt_can_terminate_bb_p (stmt))
8157 edge e;
8159 /* The handling above of the final block before the
8160 epilogue should be enough to verify that there is
8161 no edge to the exit block in CFG already.
8162 Calling make_edge in such case would cause us to
8163 mark that edge as fake and remove it later. */
8164 if (flag_checking && stmt == last_stmt)
8166 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8167 gcc_assert (e == NULL);
8170 /* Note that the following may create a new basic block
8171 and renumber the existing basic blocks. */
8172 if (stmt != last_stmt)
8174 e = split_block (bb, stmt);
8175 if (e)
8176 blocks_split++;
8178 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8180 gsi_prev (&gsi);
8182 while (!gsi_end_p (gsi));
8186 if (blocks_split)
8187 verify_flow_info ();
8189 return blocks_split;
8192 /* Removes edge E and all the blocks dominated by it, and updates dominance
8193 information. The IL in E->src needs to be updated separately.
8194 If dominance info is not available, only the edge E is removed.*/
8196 void
8197 remove_edge_and_dominated_blocks (edge e)
8199 vec<basic_block> bbs_to_remove = vNULL;
8200 vec<basic_block> bbs_to_fix_dom = vNULL;
8201 edge f;
8202 edge_iterator ei;
8203 bool none_removed = false;
8204 unsigned i;
8205 basic_block bb, dbb;
8206 bitmap_iterator bi;
8208 /* If we are removing a path inside a non-root loop that may change
8209 loop ownership of blocks or remove loops. Mark loops for fixup. */
8210 if (current_loops
8211 && loop_outer (e->src->loop_father) != NULL
8212 && e->src->loop_father == e->dest->loop_father)
8213 loops_state_set (LOOPS_NEED_FIXUP);
8215 if (!dom_info_available_p (CDI_DOMINATORS))
8217 remove_edge (e);
8218 return;
8221 /* No updating is needed for edges to exit. */
8222 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8224 if (cfgcleanup_altered_bbs)
8225 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8226 remove_edge (e);
8227 return;
8230 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8231 that is not dominated by E->dest, then this set is empty. Otherwise,
8232 all the basic blocks dominated by E->dest are removed.
8234 Also, to DF_IDOM we store the immediate dominators of the blocks in
8235 the dominance frontier of E (i.e., of the successors of the
8236 removed blocks, if there are any, and of E->dest otherwise). */
8237 FOR_EACH_EDGE (f, ei, e->dest->preds)
8239 if (f == e)
8240 continue;
8242 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8244 none_removed = true;
8245 break;
8249 auto_bitmap df, df_idom;
8250 if (none_removed)
8251 bitmap_set_bit (df_idom,
8252 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8253 else
8255 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8256 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8258 FOR_EACH_EDGE (f, ei, bb->succs)
8260 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8261 bitmap_set_bit (df, f->dest->index);
8264 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8265 bitmap_clear_bit (df, bb->index);
8267 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8269 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8270 bitmap_set_bit (df_idom,
8271 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8275 if (cfgcleanup_altered_bbs)
8277 /* Record the set of the altered basic blocks. */
8278 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8279 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8282 /* Remove E and the cancelled blocks. */
8283 if (none_removed)
8284 remove_edge (e);
8285 else
8287 /* Walk backwards so as to get a chance to substitute all
8288 released DEFs into debug stmts. See
8289 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8290 details. */
8291 for (i = bbs_to_remove.length (); i-- > 0; )
8292 delete_basic_block (bbs_to_remove[i]);
8295 /* Update the dominance information. The immediate dominator may change only
8296 for blocks whose immediate dominator belongs to DF_IDOM:
8298 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8299 removal. Let Z the arbitrary block such that idom(Z) = Y and
8300 Z dominates X after the removal. Before removal, there exists a path P
8301 from Y to X that avoids Z. Let F be the last edge on P that is
8302 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8303 dominates W, and because of P, Z does not dominate W), and W belongs to
8304 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8305 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8307 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8308 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8309 dbb;
8310 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8311 bbs_to_fix_dom.safe_push (dbb);
8314 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8316 bbs_to_remove.release ();
8317 bbs_to_fix_dom.release ();
8320 /* Purge dead EH edges from basic block BB. */
8322 bool
8323 gimple_purge_dead_eh_edges (basic_block bb)
8325 bool changed = false;
8326 edge e;
8327 edge_iterator ei;
8328 gimple *stmt = last_stmt (bb);
8330 if (stmt && stmt_can_throw_internal (stmt))
8331 return false;
8333 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8335 if (e->flags & EDGE_EH)
8337 remove_edge_and_dominated_blocks (e);
8338 changed = true;
8340 else
8341 ei_next (&ei);
8344 return changed;
8347 /* Purge dead EH edges from basic block listed in BLOCKS. */
8349 bool
8350 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8352 bool changed = false;
8353 unsigned i;
8354 bitmap_iterator bi;
8356 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8358 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8360 /* Earlier gimple_purge_dead_eh_edges could have removed
8361 this basic block already. */
8362 gcc_assert (bb || changed);
8363 if (bb != NULL)
8364 changed |= gimple_purge_dead_eh_edges (bb);
8367 return changed;
8370 /* Purge dead abnormal call edges from basic block BB. */
8372 bool
8373 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8375 bool changed = false;
8376 edge e;
8377 edge_iterator ei;
8378 gimple *stmt = last_stmt (bb);
8380 if (!cfun->has_nonlocal_label
8381 && !cfun->calls_setjmp)
8382 return false;
8384 if (stmt && stmt_can_make_abnormal_goto (stmt))
8385 return false;
8387 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8389 if (e->flags & EDGE_ABNORMAL)
8391 if (e->flags & EDGE_FALLTHRU)
8392 e->flags &= ~EDGE_ABNORMAL;
8393 else
8394 remove_edge_and_dominated_blocks (e);
8395 changed = true;
8397 else
8398 ei_next (&ei);
8401 return changed;
8404 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8406 bool
8407 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8409 bool changed = false;
8410 unsigned i;
8411 bitmap_iterator bi;
8413 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8415 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8417 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8418 this basic block already. */
8419 gcc_assert (bb || changed);
8420 if (bb != NULL)
8421 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8424 return changed;
8427 /* This function is called whenever a new edge is created or
8428 redirected. */
8430 static void
8431 gimple_execute_on_growing_pred (edge e)
8433 basic_block bb = e->dest;
8435 if (!gimple_seq_empty_p (phi_nodes (bb)))
8436 reserve_phi_args_for_new_edge (bb);
8439 /* This function is called immediately before edge E is removed from
8440 the edge vector E->dest->preds. */
8442 static void
8443 gimple_execute_on_shrinking_pred (edge e)
8445 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8446 remove_phi_args (e);
8449 /*---------------------------------------------------------------------------
8450 Helper functions for Loop versioning
8451 ---------------------------------------------------------------------------*/
8453 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8454 of 'first'. Both of them are dominated by 'new_head' basic block. When
8455 'new_head' was created by 'second's incoming edge it received phi arguments
8456 on the edge by split_edge(). Later, additional edge 'e' was created to
8457 connect 'new_head' and 'first'. Now this routine adds phi args on this
8458 additional edge 'e' that new_head to second edge received as part of edge
8459 splitting. */
8461 static void
8462 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8463 basic_block new_head, edge e)
8465 gphi *phi1, *phi2;
8466 gphi_iterator psi1, psi2;
8467 tree def;
8468 edge e2 = find_edge (new_head, second);
8470 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8471 edge, we should always have an edge from NEW_HEAD to SECOND. */
8472 gcc_assert (e2 != NULL);
8474 /* Browse all 'second' basic block phi nodes and add phi args to
8475 edge 'e' for 'first' head. PHI args are always in correct order. */
8477 for (psi2 = gsi_start_phis (second),
8478 psi1 = gsi_start_phis (first);
8479 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8480 gsi_next (&psi2), gsi_next (&psi1))
8482 phi1 = psi1.phi ();
8483 phi2 = psi2.phi ();
8484 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8485 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8490 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8491 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8492 the destination of the ELSE part. */
8494 static void
8495 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8496 basic_block second_head ATTRIBUTE_UNUSED,
8497 basic_block cond_bb, void *cond_e)
8499 gimple_stmt_iterator gsi;
8500 gimple *new_cond_expr;
8501 tree cond_expr = (tree) cond_e;
8502 edge e0;
8504 /* Build new conditional expr */
8505 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8506 NULL_TREE, NULL_TREE);
8508 /* Add new cond in cond_bb. */
8509 gsi = gsi_last_bb (cond_bb);
8510 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8512 /* Adjust edges appropriately to connect new head with first head
8513 as well as second head. */
8514 e0 = single_succ_edge (cond_bb);
8515 e0->flags &= ~EDGE_FALLTHRU;
8516 e0->flags |= EDGE_FALSE_VALUE;
8520 /* Do book-keeping of basic block BB for the profile consistency checker.
8521 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8522 then do post-pass accounting. Store the counting in RECORD. */
8523 static void
8524 gimple_account_profile_record (basic_block bb, int after_pass,
8525 struct profile_record *record)
8527 gimple_stmt_iterator i;
8528 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8530 record->size[after_pass]
8531 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8532 if (profile_status_for_fn (cfun) == PROFILE_READ)
8533 record->time[after_pass]
8534 += estimate_num_insns (gsi_stmt (i),
8535 &eni_time_weights) * bb->count;
8536 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8537 record->time[after_pass]
8538 += estimate_num_insns (gsi_stmt (i),
8539 &eni_time_weights) * bb->frequency;
8543 struct cfg_hooks gimple_cfg_hooks = {
8544 "gimple",
8545 gimple_verify_flow_info,
8546 gimple_dump_bb, /* dump_bb */
8547 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8548 create_bb, /* create_basic_block */
8549 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8550 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8551 gimple_can_remove_branch_p, /* can_remove_branch_p */
8552 remove_bb, /* delete_basic_block */
8553 gimple_split_block, /* split_block */
8554 gimple_move_block_after, /* move_block_after */
8555 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8556 gimple_merge_blocks, /* merge_blocks */
8557 gimple_predict_edge, /* predict_edge */
8558 gimple_predicted_by_p, /* predicted_by_p */
8559 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8560 gimple_duplicate_bb, /* duplicate_block */
8561 gimple_split_edge, /* split_edge */
8562 gimple_make_forwarder_block, /* make_forward_block */
8563 NULL, /* tidy_fallthru_edge */
8564 NULL, /* force_nonfallthru */
8565 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8566 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8567 gimple_flow_call_edges_add, /* flow_call_edges_add */
8568 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8569 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8570 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8571 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8572 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8573 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8574 flush_pending_stmts, /* flush_pending_stmts */
8575 gimple_empty_block_p, /* block_empty_p */
8576 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8577 gimple_account_profile_record,
8581 /* Split all critical edges. */
8583 unsigned int
8584 split_critical_edges (void)
8586 basic_block bb;
8587 edge e;
8588 edge_iterator ei;
8590 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8591 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8592 mappings around the calls to split_edge. */
8593 start_recording_case_labels ();
8594 FOR_ALL_BB_FN (bb, cfun)
8596 FOR_EACH_EDGE (e, ei, bb->succs)
8598 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8599 split_edge (e);
8600 /* PRE inserts statements to edges and expects that
8601 since split_critical_edges was done beforehand, committing edge
8602 insertions will not split more edges. In addition to critical
8603 edges we must split edges that have multiple successors and
8604 end by control flow statements, such as RESX.
8605 Go ahead and split them too. This matches the logic in
8606 gimple_find_edge_insert_loc. */
8607 else if ((!single_pred_p (e->dest)
8608 || !gimple_seq_empty_p (phi_nodes (e->dest))
8609 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8610 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8611 && !(e->flags & EDGE_ABNORMAL))
8613 gimple_stmt_iterator gsi;
8615 gsi = gsi_last_bb (e->src);
8616 if (!gsi_end_p (gsi)
8617 && stmt_ends_bb_p (gsi_stmt (gsi))
8618 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8619 && !gimple_call_builtin_p (gsi_stmt (gsi),
8620 BUILT_IN_RETURN)))
8621 split_edge (e);
8625 end_recording_case_labels ();
8626 return 0;
8629 namespace {
8631 const pass_data pass_data_split_crit_edges =
8633 GIMPLE_PASS, /* type */
8634 "crited", /* name */
8635 OPTGROUP_NONE, /* optinfo_flags */
8636 TV_TREE_SPLIT_EDGES, /* tv_id */
8637 PROP_cfg, /* properties_required */
8638 PROP_no_crit_edges, /* properties_provided */
8639 0, /* properties_destroyed */
8640 0, /* todo_flags_start */
8641 0, /* todo_flags_finish */
8644 class pass_split_crit_edges : public gimple_opt_pass
8646 public:
8647 pass_split_crit_edges (gcc::context *ctxt)
8648 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8651 /* opt_pass methods: */
8652 virtual unsigned int execute (function *) { return split_critical_edges (); }
8654 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8655 }; // class pass_split_crit_edges
8657 } // anon namespace
8659 gimple_opt_pass *
8660 make_pass_split_crit_edges (gcc::context *ctxt)
8662 return new pass_split_crit_edges (ctxt);
8666 /* Insert COND expression which is GIMPLE_COND after STMT
8667 in basic block BB with appropriate basic block split
8668 and creation of a new conditionally executed basic block.
8669 Return created basic block. */
8670 basic_block
8671 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8673 edge fall = split_block (bb, stmt);
8674 gimple_stmt_iterator iter = gsi_last_bb (bb);
8675 basic_block new_bb;
8677 /* Insert cond statement. */
8678 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8679 if (gsi_end_p (iter))
8680 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8681 else
8682 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8684 /* Create conditionally executed block. */
8685 new_bb = create_empty_bb (bb);
8686 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8687 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8689 /* Fix edge for split bb. */
8690 fall->flags = EDGE_FALSE_VALUE;
8692 /* Update dominance info. */
8693 if (dom_info_available_p (CDI_DOMINATORS))
8695 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8696 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8699 /* Update loop info. */
8700 if (current_loops)
8701 add_bb_to_loop (new_bb, bb->loop_father);
8703 return new_bb;
8706 /* Build a ternary operation and gimplify it. Emit code before GSI.
8707 Return the gimple_val holding the result. */
8709 tree
8710 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8711 tree type, tree a, tree b, tree c)
8713 tree ret;
8714 location_t loc = gimple_location (gsi_stmt (*gsi));
8716 ret = fold_build3_loc (loc, code, type, a, b, c);
8717 STRIP_NOPS (ret);
8719 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8720 GSI_SAME_STMT);
8723 /* Build a binary operation and gimplify it. Emit code before GSI.
8724 Return the gimple_val holding the result. */
8726 tree
8727 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8728 tree type, tree a, tree b)
8730 tree ret;
8732 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8733 STRIP_NOPS (ret);
8735 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8736 GSI_SAME_STMT);
8739 /* Build a unary operation and gimplify it. Emit code before GSI.
8740 Return the gimple_val holding the result. */
8742 tree
8743 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8744 tree a)
8746 tree ret;
8748 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8749 STRIP_NOPS (ret);
8751 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8752 GSI_SAME_STMT);
8757 /* Given a basic block B which ends with a conditional and has
8758 precisely two successors, determine which of the edges is taken if
8759 the conditional is true and which is taken if the conditional is
8760 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8762 void
8763 extract_true_false_edges_from_block (basic_block b,
8764 edge *true_edge,
8765 edge *false_edge)
8767 edge e = EDGE_SUCC (b, 0);
8769 if (e->flags & EDGE_TRUE_VALUE)
8771 *true_edge = e;
8772 *false_edge = EDGE_SUCC (b, 1);
8774 else
8776 *false_edge = e;
8777 *true_edge = EDGE_SUCC (b, 1);
8782 /* From a controlling predicate in the immediate dominator DOM of
8783 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8784 predicate evaluates to true and false and store them to
8785 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8786 they are non-NULL. Returns true if the edges can be determined,
8787 else return false. */
8789 bool
8790 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8791 edge *true_controlled_edge,
8792 edge *false_controlled_edge)
8794 basic_block bb = phiblock;
8795 edge true_edge, false_edge, tem;
8796 edge e0 = NULL, e1 = NULL;
8798 /* We have to verify that one edge into the PHI node is dominated
8799 by the true edge of the predicate block and the other edge
8800 dominated by the false edge. This ensures that the PHI argument
8801 we are going to take is completely determined by the path we
8802 take from the predicate block.
8803 We can only use BB dominance checks below if the destination of
8804 the true/false edges are dominated by their edge, thus only
8805 have a single predecessor. */
8806 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8807 tem = EDGE_PRED (bb, 0);
8808 if (tem == true_edge
8809 || (single_pred_p (true_edge->dest)
8810 && (tem->src == true_edge->dest
8811 || dominated_by_p (CDI_DOMINATORS,
8812 tem->src, true_edge->dest))))
8813 e0 = tem;
8814 else if (tem == false_edge
8815 || (single_pred_p (false_edge->dest)
8816 && (tem->src == false_edge->dest
8817 || dominated_by_p (CDI_DOMINATORS,
8818 tem->src, false_edge->dest))))
8819 e1 = tem;
8820 else
8821 return false;
8822 tem = EDGE_PRED (bb, 1);
8823 if (tem == true_edge
8824 || (single_pred_p (true_edge->dest)
8825 && (tem->src == true_edge->dest
8826 || dominated_by_p (CDI_DOMINATORS,
8827 tem->src, true_edge->dest))))
8828 e0 = tem;
8829 else if (tem == false_edge
8830 || (single_pred_p (false_edge->dest)
8831 && (tem->src == false_edge->dest
8832 || dominated_by_p (CDI_DOMINATORS,
8833 tem->src, false_edge->dest))))
8834 e1 = tem;
8835 else
8836 return false;
8837 if (!e0 || !e1)
8838 return false;
8840 if (true_controlled_edge)
8841 *true_controlled_edge = e0;
8842 if (false_controlled_edge)
8843 *false_controlled_edge = e1;
8845 return true;
8850 /* Emit return warnings. */
8852 namespace {
8854 const pass_data pass_data_warn_function_return =
8856 GIMPLE_PASS, /* type */
8857 "*warn_function_return", /* name */
8858 OPTGROUP_NONE, /* optinfo_flags */
8859 TV_NONE, /* tv_id */
8860 PROP_cfg, /* properties_required */
8861 0, /* properties_provided */
8862 0, /* properties_destroyed */
8863 0, /* todo_flags_start */
8864 0, /* todo_flags_finish */
8867 class pass_warn_function_return : public gimple_opt_pass
8869 public:
8870 pass_warn_function_return (gcc::context *ctxt)
8871 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8874 /* opt_pass methods: */
8875 virtual unsigned int execute (function *);
8877 }; // class pass_warn_function_return
8879 unsigned int
8880 pass_warn_function_return::execute (function *fun)
8882 source_location location;
8883 gimple *last;
8884 edge e;
8885 edge_iterator ei;
8887 if (!targetm.warn_func_return (fun->decl))
8888 return 0;
8890 /* If we have a path to EXIT, then we do return. */
8891 if (TREE_THIS_VOLATILE (fun->decl)
8892 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8894 location = UNKNOWN_LOCATION;
8895 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8897 last = last_stmt (e->src);
8898 if ((gimple_code (last) == GIMPLE_RETURN
8899 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8900 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8901 break;
8903 if (location == UNKNOWN_LOCATION)
8904 location = cfun->function_end_locus;
8905 warning_at (location, 0, "%<noreturn%> function does return");
8908 /* If we see "return;" in some basic block, then we do reach the end
8909 without returning a value. */
8910 else if (warn_return_type
8911 && !TREE_NO_WARNING (fun->decl)
8912 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8913 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8915 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8917 gimple *last = last_stmt (e->src);
8918 greturn *return_stmt = dyn_cast <greturn *> (last);
8919 if (return_stmt
8920 && gimple_return_retval (return_stmt) == NULL
8921 && !gimple_no_warning_p (last))
8923 location = gimple_location (last);
8924 if (location == UNKNOWN_LOCATION)
8925 location = fun->function_end_locus;
8926 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8927 TREE_NO_WARNING (fun->decl) = 1;
8928 break;
8932 return 0;
8935 } // anon namespace
8937 gimple_opt_pass *
8938 make_pass_warn_function_return (gcc::context *ctxt)
8940 return new pass_warn_function_return (ctxt);
8943 /* Walk a gimplified function and warn for functions whose return value is
8944 ignored and attribute((warn_unused_result)) is set. This is done before
8945 inlining, so we don't have to worry about that. */
8947 static void
8948 do_warn_unused_result (gimple_seq seq)
8950 tree fdecl, ftype;
8951 gimple_stmt_iterator i;
8953 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8955 gimple *g = gsi_stmt (i);
8957 switch (gimple_code (g))
8959 case GIMPLE_BIND:
8960 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8961 break;
8962 case GIMPLE_TRY:
8963 do_warn_unused_result (gimple_try_eval (g));
8964 do_warn_unused_result (gimple_try_cleanup (g));
8965 break;
8966 case GIMPLE_CATCH:
8967 do_warn_unused_result (gimple_catch_handler (
8968 as_a <gcatch *> (g)));
8969 break;
8970 case GIMPLE_EH_FILTER:
8971 do_warn_unused_result (gimple_eh_filter_failure (g));
8972 break;
8974 case GIMPLE_CALL:
8975 if (gimple_call_lhs (g))
8976 break;
8977 if (gimple_call_internal_p (g))
8978 break;
8980 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8981 LHS. All calls whose value is ignored should be
8982 represented like this. Look for the attribute. */
8983 fdecl = gimple_call_fndecl (g);
8984 ftype = gimple_call_fntype (g);
8986 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8988 location_t loc = gimple_location (g);
8990 if (fdecl)
8991 warning_at (loc, OPT_Wunused_result,
8992 "ignoring return value of %qD, "
8993 "declared with attribute warn_unused_result",
8994 fdecl);
8995 else
8996 warning_at (loc, OPT_Wunused_result,
8997 "ignoring return value of function "
8998 "declared with attribute warn_unused_result");
9000 break;
9002 default:
9003 /* Not a container, not a call, or a call whose value is used. */
9004 break;
9009 namespace {
9011 const pass_data pass_data_warn_unused_result =
9013 GIMPLE_PASS, /* type */
9014 "*warn_unused_result", /* name */
9015 OPTGROUP_NONE, /* optinfo_flags */
9016 TV_NONE, /* tv_id */
9017 PROP_gimple_any, /* properties_required */
9018 0, /* properties_provided */
9019 0, /* properties_destroyed */
9020 0, /* todo_flags_start */
9021 0, /* todo_flags_finish */
9024 class pass_warn_unused_result : public gimple_opt_pass
9026 public:
9027 pass_warn_unused_result (gcc::context *ctxt)
9028 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9031 /* opt_pass methods: */
9032 virtual bool gate (function *) { return flag_warn_unused_result; }
9033 virtual unsigned int execute (function *)
9035 do_warn_unused_result (gimple_body (current_function_decl));
9036 return 0;
9039 }; // class pass_warn_unused_result
9041 } // anon namespace
9043 gimple_opt_pass *
9044 make_pass_warn_unused_result (gcc::context *ctxt)
9046 return new pass_warn_unused_result (ctxt);
9049 /* IPA passes, compilation of earlier functions or inlining
9050 might have changed some properties, such as marked functions nothrow,
9051 pure, const or noreturn.
9052 Remove redundant edges and basic blocks, and create new ones if necessary.
9054 This pass can't be executed as stand alone pass from pass manager, because
9055 in between inlining and this fixup the verify_flow_info would fail. */
9057 unsigned int
9058 execute_fixup_cfg (void)
9060 basic_block bb;
9061 gimple_stmt_iterator gsi;
9062 int todo = 0;
9063 gcov_type count_scale;
9064 edge e;
9065 edge_iterator ei;
9066 cgraph_node *node = cgraph_node::get (current_function_decl);
9068 count_scale
9069 = GCOV_COMPUTE_SCALE (node->count, ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
9071 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9072 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9073 = apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count, count_scale);
9075 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
9076 e->count = apply_scale (e->count, count_scale);
9078 FOR_EACH_BB_FN (bb, cfun)
9080 bb->count = apply_scale (bb->count, count_scale);
9081 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9083 gimple *stmt = gsi_stmt (gsi);
9084 tree decl = is_gimple_call (stmt)
9085 ? gimple_call_fndecl (stmt)
9086 : NULL;
9087 if (decl)
9089 int flags = gimple_call_flags (stmt);
9090 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9092 if (gimple_purge_dead_abnormal_call_edges (bb))
9093 todo |= TODO_cleanup_cfg;
9095 if (gimple_in_ssa_p (cfun))
9097 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9098 update_stmt (stmt);
9102 if (flags & ECF_NORETURN
9103 && fixup_noreturn_call (stmt))
9104 todo |= TODO_cleanup_cfg;
9107 /* Remove stores to variables we marked write-only.
9108 Keep access when store has side effect, i.e. in case when source
9109 is volatile. */
9110 if (gimple_store_p (stmt)
9111 && !gimple_has_side_effects (stmt))
9113 tree lhs = get_base_address (gimple_get_lhs (stmt));
9115 if (VAR_P (lhs)
9116 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9117 && varpool_node::get (lhs)->writeonly)
9119 unlink_stmt_vdef (stmt);
9120 gsi_remove (&gsi, true);
9121 release_defs (stmt);
9122 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9123 continue;
9126 /* For calls we can simply remove LHS when it is known
9127 to be write-only. */
9128 if (is_gimple_call (stmt)
9129 && gimple_get_lhs (stmt))
9131 tree lhs = get_base_address (gimple_get_lhs (stmt));
9133 if (VAR_P (lhs)
9134 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9135 && varpool_node::get (lhs)->writeonly)
9137 gimple_call_set_lhs (stmt, NULL);
9138 update_stmt (stmt);
9139 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9143 if (maybe_clean_eh_stmt (stmt)
9144 && gimple_purge_dead_eh_edges (bb))
9145 todo |= TODO_cleanup_cfg;
9146 gsi_next (&gsi);
9149 FOR_EACH_EDGE (e, ei, bb->succs)
9150 e->count = apply_scale (e->count, count_scale);
9152 /* If we have a basic block with no successors that does not
9153 end with a control statement or a noreturn call end it with
9154 a call to __builtin_unreachable. This situation can occur
9155 when inlining a noreturn call that does in fact return. */
9156 if (EDGE_COUNT (bb->succs) == 0)
9158 gimple *stmt = last_stmt (bb);
9159 if (!stmt
9160 || (!is_ctrl_stmt (stmt)
9161 && (!is_gimple_call (stmt)
9162 || !gimple_call_noreturn_p (stmt))))
9164 if (stmt && is_gimple_call (stmt))
9165 gimple_call_set_ctrl_altering (stmt, false);
9166 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9167 stmt = gimple_build_call (fndecl, 0);
9168 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9169 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9170 if (!cfun->after_inlining)
9172 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9173 int freq
9174 = compute_call_stmt_bb_frequency (current_function_decl,
9175 bb);
9176 node->create_edge (cgraph_node::get_create (fndecl),
9177 call_stmt, bb->count, freq);
9182 if (count_scale != REG_BR_PROB_BASE)
9183 compute_function_frequency ();
9185 if (current_loops
9186 && (todo & TODO_cleanup_cfg))
9187 loops_state_set (LOOPS_NEED_FIXUP);
9189 return todo;
9192 namespace {
9194 const pass_data pass_data_fixup_cfg =
9196 GIMPLE_PASS, /* type */
9197 "fixup_cfg", /* name */
9198 OPTGROUP_NONE, /* optinfo_flags */
9199 TV_NONE, /* tv_id */
9200 PROP_cfg, /* properties_required */
9201 0, /* properties_provided */
9202 0, /* properties_destroyed */
9203 0, /* todo_flags_start */
9204 0, /* todo_flags_finish */
9207 class pass_fixup_cfg : public gimple_opt_pass
9209 public:
9210 pass_fixup_cfg (gcc::context *ctxt)
9211 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9214 /* opt_pass methods: */
9215 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9216 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9218 }; // class pass_fixup_cfg
9220 } // anon namespace
9222 gimple_opt_pass *
9223 make_pass_fixup_cfg (gcc::context *ctxt)
9225 return new pass_fixup_cfg (ctxt);
9228 /* Garbage collection support for edge_def. */
9230 extern void gt_ggc_mx (tree&);
9231 extern void gt_ggc_mx (gimple *&);
9232 extern void gt_ggc_mx (rtx&);
9233 extern void gt_ggc_mx (basic_block&);
9235 static void
9236 gt_ggc_mx (rtx_insn *& x)
9238 if (x)
9239 gt_ggc_mx_rtx_def ((void *) x);
9242 void
9243 gt_ggc_mx (edge_def *e)
9245 tree block = LOCATION_BLOCK (e->goto_locus);
9246 gt_ggc_mx (e->src);
9247 gt_ggc_mx (e->dest);
9248 if (current_ir_type () == IR_GIMPLE)
9249 gt_ggc_mx (e->insns.g);
9250 else
9251 gt_ggc_mx (e->insns.r);
9252 gt_ggc_mx (block);
9255 /* PCH support for edge_def. */
9257 extern void gt_pch_nx (tree&);
9258 extern void gt_pch_nx (gimple *&);
9259 extern void gt_pch_nx (rtx&);
9260 extern void gt_pch_nx (basic_block&);
9262 static void
9263 gt_pch_nx (rtx_insn *& x)
9265 if (x)
9266 gt_pch_nx_rtx_def ((void *) x);
9269 void
9270 gt_pch_nx (edge_def *e)
9272 tree block = LOCATION_BLOCK (e->goto_locus);
9273 gt_pch_nx (e->src);
9274 gt_pch_nx (e->dest);
9275 if (current_ir_type () == IR_GIMPLE)
9276 gt_pch_nx (e->insns.g);
9277 else
9278 gt_pch_nx (e->insns.r);
9279 gt_pch_nx (block);
9282 void
9283 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9285 tree block = LOCATION_BLOCK (e->goto_locus);
9286 op (&(e->src), cookie);
9287 op (&(e->dest), cookie);
9288 if (current_ir_type () == IR_GIMPLE)
9289 op (&(e->insns.g), cookie);
9290 else
9291 op (&(e->insns.r), cookie);
9292 op (&(block), cookie);
9295 #if CHECKING_P
9297 namespace selftest {
9299 /* Helper function for CFG selftests: create a dummy function decl
9300 and push it as cfun. */
9302 static tree
9303 push_fndecl (const char *name)
9305 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9306 /* FIXME: this uses input_location: */
9307 tree fndecl = build_fn_decl (name, fn_type);
9308 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9309 NULL_TREE, integer_type_node);
9310 DECL_RESULT (fndecl) = retval;
9311 push_struct_function (fndecl);
9312 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9313 ASSERT_TRUE (fun != NULL);
9314 init_empty_tree_cfg_for_function (fun);
9315 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9316 ASSERT_EQ (0, n_edges_for_fn (fun));
9317 return fndecl;
9320 /* These tests directly create CFGs.
9321 Compare with the static fns within tree-cfg.c:
9322 - build_gimple_cfg
9323 - make_blocks: calls create_basic_block (seq, bb);
9324 - make_edges. */
9326 /* Verify a simple cfg of the form:
9327 ENTRY -> A -> B -> C -> EXIT. */
9329 static void
9330 test_linear_chain ()
9332 gimple_register_cfg_hooks ();
9334 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9335 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9337 /* Create some empty blocks. */
9338 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9339 basic_block bb_b = create_empty_bb (bb_a);
9340 basic_block bb_c = create_empty_bb (bb_b);
9342 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9343 ASSERT_EQ (0, n_edges_for_fn (fun));
9345 /* Create some edges: a simple linear chain of BBs. */
9346 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9347 make_edge (bb_a, bb_b, 0);
9348 make_edge (bb_b, bb_c, 0);
9349 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9351 /* Verify the edges. */
9352 ASSERT_EQ (4, n_edges_for_fn (fun));
9353 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9354 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9355 ASSERT_EQ (1, bb_a->preds->length ());
9356 ASSERT_EQ (1, bb_a->succs->length ());
9357 ASSERT_EQ (1, bb_b->preds->length ());
9358 ASSERT_EQ (1, bb_b->succs->length ());
9359 ASSERT_EQ (1, bb_c->preds->length ());
9360 ASSERT_EQ (1, bb_c->succs->length ());
9361 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9362 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9364 /* Verify the dominance information
9365 Each BB in our simple chain should be dominated by the one before
9366 it. */
9367 calculate_dominance_info (CDI_DOMINATORS);
9368 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9369 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9370 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9371 ASSERT_EQ (1, dom_by_b.length ());
9372 ASSERT_EQ (bb_c, dom_by_b[0]);
9373 free_dominance_info (CDI_DOMINATORS);
9374 dom_by_b.release ();
9376 /* Similarly for post-dominance: each BB in our chain is post-dominated
9377 by the one after it. */
9378 calculate_dominance_info (CDI_POST_DOMINATORS);
9379 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9380 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9381 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9382 ASSERT_EQ (1, postdom_by_b.length ());
9383 ASSERT_EQ (bb_a, postdom_by_b[0]);
9384 free_dominance_info (CDI_POST_DOMINATORS);
9385 postdom_by_b.release ();
9387 pop_cfun ();
9390 /* Verify a simple CFG of the form:
9391 ENTRY
9395 /t \f
9401 EXIT. */
9403 static void
9404 test_diamond ()
9406 gimple_register_cfg_hooks ();
9408 tree fndecl = push_fndecl ("cfg_test_diamond");
9409 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9411 /* Create some empty blocks. */
9412 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9413 basic_block bb_b = create_empty_bb (bb_a);
9414 basic_block bb_c = create_empty_bb (bb_a);
9415 basic_block bb_d = create_empty_bb (bb_b);
9417 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9418 ASSERT_EQ (0, n_edges_for_fn (fun));
9420 /* Create the edges. */
9421 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9422 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9423 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9424 make_edge (bb_b, bb_d, 0);
9425 make_edge (bb_c, bb_d, 0);
9426 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9428 /* Verify the edges. */
9429 ASSERT_EQ (6, n_edges_for_fn (fun));
9430 ASSERT_EQ (1, bb_a->preds->length ());
9431 ASSERT_EQ (2, bb_a->succs->length ());
9432 ASSERT_EQ (1, bb_b->preds->length ());
9433 ASSERT_EQ (1, bb_b->succs->length ());
9434 ASSERT_EQ (1, bb_c->preds->length ());
9435 ASSERT_EQ (1, bb_c->succs->length ());
9436 ASSERT_EQ (2, bb_d->preds->length ());
9437 ASSERT_EQ (1, bb_d->succs->length ());
9439 /* Verify the dominance information. */
9440 calculate_dominance_info (CDI_DOMINATORS);
9441 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9442 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9443 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9444 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9445 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9446 dom_by_a.release ();
9447 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9448 ASSERT_EQ (0, dom_by_b.length ());
9449 dom_by_b.release ();
9450 free_dominance_info (CDI_DOMINATORS);
9452 /* Similarly for post-dominance. */
9453 calculate_dominance_info (CDI_POST_DOMINATORS);
9454 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9455 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9456 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9457 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9458 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9459 postdom_by_d.release ();
9460 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9461 ASSERT_EQ (0, postdom_by_b.length ());
9462 postdom_by_b.release ();
9463 free_dominance_info (CDI_POST_DOMINATORS);
9465 pop_cfun ();
9468 /* Verify that we can handle a CFG containing a "complete" aka
9469 fully-connected subgraph (where A B C D below all have edges
9470 pointing to each other node, also to themselves).
9471 e.g.:
9472 ENTRY EXIT
9478 A<--->B
9479 ^^ ^^
9480 | \ / |
9481 | X |
9482 | / \ |
9483 VV VV
9484 C<--->D
9487 static void
9488 test_fully_connected ()
9490 gimple_register_cfg_hooks ();
9492 tree fndecl = push_fndecl ("cfg_fully_connected");
9493 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9495 const int n = 4;
9497 /* Create some empty blocks. */
9498 auto_vec <basic_block> subgraph_nodes;
9499 for (int i = 0; i < n; i++)
9500 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9502 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9503 ASSERT_EQ (0, n_edges_for_fn (fun));
9505 /* Create the edges. */
9506 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9507 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9508 for (int i = 0; i < n; i++)
9509 for (int j = 0; j < n; j++)
9510 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9512 /* Verify the edges. */
9513 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9514 /* The first one is linked to ENTRY/EXIT as well as itself and
9515 everything else. */
9516 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9517 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9518 /* The other ones in the subgraph are linked to everything in
9519 the subgraph (including themselves). */
9520 for (int i = 1; i < n; i++)
9522 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9523 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9526 /* Verify the dominance information. */
9527 calculate_dominance_info (CDI_DOMINATORS);
9528 /* The initial block in the subgraph should be dominated by ENTRY. */
9529 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9530 get_immediate_dominator (CDI_DOMINATORS,
9531 subgraph_nodes[0]));
9532 /* Every other block in the subgraph should be dominated by the
9533 initial block. */
9534 for (int i = 1; i < n; i++)
9535 ASSERT_EQ (subgraph_nodes[0],
9536 get_immediate_dominator (CDI_DOMINATORS,
9537 subgraph_nodes[i]));
9538 free_dominance_info (CDI_DOMINATORS);
9540 /* Similarly for post-dominance. */
9541 calculate_dominance_info (CDI_POST_DOMINATORS);
9542 /* The initial block in the subgraph should be postdominated by EXIT. */
9543 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9544 get_immediate_dominator (CDI_POST_DOMINATORS,
9545 subgraph_nodes[0]));
9546 /* Every other block in the subgraph should be postdominated by the
9547 initial block, since that leads to EXIT. */
9548 for (int i = 1; i < n; i++)
9549 ASSERT_EQ (subgraph_nodes[0],
9550 get_immediate_dominator (CDI_POST_DOMINATORS,
9551 subgraph_nodes[i]));
9552 free_dominance_info (CDI_POST_DOMINATORS);
9554 pop_cfun ();
9557 /* Run all of the selftests within this file. */
9559 void
9560 tree_cfg_c_tests ()
9562 test_linear_chain ();
9563 test_diamond ();
9564 test_fully_connected ();
9567 } // namespace selftest
9569 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9570 - loop
9571 - nested loops
9572 - switch statement (a block with many out-edges)
9573 - something that jumps to itself
9574 - etc */
9576 #endif /* CHECKING_P */