2017-05-11 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / tree-cfg.c
blob77cb3d62368637d15d624f39e015b90255532e8d
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
64 /* This file contains functions for building the Control Flow Graph (CFG)
65 for a function tree. */
67 /* Local declarations. */
69 /* Initial capacity for the basic block array. */
70 static const int initial_cfg_capacity = 20;
72 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
73 which use a particular edge. The CASE_LABEL_EXPRs are chained together
74 via their CASE_CHAIN field, which we clear after we're done with the
75 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
77 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
78 update the case vector in response to edge redirections.
80 Right now this table is set up and torn down at key points in the
81 compilation process. It would be nice if we could make the table
82 more persistent. The key is getting notification of changes to
83 the CFG (particularly edge removal, creation and redirection). */
85 static hash_map<edge, tree> *edge_to_cases;
87 /* If we record edge_to_cases, this bitmap will hold indexes
88 of basic blocks that end in a GIMPLE_SWITCH which we touched
89 due to edge manipulations. */
91 static bitmap touched_switch_bbs;
93 /* CFG statistics. */
94 struct cfg_stats_d
96 long num_merged_labels;
99 static struct cfg_stats_d cfg_stats;
101 /* Data to pass to replace_block_vars_by_duplicates_1. */
102 struct replace_decls_d
104 hash_map<tree, tree> *vars_map;
105 tree to_context;
108 /* Hash table to store last discriminator assigned for each locus. */
109 struct locus_discrim_map
111 location_t locus;
112 int discriminator;
115 /* Hashtable helpers. */
117 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
119 static inline hashval_t hash (const locus_discrim_map *);
120 static inline bool equal (const locus_discrim_map *,
121 const locus_discrim_map *);
124 /* Trivial hash function for a location_t. ITEM is a pointer to
125 a hash table entry that maps a location_t to a discriminator. */
127 inline hashval_t
128 locus_discrim_hasher::hash (const locus_discrim_map *item)
130 return LOCATION_LINE (item->locus);
133 /* Equality function for the locus-to-discriminator map. A and B
134 point to the two hash table entries to compare. */
136 inline bool
137 locus_discrim_hasher::equal (const locus_discrim_map *a,
138 const locus_discrim_map *b)
140 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
143 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
145 /* Basic blocks and flowgraphs. */
146 static void make_blocks (gimple_seq);
148 /* Edges. */
149 static void make_edges (void);
150 static void assign_discriminators (void);
151 static void make_cond_expr_edges (basic_block);
152 static void make_gimple_switch_edges (gswitch *, basic_block);
153 static bool make_goto_expr_edges (basic_block);
154 static void make_gimple_asm_edges (basic_block);
155 static edge gimple_redirect_edge_and_branch (edge, basic_block);
156 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
158 /* Various helpers. */
159 static inline bool stmt_starts_bb_p (gimple *, gimple *);
160 static int gimple_verify_flow_info (void);
161 static void gimple_make_forwarder_block (edge);
162 static gimple *first_non_label_stmt (basic_block);
163 static bool verify_gimple_transaction (gtransaction *);
164 static bool call_can_make_abnormal_goto (gimple *);
166 /* Flowgraph optimization and cleanup. */
167 static void gimple_merge_blocks (basic_block, basic_block);
168 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
169 static void remove_bb (basic_block);
170 static edge find_taken_edge_computed_goto (basic_block, tree);
171 static edge find_taken_edge_cond_expr (basic_block, tree);
172 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
173 static tree find_case_label_for_value (gswitch *, tree);
174 static void lower_phi_internal_fn ();
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
202 void
203 init_empty_tree_cfg (void)
205 init_empty_tree_cfg_for_function (cfun);
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
215 static void
216 build_gimple_cfg (gimple_seq seq)
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
223 init_empty_tree_cfg ();
225 make_blocks (seq);
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 lower_phi_internal_fn ();
250 cleanup_dead_labels ();
251 delete discriminator_per_locus;
252 discriminator_per_locus = NULL;
255 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
256 them and propagate the information to LOOP. We assume that the annotations
257 come immediately before the condition in BB, if any. */
259 static void
260 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
262 gimple_stmt_iterator gsi = gsi_last_bb (bb);
263 gimple *stmt = gsi_stmt (gsi);
265 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
266 return;
268 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
270 stmt = gsi_stmt (gsi);
271 if (gimple_code (stmt) != GIMPLE_CALL)
272 break;
273 if (!gimple_call_internal_p (stmt)
274 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
275 break;
277 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
279 case annot_expr_ivdep_kind:
280 loop->safelen = INT_MAX;
281 break;
282 case annot_expr_no_vector_kind:
283 loop->dont_vectorize = true;
284 break;
285 case annot_expr_vector_kind:
286 loop->force_vectorize = true;
287 cfun->has_force_vectorize_loops = true;
288 break;
289 default:
290 gcc_unreachable ();
293 stmt = gimple_build_assign (gimple_call_lhs (stmt),
294 gimple_call_arg (stmt, 0));
295 gsi_replace (&gsi, stmt, true);
299 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
300 them and propagate the information to the loop. We assume that the
301 annotations come immediately before the condition of the loop. */
303 static void
304 replace_loop_annotate (void)
306 struct loop *loop;
307 basic_block bb;
308 gimple_stmt_iterator gsi;
309 gimple *stmt;
311 FOR_EACH_LOOP (loop, 0)
313 /* First look into the header. */
314 replace_loop_annotate_in_block (loop->header, loop);
316 /* Then look into the latch, if any. */
317 if (loop->latch)
318 replace_loop_annotate_in_block (loop->latch, loop);
321 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
322 FOR_EACH_BB_FN (bb, cfun)
324 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
326 stmt = gsi_stmt (gsi);
327 if (gimple_code (stmt) != GIMPLE_CALL)
328 continue;
329 if (!gimple_call_internal_p (stmt)
330 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
331 continue;
333 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
335 case annot_expr_ivdep_kind:
336 case annot_expr_no_vector_kind:
337 case annot_expr_vector_kind:
338 break;
339 default:
340 gcc_unreachable ();
343 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
344 stmt = gimple_build_assign (gimple_call_lhs (stmt),
345 gimple_call_arg (stmt, 0));
346 gsi_replace (&gsi, stmt, true);
351 /* Lower internal PHI function from GIMPLE FE. */
353 static void
354 lower_phi_internal_fn ()
356 basic_block bb, pred = NULL;
357 gimple_stmt_iterator gsi;
358 tree lhs;
359 gphi *phi_node;
360 gimple *stmt;
362 /* After edge creation, handle __PHI function from GIMPLE FE. */
363 FOR_EACH_BB_FN (bb, cfun)
365 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
367 stmt = gsi_stmt (gsi);
368 if (! gimple_call_internal_p (stmt, IFN_PHI))
369 break;
371 lhs = gimple_call_lhs (stmt);
372 phi_node = create_phi_node (lhs, bb);
374 /* Add arguments to the PHI node. */
375 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
377 tree arg = gimple_call_arg (stmt, i);
378 if (TREE_CODE (arg) == LABEL_DECL)
379 pred = label_to_block (arg);
380 else
382 edge e = find_edge (pred, bb);
383 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
387 gsi_remove (&gsi, true);
392 static unsigned int
393 execute_build_cfg (void)
395 gimple_seq body = gimple_body (current_function_decl);
397 build_gimple_cfg (body);
398 gimple_set_body (current_function_decl, NULL);
399 if (dump_file && (dump_flags & TDF_DETAILS))
401 fprintf (dump_file, "Scope blocks:\n");
402 dump_scope_blocks (dump_file, dump_flags);
404 cleanup_tree_cfg ();
405 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
406 replace_loop_annotate ();
407 return 0;
410 namespace {
412 const pass_data pass_data_build_cfg =
414 GIMPLE_PASS, /* type */
415 "cfg", /* name */
416 OPTGROUP_NONE, /* optinfo_flags */
417 TV_TREE_CFG, /* tv_id */
418 PROP_gimple_leh, /* properties_required */
419 ( PROP_cfg | PROP_loops ), /* properties_provided */
420 0, /* properties_destroyed */
421 0, /* todo_flags_start */
422 0, /* todo_flags_finish */
425 class pass_build_cfg : public gimple_opt_pass
427 public:
428 pass_build_cfg (gcc::context *ctxt)
429 : gimple_opt_pass (pass_data_build_cfg, ctxt)
432 /* opt_pass methods: */
433 virtual unsigned int execute (function *) { return execute_build_cfg (); }
435 }; // class pass_build_cfg
437 } // anon namespace
439 gimple_opt_pass *
440 make_pass_build_cfg (gcc::context *ctxt)
442 return new pass_build_cfg (ctxt);
446 /* Return true if T is a computed goto. */
448 bool
449 computed_goto_p (gimple *t)
451 return (gimple_code (t) == GIMPLE_GOTO
452 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
455 /* Returns true if the sequence of statements STMTS only contains
456 a call to __builtin_unreachable (). */
458 bool
459 gimple_seq_unreachable_p (gimple_seq stmts)
461 if (stmts == NULL)
462 return false;
464 gimple_stmt_iterator gsi = gsi_last (stmts);
466 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
467 return false;
469 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
471 gimple *stmt = gsi_stmt (gsi);
472 if (gimple_code (stmt) != GIMPLE_LABEL
473 && !is_gimple_debug (stmt)
474 && !gimple_clobber_p (stmt))
475 return false;
477 return true;
480 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
481 the other edge points to a bb with just __builtin_unreachable ().
482 I.e. return true for C->M edge in:
483 <bb C>:
485 if (something)
486 goto <bb N>;
487 else
488 goto <bb M>;
489 <bb N>:
490 __builtin_unreachable ();
491 <bb M>: */
493 bool
494 assert_unreachable_fallthru_edge_p (edge e)
496 basic_block pred_bb = e->src;
497 gimple *last = last_stmt (pred_bb);
498 if (last && gimple_code (last) == GIMPLE_COND)
500 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
501 if (other_bb == e->dest)
502 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
503 if (EDGE_COUNT (other_bb->succs) == 0)
504 return gimple_seq_unreachable_p (bb_seq (other_bb));
506 return false;
510 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
511 could alter control flow except via eh. We initialize the flag at
512 CFG build time and only ever clear it later. */
514 static void
515 gimple_call_initialize_ctrl_altering (gimple *stmt)
517 int flags = gimple_call_flags (stmt);
519 /* A call alters control flow if it can make an abnormal goto. */
520 if (call_can_make_abnormal_goto (stmt)
521 /* A call also alters control flow if it does not return. */
522 || flags & ECF_NORETURN
523 /* TM ending statements have backedges out of the transaction.
524 Return true so we split the basic block containing them.
525 Note that the TM_BUILTIN test is merely an optimization. */
526 || ((flags & ECF_TM_BUILTIN)
527 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
528 /* BUILT_IN_RETURN call is same as return statement. */
529 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
530 /* IFN_UNIQUE should be the last insn, to make checking for it
531 as cheap as possible. */
532 || (gimple_call_internal_p (stmt)
533 && gimple_call_internal_unique_p (stmt)))
534 gimple_call_set_ctrl_altering (stmt, true);
535 else
536 gimple_call_set_ctrl_altering (stmt, false);
540 /* Insert SEQ after BB and build a flowgraph. */
542 static basic_block
543 make_blocks_1 (gimple_seq seq, basic_block bb)
545 gimple_stmt_iterator i = gsi_start (seq);
546 gimple *stmt = NULL;
547 bool start_new_block = true;
548 bool first_stmt_of_seq = true;
550 while (!gsi_end_p (i))
552 gimple *prev_stmt;
554 prev_stmt = stmt;
555 stmt = gsi_stmt (i);
557 if (stmt && is_gimple_call (stmt))
558 gimple_call_initialize_ctrl_altering (stmt);
560 /* If the statement starts a new basic block or if we have determined
561 in a previous pass that we need to create a new block for STMT, do
562 so now. */
563 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
565 if (!first_stmt_of_seq)
566 gsi_split_seq_before (&i, &seq);
567 bb = create_basic_block (seq, bb);
568 start_new_block = false;
571 /* Now add STMT to BB and create the subgraphs for special statement
572 codes. */
573 gimple_set_bb (stmt, bb);
575 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
576 next iteration. */
577 if (stmt_ends_bb_p (stmt))
579 /* If the stmt can make abnormal goto use a new temporary
580 for the assignment to the LHS. This makes sure the old value
581 of the LHS is available on the abnormal edge. Otherwise
582 we will end up with overlapping life-ranges for abnormal
583 SSA names. */
584 if (gimple_has_lhs (stmt)
585 && stmt_can_make_abnormal_goto (stmt)
586 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
588 tree lhs = gimple_get_lhs (stmt);
589 tree tmp = create_tmp_var (TREE_TYPE (lhs));
590 gimple *s = gimple_build_assign (lhs, tmp);
591 gimple_set_location (s, gimple_location (stmt));
592 gimple_set_block (s, gimple_block (stmt));
593 gimple_set_lhs (stmt, tmp);
594 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
595 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
596 DECL_GIMPLE_REG_P (tmp) = 1;
597 gsi_insert_after (&i, s, GSI_SAME_STMT);
599 start_new_block = true;
602 gsi_next (&i);
603 first_stmt_of_seq = false;
605 return bb;
608 /* Build a flowgraph for the sequence of stmts SEQ. */
610 static void
611 make_blocks (gimple_seq seq)
613 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
616 /* Create and return a new empty basic block after bb AFTER. */
618 static basic_block
619 create_bb (void *h, void *e, basic_block after)
621 basic_block bb;
623 gcc_assert (!e);
625 /* Create and initialize a new basic block. Since alloc_block uses
626 GC allocation that clears memory to allocate a basic block, we do
627 not have to clear the newly allocated basic block here. */
628 bb = alloc_block ();
630 bb->index = last_basic_block_for_fn (cfun);
631 bb->flags = BB_NEW;
632 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
634 /* Add the new block to the linked list of blocks. */
635 link_block (bb, after);
637 /* Grow the basic block array if needed. */
638 if ((size_t) last_basic_block_for_fn (cfun)
639 == basic_block_info_for_fn (cfun)->length ())
641 size_t new_size =
642 (last_basic_block_for_fn (cfun)
643 + (last_basic_block_for_fn (cfun) + 3) / 4);
644 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
647 /* Add the newly created block to the array. */
648 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
650 n_basic_blocks_for_fn (cfun)++;
651 last_basic_block_for_fn (cfun)++;
653 return bb;
657 /*---------------------------------------------------------------------------
658 Edge creation
659 ---------------------------------------------------------------------------*/
661 /* If basic block BB has an abnormal edge to a basic block
662 containing IFN_ABNORMAL_DISPATCHER internal call, return
663 that the dispatcher's basic block, otherwise return NULL. */
665 basic_block
666 get_abnormal_succ_dispatcher (basic_block bb)
668 edge e;
669 edge_iterator ei;
671 FOR_EACH_EDGE (e, ei, bb->succs)
672 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
674 gimple_stmt_iterator gsi
675 = gsi_start_nondebug_after_labels_bb (e->dest);
676 gimple *g = gsi_stmt (gsi);
677 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
678 return e->dest;
680 return NULL;
683 /* Helper function for make_edges. Create a basic block with
684 with ABNORMAL_DISPATCHER internal call in it if needed, and
685 create abnormal edges from BBS to it and from it to FOR_BB
686 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
688 static void
689 handle_abnormal_edges (basic_block *dispatcher_bbs,
690 basic_block for_bb, int *bb_to_omp_idx,
691 auto_vec<basic_block> *bbs, bool computed_goto)
693 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
694 unsigned int idx = 0;
695 basic_block bb;
696 bool inner = false;
698 if (bb_to_omp_idx)
700 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
701 if (bb_to_omp_idx[for_bb->index] != 0)
702 inner = true;
705 /* If the dispatcher has been created already, then there are basic
706 blocks with abnormal edges to it, so just make a new edge to
707 for_bb. */
708 if (*dispatcher == NULL)
710 /* Check if there are any basic blocks that need to have
711 abnormal edges to this dispatcher. If there are none, return
712 early. */
713 if (bb_to_omp_idx == NULL)
715 if (bbs->is_empty ())
716 return;
718 else
720 FOR_EACH_VEC_ELT (*bbs, idx, bb)
721 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
722 break;
723 if (bb == NULL)
724 return;
727 /* Create the dispatcher bb. */
728 *dispatcher = create_basic_block (NULL, for_bb);
729 if (computed_goto)
731 /* Factor computed gotos into a common computed goto site. Also
732 record the location of that site so that we can un-factor the
733 gotos after we have converted back to normal form. */
734 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
736 /* Create the destination of the factored goto. Each original
737 computed goto will put its desired destination into this
738 variable and jump to the label we create immediately below. */
739 tree var = create_tmp_var (ptr_type_node, "gotovar");
741 /* Build a label for the new block which will contain the
742 factored computed goto. */
743 tree factored_label_decl
744 = create_artificial_label (UNKNOWN_LOCATION);
745 gimple *factored_computed_goto_label
746 = gimple_build_label (factored_label_decl);
747 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
749 /* Build our new computed goto. */
750 gimple *factored_computed_goto = gimple_build_goto (var);
751 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
753 FOR_EACH_VEC_ELT (*bbs, idx, bb)
755 if (bb_to_omp_idx
756 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
757 continue;
759 gsi = gsi_last_bb (bb);
760 gimple *last = gsi_stmt (gsi);
762 gcc_assert (computed_goto_p (last));
764 /* Copy the original computed goto's destination into VAR. */
765 gimple *assignment
766 = gimple_build_assign (var, gimple_goto_dest (last));
767 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
769 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
770 e->goto_locus = gimple_location (last);
771 gsi_remove (&gsi, true);
774 else
776 tree arg = inner ? boolean_true_node : boolean_false_node;
777 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
778 1, arg);
779 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
780 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
782 /* Create predecessor edges of the dispatcher. */
783 FOR_EACH_VEC_ELT (*bbs, idx, bb)
785 if (bb_to_omp_idx
786 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
787 continue;
788 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
793 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
796 /* Creates outgoing edges for BB. Returns 1 when it ends with an
797 computed goto, returns 2 when it ends with a statement that
798 might return to this function via an nonlocal goto, otherwise
799 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
801 static int
802 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
804 gimple *last = last_stmt (bb);
805 bool fallthru = false;
806 int ret = 0;
808 if (!last)
809 return ret;
811 switch (gimple_code (last))
813 case GIMPLE_GOTO:
814 if (make_goto_expr_edges (bb))
815 ret = 1;
816 fallthru = false;
817 break;
818 case GIMPLE_RETURN:
820 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
821 e->goto_locus = gimple_location (last);
822 fallthru = false;
824 break;
825 case GIMPLE_COND:
826 make_cond_expr_edges (bb);
827 fallthru = false;
828 break;
829 case GIMPLE_SWITCH:
830 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
831 fallthru = false;
832 break;
833 case GIMPLE_RESX:
834 make_eh_edges (last);
835 fallthru = false;
836 break;
837 case GIMPLE_EH_DISPATCH:
838 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
839 break;
841 case GIMPLE_CALL:
842 /* If this function receives a nonlocal goto, then we need to
843 make edges from this call site to all the nonlocal goto
844 handlers. */
845 if (stmt_can_make_abnormal_goto (last))
846 ret = 2;
848 /* If this statement has reachable exception handlers, then
849 create abnormal edges to them. */
850 make_eh_edges (last);
852 /* BUILTIN_RETURN is really a return statement. */
853 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
855 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
856 fallthru = false;
858 /* Some calls are known not to return. */
859 else
860 fallthru = !gimple_call_noreturn_p (last);
861 break;
863 case GIMPLE_ASSIGN:
864 /* A GIMPLE_ASSIGN may throw internally and thus be considered
865 control-altering. */
866 if (is_ctrl_altering_stmt (last))
867 make_eh_edges (last);
868 fallthru = true;
869 break;
871 case GIMPLE_ASM:
872 make_gimple_asm_edges (bb);
873 fallthru = true;
874 break;
876 CASE_GIMPLE_OMP:
877 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
878 break;
880 case GIMPLE_TRANSACTION:
882 gtransaction *txn = as_a <gtransaction *> (last);
883 tree label1 = gimple_transaction_label_norm (txn);
884 tree label2 = gimple_transaction_label_uninst (txn);
886 if (label1)
887 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
888 if (label2)
889 make_edge (bb, label_to_block (label2),
890 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
892 tree label3 = gimple_transaction_label_over (txn);
893 if (gimple_transaction_subcode (txn)
894 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
895 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
897 fallthru = false;
899 break;
901 default:
902 gcc_assert (!stmt_ends_bb_p (last));
903 fallthru = true;
904 break;
907 if (fallthru)
908 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
910 return ret;
913 /* Join all the blocks in the flowgraph. */
915 static void
916 make_edges (void)
918 basic_block bb;
919 struct omp_region *cur_region = NULL;
920 auto_vec<basic_block> ab_edge_goto;
921 auto_vec<basic_block> ab_edge_call;
922 int *bb_to_omp_idx = NULL;
923 int cur_omp_region_idx = 0;
925 /* Create an edge from entry to the first block with executable
926 statements in it. */
927 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
928 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
929 EDGE_FALLTHRU);
931 /* Traverse the basic block array placing edges. */
932 FOR_EACH_BB_FN (bb, cfun)
934 int mer;
936 if (bb_to_omp_idx)
937 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
939 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
940 if (mer == 1)
941 ab_edge_goto.safe_push (bb);
942 else if (mer == 2)
943 ab_edge_call.safe_push (bb);
945 if (cur_region && bb_to_omp_idx == NULL)
946 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
949 /* Computed gotos are hell to deal with, especially if there are
950 lots of them with a large number of destinations. So we factor
951 them to a common computed goto location before we build the
952 edge list. After we convert back to normal form, we will un-factor
953 the computed gotos since factoring introduces an unwanted jump.
954 For non-local gotos and abnormal edges from calls to calls that return
955 twice or forced labels, factor the abnormal edges too, by having all
956 abnormal edges from the calls go to a common artificial basic block
957 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
958 basic block to all forced labels and calls returning twice.
959 We do this per-OpenMP structured block, because those regions
960 are guaranteed to be single entry single exit by the standard,
961 so it is not allowed to enter or exit such regions abnormally this way,
962 thus all computed gotos, non-local gotos and setjmp/longjmp calls
963 must not transfer control across SESE region boundaries. */
964 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
966 gimple_stmt_iterator gsi;
967 basic_block dispatcher_bb_array[2] = { NULL, NULL };
968 basic_block *dispatcher_bbs = dispatcher_bb_array;
969 int count = n_basic_blocks_for_fn (cfun);
971 if (bb_to_omp_idx)
972 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
974 FOR_EACH_BB_FN (bb, cfun)
976 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
978 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
979 tree target;
981 if (!label_stmt)
982 break;
984 target = gimple_label_label (label_stmt);
986 /* Make an edge to every label block that has been marked as a
987 potential target for a computed goto or a non-local goto. */
988 if (FORCED_LABEL (target))
989 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
990 &ab_edge_goto, true);
991 if (DECL_NONLOCAL (target))
993 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
994 &ab_edge_call, false);
995 break;
999 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1000 gsi_next_nondebug (&gsi);
1001 if (!gsi_end_p (gsi))
1003 /* Make an edge to every setjmp-like call. */
1004 gimple *call_stmt = gsi_stmt (gsi);
1005 if (is_gimple_call (call_stmt)
1006 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1007 || gimple_call_builtin_p (call_stmt,
1008 BUILT_IN_SETJMP_RECEIVER)))
1009 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1010 &ab_edge_call, false);
1014 if (bb_to_omp_idx)
1015 XDELETE (dispatcher_bbs);
1018 XDELETE (bb_to_omp_idx);
1020 omp_free_regions ();
1023 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1024 needed. Returns true if new bbs were created.
1025 Note: This is transitional code, and should not be used for new code. We
1026 should be able to get rid of this by rewriting all target va-arg
1027 gimplification hooks to use an interface gimple_build_cond_value as described
1028 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1030 bool
1031 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1033 gimple *stmt = gsi_stmt (*gsi);
1034 basic_block bb = gimple_bb (stmt);
1035 basic_block lastbb, afterbb;
1036 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1037 edge e;
1038 lastbb = make_blocks_1 (seq, bb);
1039 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1040 return false;
1041 e = split_block (bb, stmt);
1042 /* Move e->dest to come after the new basic blocks. */
1043 afterbb = e->dest;
1044 unlink_block (afterbb);
1045 link_block (afterbb, lastbb);
1046 redirect_edge_succ (e, bb->next_bb);
1047 bb = bb->next_bb;
1048 while (bb != afterbb)
1050 struct omp_region *cur_region = NULL;
1051 int cur_omp_region_idx = 0;
1052 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1053 gcc_assert (!mer && !cur_region);
1054 add_bb_to_loop (bb, afterbb->loop_father);
1055 bb = bb->next_bb;
1057 return true;
1060 /* Find the next available discriminator value for LOCUS. The
1061 discriminator distinguishes among several basic blocks that
1062 share a common locus, allowing for more accurate sample-based
1063 profiling. */
1065 static int
1066 next_discriminator_for_locus (location_t locus)
1068 struct locus_discrim_map item;
1069 struct locus_discrim_map **slot;
1071 item.locus = locus;
1072 item.discriminator = 0;
1073 slot = discriminator_per_locus->find_slot_with_hash (
1074 &item, LOCATION_LINE (locus), INSERT);
1075 gcc_assert (slot);
1076 if (*slot == HTAB_EMPTY_ENTRY)
1078 *slot = XNEW (struct locus_discrim_map);
1079 gcc_assert (*slot);
1080 (*slot)->locus = locus;
1081 (*slot)->discriminator = 0;
1083 (*slot)->discriminator++;
1084 return (*slot)->discriminator;
1087 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1089 static bool
1090 same_line_p (location_t locus1, location_t locus2)
1092 expanded_location from, to;
1094 if (locus1 == locus2)
1095 return true;
1097 from = expand_location (locus1);
1098 to = expand_location (locus2);
1100 if (from.line != to.line)
1101 return false;
1102 if (from.file == to.file)
1103 return true;
1104 return (from.file != NULL
1105 && to.file != NULL
1106 && filename_cmp (from.file, to.file) == 0);
1109 /* Assign discriminators to each basic block. */
1111 static void
1112 assign_discriminators (void)
1114 basic_block bb;
1116 FOR_EACH_BB_FN (bb, cfun)
1118 edge e;
1119 edge_iterator ei;
1120 gimple *last = last_stmt (bb);
1121 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1123 if (locus == UNKNOWN_LOCATION)
1124 continue;
1126 FOR_EACH_EDGE (e, ei, bb->succs)
1128 gimple *first = first_non_label_stmt (e->dest);
1129 gimple *last = last_stmt (e->dest);
1130 if ((first && same_line_p (locus, gimple_location (first)))
1131 || (last && same_line_p (locus, gimple_location (last))))
1133 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1134 bb->discriminator = next_discriminator_for_locus (locus);
1135 else
1136 e->dest->discriminator = next_discriminator_for_locus (locus);
1142 /* Create the edges for a GIMPLE_COND starting at block BB. */
1144 static void
1145 make_cond_expr_edges (basic_block bb)
1147 gcond *entry = as_a <gcond *> (last_stmt (bb));
1148 gimple *then_stmt, *else_stmt;
1149 basic_block then_bb, else_bb;
1150 tree then_label, else_label;
1151 edge e;
1153 gcc_assert (entry);
1154 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1156 /* Entry basic blocks for each component. */
1157 then_label = gimple_cond_true_label (entry);
1158 else_label = gimple_cond_false_label (entry);
1159 then_bb = label_to_block (then_label);
1160 else_bb = label_to_block (else_label);
1161 then_stmt = first_stmt (then_bb);
1162 else_stmt = first_stmt (else_bb);
1164 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1165 e->goto_locus = gimple_location (then_stmt);
1166 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1167 if (e)
1168 e->goto_locus = gimple_location (else_stmt);
1170 /* We do not need the labels anymore. */
1171 gimple_cond_set_true_label (entry, NULL_TREE);
1172 gimple_cond_set_false_label (entry, NULL_TREE);
1176 /* Called for each element in the hash table (P) as we delete the
1177 edge to cases hash table.
1179 Clear all the CASE_CHAINs to prevent problems with copying of
1180 SWITCH_EXPRs and structure sharing rules, then free the hash table
1181 element. */
1183 bool
1184 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1186 tree t, next;
1188 for (t = value; t; t = next)
1190 next = CASE_CHAIN (t);
1191 CASE_CHAIN (t) = NULL;
1194 return true;
1197 /* Start recording information mapping edges to case labels. */
1199 void
1200 start_recording_case_labels (void)
1202 gcc_assert (edge_to_cases == NULL);
1203 edge_to_cases = new hash_map<edge, tree>;
1204 touched_switch_bbs = BITMAP_ALLOC (NULL);
1207 /* Return nonzero if we are recording information for case labels. */
1209 static bool
1210 recording_case_labels_p (void)
1212 return (edge_to_cases != NULL);
1215 /* Stop recording information mapping edges to case labels and
1216 remove any information we have recorded. */
1217 void
1218 end_recording_case_labels (void)
1220 bitmap_iterator bi;
1221 unsigned i;
1222 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1223 delete edge_to_cases;
1224 edge_to_cases = NULL;
1225 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1227 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1228 if (bb)
1230 gimple *stmt = last_stmt (bb);
1231 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1232 group_case_labels_stmt (as_a <gswitch *> (stmt));
1235 BITMAP_FREE (touched_switch_bbs);
1238 /* If we are inside a {start,end}_recording_cases block, then return
1239 a chain of CASE_LABEL_EXPRs from T which reference E.
1241 Otherwise return NULL. */
1243 static tree
1244 get_cases_for_edge (edge e, gswitch *t)
1246 tree *slot;
1247 size_t i, n;
1249 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1250 chains available. Return NULL so the caller can detect this case. */
1251 if (!recording_case_labels_p ())
1252 return NULL;
1254 slot = edge_to_cases->get (e);
1255 if (slot)
1256 return *slot;
1258 /* If we did not find E in the hash table, then this must be the first
1259 time we have been queried for information about E & T. Add all the
1260 elements from T to the hash table then perform the query again. */
1262 n = gimple_switch_num_labels (t);
1263 for (i = 0; i < n; i++)
1265 tree elt = gimple_switch_label (t, i);
1266 tree lab = CASE_LABEL (elt);
1267 basic_block label_bb = label_to_block (lab);
1268 edge this_edge = find_edge (e->src, label_bb);
1270 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1271 a new chain. */
1272 tree &s = edge_to_cases->get_or_insert (this_edge);
1273 CASE_CHAIN (elt) = s;
1274 s = elt;
1277 return *edge_to_cases->get (e);
1280 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1282 static void
1283 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1285 size_t i, n;
1287 n = gimple_switch_num_labels (entry);
1289 for (i = 0; i < n; ++i)
1291 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1292 basic_block label_bb = label_to_block (lab);
1293 make_edge (bb, label_bb, 0);
1298 /* Return the basic block holding label DEST. */
1300 basic_block
1301 label_to_block_fn (struct function *ifun, tree dest)
1303 int uid = LABEL_DECL_UID (dest);
1305 /* We would die hard when faced by an undefined label. Emit a label to
1306 the very first basic block. This will hopefully make even the dataflow
1307 and undefined variable warnings quite right. */
1308 if (seen_error () && uid < 0)
1310 gimple_stmt_iterator gsi =
1311 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1312 gimple *stmt;
1314 stmt = gimple_build_label (dest);
1315 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1316 uid = LABEL_DECL_UID (dest);
1318 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1319 return NULL;
1320 return (*ifun->cfg->x_label_to_block_map)[uid];
1323 /* Create edges for a goto statement at block BB. Returns true
1324 if abnormal edges should be created. */
1326 static bool
1327 make_goto_expr_edges (basic_block bb)
1329 gimple_stmt_iterator last = gsi_last_bb (bb);
1330 gimple *goto_t = gsi_stmt (last);
1332 /* A simple GOTO creates normal edges. */
1333 if (simple_goto_p (goto_t))
1335 tree dest = gimple_goto_dest (goto_t);
1336 basic_block label_bb = label_to_block (dest);
1337 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1338 e->goto_locus = gimple_location (goto_t);
1339 gsi_remove (&last, true);
1340 return false;
1343 /* A computed GOTO creates abnormal edges. */
1344 return true;
1347 /* Create edges for an asm statement with labels at block BB. */
1349 static void
1350 make_gimple_asm_edges (basic_block bb)
1352 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1353 int i, n = gimple_asm_nlabels (stmt);
1355 for (i = 0; i < n; ++i)
1357 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1358 basic_block label_bb = label_to_block (label);
1359 make_edge (bb, label_bb, 0);
1363 /*---------------------------------------------------------------------------
1364 Flowgraph analysis
1365 ---------------------------------------------------------------------------*/
1367 /* Cleanup useless labels in basic blocks. This is something we wish
1368 to do early because it allows us to group case labels before creating
1369 the edges for the CFG, and it speeds up block statement iterators in
1370 all passes later on.
1371 We rerun this pass after CFG is created, to get rid of the labels that
1372 are no longer referenced. After then we do not run it any more, since
1373 (almost) no new labels should be created. */
1375 /* A map from basic block index to the leading label of that block. */
1376 static struct label_record
1378 /* The label. */
1379 tree label;
1381 /* True if the label is referenced from somewhere. */
1382 bool used;
1383 } *label_for_bb;
1385 /* Given LABEL return the first label in the same basic block. */
1387 static tree
1388 main_block_label (tree label)
1390 basic_block bb = label_to_block (label);
1391 tree main_label = label_for_bb[bb->index].label;
1393 /* label_to_block possibly inserted undefined label into the chain. */
1394 if (!main_label)
1396 label_for_bb[bb->index].label = label;
1397 main_label = label;
1400 label_for_bb[bb->index].used = true;
1401 return main_label;
1404 /* Clean up redundant labels within the exception tree. */
1406 static void
1407 cleanup_dead_labels_eh (void)
1409 eh_landing_pad lp;
1410 eh_region r;
1411 tree lab;
1412 int i;
1414 if (cfun->eh == NULL)
1415 return;
1417 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1418 if (lp && lp->post_landing_pad)
1420 lab = main_block_label (lp->post_landing_pad);
1421 if (lab != lp->post_landing_pad)
1423 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1424 EH_LANDING_PAD_NR (lab) = lp->index;
1428 FOR_ALL_EH_REGION (r)
1429 switch (r->type)
1431 case ERT_CLEANUP:
1432 case ERT_MUST_NOT_THROW:
1433 break;
1435 case ERT_TRY:
1437 eh_catch c;
1438 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1440 lab = c->label;
1441 if (lab)
1442 c->label = main_block_label (lab);
1445 break;
1447 case ERT_ALLOWED_EXCEPTIONS:
1448 lab = r->u.allowed.label;
1449 if (lab)
1450 r->u.allowed.label = main_block_label (lab);
1451 break;
1456 /* Cleanup redundant labels. This is a three-step process:
1457 1) Find the leading label for each block.
1458 2) Redirect all references to labels to the leading labels.
1459 3) Cleanup all useless labels. */
1461 void
1462 cleanup_dead_labels (void)
1464 basic_block bb;
1465 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1467 /* Find a suitable label for each block. We use the first user-defined
1468 label if there is one, or otherwise just the first label we see. */
1469 FOR_EACH_BB_FN (bb, cfun)
1471 gimple_stmt_iterator i;
1473 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1475 tree label;
1476 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1478 if (!label_stmt)
1479 break;
1481 label = gimple_label_label (label_stmt);
1483 /* If we have not yet seen a label for the current block,
1484 remember this one and see if there are more labels. */
1485 if (!label_for_bb[bb->index].label)
1487 label_for_bb[bb->index].label = label;
1488 continue;
1491 /* If we did see a label for the current block already, but it
1492 is an artificially created label, replace it if the current
1493 label is a user defined label. */
1494 if (!DECL_ARTIFICIAL (label)
1495 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1497 label_for_bb[bb->index].label = label;
1498 break;
1503 /* Now redirect all jumps/branches to the selected label.
1504 First do so for each block ending in a control statement. */
1505 FOR_EACH_BB_FN (bb, cfun)
1507 gimple *stmt = last_stmt (bb);
1508 tree label, new_label;
1510 if (!stmt)
1511 continue;
1513 switch (gimple_code (stmt))
1515 case GIMPLE_COND:
1517 gcond *cond_stmt = as_a <gcond *> (stmt);
1518 label = gimple_cond_true_label (cond_stmt);
1519 if (label)
1521 new_label = main_block_label (label);
1522 if (new_label != label)
1523 gimple_cond_set_true_label (cond_stmt, new_label);
1526 label = gimple_cond_false_label (cond_stmt);
1527 if (label)
1529 new_label = main_block_label (label);
1530 if (new_label != label)
1531 gimple_cond_set_false_label (cond_stmt, new_label);
1534 break;
1536 case GIMPLE_SWITCH:
1538 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1539 size_t i, n = gimple_switch_num_labels (switch_stmt);
1541 /* Replace all destination labels. */
1542 for (i = 0; i < n; ++i)
1544 tree case_label = gimple_switch_label (switch_stmt, i);
1545 label = CASE_LABEL (case_label);
1546 new_label = main_block_label (label);
1547 if (new_label != label)
1548 CASE_LABEL (case_label) = new_label;
1550 break;
1553 case GIMPLE_ASM:
1555 gasm *asm_stmt = as_a <gasm *> (stmt);
1556 int i, n = gimple_asm_nlabels (asm_stmt);
1558 for (i = 0; i < n; ++i)
1560 tree cons = gimple_asm_label_op (asm_stmt, i);
1561 tree label = main_block_label (TREE_VALUE (cons));
1562 TREE_VALUE (cons) = label;
1564 break;
1567 /* We have to handle gotos until they're removed, and we don't
1568 remove them until after we've created the CFG edges. */
1569 case GIMPLE_GOTO:
1570 if (!computed_goto_p (stmt))
1572 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1573 label = gimple_goto_dest (goto_stmt);
1574 new_label = main_block_label (label);
1575 if (new_label != label)
1576 gimple_goto_set_dest (goto_stmt, new_label);
1578 break;
1580 case GIMPLE_TRANSACTION:
1582 gtransaction *txn = as_a <gtransaction *> (stmt);
1584 label = gimple_transaction_label_norm (txn);
1585 if (label)
1587 new_label = main_block_label (label);
1588 if (new_label != label)
1589 gimple_transaction_set_label_norm (txn, new_label);
1592 label = gimple_transaction_label_uninst (txn);
1593 if (label)
1595 new_label = main_block_label (label);
1596 if (new_label != label)
1597 gimple_transaction_set_label_uninst (txn, new_label);
1600 label = gimple_transaction_label_over (txn);
1601 if (label)
1603 new_label = main_block_label (label);
1604 if (new_label != label)
1605 gimple_transaction_set_label_over (txn, new_label);
1608 break;
1610 default:
1611 break;
1615 /* Do the same for the exception region tree labels. */
1616 cleanup_dead_labels_eh ();
1618 /* Finally, purge dead labels. All user-defined labels and labels that
1619 can be the target of non-local gotos and labels which have their
1620 address taken are preserved. */
1621 FOR_EACH_BB_FN (bb, cfun)
1623 gimple_stmt_iterator i;
1624 tree label_for_this_bb = label_for_bb[bb->index].label;
1626 if (!label_for_this_bb)
1627 continue;
1629 /* If the main label of the block is unused, we may still remove it. */
1630 if (!label_for_bb[bb->index].used)
1631 label_for_this_bb = NULL;
1633 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1635 tree label;
1636 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1638 if (!label_stmt)
1639 break;
1641 label = gimple_label_label (label_stmt);
1643 if (label == label_for_this_bb
1644 || !DECL_ARTIFICIAL (label)
1645 || DECL_NONLOCAL (label)
1646 || FORCED_LABEL (label))
1647 gsi_next (&i);
1648 else
1649 gsi_remove (&i, true);
1653 free (label_for_bb);
1656 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1657 the ones jumping to the same label.
1658 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1660 void
1661 group_case_labels_stmt (gswitch *stmt)
1663 int old_size = gimple_switch_num_labels (stmt);
1664 int i, j, new_size = old_size;
1665 basic_block default_bb = NULL;
1667 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1669 /* Look for possible opportunities to merge cases. */
1670 i = 1;
1671 while (i < old_size)
1673 tree base_case, base_high;
1674 basic_block base_bb;
1676 base_case = gimple_switch_label (stmt, i);
1678 gcc_assert (base_case);
1679 base_bb = label_to_block (CASE_LABEL (base_case));
1681 /* Discard cases that have the same destination as the default case
1682 or if their destination block is unreachable. */
1683 if (base_bb == default_bb
1684 || (EDGE_COUNT (base_bb->succs) == 0
1685 && gimple_seq_unreachable_p (bb_seq (base_bb))))
1687 gimple_switch_set_label (stmt, i, NULL_TREE);
1688 i++;
1689 new_size--;
1690 continue;
1693 base_high = CASE_HIGH (base_case)
1694 ? CASE_HIGH (base_case)
1695 : CASE_LOW (base_case);
1696 i++;
1698 /* Try to merge case labels. Break out when we reach the end
1699 of the label vector or when we cannot merge the next case
1700 label with the current one. */
1701 while (i < old_size)
1703 tree merge_case = gimple_switch_label (stmt, i);
1704 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1705 wide_int bhp1 = wi::add (base_high, 1);
1707 /* Merge the cases if they jump to the same place,
1708 and their ranges are consecutive. */
1709 if (merge_bb == base_bb
1710 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1712 base_high = CASE_HIGH (merge_case) ?
1713 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1714 CASE_HIGH (base_case) = base_high;
1715 gimple_switch_set_label (stmt, i, NULL_TREE);
1716 new_size--;
1717 i++;
1719 else
1720 break;
1724 /* Compress the case labels in the label vector, and adjust the
1725 length of the vector. */
1726 for (i = 0, j = 0; i < new_size; i++)
1728 while (! gimple_switch_label (stmt, j))
1729 j++;
1730 gimple_switch_set_label (stmt, i,
1731 gimple_switch_label (stmt, j++));
1734 gcc_assert (new_size <= old_size);
1735 gimple_switch_set_num_labels (stmt, new_size);
1738 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1739 and scan the sorted vector of cases. Combine the ones jumping to the
1740 same label. */
1742 void
1743 group_case_labels (void)
1745 basic_block bb;
1747 FOR_EACH_BB_FN (bb, cfun)
1749 gimple *stmt = last_stmt (bb);
1750 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1751 group_case_labels_stmt (as_a <gswitch *> (stmt));
1755 /* Checks whether we can merge block B into block A. */
1757 static bool
1758 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1760 gimple *stmt;
1762 if (!single_succ_p (a))
1763 return false;
1765 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1766 return false;
1768 if (single_succ (a) != b)
1769 return false;
1771 if (!single_pred_p (b))
1772 return false;
1774 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1775 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1776 return false;
1778 /* If A ends by a statement causing exceptions or something similar, we
1779 cannot merge the blocks. */
1780 stmt = last_stmt (a);
1781 if (stmt && stmt_ends_bb_p (stmt))
1782 return false;
1784 /* Do not allow a block with only a non-local label to be merged. */
1785 if (stmt)
1786 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1787 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1788 return false;
1790 /* Examine the labels at the beginning of B. */
1791 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1792 gsi_next (&gsi))
1794 tree lab;
1795 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1796 if (!label_stmt)
1797 break;
1798 lab = gimple_label_label (label_stmt);
1800 /* Do not remove user forced labels or for -O0 any user labels. */
1801 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1802 return false;
1805 /* Protect simple loop latches. We only want to avoid merging
1806 the latch with the loop header or with a block in another
1807 loop in this case. */
1808 if (current_loops
1809 && b->loop_father->latch == b
1810 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1811 && (b->loop_father->header == a
1812 || b->loop_father != a->loop_father))
1813 return false;
1815 /* It must be possible to eliminate all phi nodes in B. If ssa form
1816 is not up-to-date and a name-mapping is registered, we cannot eliminate
1817 any phis. Symbols marked for renaming are never a problem though. */
1818 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1819 gsi_next (&gsi))
1821 gphi *phi = gsi.phi ();
1822 /* Technically only new names matter. */
1823 if (name_registered_for_update_p (PHI_RESULT (phi)))
1824 return false;
1827 /* When not optimizing, don't merge if we'd lose goto_locus. */
1828 if (!optimize
1829 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1831 location_t goto_locus = single_succ_edge (a)->goto_locus;
1832 gimple_stmt_iterator prev, next;
1833 prev = gsi_last_nondebug_bb (a);
1834 next = gsi_after_labels (b);
1835 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1836 gsi_next_nondebug (&next);
1837 if ((gsi_end_p (prev)
1838 || gimple_location (gsi_stmt (prev)) != goto_locus)
1839 && (gsi_end_p (next)
1840 || gimple_location (gsi_stmt (next)) != goto_locus))
1841 return false;
1844 return true;
1847 /* Replaces all uses of NAME by VAL. */
1849 void
1850 replace_uses_by (tree name, tree val)
1852 imm_use_iterator imm_iter;
1853 use_operand_p use;
1854 gimple *stmt;
1855 edge e;
1857 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1859 /* Mark the block if we change the last stmt in it. */
1860 if (cfgcleanup_altered_bbs
1861 && stmt_ends_bb_p (stmt))
1862 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1864 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1866 replace_exp (use, val);
1868 if (gimple_code (stmt) == GIMPLE_PHI)
1870 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1871 PHI_ARG_INDEX_FROM_USE (use));
1872 if (e->flags & EDGE_ABNORMAL
1873 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1875 /* This can only occur for virtual operands, since
1876 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1877 would prevent replacement. */
1878 gcc_checking_assert (virtual_operand_p (name));
1879 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1884 if (gimple_code (stmt) != GIMPLE_PHI)
1886 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1887 gimple *orig_stmt = stmt;
1888 size_t i;
1890 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1891 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1892 only change sth from non-invariant to invariant, and only
1893 when propagating constants. */
1894 if (is_gimple_min_invariant (val))
1895 for (i = 0; i < gimple_num_ops (stmt); i++)
1897 tree op = gimple_op (stmt, i);
1898 /* Operands may be empty here. For example, the labels
1899 of a GIMPLE_COND are nulled out following the creation
1900 of the corresponding CFG edges. */
1901 if (op && TREE_CODE (op) == ADDR_EXPR)
1902 recompute_tree_invariant_for_addr_expr (op);
1905 if (fold_stmt (&gsi))
1906 stmt = gsi_stmt (gsi);
1908 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1909 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1911 update_stmt (stmt);
1915 gcc_checking_assert (has_zero_uses (name));
1917 /* Also update the trees stored in loop structures. */
1918 if (current_loops)
1920 struct loop *loop;
1922 FOR_EACH_LOOP (loop, 0)
1924 substitute_in_loop_info (loop, name, val);
1929 /* Merge block B into block A. */
1931 static void
1932 gimple_merge_blocks (basic_block a, basic_block b)
1934 gimple_stmt_iterator last, gsi;
1935 gphi_iterator psi;
1937 if (dump_file)
1938 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1940 /* Remove all single-valued PHI nodes from block B of the form
1941 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1942 gsi = gsi_last_bb (a);
1943 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1945 gimple *phi = gsi_stmt (psi);
1946 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1947 gimple *copy;
1948 bool may_replace_uses = (virtual_operand_p (def)
1949 || may_propagate_copy (def, use));
1951 /* In case we maintain loop closed ssa form, do not propagate arguments
1952 of loop exit phi nodes. */
1953 if (current_loops
1954 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1955 && !virtual_operand_p (def)
1956 && TREE_CODE (use) == SSA_NAME
1957 && a->loop_father != b->loop_father)
1958 may_replace_uses = false;
1960 if (!may_replace_uses)
1962 gcc_assert (!virtual_operand_p (def));
1964 /* Note that just emitting the copies is fine -- there is no problem
1965 with ordering of phi nodes. This is because A is the single
1966 predecessor of B, therefore results of the phi nodes cannot
1967 appear as arguments of the phi nodes. */
1968 copy = gimple_build_assign (def, use);
1969 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1970 remove_phi_node (&psi, false);
1972 else
1974 /* If we deal with a PHI for virtual operands, we can simply
1975 propagate these without fussing with folding or updating
1976 the stmt. */
1977 if (virtual_operand_p (def))
1979 imm_use_iterator iter;
1980 use_operand_p use_p;
1981 gimple *stmt;
1983 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1984 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1985 SET_USE (use_p, use);
1987 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1988 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1990 else
1991 replace_uses_by (def, use);
1993 remove_phi_node (&psi, true);
1997 /* Ensure that B follows A. */
1998 move_block_after (b, a);
2000 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2001 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2003 /* Remove labels from B and set gimple_bb to A for other statements. */
2004 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2006 gimple *stmt = gsi_stmt (gsi);
2007 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2009 tree label = gimple_label_label (label_stmt);
2010 int lp_nr;
2012 gsi_remove (&gsi, false);
2014 /* Now that we can thread computed gotos, we might have
2015 a situation where we have a forced label in block B
2016 However, the label at the start of block B might still be
2017 used in other ways (think about the runtime checking for
2018 Fortran assigned gotos). So we can not just delete the
2019 label. Instead we move the label to the start of block A. */
2020 if (FORCED_LABEL (label))
2022 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2023 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2025 /* Other user labels keep around in a form of a debug stmt. */
2026 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2028 gimple *dbg = gimple_build_debug_bind (label,
2029 integer_zero_node,
2030 stmt);
2031 gimple_debug_bind_reset_value (dbg);
2032 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2035 lp_nr = EH_LANDING_PAD_NR (label);
2036 if (lp_nr)
2038 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2039 lp->post_landing_pad = NULL;
2042 else
2044 gimple_set_bb (stmt, a);
2045 gsi_next (&gsi);
2049 /* When merging two BBs, if their counts are different, the larger count
2050 is selected as the new bb count. This is to handle inconsistent
2051 profiles. */
2052 if (a->loop_father == b->loop_father)
2054 a->count = MAX (a->count, b->count);
2055 a->frequency = MAX (a->frequency, b->frequency);
2058 /* Merge the sequences. */
2059 last = gsi_last_bb (a);
2060 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2061 set_bb_seq (b, NULL);
2063 if (cfgcleanup_altered_bbs)
2064 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2068 /* Return the one of two successors of BB that is not reachable by a
2069 complex edge, if there is one. Else, return BB. We use
2070 this in optimizations that use post-dominators for their heuristics,
2071 to catch the cases in C++ where function calls are involved. */
2073 basic_block
2074 single_noncomplex_succ (basic_block bb)
2076 edge e0, e1;
2077 if (EDGE_COUNT (bb->succs) != 2)
2078 return bb;
2080 e0 = EDGE_SUCC (bb, 0);
2081 e1 = EDGE_SUCC (bb, 1);
2082 if (e0->flags & EDGE_COMPLEX)
2083 return e1->dest;
2084 if (e1->flags & EDGE_COMPLEX)
2085 return e0->dest;
2087 return bb;
2090 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2092 void
2093 notice_special_calls (gcall *call)
2095 int flags = gimple_call_flags (call);
2097 if (flags & ECF_MAY_BE_ALLOCA)
2098 cfun->calls_alloca = true;
2099 if (flags & ECF_RETURNS_TWICE)
2100 cfun->calls_setjmp = true;
2104 /* Clear flags set by notice_special_calls. Used by dead code removal
2105 to update the flags. */
2107 void
2108 clear_special_calls (void)
2110 cfun->calls_alloca = false;
2111 cfun->calls_setjmp = false;
2114 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2116 static void
2117 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2119 /* Since this block is no longer reachable, we can just delete all
2120 of its PHI nodes. */
2121 remove_phi_nodes (bb);
2123 /* Remove edges to BB's successors. */
2124 while (EDGE_COUNT (bb->succs) > 0)
2125 remove_edge (EDGE_SUCC (bb, 0));
2129 /* Remove statements of basic block BB. */
2131 static void
2132 remove_bb (basic_block bb)
2134 gimple_stmt_iterator i;
2136 if (dump_file)
2138 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2139 if (dump_flags & TDF_DETAILS)
2141 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2142 fprintf (dump_file, "\n");
2146 if (current_loops)
2148 struct loop *loop = bb->loop_father;
2150 /* If a loop gets removed, clean up the information associated
2151 with it. */
2152 if (loop->latch == bb
2153 || loop->header == bb)
2154 free_numbers_of_iterations_estimates_loop (loop);
2157 /* Remove all the instructions in the block. */
2158 if (bb_seq (bb) != NULL)
2160 /* Walk backwards so as to get a chance to substitute all
2161 released DEFs into debug stmts. See
2162 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2163 details. */
2164 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2166 gimple *stmt = gsi_stmt (i);
2167 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2168 if (label_stmt
2169 && (FORCED_LABEL (gimple_label_label (label_stmt))
2170 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2172 basic_block new_bb;
2173 gimple_stmt_iterator new_gsi;
2175 /* A non-reachable non-local label may still be referenced.
2176 But it no longer needs to carry the extra semantics of
2177 non-locality. */
2178 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2180 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2181 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2184 new_bb = bb->prev_bb;
2185 new_gsi = gsi_start_bb (new_bb);
2186 gsi_remove (&i, false);
2187 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2189 else
2191 /* Release SSA definitions. */
2192 release_defs (stmt);
2193 gsi_remove (&i, true);
2196 if (gsi_end_p (i))
2197 i = gsi_last_bb (bb);
2198 else
2199 gsi_prev (&i);
2203 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2204 bb->il.gimple.seq = NULL;
2205 bb->il.gimple.phi_nodes = NULL;
2209 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2210 predicate VAL, return the edge that will be taken out of the block.
2211 If VAL does not match a unique edge, NULL is returned. */
2213 edge
2214 find_taken_edge (basic_block bb, tree val)
2216 gimple *stmt;
2218 stmt = last_stmt (bb);
2220 gcc_assert (stmt);
2221 gcc_assert (is_ctrl_stmt (stmt));
2223 if (val == NULL)
2224 return NULL;
2226 if (!is_gimple_min_invariant (val))
2227 return NULL;
2229 if (gimple_code (stmt) == GIMPLE_COND)
2230 return find_taken_edge_cond_expr (bb, val);
2232 if (gimple_code (stmt) == GIMPLE_SWITCH)
2233 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2235 if (computed_goto_p (stmt))
2237 /* Only optimize if the argument is a label, if the argument is
2238 not a label then we can not construct a proper CFG.
2240 It may be the case that we only need to allow the LABEL_REF to
2241 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2242 appear inside a LABEL_EXPR just to be safe. */
2243 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2244 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2245 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2246 return NULL;
2249 gcc_unreachable ();
2252 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2253 statement, determine which of the outgoing edges will be taken out of the
2254 block. Return NULL if either edge may be taken. */
2256 static edge
2257 find_taken_edge_computed_goto (basic_block bb, tree val)
2259 basic_block dest;
2260 edge e = NULL;
2262 dest = label_to_block (val);
2263 if (dest)
2265 e = find_edge (bb, dest);
2266 gcc_assert (e != NULL);
2269 return e;
2272 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2273 statement, determine which of the two edges will be taken out of the
2274 block. Return NULL if either edge may be taken. */
2276 static edge
2277 find_taken_edge_cond_expr (basic_block bb, tree val)
2279 edge true_edge, false_edge;
2281 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2283 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2284 return (integer_zerop (val) ? false_edge : true_edge);
2287 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2288 statement, determine which edge will be taken out of the block. Return
2289 NULL if any edge may be taken. */
2291 static edge
2292 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2293 tree val)
2295 basic_block dest_bb;
2296 edge e;
2297 tree taken_case;
2299 taken_case = find_case_label_for_value (switch_stmt, val);
2300 dest_bb = label_to_block (CASE_LABEL (taken_case));
2302 e = find_edge (bb, dest_bb);
2303 gcc_assert (e);
2304 return e;
2308 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2309 We can make optimal use here of the fact that the case labels are
2310 sorted: We can do a binary search for a case matching VAL. */
2312 static tree
2313 find_case_label_for_value (gswitch *switch_stmt, tree val)
2315 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2316 tree default_case = gimple_switch_default_label (switch_stmt);
2318 for (low = 0, high = n; high - low > 1; )
2320 size_t i = (high + low) / 2;
2321 tree t = gimple_switch_label (switch_stmt, i);
2322 int cmp;
2324 /* Cache the result of comparing CASE_LOW and val. */
2325 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2327 if (cmp > 0)
2328 high = i;
2329 else
2330 low = i;
2332 if (CASE_HIGH (t) == NULL)
2334 /* A singe-valued case label. */
2335 if (cmp == 0)
2336 return t;
2338 else
2340 /* A case range. We can only handle integer ranges. */
2341 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2342 return t;
2346 return default_case;
2350 /* Dump a basic block on stderr. */
2352 void
2353 gimple_debug_bb (basic_block bb)
2355 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2359 /* Dump basic block with index N on stderr. */
2361 basic_block
2362 gimple_debug_bb_n (int n)
2364 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2365 return BASIC_BLOCK_FOR_FN (cfun, n);
2369 /* Dump the CFG on stderr.
2371 FLAGS are the same used by the tree dumping functions
2372 (see TDF_* in dumpfile.h). */
2374 void
2375 gimple_debug_cfg (int flags)
2377 gimple_dump_cfg (stderr, flags);
2381 /* Dump the program showing basic block boundaries on the given FILE.
2383 FLAGS are the same used by the tree dumping functions (see TDF_* in
2384 tree.h). */
2386 void
2387 gimple_dump_cfg (FILE *file, int flags)
2389 if (flags & TDF_DETAILS)
2391 dump_function_header (file, current_function_decl, flags);
2392 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2393 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2394 last_basic_block_for_fn (cfun));
2396 brief_dump_cfg (file, flags | TDF_COMMENT);
2397 fprintf (file, "\n");
2400 if (flags & TDF_STATS)
2401 dump_cfg_stats (file);
2403 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2407 /* Dump CFG statistics on FILE. */
2409 void
2410 dump_cfg_stats (FILE *file)
2412 static long max_num_merged_labels = 0;
2413 unsigned long size, total = 0;
2414 long num_edges;
2415 basic_block bb;
2416 const char * const fmt_str = "%-30s%-13s%12s\n";
2417 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2418 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2419 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2420 const char *funcname = current_function_name ();
2422 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2424 fprintf (file, "---------------------------------------------------------\n");
2425 fprintf (file, fmt_str, "", " Number of ", "Memory");
2426 fprintf (file, fmt_str, "", " instances ", "used ");
2427 fprintf (file, "---------------------------------------------------------\n");
2429 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2430 total += size;
2431 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2432 SCALE (size), LABEL (size));
2434 num_edges = 0;
2435 FOR_EACH_BB_FN (bb, cfun)
2436 num_edges += EDGE_COUNT (bb->succs);
2437 size = num_edges * sizeof (struct edge_def);
2438 total += size;
2439 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2441 fprintf (file, "---------------------------------------------------------\n");
2442 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2443 LABEL (total));
2444 fprintf (file, "---------------------------------------------------------\n");
2445 fprintf (file, "\n");
2447 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2448 max_num_merged_labels = cfg_stats.num_merged_labels;
2450 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2451 cfg_stats.num_merged_labels, max_num_merged_labels);
2453 fprintf (file, "\n");
2457 /* Dump CFG statistics on stderr. Keep extern so that it's always
2458 linked in the final executable. */
2460 DEBUG_FUNCTION void
2461 debug_cfg_stats (void)
2463 dump_cfg_stats (stderr);
2466 /*---------------------------------------------------------------------------
2467 Miscellaneous helpers
2468 ---------------------------------------------------------------------------*/
2470 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2471 flow. Transfers of control flow associated with EH are excluded. */
2473 static bool
2474 call_can_make_abnormal_goto (gimple *t)
2476 /* If the function has no non-local labels, then a call cannot make an
2477 abnormal transfer of control. */
2478 if (!cfun->has_nonlocal_label
2479 && !cfun->calls_setjmp)
2480 return false;
2482 /* Likewise if the call has no side effects. */
2483 if (!gimple_has_side_effects (t))
2484 return false;
2486 /* Likewise if the called function is leaf. */
2487 if (gimple_call_flags (t) & ECF_LEAF)
2488 return false;
2490 return true;
2494 /* Return true if T can make an abnormal transfer of control flow.
2495 Transfers of control flow associated with EH are excluded. */
2497 bool
2498 stmt_can_make_abnormal_goto (gimple *t)
2500 if (computed_goto_p (t))
2501 return true;
2502 if (is_gimple_call (t))
2503 return call_can_make_abnormal_goto (t);
2504 return false;
2508 /* Return true if T represents a stmt that always transfers control. */
2510 bool
2511 is_ctrl_stmt (gimple *t)
2513 switch (gimple_code (t))
2515 case GIMPLE_COND:
2516 case GIMPLE_SWITCH:
2517 case GIMPLE_GOTO:
2518 case GIMPLE_RETURN:
2519 case GIMPLE_RESX:
2520 return true;
2521 default:
2522 return false;
2527 /* Return true if T is a statement that may alter the flow of control
2528 (e.g., a call to a non-returning function). */
2530 bool
2531 is_ctrl_altering_stmt (gimple *t)
2533 gcc_assert (t);
2535 switch (gimple_code (t))
2537 case GIMPLE_CALL:
2538 /* Per stmt call flag indicates whether the call could alter
2539 controlflow. */
2540 if (gimple_call_ctrl_altering_p (t))
2541 return true;
2542 break;
2544 case GIMPLE_EH_DISPATCH:
2545 /* EH_DISPATCH branches to the individual catch handlers at
2546 this level of a try or allowed-exceptions region. It can
2547 fallthru to the next statement as well. */
2548 return true;
2550 case GIMPLE_ASM:
2551 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2552 return true;
2553 break;
2555 CASE_GIMPLE_OMP:
2556 /* OpenMP directives alter control flow. */
2557 return true;
2559 case GIMPLE_TRANSACTION:
2560 /* A transaction start alters control flow. */
2561 return true;
2563 default:
2564 break;
2567 /* If a statement can throw, it alters control flow. */
2568 return stmt_can_throw_internal (t);
2572 /* Return true if T is a simple local goto. */
2574 bool
2575 simple_goto_p (gimple *t)
2577 return (gimple_code (t) == GIMPLE_GOTO
2578 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2582 /* Return true if STMT should start a new basic block. PREV_STMT is
2583 the statement preceding STMT. It is used when STMT is a label or a
2584 case label. Labels should only start a new basic block if their
2585 previous statement wasn't a label. Otherwise, sequence of labels
2586 would generate unnecessary basic blocks that only contain a single
2587 label. */
2589 static inline bool
2590 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2592 if (stmt == NULL)
2593 return false;
2595 /* Labels start a new basic block only if the preceding statement
2596 wasn't a label of the same type. This prevents the creation of
2597 consecutive blocks that have nothing but a single label. */
2598 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2600 /* Nonlocal and computed GOTO targets always start a new block. */
2601 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2602 || FORCED_LABEL (gimple_label_label (label_stmt)))
2603 return true;
2605 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2607 if (DECL_NONLOCAL (gimple_label_label (
2608 as_a <glabel *> (prev_stmt))))
2609 return true;
2611 cfg_stats.num_merged_labels++;
2612 return false;
2614 else
2615 return true;
2617 else if (gimple_code (stmt) == GIMPLE_CALL)
2619 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2620 /* setjmp acts similar to a nonlocal GOTO target and thus should
2621 start a new block. */
2622 return true;
2623 if (gimple_call_internal_p (stmt, IFN_PHI)
2624 && prev_stmt
2625 && gimple_code (prev_stmt) != GIMPLE_LABEL
2626 && (gimple_code (prev_stmt) != GIMPLE_CALL
2627 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2628 /* PHI nodes start a new block unless preceeded by a label
2629 or another PHI. */
2630 return true;
2633 return false;
2637 /* Return true if T should end a basic block. */
2639 bool
2640 stmt_ends_bb_p (gimple *t)
2642 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2645 /* Remove block annotations and other data structures. */
2647 void
2648 delete_tree_cfg_annotations (struct function *fn)
2650 vec_free (label_to_block_map_for_fn (fn));
2653 /* Return the virtual phi in BB. */
2655 gphi *
2656 get_virtual_phi (basic_block bb)
2658 for (gphi_iterator gsi = gsi_start_phis (bb);
2659 !gsi_end_p (gsi);
2660 gsi_next (&gsi))
2662 gphi *phi = gsi.phi ();
2664 if (virtual_operand_p (PHI_RESULT (phi)))
2665 return phi;
2668 return NULL;
2671 /* Return the first statement in basic block BB. */
2673 gimple *
2674 first_stmt (basic_block bb)
2676 gimple_stmt_iterator i = gsi_start_bb (bb);
2677 gimple *stmt = NULL;
2679 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2681 gsi_next (&i);
2682 stmt = NULL;
2684 return stmt;
2687 /* Return the first non-label statement in basic block BB. */
2689 static gimple *
2690 first_non_label_stmt (basic_block bb)
2692 gimple_stmt_iterator i = gsi_start_bb (bb);
2693 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2694 gsi_next (&i);
2695 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2698 /* Return the last statement in basic block BB. */
2700 gimple *
2701 last_stmt (basic_block bb)
2703 gimple_stmt_iterator i = gsi_last_bb (bb);
2704 gimple *stmt = NULL;
2706 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2708 gsi_prev (&i);
2709 stmt = NULL;
2711 return stmt;
2714 /* Return the last statement of an otherwise empty block. Return NULL
2715 if the block is totally empty, or if it contains more than one
2716 statement. */
2718 gimple *
2719 last_and_only_stmt (basic_block bb)
2721 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2722 gimple *last, *prev;
2724 if (gsi_end_p (i))
2725 return NULL;
2727 last = gsi_stmt (i);
2728 gsi_prev_nondebug (&i);
2729 if (gsi_end_p (i))
2730 return last;
2732 /* Empty statements should no longer appear in the instruction stream.
2733 Everything that might have appeared before should be deleted by
2734 remove_useless_stmts, and the optimizers should just gsi_remove
2735 instead of smashing with build_empty_stmt.
2737 Thus the only thing that should appear here in a block containing
2738 one executable statement is a label. */
2739 prev = gsi_stmt (i);
2740 if (gimple_code (prev) == GIMPLE_LABEL)
2741 return last;
2742 else
2743 return NULL;
2746 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2748 static void
2749 reinstall_phi_args (edge new_edge, edge old_edge)
2751 edge_var_map *vm;
2752 int i;
2753 gphi_iterator phis;
2755 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2756 if (!v)
2757 return;
2759 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2760 v->iterate (i, &vm) && !gsi_end_p (phis);
2761 i++, gsi_next (&phis))
2763 gphi *phi = phis.phi ();
2764 tree result = redirect_edge_var_map_result (vm);
2765 tree arg = redirect_edge_var_map_def (vm);
2767 gcc_assert (result == gimple_phi_result (phi));
2769 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2772 redirect_edge_var_map_clear (old_edge);
2775 /* Returns the basic block after which the new basic block created
2776 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2777 near its "logical" location. This is of most help to humans looking
2778 at debugging dumps. */
2780 basic_block
2781 split_edge_bb_loc (edge edge_in)
2783 basic_block dest = edge_in->dest;
2784 basic_block dest_prev = dest->prev_bb;
2786 if (dest_prev)
2788 edge e = find_edge (dest_prev, dest);
2789 if (e && !(e->flags & EDGE_COMPLEX))
2790 return edge_in->src;
2792 return dest_prev;
2795 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2796 Abort on abnormal edges. */
2798 static basic_block
2799 gimple_split_edge (edge edge_in)
2801 basic_block new_bb, after_bb, dest;
2802 edge new_edge, e;
2804 /* Abnormal edges cannot be split. */
2805 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2807 dest = edge_in->dest;
2809 after_bb = split_edge_bb_loc (edge_in);
2811 new_bb = create_empty_bb (after_bb);
2812 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2813 new_bb->count = edge_in->count;
2814 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2815 new_edge->probability = REG_BR_PROB_BASE;
2816 new_edge->count = edge_in->count;
2818 e = redirect_edge_and_branch (edge_in, new_bb);
2819 gcc_assert (e == edge_in);
2820 reinstall_phi_args (new_edge, e);
2822 return new_bb;
2826 /* Verify properties of the address expression T with base object BASE. */
2828 static tree
2829 verify_address (tree t, tree base)
2831 bool old_constant;
2832 bool old_side_effects;
2833 bool new_constant;
2834 bool new_side_effects;
2836 old_constant = TREE_CONSTANT (t);
2837 old_side_effects = TREE_SIDE_EFFECTS (t);
2839 recompute_tree_invariant_for_addr_expr (t);
2840 new_side_effects = TREE_SIDE_EFFECTS (t);
2841 new_constant = TREE_CONSTANT (t);
2843 if (old_constant != new_constant)
2845 error ("constant not recomputed when ADDR_EXPR changed");
2846 return t;
2848 if (old_side_effects != new_side_effects)
2850 error ("side effects not recomputed when ADDR_EXPR changed");
2851 return t;
2854 if (!(VAR_P (base)
2855 || TREE_CODE (base) == PARM_DECL
2856 || TREE_CODE (base) == RESULT_DECL))
2857 return NULL_TREE;
2859 if (DECL_GIMPLE_REG_P (base))
2861 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2862 return base;
2865 return NULL_TREE;
2868 /* Callback for walk_tree, check that all elements with address taken are
2869 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2870 inside a PHI node. */
2872 static tree
2873 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2875 tree t = *tp, x;
2877 if (TYPE_P (t))
2878 *walk_subtrees = 0;
2880 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2881 #define CHECK_OP(N, MSG) \
2882 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2883 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2885 switch (TREE_CODE (t))
2887 case SSA_NAME:
2888 if (SSA_NAME_IN_FREE_LIST (t))
2890 error ("SSA name in freelist but still referenced");
2891 return *tp;
2893 break;
2895 case PARM_DECL:
2896 case VAR_DECL:
2897 case RESULT_DECL:
2899 tree context = decl_function_context (t);
2900 if (context != cfun->decl
2901 && !SCOPE_FILE_SCOPE_P (context)
2902 && !TREE_STATIC (t)
2903 && !DECL_EXTERNAL (t))
2905 error ("Local declaration from a different function");
2906 return t;
2909 break;
2911 case INDIRECT_REF:
2912 error ("INDIRECT_REF in gimple IL");
2913 return t;
2915 case MEM_REF:
2916 x = TREE_OPERAND (t, 0);
2917 if (!POINTER_TYPE_P (TREE_TYPE (x))
2918 || !is_gimple_mem_ref_addr (x))
2920 error ("invalid first operand of MEM_REF");
2921 return x;
2923 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2924 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2926 error ("invalid offset operand of MEM_REF");
2927 return TREE_OPERAND (t, 1);
2929 if (TREE_CODE (x) == ADDR_EXPR)
2931 tree va = verify_address (x, TREE_OPERAND (x, 0));
2932 if (va)
2933 return va;
2934 x = TREE_OPERAND (x, 0);
2936 walk_tree (&x, verify_expr, data, NULL);
2937 *walk_subtrees = 0;
2938 break;
2940 case ASSERT_EXPR:
2941 x = fold (ASSERT_EXPR_COND (t));
2942 if (x == boolean_false_node)
2944 error ("ASSERT_EXPR with an always-false condition");
2945 return *tp;
2947 break;
2949 case MODIFY_EXPR:
2950 error ("MODIFY_EXPR not expected while having tuples");
2951 return *tp;
2953 case ADDR_EXPR:
2955 tree tem;
2957 gcc_assert (is_gimple_address (t));
2959 /* Skip any references (they will be checked when we recurse down the
2960 tree) and ensure that any variable used as a prefix is marked
2961 addressable. */
2962 for (x = TREE_OPERAND (t, 0);
2963 handled_component_p (x);
2964 x = TREE_OPERAND (x, 0))
2967 if ((tem = verify_address (t, x)))
2968 return tem;
2970 if (!(VAR_P (x)
2971 || TREE_CODE (x) == PARM_DECL
2972 || TREE_CODE (x) == RESULT_DECL))
2973 return NULL;
2975 if (!TREE_ADDRESSABLE (x))
2977 error ("address taken, but ADDRESSABLE bit not set");
2978 return x;
2981 break;
2984 case COND_EXPR:
2985 x = COND_EXPR_COND (t);
2986 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2988 error ("non-integral used in condition");
2989 return x;
2991 if (!is_gimple_condexpr (x))
2993 error ("invalid conditional operand");
2994 return x;
2996 break;
2998 case NON_LVALUE_EXPR:
2999 case TRUTH_NOT_EXPR:
3000 gcc_unreachable ();
3002 CASE_CONVERT:
3003 case FIX_TRUNC_EXPR:
3004 case FLOAT_EXPR:
3005 case NEGATE_EXPR:
3006 case ABS_EXPR:
3007 case BIT_NOT_EXPR:
3008 CHECK_OP (0, "invalid operand to unary operator");
3009 break;
3011 case REALPART_EXPR:
3012 case IMAGPART_EXPR:
3013 case BIT_FIELD_REF:
3014 if (!is_gimple_reg_type (TREE_TYPE (t)))
3016 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3017 return t;
3020 if (TREE_CODE (t) == BIT_FIELD_REF)
3022 tree t0 = TREE_OPERAND (t, 0);
3023 tree t1 = TREE_OPERAND (t, 1);
3024 tree t2 = TREE_OPERAND (t, 2);
3025 if (!tree_fits_uhwi_p (t1)
3026 || !tree_fits_uhwi_p (t2))
3028 error ("invalid position or size operand to BIT_FIELD_REF");
3029 return t;
3031 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3032 && (TYPE_PRECISION (TREE_TYPE (t))
3033 != tree_to_uhwi (t1)))
3035 error ("integral result type precision does not match "
3036 "field size of BIT_FIELD_REF");
3037 return t;
3039 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3040 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3041 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3042 != tree_to_uhwi (t1)))
3044 error ("mode size of non-integral result does not "
3045 "match field size of BIT_FIELD_REF");
3046 return t;
3048 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3049 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3050 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3052 error ("position plus size exceeds size of referenced object in "
3053 "BIT_FIELD_REF");
3054 return t;
3057 t = TREE_OPERAND (t, 0);
3059 /* Fall-through. */
3060 case COMPONENT_REF:
3061 case ARRAY_REF:
3062 case ARRAY_RANGE_REF:
3063 case VIEW_CONVERT_EXPR:
3064 /* We have a nest of references. Verify that each of the operands
3065 that determine where to reference is either a constant or a variable,
3066 verify that the base is valid, and then show we've already checked
3067 the subtrees. */
3068 while (handled_component_p (t))
3070 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3071 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3072 else if (TREE_CODE (t) == ARRAY_REF
3073 || TREE_CODE (t) == ARRAY_RANGE_REF)
3075 CHECK_OP (1, "invalid array index");
3076 if (TREE_OPERAND (t, 2))
3077 CHECK_OP (2, "invalid array lower bound");
3078 if (TREE_OPERAND (t, 3))
3079 CHECK_OP (3, "invalid array stride");
3081 else if (TREE_CODE (t) == BIT_FIELD_REF
3082 || TREE_CODE (t) == REALPART_EXPR
3083 || TREE_CODE (t) == IMAGPART_EXPR)
3085 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3086 "REALPART_EXPR");
3087 return t;
3090 t = TREE_OPERAND (t, 0);
3093 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3095 error ("invalid reference prefix");
3096 return t;
3098 walk_tree (&t, verify_expr, data, NULL);
3099 *walk_subtrees = 0;
3100 break;
3101 case PLUS_EXPR:
3102 case MINUS_EXPR:
3103 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3104 POINTER_PLUS_EXPR. */
3105 if (POINTER_TYPE_P (TREE_TYPE (t)))
3107 error ("invalid operand to plus/minus, type is a pointer");
3108 return t;
3110 CHECK_OP (0, "invalid operand to binary operator");
3111 CHECK_OP (1, "invalid operand to binary operator");
3112 break;
3114 case POINTER_PLUS_EXPR:
3115 /* Check to make sure the first operand is a pointer or reference type. */
3116 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3118 error ("invalid operand to pointer plus, first operand is not a pointer");
3119 return t;
3121 /* Check to make sure the second operand is a ptrofftype. */
3122 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3124 error ("invalid operand to pointer plus, second operand is not an "
3125 "integer type of appropriate width");
3126 return t;
3128 /* FALLTHROUGH */
3129 case LT_EXPR:
3130 case LE_EXPR:
3131 case GT_EXPR:
3132 case GE_EXPR:
3133 case EQ_EXPR:
3134 case NE_EXPR:
3135 case UNORDERED_EXPR:
3136 case ORDERED_EXPR:
3137 case UNLT_EXPR:
3138 case UNLE_EXPR:
3139 case UNGT_EXPR:
3140 case UNGE_EXPR:
3141 case UNEQ_EXPR:
3142 case LTGT_EXPR:
3143 case MULT_EXPR:
3144 case TRUNC_DIV_EXPR:
3145 case CEIL_DIV_EXPR:
3146 case FLOOR_DIV_EXPR:
3147 case ROUND_DIV_EXPR:
3148 case TRUNC_MOD_EXPR:
3149 case CEIL_MOD_EXPR:
3150 case FLOOR_MOD_EXPR:
3151 case ROUND_MOD_EXPR:
3152 case RDIV_EXPR:
3153 case EXACT_DIV_EXPR:
3154 case MIN_EXPR:
3155 case MAX_EXPR:
3156 case LSHIFT_EXPR:
3157 case RSHIFT_EXPR:
3158 case LROTATE_EXPR:
3159 case RROTATE_EXPR:
3160 case BIT_IOR_EXPR:
3161 case BIT_XOR_EXPR:
3162 case BIT_AND_EXPR:
3163 CHECK_OP (0, "invalid operand to binary operator");
3164 CHECK_OP (1, "invalid operand to binary operator");
3165 break;
3167 case CONSTRUCTOR:
3168 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3169 *walk_subtrees = 0;
3170 break;
3172 case CASE_LABEL_EXPR:
3173 if (CASE_CHAIN (t))
3175 error ("invalid CASE_CHAIN");
3176 return t;
3178 break;
3180 default:
3181 break;
3183 return NULL;
3185 #undef CHECK_OP
3189 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3190 Returns true if there is an error, otherwise false. */
3192 static bool
3193 verify_types_in_gimple_min_lval (tree expr)
3195 tree op;
3197 if (is_gimple_id (expr))
3198 return false;
3200 if (TREE_CODE (expr) != TARGET_MEM_REF
3201 && TREE_CODE (expr) != MEM_REF)
3203 error ("invalid expression for min lvalue");
3204 return true;
3207 /* TARGET_MEM_REFs are strange beasts. */
3208 if (TREE_CODE (expr) == TARGET_MEM_REF)
3209 return false;
3211 op = TREE_OPERAND (expr, 0);
3212 if (!is_gimple_val (op))
3214 error ("invalid operand in indirect reference");
3215 debug_generic_stmt (op);
3216 return true;
3218 /* Memory references now generally can involve a value conversion. */
3220 return false;
3223 /* Verify if EXPR is a valid GIMPLE reference expression. If
3224 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3225 if there is an error, otherwise false. */
3227 static bool
3228 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3230 while (handled_component_p (expr))
3232 tree op = TREE_OPERAND (expr, 0);
3234 if (TREE_CODE (expr) == ARRAY_REF
3235 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3237 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3238 || (TREE_OPERAND (expr, 2)
3239 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3240 || (TREE_OPERAND (expr, 3)
3241 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3243 error ("invalid operands to array reference");
3244 debug_generic_stmt (expr);
3245 return true;
3249 /* Verify if the reference array element types are compatible. */
3250 if (TREE_CODE (expr) == ARRAY_REF
3251 && !useless_type_conversion_p (TREE_TYPE (expr),
3252 TREE_TYPE (TREE_TYPE (op))))
3254 error ("type mismatch in array reference");
3255 debug_generic_stmt (TREE_TYPE (expr));
3256 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3257 return true;
3259 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3260 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3261 TREE_TYPE (TREE_TYPE (op))))
3263 error ("type mismatch in array range reference");
3264 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3265 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3266 return true;
3269 if ((TREE_CODE (expr) == REALPART_EXPR
3270 || TREE_CODE (expr) == IMAGPART_EXPR)
3271 && !useless_type_conversion_p (TREE_TYPE (expr),
3272 TREE_TYPE (TREE_TYPE (op))))
3274 error ("type mismatch in real/imagpart reference");
3275 debug_generic_stmt (TREE_TYPE (expr));
3276 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3277 return true;
3280 if (TREE_CODE (expr) == COMPONENT_REF
3281 && !useless_type_conversion_p (TREE_TYPE (expr),
3282 TREE_TYPE (TREE_OPERAND (expr, 1))))
3284 error ("type mismatch in component reference");
3285 debug_generic_stmt (TREE_TYPE (expr));
3286 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3287 return true;
3290 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3292 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3293 that their operand is not an SSA name or an invariant when
3294 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3295 bug). Otherwise there is nothing to verify, gross mismatches at
3296 most invoke undefined behavior. */
3297 if (require_lvalue
3298 && (TREE_CODE (op) == SSA_NAME
3299 || is_gimple_min_invariant (op)))
3301 error ("conversion of an SSA_NAME on the left hand side");
3302 debug_generic_stmt (expr);
3303 return true;
3305 else if (TREE_CODE (op) == SSA_NAME
3306 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3308 error ("conversion of register to a different size");
3309 debug_generic_stmt (expr);
3310 return true;
3312 else if (!handled_component_p (op))
3313 return false;
3316 expr = op;
3319 if (TREE_CODE (expr) == MEM_REF)
3321 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3323 error ("invalid address operand in MEM_REF");
3324 debug_generic_stmt (expr);
3325 return true;
3327 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3328 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3330 error ("invalid offset operand in MEM_REF");
3331 debug_generic_stmt (expr);
3332 return true;
3335 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3337 if (!TMR_BASE (expr)
3338 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3340 error ("invalid address operand in TARGET_MEM_REF");
3341 return true;
3343 if (!TMR_OFFSET (expr)
3344 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3345 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3347 error ("invalid offset operand in TARGET_MEM_REF");
3348 debug_generic_stmt (expr);
3349 return true;
3353 return ((require_lvalue || !is_gimple_min_invariant (expr))
3354 && verify_types_in_gimple_min_lval (expr));
3357 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3358 list of pointer-to types that is trivially convertible to DEST. */
3360 static bool
3361 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3363 tree src;
3365 if (!TYPE_POINTER_TO (src_obj))
3366 return true;
3368 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3369 if (useless_type_conversion_p (dest, src))
3370 return true;
3372 return false;
3375 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3376 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3378 static bool
3379 valid_fixed_convert_types_p (tree type1, tree type2)
3381 return (FIXED_POINT_TYPE_P (type1)
3382 && (INTEGRAL_TYPE_P (type2)
3383 || SCALAR_FLOAT_TYPE_P (type2)
3384 || FIXED_POINT_TYPE_P (type2)));
3387 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3388 is a problem, otherwise false. */
3390 static bool
3391 verify_gimple_call (gcall *stmt)
3393 tree fn = gimple_call_fn (stmt);
3394 tree fntype, fndecl;
3395 unsigned i;
3397 if (gimple_call_internal_p (stmt))
3399 if (fn)
3401 error ("gimple call has two targets");
3402 debug_generic_stmt (fn);
3403 return true;
3405 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3406 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3408 return false;
3411 else
3413 if (!fn)
3415 error ("gimple call has no target");
3416 return true;
3420 if (fn && !is_gimple_call_addr (fn))
3422 error ("invalid function in gimple call");
3423 debug_generic_stmt (fn);
3424 return true;
3427 if (fn
3428 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3429 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3430 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3432 error ("non-function in gimple call");
3433 return true;
3436 fndecl = gimple_call_fndecl (stmt);
3437 if (fndecl
3438 && TREE_CODE (fndecl) == FUNCTION_DECL
3439 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3440 && !DECL_PURE_P (fndecl)
3441 && !TREE_READONLY (fndecl))
3443 error ("invalid pure const state for function");
3444 return true;
3447 tree lhs = gimple_call_lhs (stmt);
3448 if (lhs
3449 && (!is_gimple_lvalue (lhs)
3450 || verify_types_in_gimple_reference (lhs, true)))
3452 error ("invalid LHS in gimple call");
3453 return true;
3456 if (gimple_call_ctrl_altering_p (stmt)
3457 && gimple_call_noreturn_p (stmt)
3458 && should_remove_lhs_p (lhs))
3460 error ("LHS in noreturn call");
3461 return true;
3464 fntype = gimple_call_fntype (stmt);
3465 if (fntype
3466 && lhs
3467 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3468 /* ??? At least C++ misses conversions at assignments from
3469 void * call results.
3470 ??? Java is completely off. Especially with functions
3471 returning java.lang.Object.
3472 For now simply allow arbitrary pointer type conversions. */
3473 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3474 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3476 error ("invalid conversion in gimple call");
3477 debug_generic_stmt (TREE_TYPE (lhs));
3478 debug_generic_stmt (TREE_TYPE (fntype));
3479 return true;
3482 if (gimple_call_chain (stmt)
3483 && !is_gimple_val (gimple_call_chain (stmt)))
3485 error ("invalid static chain in gimple call");
3486 debug_generic_stmt (gimple_call_chain (stmt));
3487 return true;
3490 /* If there is a static chain argument, the call should either be
3491 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3492 if (gimple_call_chain (stmt)
3493 && fndecl
3494 && !DECL_STATIC_CHAIN (fndecl))
3496 error ("static chain with function that doesn%'t use one");
3497 return true;
3500 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3502 switch (DECL_FUNCTION_CODE (fndecl))
3504 case BUILT_IN_UNREACHABLE:
3505 case BUILT_IN_TRAP:
3506 if (gimple_call_num_args (stmt) > 0)
3508 /* Built-in unreachable with parameters might not be caught by
3509 undefined behavior sanitizer. Front-ends do check users do not
3510 call them that way but we also produce calls to
3511 __builtin_unreachable internally, for example when IPA figures
3512 out a call cannot happen in a legal program. In such cases,
3513 we must make sure arguments are stripped off. */
3514 error ("__builtin_unreachable or __builtin_trap call with "
3515 "arguments");
3516 return true;
3518 break;
3519 default:
3520 break;
3524 /* ??? The C frontend passes unpromoted arguments in case it
3525 didn't see a function declaration before the call. So for now
3526 leave the call arguments mostly unverified. Once we gimplify
3527 unit-at-a-time we have a chance to fix this. */
3529 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3531 tree arg = gimple_call_arg (stmt, i);
3532 if ((is_gimple_reg_type (TREE_TYPE (arg))
3533 && !is_gimple_val (arg))
3534 || (!is_gimple_reg_type (TREE_TYPE (arg))
3535 && !is_gimple_lvalue (arg)))
3537 error ("invalid argument to gimple call");
3538 debug_generic_expr (arg);
3539 return true;
3543 return false;
3546 /* Verifies the gimple comparison with the result type TYPE and
3547 the operands OP0 and OP1, comparison code is CODE. */
3549 static bool
3550 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3552 tree op0_type = TREE_TYPE (op0);
3553 tree op1_type = TREE_TYPE (op1);
3555 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3557 error ("invalid operands in gimple comparison");
3558 return true;
3561 /* For comparisons we do not have the operations type as the
3562 effective type the comparison is carried out in. Instead
3563 we require that either the first operand is trivially
3564 convertible into the second, or the other way around.
3565 Because we special-case pointers to void we allow
3566 comparisons of pointers with the same mode as well. */
3567 if (!useless_type_conversion_p (op0_type, op1_type)
3568 && !useless_type_conversion_p (op1_type, op0_type)
3569 && (!POINTER_TYPE_P (op0_type)
3570 || !POINTER_TYPE_P (op1_type)
3571 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3573 error ("mismatching comparison operand types");
3574 debug_generic_expr (op0_type);
3575 debug_generic_expr (op1_type);
3576 return true;
3579 /* The resulting type of a comparison may be an effective boolean type. */
3580 if (INTEGRAL_TYPE_P (type)
3581 && (TREE_CODE (type) == BOOLEAN_TYPE
3582 || TYPE_PRECISION (type) == 1))
3584 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3585 || TREE_CODE (op1_type) == VECTOR_TYPE)
3586 && code != EQ_EXPR && code != NE_EXPR
3587 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3588 && !VECTOR_INTEGER_TYPE_P (op0_type))
3590 error ("unsupported operation or type for vector comparison"
3591 " returning a boolean");
3592 debug_generic_expr (op0_type);
3593 debug_generic_expr (op1_type);
3594 return true;
3597 /* Or a boolean vector type with the same element count
3598 as the comparison operand types. */
3599 else if (TREE_CODE (type) == VECTOR_TYPE
3600 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3602 if (TREE_CODE (op0_type) != VECTOR_TYPE
3603 || TREE_CODE (op1_type) != VECTOR_TYPE)
3605 error ("non-vector operands in vector comparison");
3606 debug_generic_expr (op0_type);
3607 debug_generic_expr (op1_type);
3608 return true;
3611 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3613 error ("invalid vector comparison resulting type");
3614 debug_generic_expr (type);
3615 return true;
3618 else
3620 error ("bogus comparison result type");
3621 debug_generic_expr (type);
3622 return true;
3625 return false;
3628 /* Verify a gimple assignment statement STMT with an unary rhs.
3629 Returns true if anything is wrong. */
3631 static bool
3632 verify_gimple_assign_unary (gassign *stmt)
3634 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3635 tree lhs = gimple_assign_lhs (stmt);
3636 tree lhs_type = TREE_TYPE (lhs);
3637 tree rhs1 = gimple_assign_rhs1 (stmt);
3638 tree rhs1_type = TREE_TYPE (rhs1);
3640 if (!is_gimple_reg (lhs))
3642 error ("non-register as LHS of unary operation");
3643 return true;
3646 if (!is_gimple_val (rhs1))
3648 error ("invalid operand in unary operation");
3649 return true;
3652 /* First handle conversions. */
3653 switch (rhs_code)
3655 CASE_CONVERT:
3657 /* Allow conversions from pointer type to integral type only if
3658 there is no sign or zero extension involved.
3659 For targets were the precision of ptrofftype doesn't match that
3660 of pointers we need to allow arbitrary conversions to ptrofftype. */
3661 if ((POINTER_TYPE_P (lhs_type)
3662 && INTEGRAL_TYPE_P (rhs1_type))
3663 || (POINTER_TYPE_P (rhs1_type)
3664 && INTEGRAL_TYPE_P (lhs_type)
3665 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3666 || ptrofftype_p (sizetype))))
3667 return false;
3669 /* Allow conversion from integral to offset type and vice versa. */
3670 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3671 && INTEGRAL_TYPE_P (rhs1_type))
3672 || (INTEGRAL_TYPE_P (lhs_type)
3673 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3674 return false;
3676 /* Otherwise assert we are converting between types of the
3677 same kind. */
3678 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3680 error ("invalid types in nop conversion");
3681 debug_generic_expr (lhs_type);
3682 debug_generic_expr (rhs1_type);
3683 return true;
3686 return false;
3689 case ADDR_SPACE_CONVERT_EXPR:
3691 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3692 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3693 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3695 error ("invalid types in address space conversion");
3696 debug_generic_expr (lhs_type);
3697 debug_generic_expr (rhs1_type);
3698 return true;
3701 return false;
3704 case FIXED_CONVERT_EXPR:
3706 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3707 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3709 error ("invalid types in fixed-point conversion");
3710 debug_generic_expr (lhs_type);
3711 debug_generic_expr (rhs1_type);
3712 return true;
3715 return false;
3718 case FLOAT_EXPR:
3720 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3721 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3722 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3724 error ("invalid types in conversion to floating point");
3725 debug_generic_expr (lhs_type);
3726 debug_generic_expr (rhs1_type);
3727 return true;
3730 return false;
3733 case FIX_TRUNC_EXPR:
3735 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3736 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3737 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3739 error ("invalid types in conversion to integer");
3740 debug_generic_expr (lhs_type);
3741 debug_generic_expr (rhs1_type);
3742 return true;
3745 return false;
3747 case REDUC_MAX_EXPR:
3748 case REDUC_MIN_EXPR:
3749 case REDUC_PLUS_EXPR:
3750 if (!VECTOR_TYPE_P (rhs1_type)
3751 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3753 error ("reduction should convert from vector to element type");
3754 debug_generic_expr (lhs_type);
3755 debug_generic_expr (rhs1_type);
3756 return true;
3758 return false;
3760 case VEC_UNPACK_HI_EXPR:
3761 case VEC_UNPACK_LO_EXPR:
3762 case VEC_UNPACK_FLOAT_HI_EXPR:
3763 case VEC_UNPACK_FLOAT_LO_EXPR:
3764 /* FIXME. */
3765 return false;
3767 case NEGATE_EXPR:
3768 case ABS_EXPR:
3769 case BIT_NOT_EXPR:
3770 case PAREN_EXPR:
3771 case CONJ_EXPR:
3772 break;
3774 default:
3775 gcc_unreachable ();
3778 /* For the remaining codes assert there is no conversion involved. */
3779 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3781 error ("non-trivial conversion in unary operation");
3782 debug_generic_expr (lhs_type);
3783 debug_generic_expr (rhs1_type);
3784 return true;
3787 return false;
3790 /* Verify a gimple assignment statement STMT with a binary rhs.
3791 Returns true if anything is wrong. */
3793 static bool
3794 verify_gimple_assign_binary (gassign *stmt)
3796 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3797 tree lhs = gimple_assign_lhs (stmt);
3798 tree lhs_type = TREE_TYPE (lhs);
3799 tree rhs1 = gimple_assign_rhs1 (stmt);
3800 tree rhs1_type = TREE_TYPE (rhs1);
3801 tree rhs2 = gimple_assign_rhs2 (stmt);
3802 tree rhs2_type = TREE_TYPE (rhs2);
3804 if (!is_gimple_reg (lhs))
3806 error ("non-register as LHS of binary operation");
3807 return true;
3810 if (!is_gimple_val (rhs1)
3811 || !is_gimple_val (rhs2))
3813 error ("invalid operands in binary operation");
3814 return true;
3817 /* First handle operations that involve different types. */
3818 switch (rhs_code)
3820 case COMPLEX_EXPR:
3822 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3823 || !(INTEGRAL_TYPE_P (rhs1_type)
3824 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3825 || !(INTEGRAL_TYPE_P (rhs2_type)
3826 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3828 error ("type mismatch in complex expression");
3829 debug_generic_expr (lhs_type);
3830 debug_generic_expr (rhs1_type);
3831 debug_generic_expr (rhs2_type);
3832 return true;
3835 return false;
3838 case LSHIFT_EXPR:
3839 case RSHIFT_EXPR:
3840 case LROTATE_EXPR:
3841 case RROTATE_EXPR:
3843 /* Shifts and rotates are ok on integral types, fixed point
3844 types and integer vector types. */
3845 if ((!INTEGRAL_TYPE_P (rhs1_type)
3846 && !FIXED_POINT_TYPE_P (rhs1_type)
3847 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3848 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3849 || (!INTEGRAL_TYPE_P (rhs2_type)
3850 /* Vector shifts of vectors are also ok. */
3851 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3852 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3853 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3854 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3855 || !useless_type_conversion_p (lhs_type, rhs1_type))
3857 error ("type mismatch in shift expression");
3858 debug_generic_expr (lhs_type);
3859 debug_generic_expr (rhs1_type);
3860 debug_generic_expr (rhs2_type);
3861 return true;
3864 return false;
3867 case WIDEN_LSHIFT_EXPR:
3869 if (!INTEGRAL_TYPE_P (lhs_type)
3870 || !INTEGRAL_TYPE_P (rhs1_type)
3871 || TREE_CODE (rhs2) != INTEGER_CST
3872 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3874 error ("type mismatch in widening vector shift expression");
3875 debug_generic_expr (lhs_type);
3876 debug_generic_expr (rhs1_type);
3877 debug_generic_expr (rhs2_type);
3878 return true;
3881 return false;
3884 case VEC_WIDEN_LSHIFT_HI_EXPR:
3885 case VEC_WIDEN_LSHIFT_LO_EXPR:
3887 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3888 || TREE_CODE (lhs_type) != VECTOR_TYPE
3889 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3890 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3891 || TREE_CODE (rhs2) != INTEGER_CST
3892 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3893 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3895 error ("type mismatch in widening vector shift expression");
3896 debug_generic_expr (lhs_type);
3897 debug_generic_expr (rhs1_type);
3898 debug_generic_expr (rhs2_type);
3899 return true;
3902 return false;
3905 case PLUS_EXPR:
3906 case MINUS_EXPR:
3908 tree lhs_etype = lhs_type;
3909 tree rhs1_etype = rhs1_type;
3910 tree rhs2_etype = rhs2_type;
3911 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3913 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3914 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3916 error ("invalid non-vector operands to vector valued plus");
3917 return true;
3919 lhs_etype = TREE_TYPE (lhs_type);
3920 rhs1_etype = TREE_TYPE (rhs1_type);
3921 rhs2_etype = TREE_TYPE (rhs2_type);
3923 if (POINTER_TYPE_P (lhs_etype)
3924 || POINTER_TYPE_P (rhs1_etype)
3925 || POINTER_TYPE_P (rhs2_etype))
3927 error ("invalid (pointer) operands to plus/minus");
3928 return true;
3931 /* Continue with generic binary expression handling. */
3932 break;
3935 case POINTER_PLUS_EXPR:
3937 if (!POINTER_TYPE_P (rhs1_type)
3938 || !useless_type_conversion_p (lhs_type, rhs1_type)
3939 || !ptrofftype_p (rhs2_type))
3941 error ("type mismatch in pointer plus expression");
3942 debug_generic_stmt (lhs_type);
3943 debug_generic_stmt (rhs1_type);
3944 debug_generic_stmt (rhs2_type);
3945 return true;
3948 return false;
3951 case TRUTH_ANDIF_EXPR:
3952 case TRUTH_ORIF_EXPR:
3953 case TRUTH_AND_EXPR:
3954 case TRUTH_OR_EXPR:
3955 case TRUTH_XOR_EXPR:
3957 gcc_unreachable ();
3959 case LT_EXPR:
3960 case LE_EXPR:
3961 case GT_EXPR:
3962 case GE_EXPR:
3963 case EQ_EXPR:
3964 case NE_EXPR:
3965 case UNORDERED_EXPR:
3966 case ORDERED_EXPR:
3967 case UNLT_EXPR:
3968 case UNLE_EXPR:
3969 case UNGT_EXPR:
3970 case UNGE_EXPR:
3971 case UNEQ_EXPR:
3972 case LTGT_EXPR:
3973 /* Comparisons are also binary, but the result type is not
3974 connected to the operand types. */
3975 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3977 case WIDEN_MULT_EXPR:
3978 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3979 return true;
3980 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3981 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3983 case WIDEN_SUM_EXPR:
3984 case VEC_WIDEN_MULT_HI_EXPR:
3985 case VEC_WIDEN_MULT_LO_EXPR:
3986 case VEC_WIDEN_MULT_EVEN_EXPR:
3987 case VEC_WIDEN_MULT_ODD_EXPR:
3988 case VEC_PACK_TRUNC_EXPR:
3989 case VEC_PACK_SAT_EXPR:
3990 case VEC_PACK_FIX_TRUNC_EXPR:
3991 /* FIXME. */
3992 return false;
3994 case MULT_EXPR:
3995 case MULT_HIGHPART_EXPR:
3996 case TRUNC_DIV_EXPR:
3997 case CEIL_DIV_EXPR:
3998 case FLOOR_DIV_EXPR:
3999 case ROUND_DIV_EXPR:
4000 case TRUNC_MOD_EXPR:
4001 case CEIL_MOD_EXPR:
4002 case FLOOR_MOD_EXPR:
4003 case ROUND_MOD_EXPR:
4004 case RDIV_EXPR:
4005 case EXACT_DIV_EXPR:
4006 case MIN_EXPR:
4007 case MAX_EXPR:
4008 case BIT_IOR_EXPR:
4009 case BIT_XOR_EXPR:
4010 case BIT_AND_EXPR:
4011 /* Continue with generic binary expression handling. */
4012 break;
4014 default:
4015 gcc_unreachable ();
4018 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4019 || !useless_type_conversion_p (lhs_type, rhs2_type))
4021 error ("type mismatch in binary expression");
4022 debug_generic_stmt (lhs_type);
4023 debug_generic_stmt (rhs1_type);
4024 debug_generic_stmt (rhs2_type);
4025 return true;
4028 return false;
4031 /* Verify a gimple assignment statement STMT with a ternary rhs.
4032 Returns true if anything is wrong. */
4034 static bool
4035 verify_gimple_assign_ternary (gassign *stmt)
4037 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4038 tree lhs = gimple_assign_lhs (stmt);
4039 tree lhs_type = TREE_TYPE (lhs);
4040 tree rhs1 = gimple_assign_rhs1 (stmt);
4041 tree rhs1_type = TREE_TYPE (rhs1);
4042 tree rhs2 = gimple_assign_rhs2 (stmt);
4043 tree rhs2_type = TREE_TYPE (rhs2);
4044 tree rhs3 = gimple_assign_rhs3 (stmt);
4045 tree rhs3_type = TREE_TYPE (rhs3);
4047 if (!is_gimple_reg (lhs))
4049 error ("non-register as LHS of ternary operation");
4050 return true;
4053 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4054 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4055 || !is_gimple_val (rhs2)
4056 || !is_gimple_val (rhs3))
4058 error ("invalid operands in ternary operation");
4059 return true;
4062 /* First handle operations that involve different types. */
4063 switch (rhs_code)
4065 case WIDEN_MULT_PLUS_EXPR:
4066 case WIDEN_MULT_MINUS_EXPR:
4067 if ((!INTEGRAL_TYPE_P (rhs1_type)
4068 && !FIXED_POINT_TYPE_P (rhs1_type))
4069 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4070 || !useless_type_conversion_p (lhs_type, rhs3_type)
4071 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4072 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4074 error ("type mismatch in widening multiply-accumulate expression");
4075 debug_generic_expr (lhs_type);
4076 debug_generic_expr (rhs1_type);
4077 debug_generic_expr (rhs2_type);
4078 debug_generic_expr (rhs3_type);
4079 return true;
4081 break;
4083 case FMA_EXPR:
4084 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4085 || !useless_type_conversion_p (lhs_type, rhs2_type)
4086 || !useless_type_conversion_p (lhs_type, rhs3_type))
4088 error ("type mismatch in fused multiply-add expression");
4089 debug_generic_expr (lhs_type);
4090 debug_generic_expr (rhs1_type);
4091 debug_generic_expr (rhs2_type);
4092 debug_generic_expr (rhs3_type);
4093 return true;
4095 break;
4097 case VEC_COND_EXPR:
4098 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4099 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4100 != TYPE_VECTOR_SUBPARTS (lhs_type))
4102 error ("the first argument of a VEC_COND_EXPR must be of a "
4103 "boolean vector type of the same number of elements "
4104 "as the result");
4105 debug_generic_expr (lhs_type);
4106 debug_generic_expr (rhs1_type);
4107 return true;
4109 /* Fallthrough. */
4110 case COND_EXPR:
4111 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4112 || !useless_type_conversion_p (lhs_type, rhs3_type))
4114 error ("type mismatch in conditional expression");
4115 debug_generic_expr (lhs_type);
4116 debug_generic_expr (rhs2_type);
4117 debug_generic_expr (rhs3_type);
4118 return true;
4120 break;
4122 case VEC_PERM_EXPR:
4123 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4124 || !useless_type_conversion_p (lhs_type, rhs2_type))
4126 error ("type mismatch in vector permute expression");
4127 debug_generic_expr (lhs_type);
4128 debug_generic_expr (rhs1_type);
4129 debug_generic_expr (rhs2_type);
4130 debug_generic_expr (rhs3_type);
4131 return true;
4134 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4135 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4136 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4138 error ("vector types expected in vector permute expression");
4139 debug_generic_expr (lhs_type);
4140 debug_generic_expr (rhs1_type);
4141 debug_generic_expr (rhs2_type);
4142 debug_generic_expr (rhs3_type);
4143 return true;
4146 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4147 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4148 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4149 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4150 != TYPE_VECTOR_SUBPARTS (lhs_type))
4152 error ("vectors with different element number found "
4153 "in vector permute expression");
4154 debug_generic_expr (lhs_type);
4155 debug_generic_expr (rhs1_type);
4156 debug_generic_expr (rhs2_type);
4157 debug_generic_expr (rhs3_type);
4158 return true;
4161 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4162 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4163 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4165 error ("invalid mask type in vector permute expression");
4166 debug_generic_expr (lhs_type);
4167 debug_generic_expr (rhs1_type);
4168 debug_generic_expr (rhs2_type);
4169 debug_generic_expr (rhs3_type);
4170 return true;
4173 return false;
4175 case SAD_EXPR:
4176 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4177 || !useless_type_conversion_p (lhs_type, rhs3_type)
4178 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4179 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4181 error ("type mismatch in sad expression");
4182 debug_generic_expr (lhs_type);
4183 debug_generic_expr (rhs1_type);
4184 debug_generic_expr (rhs2_type);
4185 debug_generic_expr (rhs3_type);
4186 return true;
4189 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4190 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4191 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4193 error ("vector types expected in sad expression");
4194 debug_generic_expr (lhs_type);
4195 debug_generic_expr (rhs1_type);
4196 debug_generic_expr (rhs2_type);
4197 debug_generic_expr (rhs3_type);
4198 return true;
4201 return false;
4203 case BIT_INSERT_EXPR:
4204 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4206 error ("type mismatch in BIT_INSERT_EXPR");
4207 debug_generic_expr (lhs_type);
4208 debug_generic_expr (rhs1_type);
4209 return true;
4211 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4212 && INTEGRAL_TYPE_P (rhs2_type))
4213 || (VECTOR_TYPE_P (rhs1_type)
4214 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4216 error ("not allowed type combination in BIT_INSERT_EXPR");
4217 debug_generic_expr (rhs1_type);
4218 debug_generic_expr (rhs2_type);
4219 return true;
4221 if (! tree_fits_uhwi_p (rhs3)
4222 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4224 error ("invalid position or size in BIT_INSERT_EXPR");
4225 return true;
4227 if (INTEGRAL_TYPE_P (rhs1_type))
4229 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4230 if (bitpos >= TYPE_PRECISION (rhs1_type)
4231 || (bitpos + TYPE_PRECISION (rhs2_type)
4232 > TYPE_PRECISION (rhs1_type)))
4234 error ("insertion out of range in BIT_INSERT_EXPR");
4235 return true;
4238 else if (VECTOR_TYPE_P (rhs1_type))
4240 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4241 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4242 if (bitpos % bitsize != 0)
4244 error ("vector insertion not at element boundary");
4245 return true;
4248 return false;
4250 case DOT_PROD_EXPR:
4251 case REALIGN_LOAD_EXPR:
4252 /* FIXME. */
4253 return false;
4255 default:
4256 gcc_unreachable ();
4258 return false;
4261 /* Verify a gimple assignment statement STMT with a single rhs.
4262 Returns true if anything is wrong. */
4264 static bool
4265 verify_gimple_assign_single (gassign *stmt)
4267 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4268 tree lhs = gimple_assign_lhs (stmt);
4269 tree lhs_type = TREE_TYPE (lhs);
4270 tree rhs1 = gimple_assign_rhs1 (stmt);
4271 tree rhs1_type = TREE_TYPE (rhs1);
4272 bool res = false;
4274 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4276 error ("non-trivial conversion at assignment");
4277 debug_generic_expr (lhs_type);
4278 debug_generic_expr (rhs1_type);
4279 return true;
4282 if (gimple_clobber_p (stmt)
4283 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4285 error ("non-decl/MEM_REF LHS in clobber statement");
4286 debug_generic_expr (lhs);
4287 return true;
4290 if (handled_component_p (lhs)
4291 || TREE_CODE (lhs) == MEM_REF
4292 || TREE_CODE (lhs) == TARGET_MEM_REF)
4293 res |= verify_types_in_gimple_reference (lhs, true);
4295 /* Special codes we cannot handle via their class. */
4296 switch (rhs_code)
4298 case ADDR_EXPR:
4300 tree op = TREE_OPERAND (rhs1, 0);
4301 if (!is_gimple_addressable (op))
4303 error ("invalid operand in unary expression");
4304 return true;
4307 /* Technically there is no longer a need for matching types, but
4308 gimple hygiene asks for this check. In LTO we can end up
4309 combining incompatible units and thus end up with addresses
4310 of globals that change their type to a common one. */
4311 if (!in_lto_p
4312 && !types_compatible_p (TREE_TYPE (op),
4313 TREE_TYPE (TREE_TYPE (rhs1)))
4314 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4315 TREE_TYPE (op)))
4317 error ("type mismatch in address expression");
4318 debug_generic_stmt (TREE_TYPE (rhs1));
4319 debug_generic_stmt (TREE_TYPE (op));
4320 return true;
4323 return verify_types_in_gimple_reference (op, true);
4326 /* tcc_reference */
4327 case INDIRECT_REF:
4328 error ("INDIRECT_REF in gimple IL");
4329 return true;
4331 case COMPONENT_REF:
4332 case BIT_FIELD_REF:
4333 case ARRAY_REF:
4334 case ARRAY_RANGE_REF:
4335 case VIEW_CONVERT_EXPR:
4336 case REALPART_EXPR:
4337 case IMAGPART_EXPR:
4338 case TARGET_MEM_REF:
4339 case MEM_REF:
4340 if (!is_gimple_reg (lhs)
4341 && is_gimple_reg_type (TREE_TYPE (lhs)))
4343 error ("invalid rhs for gimple memory store");
4344 debug_generic_stmt (lhs);
4345 debug_generic_stmt (rhs1);
4346 return true;
4348 return res || verify_types_in_gimple_reference (rhs1, false);
4350 /* tcc_constant */
4351 case SSA_NAME:
4352 case INTEGER_CST:
4353 case REAL_CST:
4354 case FIXED_CST:
4355 case COMPLEX_CST:
4356 case VECTOR_CST:
4357 case STRING_CST:
4358 return res;
4360 /* tcc_declaration */
4361 case CONST_DECL:
4362 return res;
4363 case VAR_DECL:
4364 case PARM_DECL:
4365 if (!is_gimple_reg (lhs)
4366 && !is_gimple_reg (rhs1)
4367 && is_gimple_reg_type (TREE_TYPE (lhs)))
4369 error ("invalid rhs for gimple memory store");
4370 debug_generic_stmt (lhs);
4371 debug_generic_stmt (rhs1);
4372 return true;
4374 return res;
4376 case CONSTRUCTOR:
4377 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4379 unsigned int i;
4380 tree elt_i, elt_v, elt_t = NULL_TREE;
4382 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4383 return res;
4384 /* For vector CONSTRUCTORs we require that either it is empty
4385 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4386 (then the element count must be correct to cover the whole
4387 outer vector and index must be NULL on all elements, or it is
4388 a CONSTRUCTOR of scalar elements, where we as an exception allow
4389 smaller number of elements (assuming zero filling) and
4390 consecutive indexes as compared to NULL indexes (such
4391 CONSTRUCTORs can appear in the IL from FEs). */
4392 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4394 if (elt_t == NULL_TREE)
4396 elt_t = TREE_TYPE (elt_v);
4397 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4399 tree elt_t = TREE_TYPE (elt_v);
4400 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4401 TREE_TYPE (elt_t)))
4403 error ("incorrect type of vector CONSTRUCTOR"
4404 " elements");
4405 debug_generic_stmt (rhs1);
4406 return true;
4408 else if (CONSTRUCTOR_NELTS (rhs1)
4409 * TYPE_VECTOR_SUBPARTS (elt_t)
4410 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4412 error ("incorrect number of vector CONSTRUCTOR"
4413 " elements");
4414 debug_generic_stmt (rhs1);
4415 return true;
4418 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4419 elt_t))
4421 error ("incorrect type of vector CONSTRUCTOR elements");
4422 debug_generic_stmt (rhs1);
4423 return true;
4425 else if (CONSTRUCTOR_NELTS (rhs1)
4426 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4428 error ("incorrect number of vector CONSTRUCTOR elements");
4429 debug_generic_stmt (rhs1);
4430 return true;
4433 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4435 error ("incorrect type of vector CONSTRUCTOR elements");
4436 debug_generic_stmt (rhs1);
4437 return true;
4439 if (elt_i != NULL_TREE
4440 && (TREE_CODE (elt_t) == VECTOR_TYPE
4441 || TREE_CODE (elt_i) != INTEGER_CST
4442 || compare_tree_int (elt_i, i) != 0))
4444 error ("vector CONSTRUCTOR with non-NULL element index");
4445 debug_generic_stmt (rhs1);
4446 return true;
4448 if (!is_gimple_val (elt_v))
4450 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4451 debug_generic_stmt (rhs1);
4452 return true;
4456 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4458 error ("non-vector CONSTRUCTOR with elements");
4459 debug_generic_stmt (rhs1);
4460 return true;
4462 return res;
4463 case OBJ_TYPE_REF:
4464 case ASSERT_EXPR:
4465 case WITH_SIZE_EXPR:
4466 /* FIXME. */
4467 return res;
4469 default:;
4472 return res;
4475 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4476 is a problem, otherwise false. */
4478 static bool
4479 verify_gimple_assign (gassign *stmt)
4481 switch (gimple_assign_rhs_class (stmt))
4483 case GIMPLE_SINGLE_RHS:
4484 return verify_gimple_assign_single (stmt);
4486 case GIMPLE_UNARY_RHS:
4487 return verify_gimple_assign_unary (stmt);
4489 case GIMPLE_BINARY_RHS:
4490 return verify_gimple_assign_binary (stmt);
4492 case GIMPLE_TERNARY_RHS:
4493 return verify_gimple_assign_ternary (stmt);
4495 default:
4496 gcc_unreachable ();
4500 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4501 is a problem, otherwise false. */
4503 static bool
4504 verify_gimple_return (greturn *stmt)
4506 tree op = gimple_return_retval (stmt);
4507 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4509 /* We cannot test for present return values as we do not fix up missing
4510 return values from the original source. */
4511 if (op == NULL)
4512 return false;
4514 if (!is_gimple_val (op)
4515 && TREE_CODE (op) != RESULT_DECL)
4517 error ("invalid operand in return statement");
4518 debug_generic_stmt (op);
4519 return true;
4522 if ((TREE_CODE (op) == RESULT_DECL
4523 && DECL_BY_REFERENCE (op))
4524 || (TREE_CODE (op) == SSA_NAME
4525 && SSA_NAME_VAR (op)
4526 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4527 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4528 op = TREE_TYPE (op);
4530 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4532 error ("invalid conversion in return statement");
4533 debug_generic_stmt (restype);
4534 debug_generic_stmt (TREE_TYPE (op));
4535 return true;
4538 return false;
4542 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4543 is a problem, otherwise false. */
4545 static bool
4546 verify_gimple_goto (ggoto *stmt)
4548 tree dest = gimple_goto_dest (stmt);
4550 /* ??? We have two canonical forms of direct goto destinations, a
4551 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4552 if (TREE_CODE (dest) != LABEL_DECL
4553 && (!is_gimple_val (dest)
4554 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4556 error ("goto destination is neither a label nor a pointer");
4557 return true;
4560 return false;
4563 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4564 is a problem, otherwise false. */
4566 static bool
4567 verify_gimple_switch (gswitch *stmt)
4569 unsigned int i, n;
4570 tree elt, prev_upper_bound = NULL_TREE;
4571 tree index_type, elt_type = NULL_TREE;
4573 if (!is_gimple_val (gimple_switch_index (stmt)))
4575 error ("invalid operand to switch statement");
4576 debug_generic_stmt (gimple_switch_index (stmt));
4577 return true;
4580 index_type = TREE_TYPE (gimple_switch_index (stmt));
4581 if (! INTEGRAL_TYPE_P (index_type))
4583 error ("non-integral type switch statement");
4584 debug_generic_expr (index_type);
4585 return true;
4588 elt = gimple_switch_label (stmt, 0);
4589 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4591 error ("invalid default case label in switch statement");
4592 debug_generic_expr (elt);
4593 return true;
4596 n = gimple_switch_num_labels (stmt);
4597 for (i = 1; i < n; i++)
4599 elt = gimple_switch_label (stmt, i);
4601 if (! CASE_LOW (elt))
4603 error ("invalid case label in switch statement");
4604 debug_generic_expr (elt);
4605 return true;
4607 if (CASE_HIGH (elt)
4608 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4610 error ("invalid case range in switch statement");
4611 debug_generic_expr (elt);
4612 return true;
4615 if (elt_type)
4617 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4618 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4620 error ("type mismatch for case label in switch statement");
4621 debug_generic_expr (elt);
4622 return true;
4625 else
4627 elt_type = TREE_TYPE (CASE_LOW (elt));
4628 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4630 error ("type precision mismatch in switch statement");
4631 return true;
4635 if (prev_upper_bound)
4637 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4639 error ("case labels not sorted in switch statement");
4640 return true;
4644 prev_upper_bound = CASE_HIGH (elt);
4645 if (! prev_upper_bound)
4646 prev_upper_bound = CASE_LOW (elt);
4649 return false;
4652 /* Verify a gimple debug statement STMT.
4653 Returns true if anything is wrong. */
4655 static bool
4656 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4658 /* There isn't much that could be wrong in a gimple debug stmt. A
4659 gimple debug bind stmt, for example, maps a tree, that's usually
4660 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4661 component or member of an aggregate type, to another tree, that
4662 can be an arbitrary expression. These stmts expand into debug
4663 insns, and are converted to debug notes by var-tracking.c. */
4664 return false;
4667 /* Verify a gimple label statement STMT.
4668 Returns true if anything is wrong. */
4670 static bool
4671 verify_gimple_label (glabel *stmt)
4673 tree decl = gimple_label_label (stmt);
4674 int uid;
4675 bool err = false;
4677 if (TREE_CODE (decl) != LABEL_DECL)
4678 return true;
4679 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4680 && DECL_CONTEXT (decl) != current_function_decl)
4682 error ("label's context is not the current function decl");
4683 err |= true;
4686 uid = LABEL_DECL_UID (decl);
4687 if (cfun->cfg
4688 && (uid == -1
4689 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4691 error ("incorrect entry in label_to_block_map");
4692 err |= true;
4695 uid = EH_LANDING_PAD_NR (decl);
4696 if (uid)
4698 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4699 if (decl != lp->post_landing_pad)
4701 error ("incorrect setting of landing pad number");
4702 err |= true;
4706 return err;
4709 /* Verify a gimple cond statement STMT.
4710 Returns true if anything is wrong. */
4712 static bool
4713 verify_gimple_cond (gcond *stmt)
4715 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4717 error ("invalid comparison code in gimple cond");
4718 return true;
4720 if (!(!gimple_cond_true_label (stmt)
4721 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4722 || !(!gimple_cond_false_label (stmt)
4723 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4725 error ("invalid labels in gimple cond");
4726 return true;
4729 return verify_gimple_comparison (boolean_type_node,
4730 gimple_cond_lhs (stmt),
4731 gimple_cond_rhs (stmt),
4732 gimple_cond_code (stmt));
4735 /* Verify the GIMPLE statement STMT. Returns true if there is an
4736 error, otherwise false. */
4738 static bool
4739 verify_gimple_stmt (gimple *stmt)
4741 switch (gimple_code (stmt))
4743 case GIMPLE_ASSIGN:
4744 return verify_gimple_assign (as_a <gassign *> (stmt));
4746 case GIMPLE_LABEL:
4747 return verify_gimple_label (as_a <glabel *> (stmt));
4749 case GIMPLE_CALL:
4750 return verify_gimple_call (as_a <gcall *> (stmt));
4752 case GIMPLE_COND:
4753 return verify_gimple_cond (as_a <gcond *> (stmt));
4755 case GIMPLE_GOTO:
4756 return verify_gimple_goto (as_a <ggoto *> (stmt));
4758 case GIMPLE_SWITCH:
4759 return verify_gimple_switch (as_a <gswitch *> (stmt));
4761 case GIMPLE_RETURN:
4762 return verify_gimple_return (as_a <greturn *> (stmt));
4764 case GIMPLE_ASM:
4765 return false;
4767 case GIMPLE_TRANSACTION:
4768 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4770 /* Tuples that do not have tree operands. */
4771 case GIMPLE_NOP:
4772 case GIMPLE_PREDICT:
4773 case GIMPLE_RESX:
4774 case GIMPLE_EH_DISPATCH:
4775 case GIMPLE_EH_MUST_NOT_THROW:
4776 return false;
4778 CASE_GIMPLE_OMP:
4779 /* OpenMP directives are validated by the FE and never operated
4780 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4781 non-gimple expressions when the main index variable has had
4782 its address taken. This does not affect the loop itself
4783 because the header of an GIMPLE_OMP_FOR is merely used to determine
4784 how to setup the parallel iteration. */
4785 return false;
4787 case GIMPLE_DEBUG:
4788 return verify_gimple_debug (stmt);
4790 default:
4791 gcc_unreachable ();
4795 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4796 and false otherwise. */
4798 static bool
4799 verify_gimple_phi (gimple *phi)
4801 bool err = false;
4802 unsigned i;
4803 tree phi_result = gimple_phi_result (phi);
4804 bool virtual_p;
4806 if (!phi_result)
4808 error ("invalid PHI result");
4809 return true;
4812 virtual_p = virtual_operand_p (phi_result);
4813 if (TREE_CODE (phi_result) != SSA_NAME
4814 || (virtual_p
4815 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4817 error ("invalid PHI result");
4818 err = true;
4821 for (i = 0; i < gimple_phi_num_args (phi); i++)
4823 tree t = gimple_phi_arg_def (phi, i);
4825 if (!t)
4827 error ("missing PHI def");
4828 err |= true;
4829 continue;
4831 /* Addressable variables do have SSA_NAMEs but they
4832 are not considered gimple values. */
4833 else if ((TREE_CODE (t) == SSA_NAME
4834 && virtual_p != virtual_operand_p (t))
4835 || (virtual_p
4836 && (TREE_CODE (t) != SSA_NAME
4837 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4838 || (!virtual_p
4839 && !is_gimple_val (t)))
4841 error ("invalid PHI argument");
4842 debug_generic_expr (t);
4843 err |= true;
4845 #ifdef ENABLE_TYPES_CHECKING
4846 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4848 error ("incompatible types in PHI argument %u", i);
4849 debug_generic_stmt (TREE_TYPE (phi_result));
4850 debug_generic_stmt (TREE_TYPE (t));
4851 err |= true;
4853 #endif
4856 return err;
4859 /* Verify the GIMPLE statements inside the sequence STMTS. */
4861 static bool
4862 verify_gimple_in_seq_2 (gimple_seq stmts)
4864 gimple_stmt_iterator ittr;
4865 bool err = false;
4867 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4869 gimple *stmt = gsi_stmt (ittr);
4871 switch (gimple_code (stmt))
4873 case GIMPLE_BIND:
4874 err |= verify_gimple_in_seq_2 (
4875 gimple_bind_body (as_a <gbind *> (stmt)));
4876 break;
4878 case GIMPLE_TRY:
4879 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4880 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4881 break;
4883 case GIMPLE_EH_FILTER:
4884 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4885 break;
4887 case GIMPLE_EH_ELSE:
4889 geh_else *eh_else = as_a <geh_else *> (stmt);
4890 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4891 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4893 break;
4895 case GIMPLE_CATCH:
4896 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4897 as_a <gcatch *> (stmt)));
4898 break;
4900 case GIMPLE_TRANSACTION:
4901 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4902 break;
4904 default:
4906 bool err2 = verify_gimple_stmt (stmt);
4907 if (err2)
4908 debug_gimple_stmt (stmt);
4909 err |= err2;
4914 return err;
4917 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4918 is a problem, otherwise false. */
4920 static bool
4921 verify_gimple_transaction (gtransaction *stmt)
4923 tree lab;
4925 lab = gimple_transaction_label_norm (stmt);
4926 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4927 return true;
4928 lab = gimple_transaction_label_uninst (stmt);
4929 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4930 return true;
4931 lab = gimple_transaction_label_over (stmt);
4932 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4933 return true;
4935 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4939 /* Verify the GIMPLE statements inside the statement list STMTS. */
4941 DEBUG_FUNCTION void
4942 verify_gimple_in_seq (gimple_seq stmts)
4944 timevar_push (TV_TREE_STMT_VERIFY);
4945 if (verify_gimple_in_seq_2 (stmts))
4946 internal_error ("verify_gimple failed");
4947 timevar_pop (TV_TREE_STMT_VERIFY);
4950 /* Return true when the T can be shared. */
4952 static bool
4953 tree_node_can_be_shared (tree t)
4955 if (IS_TYPE_OR_DECL_P (t)
4956 || is_gimple_min_invariant (t)
4957 || TREE_CODE (t) == SSA_NAME
4958 || t == error_mark_node
4959 || TREE_CODE (t) == IDENTIFIER_NODE)
4960 return true;
4962 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4963 return true;
4965 if (DECL_P (t))
4966 return true;
4968 return false;
4971 /* Called via walk_tree. Verify tree sharing. */
4973 static tree
4974 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4976 hash_set<void *> *visited = (hash_set<void *> *) data;
4978 if (tree_node_can_be_shared (*tp))
4980 *walk_subtrees = false;
4981 return NULL;
4984 if (visited->add (*tp))
4985 return *tp;
4987 return NULL;
4990 /* Called via walk_gimple_stmt. Verify tree sharing. */
4992 static tree
4993 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4995 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4996 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4999 static bool eh_error_found;
5000 bool
5001 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5002 hash_set<gimple *> *visited)
5004 if (!visited->contains (stmt))
5006 error ("dead STMT in EH table");
5007 debug_gimple_stmt (stmt);
5008 eh_error_found = true;
5010 return true;
5013 /* Verify if the location LOCs block is in BLOCKS. */
5015 static bool
5016 verify_location (hash_set<tree> *blocks, location_t loc)
5018 tree block = LOCATION_BLOCK (loc);
5019 if (block != NULL_TREE
5020 && !blocks->contains (block))
5022 error ("location references block not in block tree");
5023 return true;
5025 if (block != NULL_TREE)
5026 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5027 return false;
5030 /* Called via walk_tree. Verify that expressions have no blocks. */
5032 static tree
5033 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5035 if (!EXPR_P (*tp))
5037 *walk_subtrees = false;
5038 return NULL;
5041 location_t loc = EXPR_LOCATION (*tp);
5042 if (LOCATION_BLOCK (loc) != NULL)
5043 return *tp;
5045 return NULL;
5048 /* Called via walk_tree. Verify locations of expressions. */
5050 static tree
5051 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5053 hash_set<tree> *blocks = (hash_set<tree> *) data;
5055 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5057 tree t = DECL_DEBUG_EXPR (*tp);
5058 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5059 if (addr)
5060 return addr;
5062 if ((VAR_P (*tp)
5063 || TREE_CODE (*tp) == PARM_DECL
5064 || TREE_CODE (*tp) == RESULT_DECL)
5065 && DECL_HAS_VALUE_EXPR_P (*tp))
5067 tree t = DECL_VALUE_EXPR (*tp);
5068 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5069 if (addr)
5070 return addr;
5073 if (!EXPR_P (*tp))
5075 *walk_subtrees = false;
5076 return NULL;
5079 location_t loc = EXPR_LOCATION (*tp);
5080 if (verify_location (blocks, loc))
5081 return *tp;
5083 return NULL;
5086 /* Called via walk_gimple_op. Verify locations of expressions. */
5088 static tree
5089 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5091 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5092 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5095 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5097 static void
5098 collect_subblocks (hash_set<tree> *blocks, tree block)
5100 tree t;
5101 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5103 blocks->add (t);
5104 collect_subblocks (blocks, t);
5108 /* Verify the GIMPLE statements in the CFG of FN. */
5110 DEBUG_FUNCTION void
5111 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5113 basic_block bb;
5114 bool err = false;
5116 timevar_push (TV_TREE_STMT_VERIFY);
5117 hash_set<void *> visited;
5118 hash_set<gimple *> visited_stmts;
5120 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5121 hash_set<tree> blocks;
5122 if (DECL_INITIAL (fn->decl))
5124 blocks.add (DECL_INITIAL (fn->decl));
5125 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5128 FOR_EACH_BB_FN (bb, fn)
5130 gimple_stmt_iterator gsi;
5132 for (gphi_iterator gpi = gsi_start_phis (bb);
5133 !gsi_end_p (gpi);
5134 gsi_next (&gpi))
5136 gphi *phi = gpi.phi ();
5137 bool err2 = false;
5138 unsigned i;
5140 visited_stmts.add (phi);
5142 if (gimple_bb (phi) != bb)
5144 error ("gimple_bb (phi) is set to a wrong basic block");
5145 err2 = true;
5148 err2 |= verify_gimple_phi (phi);
5150 /* Only PHI arguments have locations. */
5151 if (gimple_location (phi) != UNKNOWN_LOCATION)
5153 error ("PHI node with location");
5154 err2 = true;
5157 for (i = 0; i < gimple_phi_num_args (phi); i++)
5159 tree arg = gimple_phi_arg_def (phi, i);
5160 tree addr = walk_tree (&arg, verify_node_sharing_1,
5161 &visited, NULL);
5162 if (addr)
5164 error ("incorrect sharing of tree nodes");
5165 debug_generic_expr (addr);
5166 err2 |= true;
5168 location_t loc = gimple_phi_arg_location (phi, i);
5169 if (virtual_operand_p (gimple_phi_result (phi))
5170 && loc != UNKNOWN_LOCATION)
5172 error ("virtual PHI with argument locations");
5173 err2 = true;
5175 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5176 if (addr)
5178 debug_generic_expr (addr);
5179 err2 = true;
5181 err2 |= verify_location (&blocks, loc);
5184 if (err2)
5185 debug_gimple_stmt (phi);
5186 err |= err2;
5189 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5191 gimple *stmt = gsi_stmt (gsi);
5192 bool err2 = false;
5193 struct walk_stmt_info wi;
5194 tree addr;
5195 int lp_nr;
5197 visited_stmts.add (stmt);
5199 if (gimple_bb (stmt) != bb)
5201 error ("gimple_bb (stmt) is set to a wrong basic block");
5202 err2 = true;
5205 err2 |= verify_gimple_stmt (stmt);
5206 err2 |= verify_location (&blocks, gimple_location (stmt));
5208 memset (&wi, 0, sizeof (wi));
5209 wi.info = (void *) &visited;
5210 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5211 if (addr)
5213 error ("incorrect sharing of tree nodes");
5214 debug_generic_expr (addr);
5215 err2 |= true;
5218 memset (&wi, 0, sizeof (wi));
5219 wi.info = (void *) &blocks;
5220 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5221 if (addr)
5223 debug_generic_expr (addr);
5224 err2 |= true;
5227 /* ??? Instead of not checking these stmts at all the walker
5228 should know its context via wi. */
5229 if (!is_gimple_debug (stmt)
5230 && !is_gimple_omp (stmt))
5232 memset (&wi, 0, sizeof (wi));
5233 addr = walk_gimple_op (stmt, verify_expr, &wi);
5234 if (addr)
5236 debug_generic_expr (addr);
5237 inform (gimple_location (stmt), "in statement");
5238 err2 |= true;
5242 /* If the statement is marked as part of an EH region, then it is
5243 expected that the statement could throw. Verify that when we
5244 have optimizations that simplify statements such that we prove
5245 that they cannot throw, that we update other data structures
5246 to match. */
5247 lp_nr = lookup_stmt_eh_lp (stmt);
5248 if (lp_nr > 0)
5250 if (!stmt_could_throw_p (stmt))
5252 if (verify_nothrow)
5254 error ("statement marked for throw, but doesn%'t");
5255 err2 |= true;
5258 else if (!gsi_one_before_end_p (gsi))
5260 error ("statement marked for throw in middle of block");
5261 err2 |= true;
5265 if (err2)
5266 debug_gimple_stmt (stmt);
5267 err |= err2;
5271 eh_error_found = false;
5272 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5273 if (eh_table)
5274 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5275 (&visited_stmts);
5277 if (err || eh_error_found)
5278 internal_error ("verify_gimple failed");
5280 verify_histograms ();
5281 timevar_pop (TV_TREE_STMT_VERIFY);
5285 /* Verifies that the flow information is OK. */
5287 static int
5288 gimple_verify_flow_info (void)
5290 int err = 0;
5291 basic_block bb;
5292 gimple_stmt_iterator gsi;
5293 gimple *stmt;
5294 edge e;
5295 edge_iterator ei;
5297 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5298 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5300 error ("ENTRY_BLOCK has IL associated with it");
5301 err = 1;
5304 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5305 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5307 error ("EXIT_BLOCK has IL associated with it");
5308 err = 1;
5311 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5312 if (e->flags & EDGE_FALLTHRU)
5314 error ("fallthru to exit from bb %d", e->src->index);
5315 err = 1;
5318 FOR_EACH_BB_FN (bb, cfun)
5320 bool found_ctrl_stmt = false;
5322 stmt = NULL;
5324 /* Skip labels on the start of basic block. */
5325 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5327 tree label;
5328 gimple *prev_stmt = stmt;
5330 stmt = gsi_stmt (gsi);
5332 if (gimple_code (stmt) != GIMPLE_LABEL)
5333 break;
5335 label = gimple_label_label (as_a <glabel *> (stmt));
5336 if (prev_stmt && DECL_NONLOCAL (label))
5338 error ("nonlocal label ");
5339 print_generic_expr (stderr, label, 0);
5340 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5341 bb->index);
5342 err = 1;
5345 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5347 error ("EH landing pad label ");
5348 print_generic_expr (stderr, label, 0);
5349 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5350 bb->index);
5351 err = 1;
5354 if (label_to_block (label) != bb)
5356 error ("label ");
5357 print_generic_expr (stderr, label, 0);
5358 fprintf (stderr, " to block does not match in bb %d",
5359 bb->index);
5360 err = 1;
5363 if (decl_function_context (label) != current_function_decl)
5365 error ("label ");
5366 print_generic_expr (stderr, label, 0);
5367 fprintf (stderr, " has incorrect context in bb %d",
5368 bb->index);
5369 err = 1;
5373 /* Verify that body of basic block BB is free of control flow. */
5374 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5376 gimple *stmt = gsi_stmt (gsi);
5378 if (found_ctrl_stmt)
5380 error ("control flow in the middle of basic block %d",
5381 bb->index);
5382 err = 1;
5385 if (stmt_ends_bb_p (stmt))
5386 found_ctrl_stmt = true;
5388 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5390 error ("label ");
5391 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5392 fprintf (stderr, " in the middle of basic block %d", bb->index);
5393 err = 1;
5397 gsi = gsi_last_bb (bb);
5398 if (gsi_end_p (gsi))
5399 continue;
5401 stmt = gsi_stmt (gsi);
5403 if (gimple_code (stmt) == GIMPLE_LABEL)
5404 continue;
5406 err |= verify_eh_edges (stmt);
5408 if (is_ctrl_stmt (stmt))
5410 FOR_EACH_EDGE (e, ei, bb->succs)
5411 if (e->flags & EDGE_FALLTHRU)
5413 error ("fallthru edge after a control statement in bb %d",
5414 bb->index);
5415 err = 1;
5419 if (gimple_code (stmt) != GIMPLE_COND)
5421 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5422 after anything else but if statement. */
5423 FOR_EACH_EDGE (e, ei, bb->succs)
5424 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5426 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5427 bb->index);
5428 err = 1;
5432 switch (gimple_code (stmt))
5434 case GIMPLE_COND:
5436 edge true_edge;
5437 edge false_edge;
5439 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5441 if (!true_edge
5442 || !false_edge
5443 || !(true_edge->flags & EDGE_TRUE_VALUE)
5444 || !(false_edge->flags & EDGE_FALSE_VALUE)
5445 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5446 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5447 || EDGE_COUNT (bb->succs) >= 3)
5449 error ("wrong outgoing edge flags at end of bb %d",
5450 bb->index);
5451 err = 1;
5454 break;
5456 case GIMPLE_GOTO:
5457 if (simple_goto_p (stmt))
5459 error ("explicit goto at end of bb %d", bb->index);
5460 err = 1;
5462 else
5464 /* FIXME. We should double check that the labels in the
5465 destination blocks have their address taken. */
5466 FOR_EACH_EDGE (e, ei, bb->succs)
5467 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5468 | EDGE_FALSE_VALUE))
5469 || !(e->flags & EDGE_ABNORMAL))
5471 error ("wrong outgoing edge flags at end of bb %d",
5472 bb->index);
5473 err = 1;
5476 break;
5478 case GIMPLE_CALL:
5479 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5480 break;
5481 /* fallthru */
5482 case GIMPLE_RETURN:
5483 if (!single_succ_p (bb)
5484 || (single_succ_edge (bb)->flags
5485 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5486 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5488 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5489 err = 1;
5491 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5493 error ("return edge does not point to exit in bb %d",
5494 bb->index);
5495 err = 1;
5497 break;
5499 case GIMPLE_SWITCH:
5501 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5502 tree prev;
5503 edge e;
5504 size_t i, n;
5506 n = gimple_switch_num_labels (switch_stmt);
5508 /* Mark all the destination basic blocks. */
5509 for (i = 0; i < n; ++i)
5511 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5512 basic_block label_bb = label_to_block (lab);
5513 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5514 label_bb->aux = (void *)1;
5517 /* Verify that the case labels are sorted. */
5518 prev = gimple_switch_label (switch_stmt, 0);
5519 for (i = 1; i < n; ++i)
5521 tree c = gimple_switch_label (switch_stmt, i);
5522 if (!CASE_LOW (c))
5524 error ("found default case not at the start of "
5525 "case vector");
5526 err = 1;
5527 continue;
5529 if (CASE_LOW (prev)
5530 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5532 error ("case labels not sorted: ");
5533 print_generic_expr (stderr, prev, 0);
5534 fprintf (stderr," is greater than ");
5535 print_generic_expr (stderr, c, 0);
5536 fprintf (stderr," but comes before it.\n");
5537 err = 1;
5539 prev = c;
5541 /* VRP will remove the default case if it can prove it will
5542 never be executed. So do not verify there always exists
5543 a default case here. */
5545 FOR_EACH_EDGE (e, ei, bb->succs)
5547 if (!e->dest->aux)
5549 error ("extra outgoing edge %d->%d",
5550 bb->index, e->dest->index);
5551 err = 1;
5554 e->dest->aux = (void *)2;
5555 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5556 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5558 error ("wrong outgoing edge flags at end of bb %d",
5559 bb->index);
5560 err = 1;
5564 /* Check that we have all of them. */
5565 for (i = 0; i < n; ++i)
5567 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5568 basic_block label_bb = label_to_block (lab);
5570 if (label_bb->aux != (void *)2)
5572 error ("missing edge %i->%i", bb->index, label_bb->index);
5573 err = 1;
5577 FOR_EACH_EDGE (e, ei, bb->succs)
5578 e->dest->aux = (void *)0;
5580 break;
5582 case GIMPLE_EH_DISPATCH:
5583 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5584 break;
5586 default:
5587 break;
5591 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5592 verify_dominators (CDI_DOMINATORS);
5594 return err;
5598 /* Updates phi nodes after creating a forwarder block joined
5599 by edge FALLTHRU. */
5601 static void
5602 gimple_make_forwarder_block (edge fallthru)
5604 edge e;
5605 edge_iterator ei;
5606 basic_block dummy, bb;
5607 tree var;
5608 gphi_iterator gsi;
5610 dummy = fallthru->src;
5611 bb = fallthru->dest;
5613 if (single_pred_p (bb))
5614 return;
5616 /* If we redirected a branch we must create new PHI nodes at the
5617 start of BB. */
5618 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5620 gphi *phi, *new_phi;
5622 phi = gsi.phi ();
5623 var = gimple_phi_result (phi);
5624 new_phi = create_phi_node (var, bb);
5625 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5626 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5627 UNKNOWN_LOCATION);
5630 /* Add the arguments we have stored on edges. */
5631 FOR_EACH_EDGE (e, ei, bb->preds)
5633 if (e == fallthru)
5634 continue;
5636 flush_pending_stmts (e);
5641 /* Return a non-special label in the head of basic block BLOCK.
5642 Create one if it doesn't exist. */
5644 tree
5645 gimple_block_label (basic_block bb)
5647 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5648 bool first = true;
5649 tree label;
5650 glabel *stmt;
5652 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5654 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5655 if (!stmt)
5656 break;
5657 label = gimple_label_label (stmt);
5658 if (!DECL_NONLOCAL (label))
5660 if (!first)
5661 gsi_move_before (&i, &s);
5662 return label;
5666 label = create_artificial_label (UNKNOWN_LOCATION);
5667 stmt = gimple_build_label (label);
5668 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5669 return label;
5673 /* Attempt to perform edge redirection by replacing a possibly complex
5674 jump instruction by a goto or by removing the jump completely.
5675 This can apply only if all edges now point to the same block. The
5676 parameters and return values are equivalent to
5677 redirect_edge_and_branch. */
5679 static edge
5680 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5682 basic_block src = e->src;
5683 gimple_stmt_iterator i;
5684 gimple *stmt;
5686 /* We can replace or remove a complex jump only when we have exactly
5687 two edges. */
5688 if (EDGE_COUNT (src->succs) != 2
5689 /* Verify that all targets will be TARGET. Specifically, the
5690 edge that is not E must also go to TARGET. */
5691 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5692 return NULL;
5694 i = gsi_last_bb (src);
5695 if (gsi_end_p (i))
5696 return NULL;
5698 stmt = gsi_stmt (i);
5700 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5702 gsi_remove (&i, true);
5703 e = ssa_redirect_edge (e, target);
5704 e->flags = EDGE_FALLTHRU;
5705 return e;
5708 return NULL;
5712 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5713 edge representing the redirected branch. */
5715 static edge
5716 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5718 basic_block bb = e->src;
5719 gimple_stmt_iterator gsi;
5720 edge ret;
5721 gimple *stmt;
5723 if (e->flags & EDGE_ABNORMAL)
5724 return NULL;
5726 if (e->dest == dest)
5727 return NULL;
5729 if (e->flags & EDGE_EH)
5730 return redirect_eh_edge (e, dest);
5732 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5734 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5735 if (ret)
5736 return ret;
5739 gsi = gsi_last_bb (bb);
5740 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5742 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5744 case GIMPLE_COND:
5745 /* For COND_EXPR, we only need to redirect the edge. */
5746 break;
5748 case GIMPLE_GOTO:
5749 /* No non-abnormal edges should lead from a non-simple goto, and
5750 simple ones should be represented implicitly. */
5751 gcc_unreachable ();
5753 case GIMPLE_SWITCH:
5755 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5756 tree label = gimple_block_label (dest);
5757 tree cases = get_cases_for_edge (e, switch_stmt);
5759 /* If we have a list of cases associated with E, then use it
5760 as it's a lot faster than walking the entire case vector. */
5761 if (cases)
5763 edge e2 = find_edge (e->src, dest);
5764 tree last, first;
5766 first = cases;
5767 while (cases)
5769 last = cases;
5770 CASE_LABEL (cases) = label;
5771 cases = CASE_CHAIN (cases);
5774 /* If there was already an edge in the CFG, then we need
5775 to move all the cases associated with E to E2. */
5776 if (e2)
5778 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5780 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5781 CASE_CHAIN (cases2) = first;
5783 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5785 else
5787 size_t i, n = gimple_switch_num_labels (switch_stmt);
5789 for (i = 0; i < n; i++)
5791 tree elt = gimple_switch_label (switch_stmt, i);
5792 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5793 CASE_LABEL (elt) = label;
5797 break;
5799 case GIMPLE_ASM:
5801 gasm *asm_stmt = as_a <gasm *> (stmt);
5802 int i, n = gimple_asm_nlabels (asm_stmt);
5803 tree label = NULL;
5805 for (i = 0; i < n; ++i)
5807 tree cons = gimple_asm_label_op (asm_stmt, i);
5808 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5810 if (!label)
5811 label = gimple_block_label (dest);
5812 TREE_VALUE (cons) = label;
5816 /* If we didn't find any label matching the former edge in the
5817 asm labels, we must be redirecting the fallthrough
5818 edge. */
5819 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5821 break;
5823 case GIMPLE_RETURN:
5824 gsi_remove (&gsi, true);
5825 e->flags |= EDGE_FALLTHRU;
5826 break;
5828 case GIMPLE_OMP_RETURN:
5829 case GIMPLE_OMP_CONTINUE:
5830 case GIMPLE_OMP_SECTIONS_SWITCH:
5831 case GIMPLE_OMP_FOR:
5832 /* The edges from OMP constructs can be simply redirected. */
5833 break;
5835 case GIMPLE_EH_DISPATCH:
5836 if (!(e->flags & EDGE_FALLTHRU))
5837 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5838 break;
5840 case GIMPLE_TRANSACTION:
5841 if (e->flags & EDGE_TM_ABORT)
5842 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5843 gimple_block_label (dest));
5844 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5845 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5846 gimple_block_label (dest));
5847 else
5848 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5849 gimple_block_label (dest));
5850 break;
5852 default:
5853 /* Otherwise it must be a fallthru edge, and we don't need to
5854 do anything besides redirecting it. */
5855 gcc_assert (e->flags & EDGE_FALLTHRU);
5856 break;
5859 /* Update/insert PHI nodes as necessary. */
5861 /* Now update the edges in the CFG. */
5862 e = ssa_redirect_edge (e, dest);
5864 return e;
5867 /* Returns true if it is possible to remove edge E by redirecting
5868 it to the destination of the other edge from E->src. */
5870 static bool
5871 gimple_can_remove_branch_p (const_edge e)
5873 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5874 return false;
5876 return true;
5879 /* Simple wrapper, as we can always redirect fallthru edges. */
5881 static basic_block
5882 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5884 e = gimple_redirect_edge_and_branch (e, dest);
5885 gcc_assert (e);
5887 return NULL;
5891 /* Splits basic block BB after statement STMT (but at least after the
5892 labels). If STMT is NULL, BB is split just after the labels. */
5894 static basic_block
5895 gimple_split_block (basic_block bb, void *stmt)
5897 gimple_stmt_iterator gsi;
5898 gimple_stmt_iterator gsi_tgt;
5899 gimple_seq list;
5900 basic_block new_bb;
5901 edge e;
5902 edge_iterator ei;
5904 new_bb = create_empty_bb (bb);
5906 /* Redirect the outgoing edges. */
5907 new_bb->succs = bb->succs;
5908 bb->succs = NULL;
5909 FOR_EACH_EDGE (e, ei, new_bb->succs)
5910 e->src = new_bb;
5912 /* Get a stmt iterator pointing to the first stmt to move. */
5913 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5914 gsi = gsi_after_labels (bb);
5915 else
5917 gsi = gsi_for_stmt ((gimple *) stmt);
5918 gsi_next (&gsi);
5921 /* Move everything from GSI to the new basic block. */
5922 if (gsi_end_p (gsi))
5923 return new_bb;
5925 /* Split the statement list - avoid re-creating new containers as this
5926 brings ugly quadratic memory consumption in the inliner.
5927 (We are still quadratic since we need to update stmt BB pointers,
5928 sadly.) */
5929 gsi_split_seq_before (&gsi, &list);
5930 set_bb_seq (new_bb, list);
5931 for (gsi_tgt = gsi_start (list);
5932 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5933 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5935 return new_bb;
5939 /* Moves basic block BB after block AFTER. */
5941 static bool
5942 gimple_move_block_after (basic_block bb, basic_block after)
5944 if (bb->prev_bb == after)
5945 return true;
5947 unlink_block (bb);
5948 link_block (bb, after);
5950 return true;
5954 /* Return TRUE if block BB has no executable statements, otherwise return
5955 FALSE. */
5957 static bool
5958 gimple_empty_block_p (basic_block bb)
5960 /* BB must have no executable statements. */
5961 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5962 if (phi_nodes (bb))
5963 return false;
5964 if (gsi_end_p (gsi))
5965 return true;
5966 if (is_gimple_debug (gsi_stmt (gsi)))
5967 gsi_next_nondebug (&gsi);
5968 return gsi_end_p (gsi);
5972 /* Split a basic block if it ends with a conditional branch and if the
5973 other part of the block is not empty. */
5975 static basic_block
5976 gimple_split_block_before_cond_jump (basic_block bb)
5978 gimple *last, *split_point;
5979 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5980 if (gsi_end_p (gsi))
5981 return NULL;
5982 last = gsi_stmt (gsi);
5983 if (gimple_code (last) != GIMPLE_COND
5984 && gimple_code (last) != GIMPLE_SWITCH)
5985 return NULL;
5986 gsi_prev (&gsi);
5987 split_point = gsi_stmt (gsi);
5988 return split_block (bb, split_point)->dest;
5992 /* Return true if basic_block can be duplicated. */
5994 static bool
5995 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5997 return true;
6000 /* Create a duplicate of the basic block BB. NOTE: This does not
6001 preserve SSA form. */
6003 static basic_block
6004 gimple_duplicate_bb (basic_block bb)
6006 basic_block new_bb;
6007 gimple_stmt_iterator gsi_tgt;
6009 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6011 /* Copy the PHI nodes. We ignore PHI node arguments here because
6012 the incoming edges have not been setup yet. */
6013 for (gphi_iterator gpi = gsi_start_phis (bb);
6014 !gsi_end_p (gpi);
6015 gsi_next (&gpi))
6017 gphi *phi, *copy;
6018 phi = gpi.phi ();
6019 copy = create_phi_node (NULL_TREE, new_bb);
6020 create_new_def_for (gimple_phi_result (phi), copy,
6021 gimple_phi_result_ptr (copy));
6022 gimple_set_uid (copy, gimple_uid (phi));
6025 gsi_tgt = gsi_start_bb (new_bb);
6026 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6027 !gsi_end_p (gsi);
6028 gsi_next (&gsi))
6030 def_operand_p def_p;
6031 ssa_op_iter op_iter;
6032 tree lhs;
6033 gimple *stmt, *copy;
6035 stmt = gsi_stmt (gsi);
6036 if (gimple_code (stmt) == GIMPLE_LABEL)
6037 continue;
6039 /* Don't duplicate label debug stmts. */
6040 if (gimple_debug_bind_p (stmt)
6041 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6042 == LABEL_DECL)
6043 continue;
6045 /* Create a new copy of STMT and duplicate STMT's virtual
6046 operands. */
6047 copy = gimple_copy (stmt);
6048 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6050 maybe_duplicate_eh_stmt (copy, stmt);
6051 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6053 /* When copying around a stmt writing into a local non-user
6054 aggregate, make sure it won't share stack slot with other
6055 vars. */
6056 lhs = gimple_get_lhs (stmt);
6057 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6059 tree base = get_base_address (lhs);
6060 if (base
6061 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6062 && DECL_IGNORED_P (base)
6063 && !TREE_STATIC (base)
6064 && !DECL_EXTERNAL (base)
6065 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6066 DECL_NONSHAREABLE (base) = 1;
6069 /* Create new names for all the definitions created by COPY and
6070 add replacement mappings for each new name. */
6071 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6072 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6075 return new_bb;
6078 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6080 static void
6081 add_phi_args_after_copy_edge (edge e_copy)
6083 basic_block bb, bb_copy = e_copy->src, dest;
6084 edge e;
6085 edge_iterator ei;
6086 gphi *phi, *phi_copy;
6087 tree def;
6088 gphi_iterator psi, psi_copy;
6090 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6091 return;
6093 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6095 if (e_copy->dest->flags & BB_DUPLICATED)
6096 dest = get_bb_original (e_copy->dest);
6097 else
6098 dest = e_copy->dest;
6100 e = find_edge (bb, dest);
6101 if (!e)
6103 /* During loop unrolling the target of the latch edge is copied.
6104 In this case we are not looking for edge to dest, but to
6105 duplicated block whose original was dest. */
6106 FOR_EACH_EDGE (e, ei, bb->succs)
6108 if ((e->dest->flags & BB_DUPLICATED)
6109 && get_bb_original (e->dest) == dest)
6110 break;
6113 gcc_assert (e != NULL);
6116 for (psi = gsi_start_phis (e->dest),
6117 psi_copy = gsi_start_phis (e_copy->dest);
6118 !gsi_end_p (psi);
6119 gsi_next (&psi), gsi_next (&psi_copy))
6121 phi = psi.phi ();
6122 phi_copy = psi_copy.phi ();
6123 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6124 add_phi_arg (phi_copy, def, e_copy,
6125 gimple_phi_arg_location_from_edge (phi, e));
6130 /* Basic block BB_COPY was created by code duplication. Add phi node
6131 arguments for edges going out of BB_COPY. The blocks that were
6132 duplicated have BB_DUPLICATED set. */
6134 void
6135 add_phi_args_after_copy_bb (basic_block bb_copy)
6137 edge e_copy;
6138 edge_iterator ei;
6140 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6142 add_phi_args_after_copy_edge (e_copy);
6146 /* Blocks in REGION_COPY array of length N_REGION were created by
6147 duplication of basic blocks. Add phi node arguments for edges
6148 going from these blocks. If E_COPY is not NULL, also add
6149 phi node arguments for its destination.*/
6151 void
6152 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6153 edge e_copy)
6155 unsigned i;
6157 for (i = 0; i < n_region; i++)
6158 region_copy[i]->flags |= BB_DUPLICATED;
6160 for (i = 0; i < n_region; i++)
6161 add_phi_args_after_copy_bb (region_copy[i]);
6162 if (e_copy)
6163 add_phi_args_after_copy_edge (e_copy);
6165 for (i = 0; i < n_region; i++)
6166 region_copy[i]->flags &= ~BB_DUPLICATED;
6169 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6170 important exit edge EXIT. By important we mean that no SSA name defined
6171 inside region is live over the other exit edges of the region. All entry
6172 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6173 to the duplicate of the region. Dominance and loop information is
6174 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6175 UPDATE_DOMINANCE is false then we assume that the caller will update the
6176 dominance information after calling this function. The new basic
6177 blocks are stored to REGION_COPY in the same order as they had in REGION,
6178 provided that REGION_COPY is not NULL.
6179 The function returns false if it is unable to copy the region,
6180 true otherwise. */
6182 bool
6183 gimple_duplicate_sese_region (edge entry, edge exit,
6184 basic_block *region, unsigned n_region,
6185 basic_block *region_copy,
6186 bool update_dominance)
6188 unsigned i;
6189 bool free_region_copy = false, copying_header = false;
6190 struct loop *loop = entry->dest->loop_father;
6191 edge exit_copy;
6192 vec<basic_block> doms;
6193 edge redirected;
6194 int total_freq = 0, entry_freq = 0;
6195 gcov_type total_count = 0, entry_count = 0;
6197 if (!can_copy_bbs_p (region, n_region))
6198 return false;
6200 /* Some sanity checking. Note that we do not check for all possible
6201 missuses of the functions. I.e. if you ask to copy something weird,
6202 it will work, but the state of structures probably will not be
6203 correct. */
6204 for (i = 0; i < n_region; i++)
6206 /* We do not handle subloops, i.e. all the blocks must belong to the
6207 same loop. */
6208 if (region[i]->loop_father != loop)
6209 return false;
6211 if (region[i] != entry->dest
6212 && region[i] == loop->header)
6213 return false;
6216 /* In case the function is used for loop header copying (which is the primary
6217 use), ensure that EXIT and its copy will be new latch and entry edges. */
6218 if (loop->header == entry->dest)
6220 copying_header = true;
6222 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6223 return false;
6225 for (i = 0; i < n_region; i++)
6226 if (region[i] != exit->src
6227 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6228 return false;
6231 initialize_original_copy_tables ();
6233 if (copying_header)
6234 set_loop_copy (loop, loop_outer (loop));
6235 else
6236 set_loop_copy (loop, loop);
6238 if (!region_copy)
6240 region_copy = XNEWVEC (basic_block, n_region);
6241 free_region_copy = true;
6244 /* Record blocks outside the region that are dominated by something
6245 inside. */
6246 if (update_dominance)
6248 doms.create (0);
6249 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6252 if (entry->dest->count)
6254 total_count = entry->dest->count;
6255 entry_count = entry->count;
6256 /* Fix up corner cases, to avoid division by zero or creation of negative
6257 frequencies. */
6258 if (entry_count > total_count)
6259 entry_count = total_count;
6261 else
6263 total_freq = entry->dest->frequency;
6264 entry_freq = EDGE_FREQUENCY (entry);
6265 /* Fix up corner cases, to avoid division by zero or creation of negative
6266 frequencies. */
6267 if (total_freq == 0)
6268 total_freq = 1;
6269 else if (entry_freq > total_freq)
6270 entry_freq = total_freq;
6273 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6274 split_edge_bb_loc (entry), update_dominance);
6275 if (total_count)
6277 scale_bbs_frequencies_gcov_type (region, n_region,
6278 total_count - entry_count,
6279 total_count);
6280 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6281 total_count);
6283 else
6285 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6286 total_freq);
6287 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6290 if (copying_header)
6292 loop->header = exit->dest;
6293 loop->latch = exit->src;
6296 /* Redirect the entry and add the phi node arguments. */
6297 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6298 gcc_assert (redirected != NULL);
6299 flush_pending_stmts (entry);
6301 /* Concerning updating of dominators: We must recount dominators
6302 for entry block and its copy. Anything that is outside of the
6303 region, but was dominated by something inside needs recounting as
6304 well. */
6305 if (update_dominance)
6307 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6308 doms.safe_push (get_bb_original (entry->dest));
6309 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6310 doms.release ();
6313 /* Add the other PHI node arguments. */
6314 add_phi_args_after_copy (region_copy, n_region, NULL);
6316 if (free_region_copy)
6317 free (region_copy);
6319 free_original_copy_tables ();
6320 return true;
6323 /* Checks if BB is part of the region defined by N_REGION BBS. */
6324 static bool
6325 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6327 unsigned int n;
6329 for (n = 0; n < n_region; n++)
6331 if (bb == bbs[n])
6332 return true;
6334 return false;
6337 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6338 are stored to REGION_COPY in the same order in that they appear
6339 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6340 the region, EXIT an exit from it. The condition guarding EXIT
6341 is moved to ENTRY. Returns true if duplication succeeds, false
6342 otherwise.
6344 For example,
6346 some_code;
6347 if (cond)
6349 else
6352 is transformed to
6354 if (cond)
6356 some_code;
6359 else
6361 some_code;
6366 bool
6367 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6368 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6369 basic_block *region_copy ATTRIBUTE_UNUSED)
6371 unsigned i;
6372 bool free_region_copy = false;
6373 struct loop *loop = exit->dest->loop_father;
6374 struct loop *orig_loop = entry->dest->loop_father;
6375 basic_block switch_bb, entry_bb, nentry_bb;
6376 vec<basic_block> doms;
6377 int total_freq = 0, exit_freq = 0;
6378 gcov_type total_count = 0, exit_count = 0;
6379 edge exits[2], nexits[2], e;
6380 gimple_stmt_iterator gsi;
6381 gimple *cond_stmt;
6382 edge sorig, snew;
6383 basic_block exit_bb;
6384 gphi_iterator psi;
6385 gphi *phi;
6386 tree def;
6387 struct loop *target, *aloop, *cloop;
6389 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6390 exits[0] = exit;
6391 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6393 if (!can_copy_bbs_p (region, n_region))
6394 return false;
6396 initialize_original_copy_tables ();
6397 set_loop_copy (orig_loop, loop);
6399 target= loop;
6400 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6402 if (bb_part_of_region_p (aloop->header, region, n_region))
6404 cloop = duplicate_loop (aloop, target);
6405 duplicate_subloops (aloop, cloop);
6409 if (!region_copy)
6411 region_copy = XNEWVEC (basic_block, n_region);
6412 free_region_copy = true;
6415 gcc_assert (!need_ssa_update_p (cfun));
6417 /* Record blocks outside the region that are dominated by something
6418 inside. */
6419 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6421 if (exit->src->count)
6423 total_count = exit->src->count;
6424 exit_count = exit->count;
6425 /* Fix up corner cases, to avoid division by zero or creation of negative
6426 frequencies. */
6427 if (exit_count > total_count)
6428 exit_count = total_count;
6430 else
6432 total_freq = exit->src->frequency;
6433 exit_freq = EDGE_FREQUENCY (exit);
6434 /* Fix up corner cases, to avoid division by zero or creation of negative
6435 frequencies. */
6436 if (total_freq == 0)
6437 total_freq = 1;
6438 if (exit_freq > total_freq)
6439 exit_freq = total_freq;
6442 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6443 split_edge_bb_loc (exit), true);
6444 if (total_count)
6446 scale_bbs_frequencies_gcov_type (region, n_region,
6447 total_count - exit_count,
6448 total_count);
6449 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6450 total_count);
6452 else
6454 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6455 total_freq);
6456 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6459 /* Create the switch block, and put the exit condition to it. */
6460 entry_bb = entry->dest;
6461 nentry_bb = get_bb_copy (entry_bb);
6462 if (!last_stmt (entry->src)
6463 || !stmt_ends_bb_p (last_stmt (entry->src)))
6464 switch_bb = entry->src;
6465 else
6466 switch_bb = split_edge (entry);
6467 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6469 gsi = gsi_last_bb (switch_bb);
6470 cond_stmt = last_stmt (exit->src);
6471 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6472 cond_stmt = gimple_copy (cond_stmt);
6474 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6476 sorig = single_succ_edge (switch_bb);
6477 sorig->flags = exits[1]->flags;
6478 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6480 /* Register the new edge from SWITCH_BB in loop exit lists. */
6481 rescan_loop_exit (snew, true, false);
6483 /* Add the PHI node arguments. */
6484 add_phi_args_after_copy (region_copy, n_region, snew);
6486 /* Get rid of now superfluous conditions and associated edges (and phi node
6487 arguments). */
6488 exit_bb = exit->dest;
6490 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6491 PENDING_STMT (e) = NULL;
6493 /* The latch of ORIG_LOOP was copied, and so was the backedge
6494 to the original header. We redirect this backedge to EXIT_BB. */
6495 for (i = 0; i < n_region; i++)
6496 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6498 gcc_assert (single_succ_edge (region_copy[i]));
6499 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6500 PENDING_STMT (e) = NULL;
6501 for (psi = gsi_start_phis (exit_bb);
6502 !gsi_end_p (psi);
6503 gsi_next (&psi))
6505 phi = psi.phi ();
6506 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6507 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6510 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6511 PENDING_STMT (e) = NULL;
6513 /* Anything that is outside of the region, but was dominated by something
6514 inside needs to update dominance info. */
6515 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6516 doms.release ();
6517 /* Update the SSA web. */
6518 update_ssa (TODO_update_ssa);
6520 if (free_region_copy)
6521 free (region_copy);
6523 free_original_copy_tables ();
6524 return true;
6527 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6528 adding blocks when the dominator traversal reaches EXIT. This
6529 function silently assumes that ENTRY strictly dominates EXIT. */
6531 void
6532 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6533 vec<basic_block> *bbs_p)
6535 basic_block son;
6537 for (son = first_dom_son (CDI_DOMINATORS, entry);
6538 son;
6539 son = next_dom_son (CDI_DOMINATORS, son))
6541 bbs_p->safe_push (son);
6542 if (son != exit)
6543 gather_blocks_in_sese_region (son, exit, bbs_p);
6547 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6548 The duplicates are recorded in VARS_MAP. */
6550 static void
6551 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6552 tree to_context)
6554 tree t = *tp, new_t;
6555 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6557 if (DECL_CONTEXT (t) == to_context)
6558 return;
6560 bool existed;
6561 tree &loc = vars_map->get_or_insert (t, &existed);
6563 if (!existed)
6565 if (SSA_VAR_P (t))
6567 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6568 add_local_decl (f, new_t);
6570 else
6572 gcc_assert (TREE_CODE (t) == CONST_DECL);
6573 new_t = copy_node (t);
6575 DECL_CONTEXT (new_t) = to_context;
6577 loc = new_t;
6579 else
6580 new_t = loc;
6582 *tp = new_t;
6586 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6587 VARS_MAP maps old ssa names and var_decls to the new ones. */
6589 static tree
6590 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6591 tree to_context)
6593 tree new_name;
6595 gcc_assert (!virtual_operand_p (name));
6597 tree *loc = vars_map->get (name);
6599 if (!loc)
6601 tree decl = SSA_NAME_VAR (name);
6602 if (decl)
6604 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6605 replace_by_duplicate_decl (&decl, vars_map, to_context);
6606 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6607 decl, SSA_NAME_DEF_STMT (name));
6609 else
6610 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6611 name, SSA_NAME_DEF_STMT (name));
6613 /* Now that we've used the def stmt to define new_name, make sure it
6614 doesn't define name anymore. */
6615 SSA_NAME_DEF_STMT (name) = NULL;
6617 vars_map->put (name, new_name);
6619 else
6620 new_name = *loc;
6622 return new_name;
6625 struct move_stmt_d
6627 tree orig_block;
6628 tree new_block;
6629 tree from_context;
6630 tree to_context;
6631 hash_map<tree, tree> *vars_map;
6632 htab_t new_label_map;
6633 hash_map<void *, void *> *eh_map;
6634 bool remap_decls_p;
6637 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6638 contained in *TP if it has been ORIG_BLOCK previously and change the
6639 DECL_CONTEXT of every local variable referenced in *TP. */
6641 static tree
6642 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6644 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6645 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6646 tree t = *tp;
6648 if (EXPR_P (t))
6650 tree block = TREE_BLOCK (t);
6651 if (block == NULL_TREE)
6653 else if (block == p->orig_block
6654 || p->orig_block == NULL_TREE)
6655 TREE_SET_BLOCK (t, p->new_block);
6656 else if (flag_checking)
6658 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6659 block = BLOCK_SUPERCONTEXT (block);
6660 gcc_assert (block == p->orig_block);
6663 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6665 if (TREE_CODE (t) == SSA_NAME)
6666 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6667 else if (TREE_CODE (t) == PARM_DECL
6668 && gimple_in_ssa_p (cfun))
6669 *tp = *(p->vars_map->get (t));
6670 else if (TREE_CODE (t) == LABEL_DECL)
6672 if (p->new_label_map)
6674 struct tree_map in, *out;
6675 in.base.from = t;
6676 out = (struct tree_map *)
6677 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6678 if (out)
6679 *tp = t = out->to;
6682 DECL_CONTEXT (t) = p->to_context;
6684 else if (p->remap_decls_p)
6686 /* Replace T with its duplicate. T should no longer appear in the
6687 parent function, so this looks wasteful; however, it may appear
6688 in referenced_vars, and more importantly, as virtual operands of
6689 statements, and in alias lists of other variables. It would be
6690 quite difficult to expunge it from all those places. ??? It might
6691 suffice to do this for addressable variables. */
6692 if ((VAR_P (t) && !is_global_var (t))
6693 || TREE_CODE (t) == CONST_DECL)
6694 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6696 *walk_subtrees = 0;
6698 else if (TYPE_P (t))
6699 *walk_subtrees = 0;
6701 return NULL_TREE;
6704 /* Helper for move_stmt_r. Given an EH region number for the source
6705 function, map that to the duplicate EH regio number in the dest. */
6707 static int
6708 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6710 eh_region old_r, new_r;
6712 old_r = get_eh_region_from_number (old_nr);
6713 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6715 return new_r->index;
6718 /* Similar, but operate on INTEGER_CSTs. */
6720 static tree
6721 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6723 int old_nr, new_nr;
6725 old_nr = tree_to_shwi (old_t_nr);
6726 new_nr = move_stmt_eh_region_nr (old_nr, p);
6728 return build_int_cst (integer_type_node, new_nr);
6731 /* Like move_stmt_op, but for gimple statements.
6733 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6734 contained in the current statement in *GSI_P and change the
6735 DECL_CONTEXT of every local variable referenced in the current
6736 statement. */
6738 static tree
6739 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6740 struct walk_stmt_info *wi)
6742 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6743 gimple *stmt = gsi_stmt (*gsi_p);
6744 tree block = gimple_block (stmt);
6746 if (block == p->orig_block
6747 || (p->orig_block == NULL_TREE
6748 && block != NULL_TREE))
6749 gimple_set_block (stmt, p->new_block);
6751 switch (gimple_code (stmt))
6753 case GIMPLE_CALL:
6754 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6756 tree r, fndecl = gimple_call_fndecl (stmt);
6757 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6758 switch (DECL_FUNCTION_CODE (fndecl))
6760 case BUILT_IN_EH_COPY_VALUES:
6761 r = gimple_call_arg (stmt, 1);
6762 r = move_stmt_eh_region_tree_nr (r, p);
6763 gimple_call_set_arg (stmt, 1, r);
6764 /* FALLTHRU */
6766 case BUILT_IN_EH_POINTER:
6767 case BUILT_IN_EH_FILTER:
6768 r = gimple_call_arg (stmt, 0);
6769 r = move_stmt_eh_region_tree_nr (r, p);
6770 gimple_call_set_arg (stmt, 0, r);
6771 break;
6773 default:
6774 break;
6777 break;
6779 case GIMPLE_RESX:
6781 gresx *resx_stmt = as_a <gresx *> (stmt);
6782 int r = gimple_resx_region (resx_stmt);
6783 r = move_stmt_eh_region_nr (r, p);
6784 gimple_resx_set_region (resx_stmt, r);
6786 break;
6788 case GIMPLE_EH_DISPATCH:
6790 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6791 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6792 r = move_stmt_eh_region_nr (r, p);
6793 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6795 break;
6797 case GIMPLE_OMP_RETURN:
6798 case GIMPLE_OMP_CONTINUE:
6799 break;
6800 default:
6801 if (is_gimple_omp (stmt))
6803 /* Do not remap variables inside OMP directives. Variables
6804 referenced in clauses and directive header belong to the
6805 parent function and should not be moved into the child
6806 function. */
6807 bool save_remap_decls_p = p->remap_decls_p;
6808 p->remap_decls_p = false;
6809 *handled_ops_p = true;
6811 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6812 move_stmt_op, wi);
6814 p->remap_decls_p = save_remap_decls_p;
6816 break;
6819 return NULL_TREE;
6822 /* Move basic block BB from function CFUN to function DEST_FN. The
6823 block is moved out of the original linked list and placed after
6824 block AFTER in the new list. Also, the block is removed from the
6825 original array of blocks and placed in DEST_FN's array of blocks.
6826 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6827 updated to reflect the moved edges.
6829 The local variables are remapped to new instances, VARS_MAP is used
6830 to record the mapping. */
6832 static void
6833 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6834 basic_block after, bool update_edge_count_p,
6835 struct move_stmt_d *d)
6837 struct control_flow_graph *cfg;
6838 edge_iterator ei;
6839 edge e;
6840 gimple_stmt_iterator si;
6841 unsigned old_len, new_len;
6843 /* Remove BB from dominance structures. */
6844 delete_from_dominance_info (CDI_DOMINATORS, bb);
6846 /* Move BB from its current loop to the copy in the new function. */
6847 if (current_loops)
6849 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6850 if (new_loop)
6851 bb->loop_father = new_loop;
6854 /* Link BB to the new linked list. */
6855 move_block_after (bb, after);
6857 /* Update the edge count in the corresponding flowgraphs. */
6858 if (update_edge_count_p)
6859 FOR_EACH_EDGE (e, ei, bb->succs)
6861 cfun->cfg->x_n_edges--;
6862 dest_cfun->cfg->x_n_edges++;
6865 /* Remove BB from the original basic block array. */
6866 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6867 cfun->cfg->x_n_basic_blocks--;
6869 /* Grow DEST_CFUN's basic block array if needed. */
6870 cfg = dest_cfun->cfg;
6871 cfg->x_n_basic_blocks++;
6872 if (bb->index >= cfg->x_last_basic_block)
6873 cfg->x_last_basic_block = bb->index + 1;
6875 old_len = vec_safe_length (cfg->x_basic_block_info);
6876 if ((unsigned) cfg->x_last_basic_block >= old_len)
6878 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6879 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6882 (*cfg->x_basic_block_info)[bb->index] = bb;
6884 /* Remap the variables in phi nodes. */
6885 for (gphi_iterator psi = gsi_start_phis (bb);
6886 !gsi_end_p (psi); )
6888 gphi *phi = psi.phi ();
6889 use_operand_p use;
6890 tree op = PHI_RESULT (phi);
6891 ssa_op_iter oi;
6892 unsigned i;
6894 if (virtual_operand_p (op))
6896 /* Remove the phi nodes for virtual operands (alias analysis will be
6897 run for the new function, anyway). */
6898 remove_phi_node (&psi, true);
6899 continue;
6902 SET_PHI_RESULT (phi,
6903 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6904 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6906 op = USE_FROM_PTR (use);
6907 if (TREE_CODE (op) == SSA_NAME)
6908 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6911 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6913 location_t locus = gimple_phi_arg_location (phi, i);
6914 tree block = LOCATION_BLOCK (locus);
6916 if (locus == UNKNOWN_LOCATION)
6917 continue;
6918 if (d->orig_block == NULL_TREE || block == d->orig_block)
6920 locus = set_block (locus, d->new_block);
6921 gimple_phi_arg_set_location (phi, i, locus);
6925 gsi_next (&psi);
6928 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6930 gimple *stmt = gsi_stmt (si);
6931 struct walk_stmt_info wi;
6933 memset (&wi, 0, sizeof (wi));
6934 wi.info = d;
6935 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6937 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6939 tree label = gimple_label_label (label_stmt);
6940 int uid = LABEL_DECL_UID (label);
6942 gcc_assert (uid > -1);
6944 old_len = vec_safe_length (cfg->x_label_to_block_map);
6945 if (old_len <= (unsigned) uid)
6947 new_len = 3 * uid / 2 + 1;
6948 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6951 (*cfg->x_label_to_block_map)[uid] = bb;
6952 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6954 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6956 if (uid >= dest_cfun->cfg->last_label_uid)
6957 dest_cfun->cfg->last_label_uid = uid + 1;
6960 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6961 remove_stmt_from_eh_lp_fn (cfun, stmt);
6963 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6964 gimple_remove_stmt_histograms (cfun, stmt);
6966 /* We cannot leave any operands allocated from the operand caches of
6967 the current function. */
6968 free_stmt_operands (cfun, stmt);
6969 push_cfun (dest_cfun);
6970 update_stmt (stmt);
6971 pop_cfun ();
6974 FOR_EACH_EDGE (e, ei, bb->succs)
6975 if (e->goto_locus != UNKNOWN_LOCATION)
6977 tree block = LOCATION_BLOCK (e->goto_locus);
6978 if (d->orig_block == NULL_TREE
6979 || block == d->orig_block)
6980 e->goto_locus = set_block (e->goto_locus, d->new_block);
6984 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6985 the outermost EH region. Use REGION as the incoming base EH region. */
6987 static eh_region
6988 find_outermost_region_in_block (struct function *src_cfun,
6989 basic_block bb, eh_region region)
6991 gimple_stmt_iterator si;
6993 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6995 gimple *stmt = gsi_stmt (si);
6996 eh_region stmt_region;
6997 int lp_nr;
6999 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7000 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7001 if (stmt_region)
7003 if (region == NULL)
7004 region = stmt_region;
7005 else if (stmt_region != region)
7007 region = eh_region_outermost (src_cfun, stmt_region, region);
7008 gcc_assert (region != NULL);
7013 return region;
7016 static tree
7017 new_label_mapper (tree decl, void *data)
7019 htab_t hash = (htab_t) data;
7020 struct tree_map *m;
7021 void **slot;
7023 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7025 m = XNEW (struct tree_map);
7026 m->hash = DECL_UID (decl);
7027 m->base.from = decl;
7028 m->to = create_artificial_label (UNKNOWN_LOCATION);
7029 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7030 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7031 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7033 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7034 gcc_assert (*slot == NULL);
7036 *slot = m;
7038 return m->to;
7041 /* Tree walker to replace the decls used inside value expressions by
7042 duplicates. */
7044 static tree
7045 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7047 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7049 switch (TREE_CODE (*tp))
7051 case VAR_DECL:
7052 case PARM_DECL:
7053 case RESULT_DECL:
7054 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7055 break;
7056 default:
7057 break;
7060 if (IS_TYPE_OR_DECL_P (*tp))
7061 *walk_subtrees = false;
7063 return NULL;
7066 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7067 subblocks. */
7069 static void
7070 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7071 tree to_context)
7073 tree *tp, t;
7075 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7077 t = *tp;
7078 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7079 continue;
7080 replace_by_duplicate_decl (&t, vars_map, to_context);
7081 if (t != *tp)
7083 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7085 tree x = DECL_VALUE_EXPR (*tp);
7086 struct replace_decls_d rd = { vars_map, to_context };
7087 unshare_expr (x);
7088 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7089 SET_DECL_VALUE_EXPR (t, x);
7090 DECL_HAS_VALUE_EXPR_P (t) = 1;
7092 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7093 *tp = t;
7097 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7098 replace_block_vars_by_duplicates (block, vars_map, to_context);
7101 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7102 from FN1 to FN2. */
7104 static void
7105 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7106 struct loop *loop)
7108 /* Discard it from the old loop array. */
7109 (*get_loops (fn1))[loop->num] = NULL;
7111 /* Place it in the new loop array, assigning it a new number. */
7112 loop->num = number_of_loops (fn2);
7113 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7115 /* Recurse to children. */
7116 for (loop = loop->inner; loop; loop = loop->next)
7117 fixup_loop_arrays_after_move (fn1, fn2, loop);
7120 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7121 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7123 DEBUG_FUNCTION void
7124 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7126 basic_block bb;
7127 edge_iterator ei;
7128 edge e;
7129 bitmap bbs = BITMAP_ALLOC (NULL);
7130 int i;
7132 gcc_assert (entry != NULL);
7133 gcc_assert (entry != exit);
7134 gcc_assert (bbs_p != NULL);
7136 gcc_assert (bbs_p->length () > 0);
7138 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7139 bitmap_set_bit (bbs, bb->index);
7141 gcc_assert (bitmap_bit_p (bbs, entry->index));
7142 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7144 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7146 if (bb == entry)
7148 gcc_assert (single_pred_p (entry));
7149 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7151 else
7152 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7154 e = ei_edge (ei);
7155 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7158 if (bb == exit)
7160 gcc_assert (single_succ_p (exit));
7161 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7163 else
7164 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7166 e = ei_edge (ei);
7167 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7171 BITMAP_FREE (bbs);
7174 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7176 bool
7177 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7179 bitmap release_names = (bitmap)data;
7181 if (TREE_CODE (from) != SSA_NAME)
7182 return true;
7184 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7185 return true;
7188 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7189 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7190 single basic block in the original CFG and the new basic block is
7191 returned. DEST_CFUN must not have a CFG yet.
7193 Note that the region need not be a pure SESE region. Blocks inside
7194 the region may contain calls to abort/exit. The only restriction
7195 is that ENTRY_BB should be the only entry point and it must
7196 dominate EXIT_BB.
7198 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7199 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7200 to the new function.
7202 All local variables referenced in the region are assumed to be in
7203 the corresponding BLOCK_VARS and unexpanded variable lists
7204 associated with DEST_CFUN.
7206 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7207 reimplement move_sese_region_to_fn by duplicating the region rather than
7208 moving it. */
7210 basic_block
7211 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7212 basic_block exit_bb, tree orig_block)
7214 vec<basic_block> bbs, dom_bbs;
7215 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7216 basic_block after, bb, *entry_pred, *exit_succ, abb;
7217 struct function *saved_cfun = cfun;
7218 int *entry_flag, *exit_flag;
7219 unsigned *entry_prob, *exit_prob;
7220 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7221 edge e;
7222 edge_iterator ei;
7223 htab_t new_label_map;
7224 hash_map<void *, void *> *eh_map;
7225 struct loop *loop = entry_bb->loop_father;
7226 struct loop *loop0 = get_loop (saved_cfun, 0);
7227 struct move_stmt_d d;
7229 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7230 region. */
7231 gcc_assert (entry_bb != exit_bb
7232 && (!exit_bb
7233 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7235 /* Collect all the blocks in the region. Manually add ENTRY_BB
7236 because it won't be added by dfs_enumerate_from. */
7237 bbs.create (0);
7238 bbs.safe_push (entry_bb);
7239 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7241 if (flag_checking)
7242 verify_sese (entry_bb, exit_bb, &bbs);
7244 /* The blocks that used to be dominated by something in BBS will now be
7245 dominated by the new block. */
7246 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7247 bbs.address (),
7248 bbs.length ());
7250 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7251 the predecessor edges to ENTRY_BB and the successor edges to
7252 EXIT_BB so that we can re-attach them to the new basic block that
7253 will replace the region. */
7254 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7255 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7256 entry_flag = XNEWVEC (int, num_entry_edges);
7257 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7258 i = 0;
7259 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7261 entry_prob[i] = e->probability;
7262 entry_flag[i] = e->flags;
7263 entry_pred[i++] = e->src;
7264 remove_edge (e);
7267 if (exit_bb)
7269 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7270 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7271 exit_flag = XNEWVEC (int, num_exit_edges);
7272 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7273 i = 0;
7274 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7276 exit_prob[i] = e->probability;
7277 exit_flag[i] = e->flags;
7278 exit_succ[i++] = e->dest;
7279 remove_edge (e);
7282 else
7284 num_exit_edges = 0;
7285 exit_succ = NULL;
7286 exit_flag = NULL;
7287 exit_prob = NULL;
7290 /* Switch context to the child function to initialize DEST_FN's CFG. */
7291 gcc_assert (dest_cfun->cfg == NULL);
7292 push_cfun (dest_cfun);
7294 init_empty_tree_cfg ();
7296 /* Initialize EH information for the new function. */
7297 eh_map = NULL;
7298 new_label_map = NULL;
7299 if (saved_cfun->eh)
7301 eh_region region = NULL;
7303 FOR_EACH_VEC_ELT (bbs, i, bb)
7304 region = find_outermost_region_in_block (saved_cfun, bb, region);
7306 init_eh_for_function ();
7307 if (region != NULL)
7309 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7310 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7311 new_label_mapper, new_label_map);
7315 /* Initialize an empty loop tree. */
7316 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7317 init_loops_structure (dest_cfun, loops, 1);
7318 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7319 set_loops_for_fn (dest_cfun, loops);
7321 /* Move the outlined loop tree part. */
7322 num_nodes = bbs.length ();
7323 FOR_EACH_VEC_ELT (bbs, i, bb)
7325 if (bb->loop_father->header == bb)
7327 struct loop *this_loop = bb->loop_father;
7328 struct loop *outer = loop_outer (this_loop);
7329 if (outer == loop
7330 /* If the SESE region contains some bbs ending with
7331 a noreturn call, those are considered to belong
7332 to the outermost loop in saved_cfun, rather than
7333 the entry_bb's loop_father. */
7334 || outer == loop0)
7336 if (outer != loop)
7337 num_nodes -= this_loop->num_nodes;
7338 flow_loop_tree_node_remove (bb->loop_father);
7339 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7340 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7343 else if (bb->loop_father == loop0 && loop0 != loop)
7344 num_nodes--;
7346 /* Remove loop exits from the outlined region. */
7347 if (loops_for_fn (saved_cfun)->exits)
7348 FOR_EACH_EDGE (e, ei, bb->succs)
7350 struct loops *l = loops_for_fn (saved_cfun);
7351 loop_exit **slot
7352 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7353 NO_INSERT);
7354 if (slot)
7355 l->exits->clear_slot (slot);
7360 /* Adjust the number of blocks in the tree root of the outlined part. */
7361 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7363 /* Setup a mapping to be used by move_block_to_fn. */
7364 loop->aux = current_loops->tree_root;
7365 loop0->aux = current_loops->tree_root;
7367 pop_cfun ();
7369 /* Move blocks from BBS into DEST_CFUN. */
7370 gcc_assert (bbs.length () >= 2);
7371 after = dest_cfun->cfg->x_entry_block_ptr;
7372 hash_map<tree, tree> vars_map;
7374 memset (&d, 0, sizeof (d));
7375 d.orig_block = orig_block;
7376 d.new_block = DECL_INITIAL (dest_cfun->decl);
7377 d.from_context = cfun->decl;
7378 d.to_context = dest_cfun->decl;
7379 d.vars_map = &vars_map;
7380 d.new_label_map = new_label_map;
7381 d.eh_map = eh_map;
7382 d.remap_decls_p = true;
7384 if (gimple_in_ssa_p (cfun))
7385 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7387 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7388 set_ssa_default_def (dest_cfun, arg, narg);
7389 vars_map.put (arg, narg);
7392 FOR_EACH_VEC_ELT (bbs, i, bb)
7394 /* No need to update edge counts on the last block. It has
7395 already been updated earlier when we detached the region from
7396 the original CFG. */
7397 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7398 after = bb;
7401 loop->aux = NULL;
7402 loop0->aux = NULL;
7403 /* Loop sizes are no longer correct, fix them up. */
7404 loop->num_nodes -= num_nodes;
7405 for (struct loop *outer = loop_outer (loop);
7406 outer; outer = loop_outer (outer))
7407 outer->num_nodes -= num_nodes;
7408 loop0->num_nodes -= bbs.length () - num_nodes;
7410 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7412 struct loop *aloop;
7413 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7414 if (aloop != NULL)
7416 if (aloop->simduid)
7418 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7419 d.to_context);
7420 dest_cfun->has_simduid_loops = true;
7422 if (aloop->force_vectorize)
7423 dest_cfun->has_force_vectorize_loops = true;
7427 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7428 if (orig_block)
7430 tree block;
7431 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7432 == NULL_TREE);
7433 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7434 = BLOCK_SUBBLOCKS (orig_block);
7435 for (block = BLOCK_SUBBLOCKS (orig_block);
7436 block; block = BLOCK_CHAIN (block))
7437 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7438 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7441 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7442 &vars_map, dest_cfun->decl);
7444 if (new_label_map)
7445 htab_delete (new_label_map);
7446 if (eh_map)
7447 delete eh_map;
7449 if (gimple_in_ssa_p (cfun))
7451 /* We need to release ssa-names in a defined order, so first find them,
7452 and then iterate in ascending version order. */
7453 bitmap release_names = BITMAP_ALLOC (NULL);
7454 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7455 bitmap_iterator bi;
7456 unsigned i;
7457 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7458 release_ssa_name (ssa_name (i));
7459 BITMAP_FREE (release_names);
7462 /* Rewire the entry and exit blocks. The successor to the entry
7463 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7464 the child function. Similarly, the predecessor of DEST_FN's
7465 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7466 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7467 various CFG manipulation function get to the right CFG.
7469 FIXME, this is silly. The CFG ought to become a parameter to
7470 these helpers. */
7471 push_cfun (dest_cfun);
7472 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7473 if (exit_bb)
7474 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7475 pop_cfun ();
7477 /* Back in the original function, the SESE region has disappeared,
7478 create a new basic block in its place. */
7479 bb = create_empty_bb (entry_pred[0]);
7480 if (current_loops)
7481 add_bb_to_loop (bb, loop);
7482 for (i = 0; i < num_entry_edges; i++)
7484 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7485 e->probability = entry_prob[i];
7488 for (i = 0; i < num_exit_edges; i++)
7490 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7491 e->probability = exit_prob[i];
7494 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7495 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7496 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7497 dom_bbs.release ();
7499 if (exit_bb)
7501 free (exit_prob);
7502 free (exit_flag);
7503 free (exit_succ);
7505 free (entry_prob);
7506 free (entry_flag);
7507 free (entry_pred);
7508 bbs.release ();
7510 return bb;
7513 /* Dump default def DEF to file FILE using FLAGS and indentation
7514 SPC. */
7516 static void
7517 dump_default_def (FILE *file, tree def, int spc, int flags)
7519 for (int i = 0; i < spc; ++i)
7520 fprintf (file, " ");
7521 dump_ssaname_info_to_file (file, def, spc);
7523 print_generic_expr (file, TREE_TYPE (def), flags);
7524 fprintf (file, " ");
7525 print_generic_expr (file, def, flags);
7526 fprintf (file, " = ");
7527 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7528 fprintf (file, ";\n");
7531 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7534 void
7535 dump_function_to_file (tree fndecl, FILE *file, int flags)
7537 tree arg, var, old_current_fndecl = current_function_decl;
7538 struct function *dsf;
7539 bool ignore_topmost_bind = false, any_var = false;
7540 basic_block bb;
7541 tree chain;
7542 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7543 && decl_is_tm_clone (fndecl));
7544 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7546 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7548 fprintf (file, "__attribute__((");
7550 bool first = true;
7551 tree chain;
7552 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7553 first = false, chain = TREE_CHAIN (chain))
7555 if (!first)
7556 fprintf (file, ", ");
7558 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7559 if (TREE_VALUE (chain) != NULL_TREE)
7561 fprintf (file, " (");
7562 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7563 fprintf (file, ")");
7567 fprintf (file, "))\n");
7570 current_function_decl = fndecl;
7571 if (flags & TDF_GIMPLE)
7573 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7574 dump_flags | TDF_SLIM);
7575 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7577 else
7578 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7580 arg = DECL_ARGUMENTS (fndecl);
7581 while (arg)
7583 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7584 fprintf (file, " ");
7585 print_generic_expr (file, arg, dump_flags);
7586 if (flags & TDF_VERBOSE)
7587 print_node (file, "", arg, 4);
7588 if (DECL_CHAIN (arg))
7589 fprintf (file, ", ");
7590 arg = DECL_CHAIN (arg);
7592 fprintf (file, ")\n");
7594 if (flags & TDF_VERBOSE)
7595 print_node (file, "", fndecl, 2);
7597 dsf = DECL_STRUCT_FUNCTION (fndecl);
7598 if (dsf && (flags & TDF_EH))
7599 dump_eh_tree (file, dsf);
7601 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7603 dump_node (fndecl, TDF_SLIM | flags, file);
7604 current_function_decl = old_current_fndecl;
7605 return;
7608 /* When GIMPLE is lowered, the variables are no longer available in
7609 BIND_EXPRs, so display them separately. */
7610 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7612 unsigned ix;
7613 ignore_topmost_bind = true;
7615 fprintf (file, "{\n");
7616 if (gimple_in_ssa_p (fun)
7617 && (flags & TDF_ALIAS))
7619 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7620 arg = DECL_CHAIN (arg))
7622 tree def = ssa_default_def (fun, arg);
7623 if (def)
7624 dump_default_def (file, def, 2, flags);
7627 tree res = DECL_RESULT (fun->decl);
7628 if (res != NULL_TREE
7629 && DECL_BY_REFERENCE (res))
7631 tree def = ssa_default_def (fun, res);
7632 if (def)
7633 dump_default_def (file, def, 2, flags);
7636 tree static_chain = fun->static_chain_decl;
7637 if (static_chain != NULL_TREE)
7639 tree def = ssa_default_def (fun, static_chain);
7640 if (def)
7641 dump_default_def (file, def, 2, flags);
7645 if (!vec_safe_is_empty (fun->local_decls))
7646 FOR_EACH_LOCAL_DECL (fun, ix, var)
7648 print_generic_decl (file, var, flags);
7649 if (flags & TDF_VERBOSE)
7650 print_node (file, "", var, 4);
7651 fprintf (file, "\n");
7653 any_var = true;
7656 tree name;
7658 if (gimple_in_ssa_p (cfun))
7659 FOR_EACH_SSA_NAME (ix, name, cfun)
7661 if (!SSA_NAME_VAR (name))
7663 fprintf (file, " ");
7664 print_generic_expr (file, TREE_TYPE (name), flags);
7665 fprintf (file, " ");
7666 print_generic_expr (file, name, flags);
7667 fprintf (file, ";\n");
7669 any_var = true;
7674 if (fun && fun->decl == fndecl
7675 && fun->cfg
7676 && basic_block_info_for_fn (fun))
7678 /* If the CFG has been built, emit a CFG-based dump. */
7679 if (!ignore_topmost_bind)
7680 fprintf (file, "{\n");
7682 if (any_var && n_basic_blocks_for_fn (fun))
7683 fprintf (file, "\n");
7685 FOR_EACH_BB_FN (bb, fun)
7686 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7688 fprintf (file, "}\n");
7690 else if (fun->curr_properties & PROP_gimple_any)
7692 /* The function is now in GIMPLE form but the CFG has not been
7693 built yet. Emit the single sequence of GIMPLE statements
7694 that make up its body. */
7695 gimple_seq body = gimple_body (fndecl);
7697 if (gimple_seq_first_stmt (body)
7698 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7699 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7700 print_gimple_seq (file, body, 0, flags);
7701 else
7703 if (!ignore_topmost_bind)
7704 fprintf (file, "{\n");
7706 if (any_var)
7707 fprintf (file, "\n");
7709 print_gimple_seq (file, body, 2, flags);
7710 fprintf (file, "}\n");
7713 else
7715 int indent;
7717 /* Make a tree based dump. */
7718 chain = DECL_SAVED_TREE (fndecl);
7719 if (chain && TREE_CODE (chain) == BIND_EXPR)
7721 if (ignore_topmost_bind)
7723 chain = BIND_EXPR_BODY (chain);
7724 indent = 2;
7726 else
7727 indent = 0;
7729 else
7731 if (!ignore_topmost_bind)
7733 fprintf (file, "{\n");
7734 /* No topmost bind, pretend it's ignored for later. */
7735 ignore_topmost_bind = true;
7737 indent = 2;
7740 if (any_var)
7741 fprintf (file, "\n");
7743 print_generic_stmt_indented (file, chain, flags, indent);
7744 if (ignore_topmost_bind)
7745 fprintf (file, "}\n");
7748 if (flags & TDF_ENUMERATE_LOCALS)
7749 dump_enumerated_decls (file, flags);
7750 fprintf (file, "\n\n");
7752 current_function_decl = old_current_fndecl;
7755 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7757 DEBUG_FUNCTION void
7758 debug_function (tree fn, int flags)
7760 dump_function_to_file (fn, stderr, flags);
7764 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7766 static void
7767 print_pred_bbs (FILE *file, basic_block bb)
7769 edge e;
7770 edge_iterator ei;
7772 FOR_EACH_EDGE (e, ei, bb->preds)
7773 fprintf (file, "bb_%d ", e->src->index);
7777 /* Print on FILE the indexes for the successors of basic_block BB. */
7779 static void
7780 print_succ_bbs (FILE *file, basic_block bb)
7782 edge e;
7783 edge_iterator ei;
7785 FOR_EACH_EDGE (e, ei, bb->succs)
7786 fprintf (file, "bb_%d ", e->dest->index);
7789 /* Print to FILE the basic block BB following the VERBOSITY level. */
7791 void
7792 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7794 char *s_indent = (char *) alloca ((size_t) indent + 1);
7795 memset ((void *) s_indent, ' ', (size_t) indent);
7796 s_indent[indent] = '\0';
7798 /* Print basic_block's header. */
7799 if (verbosity >= 2)
7801 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7802 print_pred_bbs (file, bb);
7803 fprintf (file, "}, succs = {");
7804 print_succ_bbs (file, bb);
7805 fprintf (file, "})\n");
7808 /* Print basic_block's body. */
7809 if (verbosity >= 3)
7811 fprintf (file, "%s {\n", s_indent);
7812 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7813 fprintf (file, "%s }\n", s_indent);
7817 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7819 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7820 VERBOSITY level this outputs the contents of the loop, or just its
7821 structure. */
7823 static void
7824 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7826 char *s_indent;
7827 basic_block bb;
7829 if (loop == NULL)
7830 return;
7832 s_indent = (char *) alloca ((size_t) indent + 1);
7833 memset ((void *) s_indent, ' ', (size_t) indent);
7834 s_indent[indent] = '\0';
7836 /* Print loop's header. */
7837 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7838 if (loop->header)
7839 fprintf (file, "header = %d", loop->header->index);
7840 else
7842 fprintf (file, "deleted)\n");
7843 return;
7845 if (loop->latch)
7846 fprintf (file, ", latch = %d", loop->latch->index);
7847 else
7848 fprintf (file, ", multiple latches");
7849 fprintf (file, ", niter = ");
7850 print_generic_expr (file, loop->nb_iterations, 0);
7852 if (loop->any_upper_bound)
7854 fprintf (file, ", upper_bound = ");
7855 print_decu (loop->nb_iterations_upper_bound, file);
7857 if (loop->any_likely_upper_bound)
7859 fprintf (file, ", likely_upper_bound = ");
7860 print_decu (loop->nb_iterations_likely_upper_bound, file);
7863 if (loop->any_estimate)
7865 fprintf (file, ", estimate = ");
7866 print_decu (loop->nb_iterations_estimate, file);
7868 fprintf (file, ")\n");
7870 /* Print loop's body. */
7871 if (verbosity >= 1)
7873 fprintf (file, "%s{\n", s_indent);
7874 FOR_EACH_BB_FN (bb, cfun)
7875 if (bb->loop_father == loop)
7876 print_loops_bb (file, bb, indent, verbosity);
7878 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7879 fprintf (file, "%s}\n", s_indent);
7883 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7884 spaces. Following VERBOSITY level this outputs the contents of the
7885 loop, or just its structure. */
7887 static void
7888 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7889 int verbosity)
7891 if (loop == NULL)
7892 return;
7894 print_loop (file, loop, indent, verbosity);
7895 print_loop_and_siblings (file, loop->next, indent, verbosity);
7898 /* Follow a CFG edge from the entry point of the program, and on entry
7899 of a loop, pretty print the loop structure on FILE. */
7901 void
7902 print_loops (FILE *file, int verbosity)
7904 basic_block bb;
7906 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7907 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7908 if (bb && bb->loop_father)
7909 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7912 /* Dump a loop. */
7914 DEBUG_FUNCTION void
7915 debug (struct loop &ref)
7917 print_loop (stderr, &ref, 0, /*verbosity*/0);
7920 DEBUG_FUNCTION void
7921 debug (struct loop *ptr)
7923 if (ptr)
7924 debug (*ptr);
7925 else
7926 fprintf (stderr, "<nil>\n");
7929 /* Dump a loop verbosely. */
7931 DEBUG_FUNCTION void
7932 debug_verbose (struct loop &ref)
7934 print_loop (stderr, &ref, 0, /*verbosity*/3);
7937 DEBUG_FUNCTION void
7938 debug_verbose (struct loop *ptr)
7940 if (ptr)
7941 debug (*ptr);
7942 else
7943 fprintf (stderr, "<nil>\n");
7947 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7949 DEBUG_FUNCTION void
7950 debug_loops (int verbosity)
7952 print_loops (stderr, verbosity);
7955 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7957 DEBUG_FUNCTION void
7958 debug_loop (struct loop *loop, int verbosity)
7960 print_loop (stderr, loop, 0, verbosity);
7963 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7964 level. */
7966 DEBUG_FUNCTION void
7967 debug_loop_num (unsigned num, int verbosity)
7969 debug_loop (get_loop (cfun, num), verbosity);
7972 /* Return true if BB ends with a call, possibly followed by some
7973 instructions that must stay with the call. Return false,
7974 otherwise. */
7976 static bool
7977 gimple_block_ends_with_call_p (basic_block bb)
7979 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7980 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7984 /* Return true if BB ends with a conditional branch. Return false,
7985 otherwise. */
7987 static bool
7988 gimple_block_ends_with_condjump_p (const_basic_block bb)
7990 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7991 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7995 /* Return true if statement T may terminate execution of BB in ways not
7996 explicitly represtented in the CFG. */
7998 bool
7999 stmt_can_terminate_bb_p (gimple *t)
8001 tree fndecl = NULL_TREE;
8002 int call_flags = 0;
8004 /* Eh exception not handled internally terminates execution of the whole
8005 function. */
8006 if (stmt_can_throw_external (t))
8007 return true;
8009 /* NORETURN and LONGJMP calls already have an edge to exit.
8010 CONST and PURE calls do not need one.
8011 We don't currently check for CONST and PURE here, although
8012 it would be a good idea, because those attributes are
8013 figured out from the RTL in mark_constant_function, and
8014 the counter incrementation code from -fprofile-arcs
8015 leads to different results from -fbranch-probabilities. */
8016 if (is_gimple_call (t))
8018 fndecl = gimple_call_fndecl (t);
8019 call_flags = gimple_call_flags (t);
8022 if (is_gimple_call (t)
8023 && fndecl
8024 && DECL_BUILT_IN (fndecl)
8025 && (call_flags & ECF_NOTHROW)
8026 && !(call_flags & ECF_RETURNS_TWICE)
8027 /* fork() doesn't really return twice, but the effect of
8028 wrapping it in __gcov_fork() which calls __gcov_flush()
8029 and clears the counters before forking has the same
8030 effect as returning twice. Force a fake edge. */
8031 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8032 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8033 return false;
8035 if (is_gimple_call (t))
8037 edge_iterator ei;
8038 edge e;
8039 basic_block bb;
8041 if (call_flags & (ECF_PURE | ECF_CONST)
8042 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8043 return false;
8045 /* Function call may do longjmp, terminate program or do other things.
8046 Special case noreturn that have non-abnormal edges out as in this case
8047 the fact is sufficiently represented by lack of edges out of T. */
8048 if (!(call_flags & ECF_NORETURN))
8049 return true;
8051 bb = gimple_bb (t);
8052 FOR_EACH_EDGE (e, ei, bb->succs)
8053 if ((e->flags & EDGE_FAKE) == 0)
8054 return true;
8057 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8058 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8059 return true;
8061 return false;
8065 /* Add fake edges to the function exit for any non constant and non
8066 noreturn calls (or noreturn calls with EH/abnormal edges),
8067 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8068 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8069 that were split.
8071 The goal is to expose cases in which entering a basic block does
8072 not imply that all subsequent instructions must be executed. */
8074 static int
8075 gimple_flow_call_edges_add (sbitmap blocks)
8077 int i;
8078 int blocks_split = 0;
8079 int last_bb = last_basic_block_for_fn (cfun);
8080 bool check_last_block = false;
8082 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8083 return 0;
8085 if (! blocks)
8086 check_last_block = true;
8087 else
8088 check_last_block = bitmap_bit_p (blocks,
8089 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8091 /* In the last basic block, before epilogue generation, there will be
8092 a fallthru edge to EXIT. Special care is required if the last insn
8093 of the last basic block is a call because make_edge folds duplicate
8094 edges, which would result in the fallthru edge also being marked
8095 fake, which would result in the fallthru edge being removed by
8096 remove_fake_edges, which would result in an invalid CFG.
8098 Moreover, we can't elide the outgoing fake edge, since the block
8099 profiler needs to take this into account in order to solve the minimal
8100 spanning tree in the case that the call doesn't return.
8102 Handle this by adding a dummy instruction in a new last basic block. */
8103 if (check_last_block)
8105 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8106 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8107 gimple *t = NULL;
8109 if (!gsi_end_p (gsi))
8110 t = gsi_stmt (gsi);
8112 if (t && stmt_can_terminate_bb_p (t))
8114 edge e;
8116 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8117 if (e)
8119 gsi_insert_on_edge (e, gimple_build_nop ());
8120 gsi_commit_edge_inserts ();
8125 /* Now add fake edges to the function exit for any non constant
8126 calls since there is no way that we can determine if they will
8127 return or not... */
8128 for (i = 0; i < last_bb; i++)
8130 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8131 gimple_stmt_iterator gsi;
8132 gimple *stmt, *last_stmt;
8134 if (!bb)
8135 continue;
8137 if (blocks && !bitmap_bit_p (blocks, i))
8138 continue;
8140 gsi = gsi_last_nondebug_bb (bb);
8141 if (!gsi_end_p (gsi))
8143 last_stmt = gsi_stmt (gsi);
8146 stmt = gsi_stmt (gsi);
8147 if (stmt_can_terminate_bb_p (stmt))
8149 edge e;
8151 /* The handling above of the final block before the
8152 epilogue should be enough to verify that there is
8153 no edge to the exit block in CFG already.
8154 Calling make_edge in such case would cause us to
8155 mark that edge as fake and remove it later. */
8156 if (flag_checking && stmt == last_stmt)
8158 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8159 gcc_assert (e == NULL);
8162 /* Note that the following may create a new basic block
8163 and renumber the existing basic blocks. */
8164 if (stmt != last_stmt)
8166 e = split_block (bb, stmt);
8167 if (e)
8168 blocks_split++;
8170 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8172 gsi_prev (&gsi);
8174 while (!gsi_end_p (gsi));
8178 if (blocks_split)
8179 verify_flow_info ();
8181 return blocks_split;
8184 /* Removes edge E and all the blocks dominated by it, and updates dominance
8185 information. The IL in E->src needs to be updated separately.
8186 If dominance info is not available, only the edge E is removed.*/
8188 void
8189 remove_edge_and_dominated_blocks (edge e)
8191 vec<basic_block> bbs_to_remove = vNULL;
8192 vec<basic_block> bbs_to_fix_dom = vNULL;
8193 bitmap df, df_idom;
8194 edge f;
8195 edge_iterator ei;
8196 bool none_removed = false;
8197 unsigned i;
8198 basic_block bb, dbb;
8199 bitmap_iterator bi;
8201 /* If we are removing a path inside a non-root loop that may change
8202 loop ownership of blocks or remove loops. Mark loops for fixup. */
8203 if (current_loops
8204 && loop_outer (e->src->loop_father) != NULL
8205 && e->src->loop_father == e->dest->loop_father)
8206 loops_state_set (LOOPS_NEED_FIXUP);
8208 if (!dom_info_available_p (CDI_DOMINATORS))
8210 remove_edge (e);
8211 return;
8214 /* No updating is needed for edges to exit. */
8215 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8217 if (cfgcleanup_altered_bbs)
8218 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8219 remove_edge (e);
8220 return;
8223 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8224 that is not dominated by E->dest, then this set is empty. Otherwise,
8225 all the basic blocks dominated by E->dest are removed.
8227 Also, to DF_IDOM we store the immediate dominators of the blocks in
8228 the dominance frontier of E (i.e., of the successors of the
8229 removed blocks, if there are any, and of E->dest otherwise). */
8230 FOR_EACH_EDGE (f, ei, e->dest->preds)
8232 if (f == e)
8233 continue;
8235 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8237 none_removed = true;
8238 break;
8242 df = BITMAP_ALLOC (NULL);
8243 df_idom = BITMAP_ALLOC (NULL);
8245 if (none_removed)
8246 bitmap_set_bit (df_idom,
8247 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8248 else
8250 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8251 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8253 FOR_EACH_EDGE (f, ei, bb->succs)
8255 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8256 bitmap_set_bit (df, f->dest->index);
8259 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8260 bitmap_clear_bit (df, bb->index);
8262 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8264 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8265 bitmap_set_bit (df_idom,
8266 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8270 if (cfgcleanup_altered_bbs)
8272 /* Record the set of the altered basic blocks. */
8273 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8274 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8277 /* Remove E and the cancelled blocks. */
8278 if (none_removed)
8279 remove_edge (e);
8280 else
8282 /* Walk backwards so as to get a chance to substitute all
8283 released DEFs into debug stmts. See
8284 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8285 details. */
8286 for (i = bbs_to_remove.length (); i-- > 0; )
8287 delete_basic_block (bbs_to_remove[i]);
8290 /* Update the dominance information. The immediate dominator may change only
8291 for blocks whose immediate dominator belongs to DF_IDOM:
8293 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8294 removal. Let Z the arbitrary block such that idom(Z) = Y and
8295 Z dominates X after the removal. Before removal, there exists a path P
8296 from Y to X that avoids Z. Let F be the last edge on P that is
8297 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8298 dominates W, and because of P, Z does not dominate W), and W belongs to
8299 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8300 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8302 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8303 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8304 dbb;
8305 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8306 bbs_to_fix_dom.safe_push (dbb);
8309 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8311 BITMAP_FREE (df);
8312 BITMAP_FREE (df_idom);
8313 bbs_to_remove.release ();
8314 bbs_to_fix_dom.release ();
8317 /* Purge dead EH edges from basic block BB. */
8319 bool
8320 gimple_purge_dead_eh_edges (basic_block bb)
8322 bool changed = false;
8323 edge e;
8324 edge_iterator ei;
8325 gimple *stmt = last_stmt (bb);
8327 if (stmt && stmt_can_throw_internal (stmt))
8328 return false;
8330 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8332 if (e->flags & EDGE_EH)
8334 remove_edge_and_dominated_blocks (e);
8335 changed = true;
8337 else
8338 ei_next (&ei);
8341 return changed;
8344 /* Purge dead EH edges from basic block listed in BLOCKS. */
8346 bool
8347 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8349 bool changed = false;
8350 unsigned i;
8351 bitmap_iterator bi;
8353 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8355 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8357 /* Earlier gimple_purge_dead_eh_edges could have removed
8358 this basic block already. */
8359 gcc_assert (bb || changed);
8360 if (bb != NULL)
8361 changed |= gimple_purge_dead_eh_edges (bb);
8364 return changed;
8367 /* Purge dead abnormal call edges from basic block BB. */
8369 bool
8370 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8372 bool changed = false;
8373 edge e;
8374 edge_iterator ei;
8375 gimple *stmt = last_stmt (bb);
8377 if (!cfun->has_nonlocal_label
8378 && !cfun->calls_setjmp)
8379 return false;
8381 if (stmt && stmt_can_make_abnormal_goto (stmt))
8382 return false;
8384 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8386 if (e->flags & EDGE_ABNORMAL)
8388 if (e->flags & EDGE_FALLTHRU)
8389 e->flags &= ~EDGE_ABNORMAL;
8390 else
8391 remove_edge_and_dominated_blocks (e);
8392 changed = true;
8394 else
8395 ei_next (&ei);
8398 return changed;
8401 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8403 bool
8404 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8406 bool changed = false;
8407 unsigned i;
8408 bitmap_iterator bi;
8410 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8412 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8414 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8415 this basic block already. */
8416 gcc_assert (bb || changed);
8417 if (bb != NULL)
8418 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8421 return changed;
8424 /* This function is called whenever a new edge is created or
8425 redirected. */
8427 static void
8428 gimple_execute_on_growing_pred (edge e)
8430 basic_block bb = e->dest;
8432 if (!gimple_seq_empty_p (phi_nodes (bb)))
8433 reserve_phi_args_for_new_edge (bb);
8436 /* This function is called immediately before edge E is removed from
8437 the edge vector E->dest->preds. */
8439 static void
8440 gimple_execute_on_shrinking_pred (edge e)
8442 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8443 remove_phi_args (e);
8446 /*---------------------------------------------------------------------------
8447 Helper functions for Loop versioning
8448 ---------------------------------------------------------------------------*/
8450 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8451 of 'first'. Both of them are dominated by 'new_head' basic block. When
8452 'new_head' was created by 'second's incoming edge it received phi arguments
8453 on the edge by split_edge(). Later, additional edge 'e' was created to
8454 connect 'new_head' and 'first'. Now this routine adds phi args on this
8455 additional edge 'e' that new_head to second edge received as part of edge
8456 splitting. */
8458 static void
8459 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8460 basic_block new_head, edge e)
8462 gphi *phi1, *phi2;
8463 gphi_iterator psi1, psi2;
8464 tree def;
8465 edge e2 = find_edge (new_head, second);
8467 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8468 edge, we should always have an edge from NEW_HEAD to SECOND. */
8469 gcc_assert (e2 != NULL);
8471 /* Browse all 'second' basic block phi nodes and add phi args to
8472 edge 'e' for 'first' head. PHI args are always in correct order. */
8474 for (psi2 = gsi_start_phis (second),
8475 psi1 = gsi_start_phis (first);
8476 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8477 gsi_next (&psi2), gsi_next (&psi1))
8479 phi1 = psi1.phi ();
8480 phi2 = psi2.phi ();
8481 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8482 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8487 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8488 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8489 the destination of the ELSE part. */
8491 static void
8492 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8493 basic_block second_head ATTRIBUTE_UNUSED,
8494 basic_block cond_bb, void *cond_e)
8496 gimple_stmt_iterator gsi;
8497 gimple *new_cond_expr;
8498 tree cond_expr = (tree) cond_e;
8499 edge e0;
8501 /* Build new conditional expr */
8502 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8503 NULL_TREE, NULL_TREE);
8505 /* Add new cond in cond_bb. */
8506 gsi = gsi_last_bb (cond_bb);
8507 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8509 /* Adjust edges appropriately to connect new head with first head
8510 as well as second head. */
8511 e0 = single_succ_edge (cond_bb);
8512 e0->flags &= ~EDGE_FALLTHRU;
8513 e0->flags |= EDGE_FALSE_VALUE;
8517 /* Do book-keeping of basic block BB for the profile consistency checker.
8518 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8519 then do post-pass accounting. Store the counting in RECORD. */
8520 static void
8521 gimple_account_profile_record (basic_block bb, int after_pass,
8522 struct profile_record *record)
8524 gimple_stmt_iterator i;
8525 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8527 record->size[after_pass]
8528 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8529 if (profile_status_for_fn (cfun) == PROFILE_READ)
8530 record->time[after_pass]
8531 += estimate_num_insns (gsi_stmt (i),
8532 &eni_time_weights) * bb->count;
8533 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8534 record->time[after_pass]
8535 += estimate_num_insns (gsi_stmt (i),
8536 &eni_time_weights) * bb->frequency;
8540 struct cfg_hooks gimple_cfg_hooks = {
8541 "gimple",
8542 gimple_verify_flow_info,
8543 gimple_dump_bb, /* dump_bb */
8544 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8545 create_bb, /* create_basic_block */
8546 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8547 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8548 gimple_can_remove_branch_p, /* can_remove_branch_p */
8549 remove_bb, /* delete_basic_block */
8550 gimple_split_block, /* split_block */
8551 gimple_move_block_after, /* move_block_after */
8552 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8553 gimple_merge_blocks, /* merge_blocks */
8554 gimple_predict_edge, /* predict_edge */
8555 gimple_predicted_by_p, /* predicted_by_p */
8556 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8557 gimple_duplicate_bb, /* duplicate_block */
8558 gimple_split_edge, /* split_edge */
8559 gimple_make_forwarder_block, /* make_forward_block */
8560 NULL, /* tidy_fallthru_edge */
8561 NULL, /* force_nonfallthru */
8562 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8563 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8564 gimple_flow_call_edges_add, /* flow_call_edges_add */
8565 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8566 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8567 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8568 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8569 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8570 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8571 flush_pending_stmts, /* flush_pending_stmts */
8572 gimple_empty_block_p, /* block_empty_p */
8573 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8574 gimple_account_profile_record,
8578 /* Split all critical edges. */
8580 unsigned int
8581 split_critical_edges (void)
8583 basic_block bb;
8584 edge e;
8585 edge_iterator ei;
8587 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8588 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8589 mappings around the calls to split_edge. */
8590 start_recording_case_labels ();
8591 FOR_ALL_BB_FN (bb, cfun)
8593 FOR_EACH_EDGE (e, ei, bb->succs)
8595 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8596 split_edge (e);
8597 /* PRE inserts statements to edges and expects that
8598 since split_critical_edges was done beforehand, committing edge
8599 insertions will not split more edges. In addition to critical
8600 edges we must split edges that have multiple successors and
8601 end by control flow statements, such as RESX.
8602 Go ahead and split them too. This matches the logic in
8603 gimple_find_edge_insert_loc. */
8604 else if ((!single_pred_p (e->dest)
8605 || !gimple_seq_empty_p (phi_nodes (e->dest))
8606 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8607 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8608 && !(e->flags & EDGE_ABNORMAL))
8610 gimple_stmt_iterator gsi;
8612 gsi = gsi_last_bb (e->src);
8613 if (!gsi_end_p (gsi)
8614 && stmt_ends_bb_p (gsi_stmt (gsi))
8615 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8616 && !gimple_call_builtin_p (gsi_stmt (gsi),
8617 BUILT_IN_RETURN)))
8618 split_edge (e);
8622 end_recording_case_labels ();
8623 return 0;
8626 namespace {
8628 const pass_data pass_data_split_crit_edges =
8630 GIMPLE_PASS, /* type */
8631 "crited", /* name */
8632 OPTGROUP_NONE, /* optinfo_flags */
8633 TV_TREE_SPLIT_EDGES, /* tv_id */
8634 PROP_cfg, /* properties_required */
8635 PROP_no_crit_edges, /* properties_provided */
8636 0, /* properties_destroyed */
8637 0, /* todo_flags_start */
8638 0, /* todo_flags_finish */
8641 class pass_split_crit_edges : public gimple_opt_pass
8643 public:
8644 pass_split_crit_edges (gcc::context *ctxt)
8645 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8648 /* opt_pass methods: */
8649 virtual unsigned int execute (function *) { return split_critical_edges (); }
8651 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8652 }; // class pass_split_crit_edges
8654 } // anon namespace
8656 gimple_opt_pass *
8657 make_pass_split_crit_edges (gcc::context *ctxt)
8659 return new pass_split_crit_edges (ctxt);
8663 /* Insert COND expression which is GIMPLE_COND after STMT
8664 in basic block BB with appropriate basic block split
8665 and creation of a new conditionally executed basic block.
8666 Return created basic block. */
8667 basic_block
8668 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8670 edge fall = split_block (bb, stmt);
8671 gimple_stmt_iterator iter = gsi_last_bb (bb);
8672 basic_block new_bb;
8674 /* Insert cond statement. */
8675 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8676 if (gsi_end_p (iter))
8677 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8678 else
8679 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8681 /* Create conditionally executed block. */
8682 new_bb = create_empty_bb (bb);
8683 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8684 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8686 /* Fix edge for split bb. */
8687 fall->flags = EDGE_FALSE_VALUE;
8689 /* Update dominance info. */
8690 if (dom_info_available_p (CDI_DOMINATORS))
8692 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8693 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8696 /* Update loop info. */
8697 if (current_loops)
8698 add_bb_to_loop (new_bb, bb->loop_father);
8700 return new_bb;
8703 /* Build a ternary operation and gimplify it. Emit code before GSI.
8704 Return the gimple_val holding the result. */
8706 tree
8707 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8708 tree type, tree a, tree b, tree c)
8710 tree ret;
8711 location_t loc = gimple_location (gsi_stmt (*gsi));
8713 ret = fold_build3_loc (loc, code, type, a, b, c);
8714 STRIP_NOPS (ret);
8716 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8717 GSI_SAME_STMT);
8720 /* Build a binary operation and gimplify it. Emit code before GSI.
8721 Return the gimple_val holding the result. */
8723 tree
8724 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8725 tree type, tree a, tree b)
8727 tree ret;
8729 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8730 STRIP_NOPS (ret);
8732 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8733 GSI_SAME_STMT);
8736 /* Build a unary operation and gimplify it. Emit code before GSI.
8737 Return the gimple_val holding the result. */
8739 tree
8740 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8741 tree a)
8743 tree ret;
8745 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8746 STRIP_NOPS (ret);
8748 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8749 GSI_SAME_STMT);
8754 /* Given a basic block B which ends with a conditional and has
8755 precisely two successors, determine which of the edges is taken if
8756 the conditional is true and which is taken if the conditional is
8757 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8759 void
8760 extract_true_false_edges_from_block (basic_block b,
8761 edge *true_edge,
8762 edge *false_edge)
8764 edge e = EDGE_SUCC (b, 0);
8766 if (e->flags & EDGE_TRUE_VALUE)
8768 *true_edge = e;
8769 *false_edge = EDGE_SUCC (b, 1);
8771 else
8773 *false_edge = e;
8774 *true_edge = EDGE_SUCC (b, 1);
8779 /* From a controlling predicate in the immediate dominator DOM of
8780 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8781 predicate evaluates to true and false and store them to
8782 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8783 they are non-NULL. Returns true if the edges can be determined,
8784 else return false. */
8786 bool
8787 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8788 edge *true_controlled_edge,
8789 edge *false_controlled_edge)
8791 basic_block bb = phiblock;
8792 edge true_edge, false_edge, tem;
8793 edge e0 = NULL, e1 = NULL;
8795 /* We have to verify that one edge into the PHI node is dominated
8796 by the true edge of the predicate block and the other edge
8797 dominated by the false edge. This ensures that the PHI argument
8798 we are going to take is completely determined by the path we
8799 take from the predicate block.
8800 We can only use BB dominance checks below if the destination of
8801 the true/false edges are dominated by their edge, thus only
8802 have a single predecessor. */
8803 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8804 tem = EDGE_PRED (bb, 0);
8805 if (tem == true_edge
8806 || (single_pred_p (true_edge->dest)
8807 && (tem->src == true_edge->dest
8808 || dominated_by_p (CDI_DOMINATORS,
8809 tem->src, true_edge->dest))))
8810 e0 = tem;
8811 else if (tem == false_edge
8812 || (single_pred_p (false_edge->dest)
8813 && (tem->src == false_edge->dest
8814 || dominated_by_p (CDI_DOMINATORS,
8815 tem->src, false_edge->dest))))
8816 e1 = tem;
8817 else
8818 return false;
8819 tem = EDGE_PRED (bb, 1);
8820 if (tem == true_edge
8821 || (single_pred_p (true_edge->dest)
8822 && (tem->src == true_edge->dest
8823 || dominated_by_p (CDI_DOMINATORS,
8824 tem->src, true_edge->dest))))
8825 e0 = tem;
8826 else if (tem == false_edge
8827 || (single_pred_p (false_edge->dest)
8828 && (tem->src == false_edge->dest
8829 || dominated_by_p (CDI_DOMINATORS,
8830 tem->src, false_edge->dest))))
8831 e1 = tem;
8832 else
8833 return false;
8834 if (!e0 || !e1)
8835 return false;
8837 if (true_controlled_edge)
8838 *true_controlled_edge = e0;
8839 if (false_controlled_edge)
8840 *false_controlled_edge = e1;
8842 return true;
8847 /* Emit return warnings. */
8849 namespace {
8851 const pass_data pass_data_warn_function_return =
8853 GIMPLE_PASS, /* type */
8854 "*warn_function_return", /* name */
8855 OPTGROUP_NONE, /* optinfo_flags */
8856 TV_NONE, /* tv_id */
8857 PROP_cfg, /* properties_required */
8858 0, /* properties_provided */
8859 0, /* properties_destroyed */
8860 0, /* todo_flags_start */
8861 0, /* todo_flags_finish */
8864 class pass_warn_function_return : public gimple_opt_pass
8866 public:
8867 pass_warn_function_return (gcc::context *ctxt)
8868 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8871 /* opt_pass methods: */
8872 virtual unsigned int execute (function *);
8874 }; // class pass_warn_function_return
8876 unsigned int
8877 pass_warn_function_return::execute (function *fun)
8879 source_location location;
8880 gimple *last;
8881 edge e;
8882 edge_iterator ei;
8884 if (!targetm.warn_func_return (fun->decl))
8885 return 0;
8887 /* If we have a path to EXIT, then we do return. */
8888 if (TREE_THIS_VOLATILE (fun->decl)
8889 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8891 location = UNKNOWN_LOCATION;
8892 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8894 last = last_stmt (e->src);
8895 if ((gimple_code (last) == GIMPLE_RETURN
8896 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8897 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8898 break;
8900 if (location == UNKNOWN_LOCATION)
8901 location = cfun->function_end_locus;
8902 warning_at (location, 0, "%<noreturn%> function does return");
8905 /* If we see "return;" in some basic block, then we do reach the end
8906 without returning a value. */
8907 else if (warn_return_type
8908 && !TREE_NO_WARNING (fun->decl)
8909 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8910 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8912 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8914 gimple *last = last_stmt (e->src);
8915 greturn *return_stmt = dyn_cast <greturn *> (last);
8916 if (return_stmt
8917 && gimple_return_retval (return_stmt) == NULL
8918 && !gimple_no_warning_p (last))
8920 location = gimple_location (last);
8921 if (location == UNKNOWN_LOCATION)
8922 location = fun->function_end_locus;
8923 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8924 TREE_NO_WARNING (fun->decl) = 1;
8925 break;
8929 return 0;
8932 } // anon namespace
8934 gimple_opt_pass *
8935 make_pass_warn_function_return (gcc::context *ctxt)
8937 return new pass_warn_function_return (ctxt);
8940 /* Walk a gimplified function and warn for functions whose return value is
8941 ignored and attribute((warn_unused_result)) is set. This is done before
8942 inlining, so we don't have to worry about that. */
8944 static void
8945 do_warn_unused_result (gimple_seq seq)
8947 tree fdecl, ftype;
8948 gimple_stmt_iterator i;
8950 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8952 gimple *g = gsi_stmt (i);
8954 switch (gimple_code (g))
8956 case GIMPLE_BIND:
8957 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8958 break;
8959 case GIMPLE_TRY:
8960 do_warn_unused_result (gimple_try_eval (g));
8961 do_warn_unused_result (gimple_try_cleanup (g));
8962 break;
8963 case GIMPLE_CATCH:
8964 do_warn_unused_result (gimple_catch_handler (
8965 as_a <gcatch *> (g)));
8966 break;
8967 case GIMPLE_EH_FILTER:
8968 do_warn_unused_result (gimple_eh_filter_failure (g));
8969 break;
8971 case GIMPLE_CALL:
8972 if (gimple_call_lhs (g))
8973 break;
8974 if (gimple_call_internal_p (g))
8975 break;
8977 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8978 LHS. All calls whose value is ignored should be
8979 represented like this. Look for the attribute. */
8980 fdecl = gimple_call_fndecl (g);
8981 ftype = gimple_call_fntype (g);
8983 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8985 location_t loc = gimple_location (g);
8987 if (fdecl)
8988 warning_at (loc, OPT_Wunused_result,
8989 "ignoring return value of %qD, "
8990 "declared with attribute warn_unused_result",
8991 fdecl);
8992 else
8993 warning_at (loc, OPT_Wunused_result,
8994 "ignoring return value of function "
8995 "declared with attribute warn_unused_result");
8997 break;
8999 default:
9000 /* Not a container, not a call, or a call whose value is used. */
9001 break;
9006 namespace {
9008 const pass_data pass_data_warn_unused_result =
9010 GIMPLE_PASS, /* type */
9011 "*warn_unused_result", /* name */
9012 OPTGROUP_NONE, /* optinfo_flags */
9013 TV_NONE, /* tv_id */
9014 PROP_gimple_any, /* properties_required */
9015 0, /* properties_provided */
9016 0, /* properties_destroyed */
9017 0, /* todo_flags_start */
9018 0, /* todo_flags_finish */
9021 class pass_warn_unused_result : public gimple_opt_pass
9023 public:
9024 pass_warn_unused_result (gcc::context *ctxt)
9025 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9028 /* opt_pass methods: */
9029 virtual bool gate (function *) { return flag_warn_unused_result; }
9030 virtual unsigned int execute (function *)
9032 do_warn_unused_result (gimple_body (current_function_decl));
9033 return 0;
9036 }; // class pass_warn_unused_result
9038 } // anon namespace
9040 gimple_opt_pass *
9041 make_pass_warn_unused_result (gcc::context *ctxt)
9043 return new pass_warn_unused_result (ctxt);
9046 /* IPA passes, compilation of earlier functions or inlining
9047 might have changed some properties, such as marked functions nothrow,
9048 pure, const or noreturn.
9049 Remove redundant edges and basic blocks, and create new ones if necessary.
9051 This pass can't be executed as stand alone pass from pass manager, because
9052 in between inlining and this fixup the verify_flow_info would fail. */
9054 unsigned int
9055 execute_fixup_cfg (void)
9057 basic_block bb;
9058 gimple_stmt_iterator gsi;
9059 int todo = 0;
9060 gcov_type count_scale;
9061 edge e;
9062 edge_iterator ei;
9063 cgraph_node *node = cgraph_node::get (current_function_decl);
9065 count_scale
9066 = GCOV_COMPUTE_SCALE (node->count, ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
9068 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9069 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9070 = apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count, count_scale);
9072 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
9073 e->count = apply_scale (e->count, count_scale);
9075 FOR_EACH_BB_FN (bb, cfun)
9077 bb->count = apply_scale (bb->count, count_scale);
9078 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9080 gimple *stmt = gsi_stmt (gsi);
9081 tree decl = is_gimple_call (stmt)
9082 ? gimple_call_fndecl (stmt)
9083 : NULL;
9084 if (decl)
9086 int flags = gimple_call_flags (stmt);
9087 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9089 if (gimple_purge_dead_abnormal_call_edges (bb))
9090 todo |= TODO_cleanup_cfg;
9092 if (gimple_in_ssa_p (cfun))
9094 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9095 update_stmt (stmt);
9099 if (flags & ECF_NORETURN
9100 && fixup_noreturn_call (stmt))
9101 todo |= TODO_cleanup_cfg;
9104 /* Remove stores to variables we marked write-only.
9105 Keep access when store has side effect, i.e. in case when source
9106 is volatile. */
9107 if (gimple_store_p (stmt)
9108 && !gimple_has_side_effects (stmt))
9110 tree lhs = get_base_address (gimple_get_lhs (stmt));
9112 if (VAR_P (lhs)
9113 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9114 && varpool_node::get (lhs)->writeonly)
9116 unlink_stmt_vdef (stmt);
9117 gsi_remove (&gsi, true);
9118 release_defs (stmt);
9119 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9120 continue;
9123 /* For calls we can simply remove LHS when it is known
9124 to be write-only. */
9125 if (is_gimple_call (stmt)
9126 && gimple_get_lhs (stmt))
9128 tree lhs = get_base_address (gimple_get_lhs (stmt));
9130 if (VAR_P (lhs)
9131 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9132 && varpool_node::get (lhs)->writeonly)
9134 gimple_call_set_lhs (stmt, NULL);
9135 update_stmt (stmt);
9136 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9140 if (maybe_clean_eh_stmt (stmt)
9141 && gimple_purge_dead_eh_edges (bb))
9142 todo |= TODO_cleanup_cfg;
9143 gsi_next (&gsi);
9146 FOR_EACH_EDGE (e, ei, bb->succs)
9147 e->count = apply_scale (e->count, count_scale);
9149 /* If we have a basic block with no successors that does not
9150 end with a control statement or a noreturn call end it with
9151 a call to __builtin_unreachable. This situation can occur
9152 when inlining a noreturn call that does in fact return. */
9153 if (EDGE_COUNT (bb->succs) == 0)
9155 gimple *stmt = last_stmt (bb);
9156 if (!stmt
9157 || (!is_ctrl_stmt (stmt)
9158 && (!is_gimple_call (stmt)
9159 || !gimple_call_noreturn_p (stmt))))
9161 if (stmt && is_gimple_call (stmt))
9162 gimple_call_set_ctrl_altering (stmt, false);
9163 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9164 stmt = gimple_build_call (fndecl, 0);
9165 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9166 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9167 if (!cfun->after_inlining)
9169 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9170 int freq
9171 = compute_call_stmt_bb_frequency (current_function_decl,
9172 bb);
9173 node->create_edge (cgraph_node::get_create (fndecl),
9174 call_stmt, bb->count, freq);
9179 if (count_scale != REG_BR_PROB_BASE)
9180 compute_function_frequency ();
9182 if (current_loops
9183 && (todo & TODO_cleanup_cfg))
9184 loops_state_set (LOOPS_NEED_FIXUP);
9186 return todo;
9189 namespace {
9191 const pass_data pass_data_fixup_cfg =
9193 GIMPLE_PASS, /* type */
9194 "fixup_cfg", /* name */
9195 OPTGROUP_NONE, /* optinfo_flags */
9196 TV_NONE, /* tv_id */
9197 PROP_cfg, /* properties_required */
9198 0, /* properties_provided */
9199 0, /* properties_destroyed */
9200 0, /* todo_flags_start */
9201 0, /* todo_flags_finish */
9204 class pass_fixup_cfg : public gimple_opt_pass
9206 public:
9207 pass_fixup_cfg (gcc::context *ctxt)
9208 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9211 /* opt_pass methods: */
9212 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9213 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9215 }; // class pass_fixup_cfg
9217 } // anon namespace
9219 gimple_opt_pass *
9220 make_pass_fixup_cfg (gcc::context *ctxt)
9222 return new pass_fixup_cfg (ctxt);
9225 /* Garbage collection support for edge_def. */
9227 extern void gt_ggc_mx (tree&);
9228 extern void gt_ggc_mx (gimple *&);
9229 extern void gt_ggc_mx (rtx&);
9230 extern void gt_ggc_mx (basic_block&);
9232 static void
9233 gt_ggc_mx (rtx_insn *& x)
9235 if (x)
9236 gt_ggc_mx_rtx_def ((void *) x);
9239 void
9240 gt_ggc_mx (edge_def *e)
9242 tree block = LOCATION_BLOCK (e->goto_locus);
9243 gt_ggc_mx (e->src);
9244 gt_ggc_mx (e->dest);
9245 if (current_ir_type () == IR_GIMPLE)
9246 gt_ggc_mx (e->insns.g);
9247 else
9248 gt_ggc_mx (e->insns.r);
9249 gt_ggc_mx (block);
9252 /* PCH support for edge_def. */
9254 extern void gt_pch_nx (tree&);
9255 extern void gt_pch_nx (gimple *&);
9256 extern void gt_pch_nx (rtx&);
9257 extern void gt_pch_nx (basic_block&);
9259 static void
9260 gt_pch_nx (rtx_insn *& x)
9262 if (x)
9263 gt_pch_nx_rtx_def ((void *) x);
9266 void
9267 gt_pch_nx (edge_def *e)
9269 tree block = LOCATION_BLOCK (e->goto_locus);
9270 gt_pch_nx (e->src);
9271 gt_pch_nx (e->dest);
9272 if (current_ir_type () == IR_GIMPLE)
9273 gt_pch_nx (e->insns.g);
9274 else
9275 gt_pch_nx (e->insns.r);
9276 gt_pch_nx (block);
9279 void
9280 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9282 tree block = LOCATION_BLOCK (e->goto_locus);
9283 op (&(e->src), cookie);
9284 op (&(e->dest), cookie);
9285 if (current_ir_type () == IR_GIMPLE)
9286 op (&(e->insns.g), cookie);
9287 else
9288 op (&(e->insns.r), cookie);
9289 op (&(block), cookie);
9292 #if CHECKING_P
9294 namespace selftest {
9296 /* Helper function for CFG selftests: create a dummy function decl
9297 and push it as cfun. */
9299 static tree
9300 push_fndecl (const char *name)
9302 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9303 /* FIXME: this uses input_location: */
9304 tree fndecl = build_fn_decl (name, fn_type);
9305 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9306 NULL_TREE, integer_type_node);
9307 DECL_RESULT (fndecl) = retval;
9308 push_struct_function (fndecl);
9309 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9310 ASSERT_TRUE (fun != NULL);
9311 init_empty_tree_cfg_for_function (fun);
9312 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9313 ASSERT_EQ (0, n_edges_for_fn (fun));
9314 return fndecl;
9317 /* These tests directly create CFGs.
9318 Compare with the static fns within tree-cfg.c:
9319 - build_gimple_cfg
9320 - make_blocks: calls create_basic_block (seq, bb);
9321 - make_edges. */
9323 /* Verify a simple cfg of the form:
9324 ENTRY -> A -> B -> C -> EXIT. */
9326 static void
9327 test_linear_chain ()
9329 gimple_register_cfg_hooks ();
9331 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9332 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9334 /* Create some empty blocks. */
9335 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9336 basic_block bb_b = create_empty_bb (bb_a);
9337 basic_block bb_c = create_empty_bb (bb_b);
9339 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9340 ASSERT_EQ (0, n_edges_for_fn (fun));
9342 /* Create some edges: a simple linear chain of BBs. */
9343 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9344 make_edge (bb_a, bb_b, 0);
9345 make_edge (bb_b, bb_c, 0);
9346 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9348 /* Verify the edges. */
9349 ASSERT_EQ (4, n_edges_for_fn (fun));
9350 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9351 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9352 ASSERT_EQ (1, bb_a->preds->length ());
9353 ASSERT_EQ (1, bb_a->succs->length ());
9354 ASSERT_EQ (1, bb_b->preds->length ());
9355 ASSERT_EQ (1, bb_b->succs->length ());
9356 ASSERT_EQ (1, bb_c->preds->length ());
9357 ASSERT_EQ (1, bb_c->succs->length ());
9358 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9359 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9361 /* Verify the dominance information
9362 Each BB in our simple chain should be dominated by the one before
9363 it. */
9364 calculate_dominance_info (CDI_DOMINATORS);
9365 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9366 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9367 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9368 ASSERT_EQ (1, dom_by_b.length ());
9369 ASSERT_EQ (bb_c, dom_by_b[0]);
9370 free_dominance_info (CDI_DOMINATORS);
9371 dom_by_b.release ();
9373 /* Similarly for post-dominance: each BB in our chain is post-dominated
9374 by the one after it. */
9375 calculate_dominance_info (CDI_POST_DOMINATORS);
9376 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9377 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9378 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9379 ASSERT_EQ (1, postdom_by_b.length ());
9380 ASSERT_EQ (bb_a, postdom_by_b[0]);
9381 free_dominance_info (CDI_POST_DOMINATORS);
9382 postdom_by_b.release ();
9384 pop_cfun ();
9387 /* Verify a simple CFG of the form:
9388 ENTRY
9392 /t \f
9398 EXIT. */
9400 static void
9401 test_diamond ()
9403 gimple_register_cfg_hooks ();
9405 tree fndecl = push_fndecl ("cfg_test_diamond");
9406 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9408 /* Create some empty blocks. */
9409 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9410 basic_block bb_b = create_empty_bb (bb_a);
9411 basic_block bb_c = create_empty_bb (bb_a);
9412 basic_block bb_d = create_empty_bb (bb_b);
9414 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9415 ASSERT_EQ (0, n_edges_for_fn (fun));
9417 /* Create the edges. */
9418 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9419 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9420 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9421 make_edge (bb_b, bb_d, 0);
9422 make_edge (bb_c, bb_d, 0);
9423 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9425 /* Verify the edges. */
9426 ASSERT_EQ (6, n_edges_for_fn (fun));
9427 ASSERT_EQ (1, bb_a->preds->length ());
9428 ASSERT_EQ (2, bb_a->succs->length ());
9429 ASSERT_EQ (1, bb_b->preds->length ());
9430 ASSERT_EQ (1, bb_b->succs->length ());
9431 ASSERT_EQ (1, bb_c->preds->length ());
9432 ASSERT_EQ (1, bb_c->succs->length ());
9433 ASSERT_EQ (2, bb_d->preds->length ());
9434 ASSERT_EQ (1, bb_d->succs->length ());
9436 /* Verify the dominance information. */
9437 calculate_dominance_info (CDI_DOMINATORS);
9438 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9439 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9440 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9441 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9442 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9443 dom_by_a.release ();
9444 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9445 ASSERT_EQ (0, dom_by_b.length ());
9446 dom_by_b.release ();
9447 free_dominance_info (CDI_DOMINATORS);
9449 /* Similarly for post-dominance. */
9450 calculate_dominance_info (CDI_POST_DOMINATORS);
9451 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9452 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9453 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9454 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9455 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9456 postdom_by_d.release ();
9457 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9458 ASSERT_EQ (0, postdom_by_b.length ());
9459 postdom_by_b.release ();
9460 free_dominance_info (CDI_POST_DOMINATORS);
9462 pop_cfun ();
9465 /* Verify that we can handle a CFG containing a "complete" aka
9466 fully-connected subgraph (where A B C D below all have edges
9467 pointing to each other node, also to themselves).
9468 e.g.:
9469 ENTRY EXIT
9475 A<--->B
9476 ^^ ^^
9477 | \ / |
9478 | X |
9479 | / \ |
9480 VV VV
9481 C<--->D
9484 static void
9485 test_fully_connected ()
9487 gimple_register_cfg_hooks ();
9489 tree fndecl = push_fndecl ("cfg_fully_connected");
9490 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9492 const int n = 4;
9494 /* Create some empty blocks. */
9495 auto_vec <basic_block> subgraph_nodes;
9496 for (int i = 0; i < n; i++)
9497 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9499 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9500 ASSERT_EQ (0, n_edges_for_fn (fun));
9502 /* Create the edges. */
9503 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9504 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9505 for (int i = 0; i < n; i++)
9506 for (int j = 0; j < n; j++)
9507 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9509 /* Verify the edges. */
9510 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9511 /* The first one is linked to ENTRY/EXIT as well as itself and
9512 everything else. */
9513 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9514 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9515 /* The other ones in the subgraph are linked to everything in
9516 the subgraph (including themselves). */
9517 for (int i = 1; i < n; i++)
9519 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9520 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9523 /* Verify the dominance information. */
9524 calculate_dominance_info (CDI_DOMINATORS);
9525 /* The initial block in the subgraph should be dominated by ENTRY. */
9526 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9527 get_immediate_dominator (CDI_DOMINATORS,
9528 subgraph_nodes[0]));
9529 /* Every other block in the subgraph should be dominated by the
9530 initial block. */
9531 for (int i = 1; i < n; i++)
9532 ASSERT_EQ (subgraph_nodes[0],
9533 get_immediate_dominator (CDI_DOMINATORS,
9534 subgraph_nodes[i]));
9535 free_dominance_info (CDI_DOMINATORS);
9537 /* Similarly for post-dominance. */
9538 calculate_dominance_info (CDI_POST_DOMINATORS);
9539 /* The initial block in the subgraph should be postdominated by EXIT. */
9540 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9541 get_immediate_dominator (CDI_POST_DOMINATORS,
9542 subgraph_nodes[0]));
9543 /* Every other block in the subgraph should be postdominated by the
9544 initial block, since that leads to EXIT. */
9545 for (int i = 1; i < n; i++)
9546 ASSERT_EQ (subgraph_nodes[0],
9547 get_immediate_dominator (CDI_POST_DOMINATORS,
9548 subgraph_nodes[i]));
9549 free_dominance_info (CDI_POST_DOMINATORS);
9551 pop_cfun ();
9554 /* Run all of the selftests within this file. */
9556 void
9557 tree_cfg_c_tests ()
9559 test_linear_chain ();
9560 test_diamond ();
9561 test_fully_connected ();
9564 } // namespace selftest
9566 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9567 - loop
9568 - nested loops
9569 - switch statement (a block with many out-edges)
9570 - something that jumps to itself
9571 - etc */
9573 #endif /* CHECKING_P */