2017-06-14 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-cfg.c
blob7df80f8ee8c6e6f3c1773ce95732a109ace6b3fe
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
65 /* This file contains functions for building the Control Flow Graph (CFG)
66 for a function tree. */
68 /* Local declarations. */
70 /* Initial capacity for the basic block array. */
71 static const int initial_cfg_capacity = 20;
73 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
74 which use a particular edge. The CASE_LABEL_EXPRs are chained together
75 via their CASE_CHAIN field, which we clear after we're done with the
76 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
78 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
79 update the case vector in response to edge redirections.
81 Right now this table is set up and torn down at key points in the
82 compilation process. It would be nice if we could make the table
83 more persistent. The key is getting notification of changes to
84 the CFG (particularly edge removal, creation and redirection). */
86 static hash_map<edge, tree> *edge_to_cases;
88 /* If we record edge_to_cases, this bitmap will hold indexes
89 of basic blocks that end in a GIMPLE_SWITCH which we touched
90 due to edge manipulations. */
92 static bitmap touched_switch_bbs;
94 /* CFG statistics. */
95 struct cfg_stats_d
97 long num_merged_labels;
100 static struct cfg_stats_d cfg_stats;
102 /* Data to pass to replace_block_vars_by_duplicates_1. */
103 struct replace_decls_d
105 hash_map<tree, tree> *vars_map;
106 tree to_context;
109 /* Hash table to store last discriminator assigned for each locus. */
110 struct locus_discrim_map
112 location_t locus;
113 int discriminator;
116 /* Hashtable helpers. */
118 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
120 static inline hashval_t hash (const locus_discrim_map *);
121 static inline bool equal (const locus_discrim_map *,
122 const locus_discrim_map *);
125 /* Trivial hash function for a location_t. ITEM is a pointer to
126 a hash table entry that maps a location_t to a discriminator. */
128 inline hashval_t
129 locus_discrim_hasher::hash (const locus_discrim_map *item)
131 return LOCATION_LINE (item->locus);
134 /* Equality function for the locus-to-discriminator map. A and B
135 point to the two hash table entries to compare. */
137 inline bool
138 locus_discrim_hasher::equal (const locus_discrim_map *a,
139 const locus_discrim_map *b)
141 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
144 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
146 /* Basic blocks and flowgraphs. */
147 static void make_blocks (gimple_seq);
149 /* Edges. */
150 static void make_edges (void);
151 static void assign_discriminators (void);
152 static void make_cond_expr_edges (basic_block);
153 static void make_gimple_switch_edges (gswitch *, basic_block);
154 static bool make_goto_expr_edges (basic_block);
155 static void make_gimple_asm_edges (basic_block);
156 static edge gimple_redirect_edge_and_branch (edge, basic_block);
157 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple *, gimple *);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge);
163 static gimple *first_non_label_stmt (basic_block);
164 static bool verify_gimple_transaction (gtransaction *);
165 static bool call_can_make_abnormal_goto (gimple *);
167 /* Flowgraph optimization and cleanup. */
168 static void gimple_merge_blocks (basic_block, basic_block);
169 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
170 static void remove_bb (basic_block);
171 static edge find_taken_edge_computed_goto (basic_block, tree);
172 static edge find_taken_edge_cond_expr (basic_block, tree);
173 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
174 static tree find_case_label_for_value (gswitch *, tree);
175 static void lower_phi_internal_fn ();
177 void
178 init_empty_tree_cfg_for_function (struct function *fn)
180 /* Initialize the basic block array. */
181 init_flow (fn);
182 profile_status_for_fn (fn) = PROFILE_ABSENT;
183 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
184 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
185 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
186 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
187 initial_cfg_capacity);
189 /* Build a mapping of labels to their associated blocks. */
190 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
192 initial_cfg_capacity);
194 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
195 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
197 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn);
199 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 void
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun);
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
216 static void
217 build_gimple_cfg (gimple_seq seq)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224 init_empty_tree_cfg ();
226 make_blocks (seq);
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
236 n_basic_blocks_for_fn (cfun));
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
248 make_edges ();
249 assign_discriminators ();
250 lower_phi_internal_fn ();
251 cleanup_dead_labels ();
252 delete discriminator_per_locus;
253 discriminator_per_locus = NULL;
256 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
257 them and propagate the information to LOOP. We assume that the annotations
258 come immediately before the condition in BB, if any. */
260 static void
261 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
263 gimple_stmt_iterator gsi = gsi_last_bb (bb);
264 gimple *stmt = gsi_stmt (gsi);
266 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
267 return;
269 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
271 stmt = gsi_stmt (gsi);
272 if (gimple_code (stmt) != GIMPLE_CALL)
273 break;
274 if (!gimple_call_internal_p (stmt)
275 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
276 break;
278 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 case annot_expr_ivdep_kind:
281 loop->safelen = INT_MAX;
282 break;
283 case annot_expr_no_vector_kind:
284 loop->dont_vectorize = true;
285 break;
286 case annot_expr_vector_kind:
287 loop->force_vectorize = true;
288 cfun->has_force_vectorize_loops = true;
289 break;
290 default:
291 gcc_unreachable ();
294 stmt = gimple_build_assign (gimple_call_lhs (stmt),
295 gimple_call_arg (stmt, 0));
296 gsi_replace (&gsi, stmt, true);
300 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
301 them and propagate the information to the loop. We assume that the
302 annotations come immediately before the condition of the loop. */
304 static void
305 replace_loop_annotate (void)
307 struct loop *loop;
308 basic_block bb;
309 gimple_stmt_iterator gsi;
310 gimple *stmt;
312 FOR_EACH_LOOP (loop, 0)
314 /* First look into the header. */
315 replace_loop_annotate_in_block (loop->header, loop);
317 /* Then look into the latch, if any. */
318 if (loop->latch)
319 replace_loop_annotate_in_block (loop->latch, loop);
322 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
323 FOR_EACH_BB_FN (bb, cfun)
325 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
327 stmt = gsi_stmt (gsi);
328 if (gimple_code (stmt) != GIMPLE_CALL)
329 continue;
330 if (!gimple_call_internal_p (stmt)
331 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
332 continue;
334 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
336 case annot_expr_ivdep_kind:
337 case annot_expr_no_vector_kind:
338 case annot_expr_vector_kind:
339 break;
340 default:
341 gcc_unreachable ();
344 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
345 stmt = gimple_build_assign (gimple_call_lhs (stmt),
346 gimple_call_arg (stmt, 0));
347 gsi_replace (&gsi, stmt, true);
352 /* Lower internal PHI function from GIMPLE FE. */
354 static void
355 lower_phi_internal_fn ()
357 basic_block bb, pred = NULL;
358 gimple_stmt_iterator gsi;
359 tree lhs;
360 gphi *phi_node;
361 gimple *stmt;
363 /* After edge creation, handle __PHI function from GIMPLE FE. */
364 FOR_EACH_BB_FN (bb, cfun)
366 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
368 stmt = gsi_stmt (gsi);
369 if (! gimple_call_internal_p (stmt, IFN_PHI))
370 break;
372 lhs = gimple_call_lhs (stmt);
373 phi_node = create_phi_node (lhs, bb);
375 /* Add arguments to the PHI node. */
376 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
378 tree arg = gimple_call_arg (stmt, i);
379 if (TREE_CODE (arg) == LABEL_DECL)
380 pred = label_to_block (arg);
381 else
383 edge e = find_edge (pred, bb);
384 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
388 gsi_remove (&gsi, true);
393 static unsigned int
394 execute_build_cfg (void)
396 gimple_seq body = gimple_body (current_function_decl);
398 build_gimple_cfg (body);
399 gimple_set_body (current_function_decl, NULL);
400 if (dump_file && (dump_flags & TDF_DETAILS))
402 fprintf (dump_file, "Scope blocks:\n");
403 dump_scope_blocks (dump_file, dump_flags);
405 cleanup_tree_cfg ();
406 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
407 replace_loop_annotate ();
408 return 0;
411 namespace {
413 const pass_data pass_data_build_cfg =
415 GIMPLE_PASS, /* type */
416 "cfg", /* name */
417 OPTGROUP_NONE, /* optinfo_flags */
418 TV_TREE_CFG, /* tv_id */
419 PROP_gimple_leh, /* properties_required */
420 ( PROP_cfg | PROP_loops ), /* properties_provided */
421 0, /* properties_destroyed */
422 0, /* todo_flags_start */
423 0, /* todo_flags_finish */
426 class pass_build_cfg : public gimple_opt_pass
428 public:
429 pass_build_cfg (gcc::context *ctxt)
430 : gimple_opt_pass (pass_data_build_cfg, ctxt)
433 /* opt_pass methods: */
434 virtual unsigned int execute (function *) { return execute_build_cfg (); }
436 }; // class pass_build_cfg
438 } // anon namespace
440 gimple_opt_pass *
441 make_pass_build_cfg (gcc::context *ctxt)
443 return new pass_build_cfg (ctxt);
447 /* Return true if T is a computed goto. */
449 bool
450 computed_goto_p (gimple *t)
452 return (gimple_code (t) == GIMPLE_GOTO
453 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
456 /* Returns true if the sequence of statements STMTS only contains
457 a call to __builtin_unreachable (). */
459 bool
460 gimple_seq_unreachable_p (gimple_seq stmts)
462 if (stmts == NULL)
463 return false;
465 gimple_stmt_iterator gsi = gsi_last (stmts);
467 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
468 return false;
470 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
472 gimple *stmt = gsi_stmt (gsi);
473 if (gimple_code (stmt) != GIMPLE_LABEL
474 && !is_gimple_debug (stmt)
475 && !gimple_clobber_p (stmt))
476 return false;
478 return true;
481 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
482 the other edge points to a bb with just __builtin_unreachable ().
483 I.e. return true for C->M edge in:
484 <bb C>:
486 if (something)
487 goto <bb N>;
488 else
489 goto <bb M>;
490 <bb N>:
491 __builtin_unreachable ();
492 <bb M>: */
494 bool
495 assert_unreachable_fallthru_edge_p (edge e)
497 basic_block pred_bb = e->src;
498 gimple *last = last_stmt (pred_bb);
499 if (last && gimple_code (last) == GIMPLE_COND)
501 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
502 if (other_bb == e->dest)
503 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
504 if (EDGE_COUNT (other_bb->succs) == 0)
505 return gimple_seq_unreachable_p (bb_seq (other_bb));
507 return false;
511 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
512 could alter control flow except via eh. We initialize the flag at
513 CFG build time and only ever clear it later. */
515 static void
516 gimple_call_initialize_ctrl_altering (gimple *stmt)
518 int flags = gimple_call_flags (stmt);
520 /* A call alters control flow if it can make an abnormal goto. */
521 if (call_can_make_abnormal_goto (stmt)
522 /* A call also alters control flow if it does not return. */
523 || flags & ECF_NORETURN
524 /* TM ending statements have backedges out of the transaction.
525 Return true so we split the basic block containing them.
526 Note that the TM_BUILTIN test is merely an optimization. */
527 || ((flags & ECF_TM_BUILTIN)
528 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
529 /* BUILT_IN_RETURN call is same as return statement. */
530 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
531 /* IFN_UNIQUE should be the last insn, to make checking for it
532 as cheap as possible. */
533 || (gimple_call_internal_p (stmt)
534 && gimple_call_internal_unique_p (stmt)))
535 gimple_call_set_ctrl_altering (stmt, true);
536 else
537 gimple_call_set_ctrl_altering (stmt, false);
541 /* Insert SEQ after BB and build a flowgraph. */
543 static basic_block
544 make_blocks_1 (gimple_seq seq, basic_block bb)
546 gimple_stmt_iterator i = gsi_start (seq);
547 gimple *stmt = NULL;
548 bool start_new_block = true;
549 bool first_stmt_of_seq = true;
551 while (!gsi_end_p (i))
553 gimple *prev_stmt;
555 prev_stmt = stmt;
556 stmt = gsi_stmt (i);
558 if (stmt && is_gimple_call (stmt))
559 gimple_call_initialize_ctrl_altering (stmt);
561 /* If the statement starts a new basic block or if we have determined
562 in a previous pass that we need to create a new block for STMT, do
563 so now. */
564 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
566 if (!first_stmt_of_seq)
567 gsi_split_seq_before (&i, &seq);
568 bb = create_basic_block (seq, bb);
569 start_new_block = false;
572 /* Now add STMT to BB and create the subgraphs for special statement
573 codes. */
574 gimple_set_bb (stmt, bb);
576 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
577 next iteration. */
578 if (stmt_ends_bb_p (stmt))
580 /* If the stmt can make abnormal goto use a new temporary
581 for the assignment to the LHS. This makes sure the old value
582 of the LHS is available on the abnormal edge. Otherwise
583 we will end up with overlapping life-ranges for abnormal
584 SSA names. */
585 if (gimple_has_lhs (stmt)
586 && stmt_can_make_abnormal_goto (stmt)
587 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
589 tree lhs = gimple_get_lhs (stmt);
590 tree tmp = create_tmp_var (TREE_TYPE (lhs));
591 gimple *s = gimple_build_assign (lhs, tmp);
592 gimple_set_location (s, gimple_location (stmt));
593 gimple_set_block (s, gimple_block (stmt));
594 gimple_set_lhs (stmt, tmp);
595 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
596 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
597 DECL_GIMPLE_REG_P (tmp) = 1;
598 gsi_insert_after (&i, s, GSI_SAME_STMT);
600 start_new_block = true;
603 gsi_next (&i);
604 first_stmt_of_seq = false;
606 return bb;
609 /* Build a flowgraph for the sequence of stmts SEQ. */
611 static void
612 make_blocks (gimple_seq seq)
614 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
617 /* Create and return a new empty basic block after bb AFTER. */
619 static basic_block
620 create_bb (void *h, void *e, basic_block after)
622 basic_block bb;
624 gcc_assert (!e);
626 /* Create and initialize a new basic block. Since alloc_block uses
627 GC allocation that clears memory to allocate a basic block, we do
628 not have to clear the newly allocated basic block here. */
629 bb = alloc_block ();
631 bb->index = last_basic_block_for_fn (cfun);
632 bb->flags = BB_NEW;
633 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
635 /* Add the new block to the linked list of blocks. */
636 link_block (bb, after);
638 /* Grow the basic block array if needed. */
639 if ((size_t) last_basic_block_for_fn (cfun)
640 == basic_block_info_for_fn (cfun)->length ())
642 size_t new_size =
643 (last_basic_block_for_fn (cfun)
644 + (last_basic_block_for_fn (cfun) + 3) / 4);
645 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
648 /* Add the newly created block to the array. */
649 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
651 n_basic_blocks_for_fn (cfun)++;
652 last_basic_block_for_fn (cfun)++;
654 return bb;
658 /*---------------------------------------------------------------------------
659 Edge creation
660 ---------------------------------------------------------------------------*/
662 /* If basic block BB has an abnormal edge to a basic block
663 containing IFN_ABNORMAL_DISPATCHER internal call, return
664 that the dispatcher's basic block, otherwise return NULL. */
666 basic_block
667 get_abnormal_succ_dispatcher (basic_block bb)
669 edge e;
670 edge_iterator ei;
672 FOR_EACH_EDGE (e, ei, bb->succs)
673 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
675 gimple_stmt_iterator gsi
676 = gsi_start_nondebug_after_labels_bb (e->dest);
677 gimple *g = gsi_stmt (gsi);
678 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
679 return e->dest;
681 return NULL;
684 /* Helper function for make_edges. Create a basic block with
685 with ABNORMAL_DISPATCHER internal call in it if needed, and
686 create abnormal edges from BBS to it and from it to FOR_BB
687 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
689 static void
690 handle_abnormal_edges (basic_block *dispatcher_bbs,
691 basic_block for_bb, int *bb_to_omp_idx,
692 auto_vec<basic_block> *bbs, bool computed_goto)
694 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
695 unsigned int idx = 0;
696 basic_block bb;
697 bool inner = false;
699 if (bb_to_omp_idx)
701 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
702 if (bb_to_omp_idx[for_bb->index] != 0)
703 inner = true;
706 /* If the dispatcher has been created already, then there are basic
707 blocks with abnormal edges to it, so just make a new edge to
708 for_bb. */
709 if (*dispatcher == NULL)
711 /* Check if there are any basic blocks that need to have
712 abnormal edges to this dispatcher. If there are none, return
713 early. */
714 if (bb_to_omp_idx == NULL)
716 if (bbs->is_empty ())
717 return;
719 else
721 FOR_EACH_VEC_ELT (*bbs, idx, bb)
722 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
723 break;
724 if (bb == NULL)
725 return;
728 /* Create the dispatcher bb. */
729 *dispatcher = create_basic_block (NULL, for_bb);
730 if (computed_goto)
732 /* Factor computed gotos into a common computed goto site. Also
733 record the location of that site so that we can un-factor the
734 gotos after we have converted back to normal form. */
735 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
737 /* Create the destination of the factored goto. Each original
738 computed goto will put its desired destination into this
739 variable and jump to the label we create immediately below. */
740 tree var = create_tmp_var (ptr_type_node, "gotovar");
742 /* Build a label for the new block which will contain the
743 factored computed goto. */
744 tree factored_label_decl
745 = create_artificial_label (UNKNOWN_LOCATION);
746 gimple *factored_computed_goto_label
747 = gimple_build_label (factored_label_decl);
748 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
750 /* Build our new computed goto. */
751 gimple *factored_computed_goto = gimple_build_goto (var);
752 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
754 FOR_EACH_VEC_ELT (*bbs, idx, bb)
756 if (bb_to_omp_idx
757 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
758 continue;
760 gsi = gsi_last_bb (bb);
761 gimple *last = gsi_stmt (gsi);
763 gcc_assert (computed_goto_p (last));
765 /* Copy the original computed goto's destination into VAR. */
766 gimple *assignment
767 = gimple_build_assign (var, gimple_goto_dest (last));
768 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
770 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
771 e->goto_locus = gimple_location (last);
772 gsi_remove (&gsi, true);
775 else
777 tree arg = inner ? boolean_true_node : boolean_false_node;
778 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
779 1, arg);
780 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
781 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
783 /* Create predecessor edges of the dispatcher. */
784 FOR_EACH_VEC_ELT (*bbs, idx, bb)
786 if (bb_to_omp_idx
787 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
788 continue;
789 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
794 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
797 /* Creates outgoing edges for BB. Returns 1 when it ends with an
798 computed goto, returns 2 when it ends with a statement that
799 might return to this function via an nonlocal goto, otherwise
800 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
802 static int
803 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
805 gimple *last = last_stmt (bb);
806 bool fallthru = false;
807 int ret = 0;
809 if (!last)
810 return ret;
812 switch (gimple_code (last))
814 case GIMPLE_GOTO:
815 if (make_goto_expr_edges (bb))
816 ret = 1;
817 fallthru = false;
818 break;
819 case GIMPLE_RETURN:
821 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
822 e->goto_locus = gimple_location (last);
823 fallthru = false;
825 break;
826 case GIMPLE_COND:
827 make_cond_expr_edges (bb);
828 fallthru = false;
829 break;
830 case GIMPLE_SWITCH:
831 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
832 fallthru = false;
833 break;
834 case GIMPLE_RESX:
835 make_eh_edges (last);
836 fallthru = false;
837 break;
838 case GIMPLE_EH_DISPATCH:
839 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
840 break;
842 case GIMPLE_CALL:
843 /* If this function receives a nonlocal goto, then we need to
844 make edges from this call site to all the nonlocal goto
845 handlers. */
846 if (stmt_can_make_abnormal_goto (last))
847 ret = 2;
849 /* If this statement has reachable exception handlers, then
850 create abnormal edges to them. */
851 make_eh_edges (last);
853 /* BUILTIN_RETURN is really a return statement. */
854 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
856 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
857 fallthru = false;
859 /* Some calls are known not to return. */
860 else
861 fallthru = !gimple_call_noreturn_p (last);
862 break;
864 case GIMPLE_ASSIGN:
865 /* A GIMPLE_ASSIGN may throw internally and thus be considered
866 control-altering. */
867 if (is_ctrl_altering_stmt (last))
868 make_eh_edges (last);
869 fallthru = true;
870 break;
872 case GIMPLE_ASM:
873 make_gimple_asm_edges (bb);
874 fallthru = true;
875 break;
877 CASE_GIMPLE_OMP:
878 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
879 break;
881 case GIMPLE_TRANSACTION:
883 gtransaction *txn = as_a <gtransaction *> (last);
884 tree label1 = gimple_transaction_label_norm (txn);
885 tree label2 = gimple_transaction_label_uninst (txn);
887 if (label1)
888 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
889 if (label2)
890 make_edge (bb, label_to_block (label2),
891 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
893 tree label3 = gimple_transaction_label_over (txn);
894 if (gimple_transaction_subcode (txn)
895 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
896 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
898 fallthru = false;
900 break;
902 default:
903 gcc_assert (!stmt_ends_bb_p (last));
904 fallthru = true;
905 break;
908 if (fallthru)
909 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
911 return ret;
914 /* Join all the blocks in the flowgraph. */
916 static void
917 make_edges (void)
919 basic_block bb;
920 struct omp_region *cur_region = NULL;
921 auto_vec<basic_block> ab_edge_goto;
922 auto_vec<basic_block> ab_edge_call;
923 int *bb_to_omp_idx = NULL;
924 int cur_omp_region_idx = 0;
926 /* Create an edge from entry to the first block with executable
927 statements in it. */
928 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
929 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
930 EDGE_FALLTHRU);
932 /* Traverse the basic block array placing edges. */
933 FOR_EACH_BB_FN (bb, cfun)
935 int mer;
937 if (bb_to_omp_idx)
938 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
940 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
941 if (mer == 1)
942 ab_edge_goto.safe_push (bb);
943 else if (mer == 2)
944 ab_edge_call.safe_push (bb);
946 if (cur_region && bb_to_omp_idx == NULL)
947 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
950 /* Computed gotos are hell to deal with, especially if there are
951 lots of them with a large number of destinations. So we factor
952 them to a common computed goto location before we build the
953 edge list. After we convert back to normal form, we will un-factor
954 the computed gotos since factoring introduces an unwanted jump.
955 For non-local gotos and abnormal edges from calls to calls that return
956 twice or forced labels, factor the abnormal edges too, by having all
957 abnormal edges from the calls go to a common artificial basic block
958 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
959 basic block to all forced labels and calls returning twice.
960 We do this per-OpenMP structured block, because those regions
961 are guaranteed to be single entry single exit by the standard,
962 so it is not allowed to enter or exit such regions abnormally this way,
963 thus all computed gotos, non-local gotos and setjmp/longjmp calls
964 must not transfer control across SESE region boundaries. */
965 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
967 gimple_stmt_iterator gsi;
968 basic_block dispatcher_bb_array[2] = { NULL, NULL };
969 basic_block *dispatcher_bbs = dispatcher_bb_array;
970 int count = n_basic_blocks_for_fn (cfun);
972 if (bb_to_omp_idx)
973 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
975 FOR_EACH_BB_FN (bb, cfun)
977 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
979 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
980 tree target;
982 if (!label_stmt)
983 break;
985 target = gimple_label_label (label_stmt);
987 /* Make an edge to every label block that has been marked as a
988 potential target for a computed goto or a non-local goto. */
989 if (FORCED_LABEL (target))
990 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
991 &ab_edge_goto, true);
992 if (DECL_NONLOCAL (target))
994 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
995 &ab_edge_call, false);
996 break;
1000 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1001 gsi_next_nondebug (&gsi);
1002 if (!gsi_end_p (gsi))
1004 /* Make an edge to every setjmp-like call. */
1005 gimple *call_stmt = gsi_stmt (gsi);
1006 if (is_gimple_call (call_stmt)
1007 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1008 || gimple_call_builtin_p (call_stmt,
1009 BUILT_IN_SETJMP_RECEIVER)))
1010 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1011 &ab_edge_call, false);
1015 if (bb_to_omp_idx)
1016 XDELETE (dispatcher_bbs);
1019 XDELETE (bb_to_omp_idx);
1021 omp_free_regions ();
1024 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1025 needed. Returns true if new bbs were created.
1026 Note: This is transitional code, and should not be used for new code. We
1027 should be able to get rid of this by rewriting all target va-arg
1028 gimplification hooks to use an interface gimple_build_cond_value as described
1029 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1031 bool
1032 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1034 gimple *stmt = gsi_stmt (*gsi);
1035 basic_block bb = gimple_bb (stmt);
1036 basic_block lastbb, afterbb;
1037 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1038 edge e;
1039 lastbb = make_blocks_1 (seq, bb);
1040 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1041 return false;
1042 e = split_block (bb, stmt);
1043 /* Move e->dest to come after the new basic blocks. */
1044 afterbb = e->dest;
1045 unlink_block (afterbb);
1046 link_block (afterbb, lastbb);
1047 redirect_edge_succ (e, bb->next_bb);
1048 bb = bb->next_bb;
1049 while (bb != afterbb)
1051 struct omp_region *cur_region = NULL;
1052 profile_count cnt = profile_count::zero ();
1053 int freq = 0;
1055 int cur_omp_region_idx = 0;
1056 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1057 gcc_assert (!mer && !cur_region);
1058 add_bb_to_loop (bb, afterbb->loop_father);
1060 edge e;
1061 edge_iterator ei;
1062 FOR_EACH_EDGE (e, ei, bb->preds)
1064 cnt += e->count;
1065 freq += EDGE_FREQUENCY (e);
1067 bb->count = cnt;
1068 bb->frequency = freq;
1069 tree_guess_outgoing_edge_probabilities (bb);
1070 FOR_EACH_EDGE (e, ei, bb->succs)
1071 e->count = bb->count.apply_probability (e->probability);
1073 bb = bb->next_bb;
1075 return true;
1078 /* Find the next available discriminator value for LOCUS. The
1079 discriminator distinguishes among several basic blocks that
1080 share a common locus, allowing for more accurate sample-based
1081 profiling. */
1083 static int
1084 next_discriminator_for_locus (location_t locus)
1086 struct locus_discrim_map item;
1087 struct locus_discrim_map **slot;
1089 item.locus = locus;
1090 item.discriminator = 0;
1091 slot = discriminator_per_locus->find_slot_with_hash (
1092 &item, LOCATION_LINE (locus), INSERT);
1093 gcc_assert (slot);
1094 if (*slot == HTAB_EMPTY_ENTRY)
1096 *slot = XNEW (struct locus_discrim_map);
1097 gcc_assert (*slot);
1098 (*slot)->locus = locus;
1099 (*slot)->discriminator = 0;
1101 (*slot)->discriminator++;
1102 return (*slot)->discriminator;
1105 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1107 static bool
1108 same_line_p (location_t locus1, location_t locus2)
1110 expanded_location from, to;
1112 if (locus1 == locus2)
1113 return true;
1115 from = expand_location (locus1);
1116 to = expand_location (locus2);
1118 if (from.line != to.line)
1119 return false;
1120 if (from.file == to.file)
1121 return true;
1122 return (from.file != NULL
1123 && to.file != NULL
1124 && filename_cmp (from.file, to.file) == 0);
1127 /* Assign discriminators to each basic block. */
1129 static void
1130 assign_discriminators (void)
1132 basic_block bb;
1134 FOR_EACH_BB_FN (bb, cfun)
1136 edge e;
1137 edge_iterator ei;
1138 gimple *last = last_stmt (bb);
1139 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1141 if (locus == UNKNOWN_LOCATION)
1142 continue;
1144 FOR_EACH_EDGE (e, ei, bb->succs)
1146 gimple *first = first_non_label_stmt (e->dest);
1147 gimple *last = last_stmt (e->dest);
1148 if ((first && same_line_p (locus, gimple_location (first)))
1149 || (last && same_line_p (locus, gimple_location (last))))
1151 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1152 bb->discriminator = next_discriminator_for_locus (locus);
1153 else
1154 e->dest->discriminator = next_discriminator_for_locus (locus);
1160 /* Create the edges for a GIMPLE_COND starting at block BB. */
1162 static void
1163 make_cond_expr_edges (basic_block bb)
1165 gcond *entry = as_a <gcond *> (last_stmt (bb));
1166 gimple *then_stmt, *else_stmt;
1167 basic_block then_bb, else_bb;
1168 tree then_label, else_label;
1169 edge e;
1171 gcc_assert (entry);
1172 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1174 /* Entry basic blocks for each component. */
1175 then_label = gimple_cond_true_label (entry);
1176 else_label = gimple_cond_false_label (entry);
1177 then_bb = label_to_block (then_label);
1178 else_bb = label_to_block (else_label);
1179 then_stmt = first_stmt (then_bb);
1180 else_stmt = first_stmt (else_bb);
1182 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1183 e->goto_locus = gimple_location (then_stmt);
1184 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1185 if (e)
1186 e->goto_locus = gimple_location (else_stmt);
1188 /* We do not need the labels anymore. */
1189 gimple_cond_set_true_label (entry, NULL_TREE);
1190 gimple_cond_set_false_label (entry, NULL_TREE);
1194 /* Called for each element in the hash table (P) as we delete the
1195 edge to cases hash table.
1197 Clear all the CASE_CHAINs to prevent problems with copying of
1198 SWITCH_EXPRs and structure sharing rules, then free the hash table
1199 element. */
1201 bool
1202 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1204 tree t, next;
1206 for (t = value; t; t = next)
1208 next = CASE_CHAIN (t);
1209 CASE_CHAIN (t) = NULL;
1212 return true;
1215 /* Start recording information mapping edges to case labels. */
1217 void
1218 start_recording_case_labels (void)
1220 gcc_assert (edge_to_cases == NULL);
1221 edge_to_cases = new hash_map<edge, tree>;
1222 touched_switch_bbs = BITMAP_ALLOC (NULL);
1225 /* Return nonzero if we are recording information for case labels. */
1227 static bool
1228 recording_case_labels_p (void)
1230 return (edge_to_cases != NULL);
1233 /* Stop recording information mapping edges to case labels and
1234 remove any information we have recorded. */
1235 void
1236 end_recording_case_labels (void)
1238 bitmap_iterator bi;
1239 unsigned i;
1240 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1241 delete edge_to_cases;
1242 edge_to_cases = NULL;
1243 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1245 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1246 if (bb)
1248 gimple *stmt = last_stmt (bb);
1249 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1250 group_case_labels_stmt (as_a <gswitch *> (stmt));
1253 BITMAP_FREE (touched_switch_bbs);
1256 /* If we are inside a {start,end}_recording_cases block, then return
1257 a chain of CASE_LABEL_EXPRs from T which reference E.
1259 Otherwise return NULL. */
1261 static tree
1262 get_cases_for_edge (edge e, gswitch *t)
1264 tree *slot;
1265 size_t i, n;
1267 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1268 chains available. Return NULL so the caller can detect this case. */
1269 if (!recording_case_labels_p ())
1270 return NULL;
1272 slot = edge_to_cases->get (e);
1273 if (slot)
1274 return *slot;
1276 /* If we did not find E in the hash table, then this must be the first
1277 time we have been queried for information about E & T. Add all the
1278 elements from T to the hash table then perform the query again. */
1280 n = gimple_switch_num_labels (t);
1281 for (i = 0; i < n; i++)
1283 tree elt = gimple_switch_label (t, i);
1284 tree lab = CASE_LABEL (elt);
1285 basic_block label_bb = label_to_block (lab);
1286 edge this_edge = find_edge (e->src, label_bb);
1288 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1289 a new chain. */
1290 tree &s = edge_to_cases->get_or_insert (this_edge);
1291 CASE_CHAIN (elt) = s;
1292 s = elt;
1295 return *edge_to_cases->get (e);
1298 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1300 static void
1301 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1303 size_t i, n;
1305 n = gimple_switch_num_labels (entry);
1307 for (i = 0; i < n; ++i)
1309 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1310 basic_block label_bb = label_to_block (lab);
1311 make_edge (bb, label_bb, 0);
1316 /* Return the basic block holding label DEST. */
1318 basic_block
1319 label_to_block_fn (struct function *ifun, tree dest)
1321 int uid = LABEL_DECL_UID (dest);
1323 /* We would die hard when faced by an undefined label. Emit a label to
1324 the very first basic block. This will hopefully make even the dataflow
1325 and undefined variable warnings quite right. */
1326 if (seen_error () && uid < 0)
1328 gimple_stmt_iterator gsi =
1329 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1330 gimple *stmt;
1332 stmt = gimple_build_label (dest);
1333 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1334 uid = LABEL_DECL_UID (dest);
1336 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1337 return NULL;
1338 return (*ifun->cfg->x_label_to_block_map)[uid];
1341 /* Create edges for a goto statement at block BB. Returns true
1342 if abnormal edges should be created. */
1344 static bool
1345 make_goto_expr_edges (basic_block bb)
1347 gimple_stmt_iterator last = gsi_last_bb (bb);
1348 gimple *goto_t = gsi_stmt (last);
1350 /* A simple GOTO creates normal edges. */
1351 if (simple_goto_p (goto_t))
1353 tree dest = gimple_goto_dest (goto_t);
1354 basic_block label_bb = label_to_block (dest);
1355 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1356 e->goto_locus = gimple_location (goto_t);
1357 gsi_remove (&last, true);
1358 return false;
1361 /* A computed GOTO creates abnormal edges. */
1362 return true;
1365 /* Create edges for an asm statement with labels at block BB. */
1367 static void
1368 make_gimple_asm_edges (basic_block bb)
1370 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1371 int i, n = gimple_asm_nlabels (stmt);
1373 for (i = 0; i < n; ++i)
1375 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1376 basic_block label_bb = label_to_block (label);
1377 make_edge (bb, label_bb, 0);
1381 /*---------------------------------------------------------------------------
1382 Flowgraph analysis
1383 ---------------------------------------------------------------------------*/
1385 /* Cleanup useless labels in basic blocks. This is something we wish
1386 to do early because it allows us to group case labels before creating
1387 the edges for the CFG, and it speeds up block statement iterators in
1388 all passes later on.
1389 We rerun this pass after CFG is created, to get rid of the labels that
1390 are no longer referenced. After then we do not run it any more, since
1391 (almost) no new labels should be created. */
1393 /* A map from basic block index to the leading label of that block. */
1394 static struct label_record
1396 /* The label. */
1397 tree label;
1399 /* True if the label is referenced from somewhere. */
1400 bool used;
1401 } *label_for_bb;
1403 /* Given LABEL return the first label in the same basic block. */
1405 static tree
1406 main_block_label (tree label)
1408 basic_block bb = label_to_block (label);
1409 tree main_label = label_for_bb[bb->index].label;
1411 /* label_to_block possibly inserted undefined label into the chain. */
1412 if (!main_label)
1414 label_for_bb[bb->index].label = label;
1415 main_label = label;
1418 label_for_bb[bb->index].used = true;
1419 return main_label;
1422 /* Clean up redundant labels within the exception tree. */
1424 static void
1425 cleanup_dead_labels_eh (void)
1427 eh_landing_pad lp;
1428 eh_region r;
1429 tree lab;
1430 int i;
1432 if (cfun->eh == NULL)
1433 return;
1435 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1436 if (lp && lp->post_landing_pad)
1438 lab = main_block_label (lp->post_landing_pad);
1439 if (lab != lp->post_landing_pad)
1441 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1442 EH_LANDING_PAD_NR (lab) = lp->index;
1446 FOR_ALL_EH_REGION (r)
1447 switch (r->type)
1449 case ERT_CLEANUP:
1450 case ERT_MUST_NOT_THROW:
1451 break;
1453 case ERT_TRY:
1455 eh_catch c;
1456 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1458 lab = c->label;
1459 if (lab)
1460 c->label = main_block_label (lab);
1463 break;
1465 case ERT_ALLOWED_EXCEPTIONS:
1466 lab = r->u.allowed.label;
1467 if (lab)
1468 r->u.allowed.label = main_block_label (lab);
1469 break;
1474 /* Cleanup redundant labels. This is a three-step process:
1475 1) Find the leading label for each block.
1476 2) Redirect all references to labels to the leading labels.
1477 3) Cleanup all useless labels. */
1479 void
1480 cleanup_dead_labels (void)
1482 basic_block bb;
1483 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1485 /* Find a suitable label for each block. We use the first user-defined
1486 label if there is one, or otherwise just the first label we see. */
1487 FOR_EACH_BB_FN (bb, cfun)
1489 gimple_stmt_iterator i;
1491 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1493 tree label;
1494 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1496 if (!label_stmt)
1497 break;
1499 label = gimple_label_label (label_stmt);
1501 /* If we have not yet seen a label for the current block,
1502 remember this one and see if there are more labels. */
1503 if (!label_for_bb[bb->index].label)
1505 label_for_bb[bb->index].label = label;
1506 continue;
1509 /* If we did see a label for the current block already, but it
1510 is an artificially created label, replace it if the current
1511 label is a user defined label. */
1512 if (!DECL_ARTIFICIAL (label)
1513 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1515 label_for_bb[bb->index].label = label;
1516 break;
1521 /* Now redirect all jumps/branches to the selected label.
1522 First do so for each block ending in a control statement. */
1523 FOR_EACH_BB_FN (bb, cfun)
1525 gimple *stmt = last_stmt (bb);
1526 tree label, new_label;
1528 if (!stmt)
1529 continue;
1531 switch (gimple_code (stmt))
1533 case GIMPLE_COND:
1535 gcond *cond_stmt = as_a <gcond *> (stmt);
1536 label = gimple_cond_true_label (cond_stmt);
1537 if (label)
1539 new_label = main_block_label (label);
1540 if (new_label != label)
1541 gimple_cond_set_true_label (cond_stmt, new_label);
1544 label = gimple_cond_false_label (cond_stmt);
1545 if (label)
1547 new_label = main_block_label (label);
1548 if (new_label != label)
1549 gimple_cond_set_false_label (cond_stmt, new_label);
1552 break;
1554 case GIMPLE_SWITCH:
1556 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1557 size_t i, n = gimple_switch_num_labels (switch_stmt);
1559 /* Replace all destination labels. */
1560 for (i = 0; i < n; ++i)
1562 tree case_label = gimple_switch_label (switch_stmt, i);
1563 label = CASE_LABEL (case_label);
1564 new_label = main_block_label (label);
1565 if (new_label != label)
1566 CASE_LABEL (case_label) = new_label;
1568 break;
1571 case GIMPLE_ASM:
1573 gasm *asm_stmt = as_a <gasm *> (stmt);
1574 int i, n = gimple_asm_nlabels (asm_stmt);
1576 for (i = 0; i < n; ++i)
1578 tree cons = gimple_asm_label_op (asm_stmt, i);
1579 tree label = main_block_label (TREE_VALUE (cons));
1580 TREE_VALUE (cons) = label;
1582 break;
1585 /* We have to handle gotos until they're removed, and we don't
1586 remove them until after we've created the CFG edges. */
1587 case GIMPLE_GOTO:
1588 if (!computed_goto_p (stmt))
1590 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1591 label = gimple_goto_dest (goto_stmt);
1592 new_label = main_block_label (label);
1593 if (new_label != label)
1594 gimple_goto_set_dest (goto_stmt, new_label);
1596 break;
1598 case GIMPLE_TRANSACTION:
1600 gtransaction *txn = as_a <gtransaction *> (stmt);
1602 label = gimple_transaction_label_norm (txn);
1603 if (label)
1605 new_label = main_block_label (label);
1606 if (new_label != label)
1607 gimple_transaction_set_label_norm (txn, new_label);
1610 label = gimple_transaction_label_uninst (txn);
1611 if (label)
1613 new_label = main_block_label (label);
1614 if (new_label != label)
1615 gimple_transaction_set_label_uninst (txn, new_label);
1618 label = gimple_transaction_label_over (txn);
1619 if (label)
1621 new_label = main_block_label (label);
1622 if (new_label != label)
1623 gimple_transaction_set_label_over (txn, new_label);
1626 break;
1628 default:
1629 break;
1633 /* Do the same for the exception region tree labels. */
1634 cleanup_dead_labels_eh ();
1636 /* Finally, purge dead labels. All user-defined labels and labels that
1637 can be the target of non-local gotos and labels which have their
1638 address taken are preserved. */
1639 FOR_EACH_BB_FN (bb, cfun)
1641 gimple_stmt_iterator i;
1642 tree label_for_this_bb = label_for_bb[bb->index].label;
1644 if (!label_for_this_bb)
1645 continue;
1647 /* If the main label of the block is unused, we may still remove it. */
1648 if (!label_for_bb[bb->index].used)
1649 label_for_this_bb = NULL;
1651 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1653 tree label;
1654 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1656 if (!label_stmt)
1657 break;
1659 label = gimple_label_label (label_stmt);
1661 if (label == label_for_this_bb
1662 || !DECL_ARTIFICIAL (label)
1663 || DECL_NONLOCAL (label)
1664 || FORCED_LABEL (label))
1665 gsi_next (&i);
1666 else
1667 gsi_remove (&i, true);
1671 free (label_for_bb);
1674 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1675 the ones jumping to the same label.
1676 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1678 void
1679 group_case_labels_stmt (gswitch *stmt)
1681 int old_size = gimple_switch_num_labels (stmt);
1682 int i, j, base_index, new_size = old_size;
1683 basic_block default_bb = NULL;
1685 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1687 /* Look for possible opportunities to merge cases. */
1688 i = 1;
1689 while (i < old_size)
1691 tree base_case, base_high;
1692 basic_block base_bb;
1694 base_case = gimple_switch_label (stmt, i);
1696 gcc_assert (base_case);
1697 base_bb = label_to_block (CASE_LABEL (base_case));
1699 /* Discard cases that have the same destination as the default case. */
1700 if (base_bb == default_bb)
1702 gimple_switch_set_label (stmt, i, NULL_TREE);
1703 i++;
1704 new_size--;
1705 continue;
1708 base_high = CASE_HIGH (base_case)
1709 ? CASE_HIGH (base_case)
1710 : CASE_LOW (base_case);
1711 base_index = i++;
1713 /* Try to merge case labels. Break out when we reach the end
1714 of the label vector or when we cannot merge the next case
1715 label with the current one. */
1716 while (i < old_size)
1718 tree merge_case = gimple_switch_label (stmt, i);
1719 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1720 wide_int bhp1 = wi::add (base_high, 1);
1722 /* Merge the cases if they jump to the same place,
1723 and their ranges are consecutive. */
1724 if (merge_bb == base_bb
1725 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1727 base_high = CASE_HIGH (merge_case) ?
1728 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1729 CASE_HIGH (base_case) = base_high;
1730 gimple_switch_set_label (stmt, i, NULL_TREE);
1731 new_size--;
1732 i++;
1734 else
1735 break;
1738 /* Discard cases that have an unreachable destination block. */
1739 if (EDGE_COUNT (base_bb->succs) == 0
1740 && gimple_seq_unreachable_p (bb_seq (base_bb)))
1742 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1743 if (base_edge != NULL)
1744 remove_edge_and_dominated_blocks (base_edge);
1745 gimple_switch_set_label (stmt, base_index, NULL_TREE);
1746 new_size--;
1750 /* Compress the case labels in the label vector, and adjust the
1751 length of the vector. */
1752 for (i = 0, j = 0; i < new_size; i++)
1754 while (! gimple_switch_label (stmt, j))
1755 j++;
1756 gimple_switch_set_label (stmt, i,
1757 gimple_switch_label (stmt, j++));
1760 gcc_assert (new_size <= old_size);
1761 gimple_switch_set_num_labels (stmt, new_size);
1764 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1765 and scan the sorted vector of cases. Combine the ones jumping to the
1766 same label. */
1768 void
1769 group_case_labels (void)
1771 basic_block bb;
1773 FOR_EACH_BB_FN (bb, cfun)
1775 gimple *stmt = last_stmt (bb);
1776 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1777 group_case_labels_stmt (as_a <gswitch *> (stmt));
1781 /* Checks whether we can merge block B into block A. */
1783 static bool
1784 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1786 gimple *stmt;
1788 if (!single_succ_p (a))
1789 return false;
1791 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1792 return false;
1794 if (single_succ (a) != b)
1795 return false;
1797 if (!single_pred_p (b))
1798 return false;
1800 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1801 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1802 return false;
1804 /* If A ends by a statement causing exceptions or something similar, we
1805 cannot merge the blocks. */
1806 stmt = last_stmt (a);
1807 if (stmt && stmt_ends_bb_p (stmt))
1808 return false;
1810 /* Do not allow a block with only a non-local label to be merged. */
1811 if (stmt)
1812 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1813 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1814 return false;
1816 /* Examine the labels at the beginning of B. */
1817 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1818 gsi_next (&gsi))
1820 tree lab;
1821 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1822 if (!label_stmt)
1823 break;
1824 lab = gimple_label_label (label_stmt);
1826 /* Do not remove user forced labels or for -O0 any user labels. */
1827 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1828 return false;
1831 /* Protect simple loop latches. We only want to avoid merging
1832 the latch with the loop header or with a block in another
1833 loop in this case. */
1834 if (current_loops
1835 && b->loop_father->latch == b
1836 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1837 && (b->loop_father->header == a
1838 || b->loop_father != a->loop_father))
1839 return false;
1841 /* It must be possible to eliminate all phi nodes in B. If ssa form
1842 is not up-to-date and a name-mapping is registered, we cannot eliminate
1843 any phis. Symbols marked for renaming are never a problem though. */
1844 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1845 gsi_next (&gsi))
1847 gphi *phi = gsi.phi ();
1848 /* Technically only new names matter. */
1849 if (name_registered_for_update_p (PHI_RESULT (phi)))
1850 return false;
1853 /* When not optimizing, don't merge if we'd lose goto_locus. */
1854 if (!optimize
1855 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1857 location_t goto_locus = single_succ_edge (a)->goto_locus;
1858 gimple_stmt_iterator prev, next;
1859 prev = gsi_last_nondebug_bb (a);
1860 next = gsi_after_labels (b);
1861 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1862 gsi_next_nondebug (&next);
1863 if ((gsi_end_p (prev)
1864 || gimple_location (gsi_stmt (prev)) != goto_locus)
1865 && (gsi_end_p (next)
1866 || gimple_location (gsi_stmt (next)) != goto_locus))
1867 return false;
1870 return true;
1873 /* Replaces all uses of NAME by VAL. */
1875 void
1876 replace_uses_by (tree name, tree val)
1878 imm_use_iterator imm_iter;
1879 use_operand_p use;
1880 gimple *stmt;
1881 edge e;
1883 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1885 /* Mark the block if we change the last stmt in it. */
1886 if (cfgcleanup_altered_bbs
1887 && stmt_ends_bb_p (stmt))
1888 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1890 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1892 replace_exp (use, val);
1894 if (gimple_code (stmt) == GIMPLE_PHI)
1896 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1897 PHI_ARG_INDEX_FROM_USE (use));
1898 if (e->flags & EDGE_ABNORMAL
1899 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1901 /* This can only occur for virtual operands, since
1902 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1903 would prevent replacement. */
1904 gcc_checking_assert (virtual_operand_p (name));
1905 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1910 if (gimple_code (stmt) != GIMPLE_PHI)
1912 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1913 gimple *orig_stmt = stmt;
1914 size_t i;
1916 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1917 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1918 only change sth from non-invariant to invariant, and only
1919 when propagating constants. */
1920 if (is_gimple_min_invariant (val))
1921 for (i = 0; i < gimple_num_ops (stmt); i++)
1923 tree op = gimple_op (stmt, i);
1924 /* Operands may be empty here. For example, the labels
1925 of a GIMPLE_COND are nulled out following the creation
1926 of the corresponding CFG edges. */
1927 if (op && TREE_CODE (op) == ADDR_EXPR)
1928 recompute_tree_invariant_for_addr_expr (op);
1931 if (fold_stmt (&gsi))
1932 stmt = gsi_stmt (gsi);
1934 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1935 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1937 update_stmt (stmt);
1941 gcc_checking_assert (has_zero_uses (name));
1943 /* Also update the trees stored in loop structures. */
1944 if (current_loops)
1946 struct loop *loop;
1948 FOR_EACH_LOOP (loop, 0)
1950 substitute_in_loop_info (loop, name, val);
1955 /* Merge block B into block A. */
1957 static void
1958 gimple_merge_blocks (basic_block a, basic_block b)
1960 gimple_stmt_iterator last, gsi;
1961 gphi_iterator psi;
1963 if (dump_file)
1964 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1966 /* Remove all single-valued PHI nodes from block B of the form
1967 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1968 gsi = gsi_last_bb (a);
1969 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1971 gimple *phi = gsi_stmt (psi);
1972 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1973 gimple *copy;
1974 bool may_replace_uses = (virtual_operand_p (def)
1975 || may_propagate_copy (def, use));
1977 /* In case we maintain loop closed ssa form, do not propagate arguments
1978 of loop exit phi nodes. */
1979 if (current_loops
1980 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1981 && !virtual_operand_p (def)
1982 && TREE_CODE (use) == SSA_NAME
1983 && a->loop_father != b->loop_father)
1984 may_replace_uses = false;
1986 if (!may_replace_uses)
1988 gcc_assert (!virtual_operand_p (def));
1990 /* Note that just emitting the copies is fine -- there is no problem
1991 with ordering of phi nodes. This is because A is the single
1992 predecessor of B, therefore results of the phi nodes cannot
1993 appear as arguments of the phi nodes. */
1994 copy = gimple_build_assign (def, use);
1995 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1996 remove_phi_node (&psi, false);
1998 else
2000 /* If we deal with a PHI for virtual operands, we can simply
2001 propagate these without fussing with folding or updating
2002 the stmt. */
2003 if (virtual_operand_p (def))
2005 imm_use_iterator iter;
2006 use_operand_p use_p;
2007 gimple *stmt;
2009 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2010 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2011 SET_USE (use_p, use);
2013 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2014 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2016 else
2017 replace_uses_by (def, use);
2019 remove_phi_node (&psi, true);
2023 /* Ensure that B follows A. */
2024 move_block_after (b, a);
2026 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2027 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2029 /* Remove labels from B and set gimple_bb to A for other statements. */
2030 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2032 gimple *stmt = gsi_stmt (gsi);
2033 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2035 tree label = gimple_label_label (label_stmt);
2036 int lp_nr;
2038 gsi_remove (&gsi, false);
2040 /* Now that we can thread computed gotos, we might have
2041 a situation where we have a forced label in block B
2042 However, the label at the start of block B might still be
2043 used in other ways (think about the runtime checking for
2044 Fortran assigned gotos). So we can not just delete the
2045 label. Instead we move the label to the start of block A. */
2046 if (FORCED_LABEL (label))
2048 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2049 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2051 /* Other user labels keep around in a form of a debug stmt. */
2052 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2054 gimple *dbg = gimple_build_debug_bind (label,
2055 integer_zero_node,
2056 stmt);
2057 gimple_debug_bind_reset_value (dbg);
2058 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2061 lp_nr = EH_LANDING_PAD_NR (label);
2062 if (lp_nr)
2064 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2065 lp->post_landing_pad = NULL;
2068 else
2070 gimple_set_bb (stmt, a);
2071 gsi_next (&gsi);
2075 /* When merging two BBs, if their counts are different, the larger count
2076 is selected as the new bb count. This is to handle inconsistent
2077 profiles. */
2078 if (a->loop_father == b->loop_father)
2080 a->count = MAX (a->count, b->count);
2081 a->frequency = MAX (a->frequency, b->frequency);
2084 /* Merge the sequences. */
2085 last = gsi_last_bb (a);
2086 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2087 set_bb_seq (b, NULL);
2089 if (cfgcleanup_altered_bbs)
2090 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2094 /* Return the one of two successors of BB that is not reachable by a
2095 complex edge, if there is one. Else, return BB. We use
2096 this in optimizations that use post-dominators for their heuristics,
2097 to catch the cases in C++ where function calls are involved. */
2099 basic_block
2100 single_noncomplex_succ (basic_block bb)
2102 edge e0, e1;
2103 if (EDGE_COUNT (bb->succs) != 2)
2104 return bb;
2106 e0 = EDGE_SUCC (bb, 0);
2107 e1 = EDGE_SUCC (bb, 1);
2108 if (e0->flags & EDGE_COMPLEX)
2109 return e1->dest;
2110 if (e1->flags & EDGE_COMPLEX)
2111 return e0->dest;
2113 return bb;
2116 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2118 void
2119 notice_special_calls (gcall *call)
2121 int flags = gimple_call_flags (call);
2123 if (flags & ECF_MAY_BE_ALLOCA)
2124 cfun->calls_alloca = true;
2125 if (flags & ECF_RETURNS_TWICE)
2126 cfun->calls_setjmp = true;
2130 /* Clear flags set by notice_special_calls. Used by dead code removal
2131 to update the flags. */
2133 void
2134 clear_special_calls (void)
2136 cfun->calls_alloca = false;
2137 cfun->calls_setjmp = false;
2140 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2142 static void
2143 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2145 /* Since this block is no longer reachable, we can just delete all
2146 of its PHI nodes. */
2147 remove_phi_nodes (bb);
2149 /* Remove edges to BB's successors. */
2150 while (EDGE_COUNT (bb->succs) > 0)
2151 remove_edge (EDGE_SUCC (bb, 0));
2155 /* Remove statements of basic block BB. */
2157 static void
2158 remove_bb (basic_block bb)
2160 gimple_stmt_iterator i;
2162 if (dump_file)
2164 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2165 if (dump_flags & TDF_DETAILS)
2167 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2168 fprintf (dump_file, "\n");
2172 if (current_loops)
2174 struct loop *loop = bb->loop_father;
2176 /* If a loop gets removed, clean up the information associated
2177 with it. */
2178 if (loop->latch == bb
2179 || loop->header == bb)
2180 free_numbers_of_iterations_estimates_loop (loop);
2183 /* Remove all the instructions in the block. */
2184 if (bb_seq (bb) != NULL)
2186 /* Walk backwards so as to get a chance to substitute all
2187 released DEFs into debug stmts. See
2188 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2189 details. */
2190 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2192 gimple *stmt = gsi_stmt (i);
2193 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2194 if (label_stmt
2195 && (FORCED_LABEL (gimple_label_label (label_stmt))
2196 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2198 basic_block new_bb;
2199 gimple_stmt_iterator new_gsi;
2201 /* A non-reachable non-local label may still be referenced.
2202 But it no longer needs to carry the extra semantics of
2203 non-locality. */
2204 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2206 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2207 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2210 new_bb = bb->prev_bb;
2211 new_gsi = gsi_start_bb (new_bb);
2212 gsi_remove (&i, false);
2213 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2215 else
2217 /* Release SSA definitions. */
2218 release_defs (stmt);
2219 gsi_remove (&i, true);
2222 if (gsi_end_p (i))
2223 i = gsi_last_bb (bb);
2224 else
2225 gsi_prev (&i);
2229 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2230 bb->il.gimple.seq = NULL;
2231 bb->il.gimple.phi_nodes = NULL;
2235 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2236 predicate VAL, return the edge that will be taken out of the block.
2237 If VAL does not match a unique edge, NULL is returned. */
2239 edge
2240 find_taken_edge (basic_block bb, tree val)
2242 gimple *stmt;
2244 stmt = last_stmt (bb);
2246 gcc_assert (stmt);
2247 gcc_assert (is_ctrl_stmt (stmt));
2249 if (val == NULL)
2250 return NULL;
2252 if (!is_gimple_min_invariant (val))
2253 return NULL;
2255 if (gimple_code (stmt) == GIMPLE_COND)
2256 return find_taken_edge_cond_expr (bb, val);
2258 if (gimple_code (stmt) == GIMPLE_SWITCH)
2259 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2261 if (computed_goto_p (stmt))
2263 /* Only optimize if the argument is a label, if the argument is
2264 not a label then we can not construct a proper CFG.
2266 It may be the case that we only need to allow the LABEL_REF to
2267 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2268 appear inside a LABEL_EXPR just to be safe. */
2269 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2270 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2271 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2272 return NULL;
2275 gcc_unreachable ();
2278 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2279 statement, determine which of the outgoing edges will be taken out of the
2280 block. Return NULL if either edge may be taken. */
2282 static edge
2283 find_taken_edge_computed_goto (basic_block bb, tree val)
2285 basic_block dest;
2286 edge e = NULL;
2288 dest = label_to_block (val);
2289 if (dest)
2291 e = find_edge (bb, dest);
2292 gcc_assert (e != NULL);
2295 return e;
2298 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2299 statement, determine which of the two edges will be taken out of the
2300 block. Return NULL if either edge may be taken. */
2302 static edge
2303 find_taken_edge_cond_expr (basic_block bb, tree val)
2305 edge true_edge, false_edge;
2307 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2309 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2310 return (integer_zerop (val) ? false_edge : true_edge);
2313 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2314 statement, determine which edge will be taken out of the block. Return
2315 NULL if any edge may be taken. */
2317 static edge
2318 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2319 tree val)
2321 basic_block dest_bb;
2322 edge e;
2323 tree taken_case;
2325 taken_case = find_case_label_for_value (switch_stmt, val);
2326 dest_bb = label_to_block (CASE_LABEL (taken_case));
2328 e = find_edge (bb, dest_bb);
2329 gcc_assert (e);
2330 return e;
2334 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2335 We can make optimal use here of the fact that the case labels are
2336 sorted: We can do a binary search for a case matching VAL. */
2338 static tree
2339 find_case_label_for_value (gswitch *switch_stmt, tree val)
2341 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2342 tree default_case = gimple_switch_default_label (switch_stmt);
2344 for (low = 0, high = n; high - low > 1; )
2346 size_t i = (high + low) / 2;
2347 tree t = gimple_switch_label (switch_stmt, i);
2348 int cmp;
2350 /* Cache the result of comparing CASE_LOW and val. */
2351 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2353 if (cmp > 0)
2354 high = i;
2355 else
2356 low = i;
2358 if (CASE_HIGH (t) == NULL)
2360 /* A singe-valued case label. */
2361 if (cmp == 0)
2362 return t;
2364 else
2366 /* A case range. We can only handle integer ranges. */
2367 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2368 return t;
2372 return default_case;
2376 /* Dump a basic block on stderr. */
2378 void
2379 gimple_debug_bb (basic_block bb)
2381 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2385 /* Dump basic block with index N on stderr. */
2387 basic_block
2388 gimple_debug_bb_n (int n)
2390 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2391 return BASIC_BLOCK_FOR_FN (cfun, n);
2395 /* Dump the CFG on stderr.
2397 FLAGS are the same used by the tree dumping functions
2398 (see TDF_* in dumpfile.h). */
2400 void
2401 gimple_debug_cfg (dump_flags_t flags)
2403 gimple_dump_cfg (stderr, flags);
2407 /* Dump the program showing basic block boundaries on the given FILE.
2409 FLAGS are the same used by the tree dumping functions (see TDF_* in
2410 tree.h). */
2412 void
2413 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2415 if (flags & TDF_DETAILS)
2417 dump_function_header (file, current_function_decl, flags);
2418 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2419 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2420 last_basic_block_for_fn (cfun));
2422 brief_dump_cfg (file, flags);
2423 fprintf (file, "\n");
2426 if (flags & TDF_STATS)
2427 dump_cfg_stats (file);
2429 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2433 /* Dump CFG statistics on FILE. */
2435 void
2436 dump_cfg_stats (FILE *file)
2438 static long max_num_merged_labels = 0;
2439 unsigned long size, total = 0;
2440 long num_edges;
2441 basic_block bb;
2442 const char * const fmt_str = "%-30s%-13s%12s\n";
2443 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2444 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2445 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2446 const char *funcname = current_function_name ();
2448 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2450 fprintf (file, "---------------------------------------------------------\n");
2451 fprintf (file, fmt_str, "", " Number of ", "Memory");
2452 fprintf (file, fmt_str, "", " instances ", "used ");
2453 fprintf (file, "---------------------------------------------------------\n");
2455 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2456 total += size;
2457 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2458 SCALE (size), LABEL (size));
2460 num_edges = 0;
2461 FOR_EACH_BB_FN (bb, cfun)
2462 num_edges += EDGE_COUNT (bb->succs);
2463 size = num_edges * sizeof (struct edge_def);
2464 total += size;
2465 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2467 fprintf (file, "---------------------------------------------------------\n");
2468 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2469 LABEL (total));
2470 fprintf (file, "---------------------------------------------------------\n");
2471 fprintf (file, "\n");
2473 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2474 max_num_merged_labels = cfg_stats.num_merged_labels;
2476 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2477 cfg_stats.num_merged_labels, max_num_merged_labels);
2479 fprintf (file, "\n");
2483 /* Dump CFG statistics on stderr. Keep extern so that it's always
2484 linked in the final executable. */
2486 DEBUG_FUNCTION void
2487 debug_cfg_stats (void)
2489 dump_cfg_stats (stderr);
2492 /*---------------------------------------------------------------------------
2493 Miscellaneous helpers
2494 ---------------------------------------------------------------------------*/
2496 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2497 flow. Transfers of control flow associated with EH are excluded. */
2499 static bool
2500 call_can_make_abnormal_goto (gimple *t)
2502 /* If the function has no non-local labels, then a call cannot make an
2503 abnormal transfer of control. */
2504 if (!cfun->has_nonlocal_label
2505 && !cfun->calls_setjmp)
2506 return false;
2508 /* Likewise if the call has no side effects. */
2509 if (!gimple_has_side_effects (t))
2510 return false;
2512 /* Likewise if the called function is leaf. */
2513 if (gimple_call_flags (t) & ECF_LEAF)
2514 return false;
2516 return true;
2520 /* Return true if T can make an abnormal transfer of control flow.
2521 Transfers of control flow associated with EH are excluded. */
2523 bool
2524 stmt_can_make_abnormal_goto (gimple *t)
2526 if (computed_goto_p (t))
2527 return true;
2528 if (is_gimple_call (t))
2529 return call_can_make_abnormal_goto (t);
2530 return false;
2534 /* Return true if T represents a stmt that always transfers control. */
2536 bool
2537 is_ctrl_stmt (gimple *t)
2539 switch (gimple_code (t))
2541 case GIMPLE_COND:
2542 case GIMPLE_SWITCH:
2543 case GIMPLE_GOTO:
2544 case GIMPLE_RETURN:
2545 case GIMPLE_RESX:
2546 return true;
2547 default:
2548 return false;
2553 /* Return true if T is a statement that may alter the flow of control
2554 (e.g., a call to a non-returning function). */
2556 bool
2557 is_ctrl_altering_stmt (gimple *t)
2559 gcc_assert (t);
2561 switch (gimple_code (t))
2563 case GIMPLE_CALL:
2564 /* Per stmt call flag indicates whether the call could alter
2565 controlflow. */
2566 if (gimple_call_ctrl_altering_p (t))
2567 return true;
2568 break;
2570 case GIMPLE_EH_DISPATCH:
2571 /* EH_DISPATCH branches to the individual catch handlers at
2572 this level of a try or allowed-exceptions region. It can
2573 fallthru to the next statement as well. */
2574 return true;
2576 case GIMPLE_ASM:
2577 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2578 return true;
2579 break;
2581 CASE_GIMPLE_OMP:
2582 /* OpenMP directives alter control flow. */
2583 return true;
2585 case GIMPLE_TRANSACTION:
2586 /* A transaction start alters control flow. */
2587 return true;
2589 default:
2590 break;
2593 /* If a statement can throw, it alters control flow. */
2594 return stmt_can_throw_internal (t);
2598 /* Return true if T is a simple local goto. */
2600 bool
2601 simple_goto_p (gimple *t)
2603 return (gimple_code (t) == GIMPLE_GOTO
2604 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2608 /* Return true if STMT should start a new basic block. PREV_STMT is
2609 the statement preceding STMT. It is used when STMT is a label or a
2610 case label. Labels should only start a new basic block if their
2611 previous statement wasn't a label. Otherwise, sequence of labels
2612 would generate unnecessary basic blocks that only contain a single
2613 label. */
2615 static inline bool
2616 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2618 if (stmt == NULL)
2619 return false;
2621 /* Labels start a new basic block only if the preceding statement
2622 wasn't a label of the same type. This prevents the creation of
2623 consecutive blocks that have nothing but a single label. */
2624 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2626 /* Nonlocal and computed GOTO targets always start a new block. */
2627 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2628 || FORCED_LABEL (gimple_label_label (label_stmt)))
2629 return true;
2631 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2633 if (DECL_NONLOCAL (gimple_label_label (
2634 as_a <glabel *> (prev_stmt))))
2635 return true;
2637 cfg_stats.num_merged_labels++;
2638 return false;
2640 else
2641 return true;
2643 else if (gimple_code (stmt) == GIMPLE_CALL)
2645 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2646 /* setjmp acts similar to a nonlocal GOTO target and thus should
2647 start a new block. */
2648 return true;
2649 if (gimple_call_internal_p (stmt, IFN_PHI)
2650 && prev_stmt
2651 && gimple_code (prev_stmt) != GIMPLE_LABEL
2652 && (gimple_code (prev_stmt) != GIMPLE_CALL
2653 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2654 /* PHI nodes start a new block unless preceeded by a label
2655 or another PHI. */
2656 return true;
2659 return false;
2663 /* Return true if T should end a basic block. */
2665 bool
2666 stmt_ends_bb_p (gimple *t)
2668 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2671 /* Remove block annotations and other data structures. */
2673 void
2674 delete_tree_cfg_annotations (struct function *fn)
2676 vec_free (label_to_block_map_for_fn (fn));
2679 /* Return the virtual phi in BB. */
2681 gphi *
2682 get_virtual_phi (basic_block bb)
2684 for (gphi_iterator gsi = gsi_start_phis (bb);
2685 !gsi_end_p (gsi);
2686 gsi_next (&gsi))
2688 gphi *phi = gsi.phi ();
2690 if (virtual_operand_p (PHI_RESULT (phi)))
2691 return phi;
2694 return NULL;
2697 /* Return the first statement in basic block BB. */
2699 gimple *
2700 first_stmt (basic_block bb)
2702 gimple_stmt_iterator i = gsi_start_bb (bb);
2703 gimple *stmt = NULL;
2705 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2707 gsi_next (&i);
2708 stmt = NULL;
2710 return stmt;
2713 /* Return the first non-label statement in basic block BB. */
2715 static gimple *
2716 first_non_label_stmt (basic_block bb)
2718 gimple_stmt_iterator i = gsi_start_bb (bb);
2719 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2720 gsi_next (&i);
2721 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2724 /* Return the last statement in basic block BB. */
2726 gimple *
2727 last_stmt (basic_block bb)
2729 gimple_stmt_iterator i = gsi_last_bb (bb);
2730 gimple *stmt = NULL;
2732 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2734 gsi_prev (&i);
2735 stmt = NULL;
2737 return stmt;
2740 /* Return the last statement of an otherwise empty block. Return NULL
2741 if the block is totally empty, or if it contains more than one
2742 statement. */
2744 gimple *
2745 last_and_only_stmt (basic_block bb)
2747 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2748 gimple *last, *prev;
2750 if (gsi_end_p (i))
2751 return NULL;
2753 last = gsi_stmt (i);
2754 gsi_prev_nondebug (&i);
2755 if (gsi_end_p (i))
2756 return last;
2758 /* Empty statements should no longer appear in the instruction stream.
2759 Everything that might have appeared before should be deleted by
2760 remove_useless_stmts, and the optimizers should just gsi_remove
2761 instead of smashing with build_empty_stmt.
2763 Thus the only thing that should appear here in a block containing
2764 one executable statement is a label. */
2765 prev = gsi_stmt (i);
2766 if (gimple_code (prev) == GIMPLE_LABEL)
2767 return last;
2768 else
2769 return NULL;
2772 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2774 static void
2775 reinstall_phi_args (edge new_edge, edge old_edge)
2777 edge_var_map *vm;
2778 int i;
2779 gphi_iterator phis;
2781 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2782 if (!v)
2783 return;
2785 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2786 v->iterate (i, &vm) && !gsi_end_p (phis);
2787 i++, gsi_next (&phis))
2789 gphi *phi = phis.phi ();
2790 tree result = redirect_edge_var_map_result (vm);
2791 tree arg = redirect_edge_var_map_def (vm);
2793 gcc_assert (result == gimple_phi_result (phi));
2795 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2798 redirect_edge_var_map_clear (old_edge);
2801 /* Returns the basic block after which the new basic block created
2802 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2803 near its "logical" location. This is of most help to humans looking
2804 at debugging dumps. */
2806 basic_block
2807 split_edge_bb_loc (edge edge_in)
2809 basic_block dest = edge_in->dest;
2810 basic_block dest_prev = dest->prev_bb;
2812 if (dest_prev)
2814 edge e = find_edge (dest_prev, dest);
2815 if (e && !(e->flags & EDGE_COMPLEX))
2816 return edge_in->src;
2818 return dest_prev;
2821 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2822 Abort on abnormal edges. */
2824 static basic_block
2825 gimple_split_edge (edge edge_in)
2827 basic_block new_bb, after_bb, dest;
2828 edge new_edge, e;
2830 /* Abnormal edges cannot be split. */
2831 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2833 dest = edge_in->dest;
2835 after_bb = split_edge_bb_loc (edge_in);
2837 new_bb = create_empty_bb (after_bb);
2838 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2839 new_bb->count = edge_in->count;
2840 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2841 new_edge->probability = REG_BR_PROB_BASE;
2842 new_edge->count = edge_in->count;
2844 e = redirect_edge_and_branch (edge_in, new_bb);
2845 gcc_assert (e == edge_in);
2846 reinstall_phi_args (new_edge, e);
2848 return new_bb;
2852 /* Verify properties of the address expression T with base object BASE. */
2854 static tree
2855 verify_address (tree t, tree base)
2857 bool old_constant;
2858 bool old_side_effects;
2859 bool new_constant;
2860 bool new_side_effects;
2862 old_constant = TREE_CONSTANT (t);
2863 old_side_effects = TREE_SIDE_EFFECTS (t);
2865 recompute_tree_invariant_for_addr_expr (t);
2866 new_side_effects = TREE_SIDE_EFFECTS (t);
2867 new_constant = TREE_CONSTANT (t);
2869 if (old_constant != new_constant)
2871 error ("constant not recomputed when ADDR_EXPR changed");
2872 return t;
2874 if (old_side_effects != new_side_effects)
2876 error ("side effects not recomputed when ADDR_EXPR changed");
2877 return t;
2880 if (!(VAR_P (base)
2881 || TREE_CODE (base) == PARM_DECL
2882 || TREE_CODE (base) == RESULT_DECL))
2883 return NULL_TREE;
2885 if (DECL_GIMPLE_REG_P (base))
2887 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2888 return base;
2891 return NULL_TREE;
2894 /* Callback for walk_tree, check that all elements with address taken are
2895 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2896 inside a PHI node. */
2898 static tree
2899 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2901 tree t = *tp, x;
2903 if (TYPE_P (t))
2904 *walk_subtrees = 0;
2906 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2907 #define CHECK_OP(N, MSG) \
2908 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2909 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2911 switch (TREE_CODE (t))
2913 case SSA_NAME:
2914 if (SSA_NAME_IN_FREE_LIST (t))
2916 error ("SSA name in freelist but still referenced");
2917 return *tp;
2919 break;
2921 case PARM_DECL:
2922 case VAR_DECL:
2923 case RESULT_DECL:
2925 tree context = decl_function_context (t);
2926 if (context != cfun->decl
2927 && !SCOPE_FILE_SCOPE_P (context)
2928 && !TREE_STATIC (t)
2929 && !DECL_EXTERNAL (t))
2931 error ("Local declaration from a different function");
2932 return t;
2935 break;
2937 case INDIRECT_REF:
2938 error ("INDIRECT_REF in gimple IL");
2939 return t;
2941 case MEM_REF:
2942 x = TREE_OPERAND (t, 0);
2943 if (!POINTER_TYPE_P (TREE_TYPE (x))
2944 || !is_gimple_mem_ref_addr (x))
2946 error ("invalid first operand of MEM_REF");
2947 return x;
2949 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2950 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2952 error ("invalid offset operand of MEM_REF");
2953 return TREE_OPERAND (t, 1);
2955 if (TREE_CODE (x) == ADDR_EXPR)
2957 tree va = verify_address (x, TREE_OPERAND (x, 0));
2958 if (va)
2959 return va;
2960 x = TREE_OPERAND (x, 0);
2962 walk_tree (&x, verify_expr, data, NULL);
2963 *walk_subtrees = 0;
2964 break;
2966 case ASSERT_EXPR:
2967 x = fold (ASSERT_EXPR_COND (t));
2968 if (x == boolean_false_node)
2970 error ("ASSERT_EXPR with an always-false condition");
2971 return *tp;
2973 break;
2975 case MODIFY_EXPR:
2976 error ("MODIFY_EXPR not expected while having tuples");
2977 return *tp;
2979 case ADDR_EXPR:
2981 tree tem;
2983 gcc_assert (is_gimple_address (t));
2985 /* Skip any references (they will be checked when we recurse down the
2986 tree) and ensure that any variable used as a prefix is marked
2987 addressable. */
2988 for (x = TREE_OPERAND (t, 0);
2989 handled_component_p (x);
2990 x = TREE_OPERAND (x, 0))
2993 if ((tem = verify_address (t, x)))
2994 return tem;
2996 if (!(VAR_P (x)
2997 || TREE_CODE (x) == PARM_DECL
2998 || TREE_CODE (x) == RESULT_DECL))
2999 return NULL;
3001 if (!TREE_ADDRESSABLE (x))
3003 error ("address taken, but ADDRESSABLE bit not set");
3004 return x;
3007 break;
3010 case COND_EXPR:
3011 x = COND_EXPR_COND (t);
3012 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3014 error ("non-integral used in condition");
3015 return x;
3017 if (!is_gimple_condexpr (x))
3019 error ("invalid conditional operand");
3020 return x;
3022 break;
3024 case NON_LVALUE_EXPR:
3025 case TRUTH_NOT_EXPR:
3026 gcc_unreachable ();
3028 CASE_CONVERT:
3029 case FIX_TRUNC_EXPR:
3030 case FLOAT_EXPR:
3031 case NEGATE_EXPR:
3032 case ABS_EXPR:
3033 case BIT_NOT_EXPR:
3034 CHECK_OP (0, "invalid operand to unary operator");
3035 break;
3037 case REALPART_EXPR:
3038 case IMAGPART_EXPR:
3039 case BIT_FIELD_REF:
3040 if (!is_gimple_reg_type (TREE_TYPE (t)))
3042 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3043 return t;
3046 if (TREE_CODE (t) == BIT_FIELD_REF)
3048 tree t0 = TREE_OPERAND (t, 0);
3049 tree t1 = TREE_OPERAND (t, 1);
3050 tree t2 = TREE_OPERAND (t, 2);
3051 if (!tree_fits_uhwi_p (t1)
3052 || !tree_fits_uhwi_p (t2))
3054 error ("invalid position or size operand to BIT_FIELD_REF");
3055 return t;
3057 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3058 && (TYPE_PRECISION (TREE_TYPE (t))
3059 != tree_to_uhwi (t1)))
3061 error ("integral result type precision does not match "
3062 "field size of BIT_FIELD_REF");
3063 return t;
3065 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3066 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3067 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3068 != tree_to_uhwi (t1)))
3070 error ("mode size of non-integral result does not "
3071 "match field size of BIT_FIELD_REF");
3072 return t;
3074 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3075 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3076 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3078 error ("position plus size exceeds size of referenced object in "
3079 "BIT_FIELD_REF");
3080 return t;
3083 t = TREE_OPERAND (t, 0);
3085 /* Fall-through. */
3086 case COMPONENT_REF:
3087 case ARRAY_REF:
3088 case ARRAY_RANGE_REF:
3089 case VIEW_CONVERT_EXPR:
3090 /* We have a nest of references. Verify that each of the operands
3091 that determine where to reference is either a constant or a variable,
3092 verify that the base is valid, and then show we've already checked
3093 the subtrees. */
3094 while (handled_component_p (t))
3096 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3097 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3098 else if (TREE_CODE (t) == ARRAY_REF
3099 || TREE_CODE (t) == ARRAY_RANGE_REF)
3101 CHECK_OP (1, "invalid array index");
3102 if (TREE_OPERAND (t, 2))
3103 CHECK_OP (2, "invalid array lower bound");
3104 if (TREE_OPERAND (t, 3))
3105 CHECK_OP (3, "invalid array stride");
3107 else if (TREE_CODE (t) == BIT_FIELD_REF
3108 || TREE_CODE (t) == REALPART_EXPR
3109 || TREE_CODE (t) == IMAGPART_EXPR)
3111 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3112 "REALPART_EXPR");
3113 return t;
3116 t = TREE_OPERAND (t, 0);
3119 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3121 error ("invalid reference prefix");
3122 return t;
3124 walk_tree (&t, verify_expr, data, NULL);
3125 *walk_subtrees = 0;
3126 break;
3127 case PLUS_EXPR:
3128 case MINUS_EXPR:
3129 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3130 POINTER_PLUS_EXPR. */
3131 if (POINTER_TYPE_P (TREE_TYPE (t)))
3133 error ("invalid operand to plus/minus, type is a pointer");
3134 return t;
3136 CHECK_OP (0, "invalid operand to binary operator");
3137 CHECK_OP (1, "invalid operand to binary operator");
3138 break;
3140 case POINTER_PLUS_EXPR:
3141 /* Check to make sure the first operand is a pointer or reference type. */
3142 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3144 error ("invalid operand to pointer plus, first operand is not a pointer");
3145 return t;
3147 /* Check to make sure the second operand is a ptrofftype. */
3148 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3150 error ("invalid operand to pointer plus, second operand is not an "
3151 "integer type of appropriate width");
3152 return t;
3154 /* FALLTHROUGH */
3155 case LT_EXPR:
3156 case LE_EXPR:
3157 case GT_EXPR:
3158 case GE_EXPR:
3159 case EQ_EXPR:
3160 case NE_EXPR:
3161 case UNORDERED_EXPR:
3162 case ORDERED_EXPR:
3163 case UNLT_EXPR:
3164 case UNLE_EXPR:
3165 case UNGT_EXPR:
3166 case UNGE_EXPR:
3167 case UNEQ_EXPR:
3168 case LTGT_EXPR:
3169 case MULT_EXPR:
3170 case TRUNC_DIV_EXPR:
3171 case CEIL_DIV_EXPR:
3172 case FLOOR_DIV_EXPR:
3173 case ROUND_DIV_EXPR:
3174 case TRUNC_MOD_EXPR:
3175 case CEIL_MOD_EXPR:
3176 case FLOOR_MOD_EXPR:
3177 case ROUND_MOD_EXPR:
3178 case RDIV_EXPR:
3179 case EXACT_DIV_EXPR:
3180 case MIN_EXPR:
3181 case MAX_EXPR:
3182 case LSHIFT_EXPR:
3183 case RSHIFT_EXPR:
3184 case LROTATE_EXPR:
3185 case RROTATE_EXPR:
3186 case BIT_IOR_EXPR:
3187 case BIT_XOR_EXPR:
3188 case BIT_AND_EXPR:
3189 CHECK_OP (0, "invalid operand to binary operator");
3190 CHECK_OP (1, "invalid operand to binary operator");
3191 break;
3193 case CONSTRUCTOR:
3194 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3195 *walk_subtrees = 0;
3196 break;
3198 case CASE_LABEL_EXPR:
3199 if (CASE_CHAIN (t))
3201 error ("invalid CASE_CHAIN");
3202 return t;
3204 break;
3206 default:
3207 break;
3209 return NULL;
3211 #undef CHECK_OP
3215 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3216 Returns true if there is an error, otherwise false. */
3218 static bool
3219 verify_types_in_gimple_min_lval (tree expr)
3221 tree op;
3223 if (is_gimple_id (expr))
3224 return false;
3226 if (TREE_CODE (expr) != TARGET_MEM_REF
3227 && TREE_CODE (expr) != MEM_REF)
3229 error ("invalid expression for min lvalue");
3230 return true;
3233 /* TARGET_MEM_REFs are strange beasts. */
3234 if (TREE_CODE (expr) == TARGET_MEM_REF)
3235 return false;
3237 op = TREE_OPERAND (expr, 0);
3238 if (!is_gimple_val (op))
3240 error ("invalid operand in indirect reference");
3241 debug_generic_stmt (op);
3242 return true;
3244 /* Memory references now generally can involve a value conversion. */
3246 return false;
3249 /* Verify if EXPR is a valid GIMPLE reference expression. If
3250 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3251 if there is an error, otherwise false. */
3253 static bool
3254 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3256 while (handled_component_p (expr))
3258 tree op = TREE_OPERAND (expr, 0);
3260 if (TREE_CODE (expr) == ARRAY_REF
3261 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3263 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3264 || (TREE_OPERAND (expr, 2)
3265 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3266 || (TREE_OPERAND (expr, 3)
3267 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3269 error ("invalid operands to array reference");
3270 debug_generic_stmt (expr);
3271 return true;
3275 /* Verify if the reference array element types are compatible. */
3276 if (TREE_CODE (expr) == ARRAY_REF
3277 && !useless_type_conversion_p (TREE_TYPE (expr),
3278 TREE_TYPE (TREE_TYPE (op))))
3280 error ("type mismatch in array reference");
3281 debug_generic_stmt (TREE_TYPE (expr));
3282 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3283 return true;
3285 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3286 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3287 TREE_TYPE (TREE_TYPE (op))))
3289 error ("type mismatch in array range reference");
3290 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3291 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3292 return true;
3295 if ((TREE_CODE (expr) == REALPART_EXPR
3296 || TREE_CODE (expr) == IMAGPART_EXPR)
3297 && !useless_type_conversion_p (TREE_TYPE (expr),
3298 TREE_TYPE (TREE_TYPE (op))))
3300 error ("type mismatch in real/imagpart reference");
3301 debug_generic_stmt (TREE_TYPE (expr));
3302 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3303 return true;
3306 if (TREE_CODE (expr) == COMPONENT_REF
3307 && !useless_type_conversion_p (TREE_TYPE (expr),
3308 TREE_TYPE (TREE_OPERAND (expr, 1))))
3310 error ("type mismatch in component reference");
3311 debug_generic_stmt (TREE_TYPE (expr));
3312 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3313 return true;
3316 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3318 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3319 that their operand is not an SSA name or an invariant when
3320 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3321 bug). Otherwise there is nothing to verify, gross mismatches at
3322 most invoke undefined behavior. */
3323 if (require_lvalue
3324 && (TREE_CODE (op) == SSA_NAME
3325 || is_gimple_min_invariant (op)))
3327 error ("conversion of an SSA_NAME on the left hand side");
3328 debug_generic_stmt (expr);
3329 return true;
3331 else if (TREE_CODE (op) == SSA_NAME
3332 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3334 error ("conversion of register to a different size");
3335 debug_generic_stmt (expr);
3336 return true;
3338 else if (!handled_component_p (op))
3339 return false;
3342 expr = op;
3345 if (TREE_CODE (expr) == MEM_REF)
3347 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3349 error ("invalid address operand in MEM_REF");
3350 debug_generic_stmt (expr);
3351 return true;
3353 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3354 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3356 error ("invalid offset operand in MEM_REF");
3357 debug_generic_stmt (expr);
3358 return true;
3361 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3363 if (!TMR_BASE (expr)
3364 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3366 error ("invalid address operand in TARGET_MEM_REF");
3367 return true;
3369 if (!TMR_OFFSET (expr)
3370 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3371 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3373 error ("invalid offset operand in TARGET_MEM_REF");
3374 debug_generic_stmt (expr);
3375 return true;
3379 return ((require_lvalue || !is_gimple_min_invariant (expr))
3380 && verify_types_in_gimple_min_lval (expr));
3383 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3384 list of pointer-to types that is trivially convertible to DEST. */
3386 static bool
3387 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3389 tree src;
3391 if (!TYPE_POINTER_TO (src_obj))
3392 return true;
3394 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3395 if (useless_type_conversion_p (dest, src))
3396 return true;
3398 return false;
3401 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3402 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3404 static bool
3405 valid_fixed_convert_types_p (tree type1, tree type2)
3407 return (FIXED_POINT_TYPE_P (type1)
3408 && (INTEGRAL_TYPE_P (type2)
3409 || SCALAR_FLOAT_TYPE_P (type2)
3410 || FIXED_POINT_TYPE_P (type2)));
3413 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3414 is a problem, otherwise false. */
3416 static bool
3417 verify_gimple_call (gcall *stmt)
3419 tree fn = gimple_call_fn (stmt);
3420 tree fntype, fndecl;
3421 unsigned i;
3423 if (gimple_call_internal_p (stmt))
3425 if (fn)
3427 error ("gimple call has two targets");
3428 debug_generic_stmt (fn);
3429 return true;
3431 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3432 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3434 return false;
3437 else
3439 if (!fn)
3441 error ("gimple call has no target");
3442 return true;
3446 if (fn && !is_gimple_call_addr (fn))
3448 error ("invalid function in gimple call");
3449 debug_generic_stmt (fn);
3450 return true;
3453 if (fn
3454 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3455 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3456 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3458 error ("non-function in gimple call");
3459 return true;
3462 fndecl = gimple_call_fndecl (stmt);
3463 if (fndecl
3464 && TREE_CODE (fndecl) == FUNCTION_DECL
3465 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3466 && !DECL_PURE_P (fndecl)
3467 && !TREE_READONLY (fndecl))
3469 error ("invalid pure const state for function");
3470 return true;
3473 tree lhs = gimple_call_lhs (stmt);
3474 if (lhs
3475 && (!is_gimple_lvalue (lhs)
3476 || verify_types_in_gimple_reference (lhs, true)))
3478 error ("invalid LHS in gimple call");
3479 return true;
3482 if (gimple_call_ctrl_altering_p (stmt)
3483 && gimple_call_noreturn_p (stmt)
3484 && should_remove_lhs_p (lhs))
3486 error ("LHS in noreturn call");
3487 return true;
3490 fntype = gimple_call_fntype (stmt);
3491 if (fntype
3492 && lhs
3493 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3494 /* ??? At least C++ misses conversions at assignments from
3495 void * call results.
3496 ??? Java is completely off. Especially with functions
3497 returning java.lang.Object.
3498 For now simply allow arbitrary pointer type conversions. */
3499 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3500 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3502 error ("invalid conversion in gimple call");
3503 debug_generic_stmt (TREE_TYPE (lhs));
3504 debug_generic_stmt (TREE_TYPE (fntype));
3505 return true;
3508 if (gimple_call_chain (stmt)
3509 && !is_gimple_val (gimple_call_chain (stmt)))
3511 error ("invalid static chain in gimple call");
3512 debug_generic_stmt (gimple_call_chain (stmt));
3513 return true;
3516 /* If there is a static chain argument, the call should either be
3517 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3518 if (gimple_call_chain (stmt)
3519 && fndecl
3520 && !DECL_STATIC_CHAIN (fndecl))
3522 error ("static chain with function that doesn%'t use one");
3523 return true;
3526 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3528 switch (DECL_FUNCTION_CODE (fndecl))
3530 case BUILT_IN_UNREACHABLE:
3531 case BUILT_IN_TRAP:
3532 if (gimple_call_num_args (stmt) > 0)
3534 /* Built-in unreachable with parameters might not be caught by
3535 undefined behavior sanitizer. Front-ends do check users do not
3536 call them that way but we also produce calls to
3537 __builtin_unreachable internally, for example when IPA figures
3538 out a call cannot happen in a legal program. In such cases,
3539 we must make sure arguments are stripped off. */
3540 error ("__builtin_unreachable or __builtin_trap call with "
3541 "arguments");
3542 return true;
3544 break;
3545 default:
3546 break;
3550 /* ??? The C frontend passes unpromoted arguments in case it
3551 didn't see a function declaration before the call. So for now
3552 leave the call arguments mostly unverified. Once we gimplify
3553 unit-at-a-time we have a chance to fix this. */
3555 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3557 tree arg = gimple_call_arg (stmt, i);
3558 if ((is_gimple_reg_type (TREE_TYPE (arg))
3559 && !is_gimple_val (arg))
3560 || (!is_gimple_reg_type (TREE_TYPE (arg))
3561 && !is_gimple_lvalue (arg)))
3563 error ("invalid argument to gimple call");
3564 debug_generic_expr (arg);
3565 return true;
3569 return false;
3572 /* Verifies the gimple comparison with the result type TYPE and
3573 the operands OP0 and OP1, comparison code is CODE. */
3575 static bool
3576 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3578 tree op0_type = TREE_TYPE (op0);
3579 tree op1_type = TREE_TYPE (op1);
3581 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3583 error ("invalid operands in gimple comparison");
3584 return true;
3587 /* For comparisons we do not have the operations type as the
3588 effective type the comparison is carried out in. Instead
3589 we require that either the first operand is trivially
3590 convertible into the second, or the other way around.
3591 Because we special-case pointers to void we allow
3592 comparisons of pointers with the same mode as well. */
3593 if (!useless_type_conversion_p (op0_type, op1_type)
3594 && !useless_type_conversion_p (op1_type, op0_type)
3595 && (!POINTER_TYPE_P (op0_type)
3596 || !POINTER_TYPE_P (op1_type)
3597 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3599 error ("mismatching comparison operand types");
3600 debug_generic_expr (op0_type);
3601 debug_generic_expr (op1_type);
3602 return true;
3605 /* The resulting type of a comparison may be an effective boolean type. */
3606 if (INTEGRAL_TYPE_P (type)
3607 && (TREE_CODE (type) == BOOLEAN_TYPE
3608 || TYPE_PRECISION (type) == 1))
3610 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3611 || TREE_CODE (op1_type) == VECTOR_TYPE)
3612 && code != EQ_EXPR && code != NE_EXPR
3613 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3614 && !VECTOR_INTEGER_TYPE_P (op0_type))
3616 error ("unsupported operation or type for vector comparison"
3617 " returning a boolean");
3618 debug_generic_expr (op0_type);
3619 debug_generic_expr (op1_type);
3620 return true;
3623 /* Or a boolean vector type with the same element count
3624 as the comparison operand types. */
3625 else if (TREE_CODE (type) == VECTOR_TYPE
3626 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3628 if (TREE_CODE (op0_type) != VECTOR_TYPE
3629 || TREE_CODE (op1_type) != VECTOR_TYPE)
3631 error ("non-vector operands in vector comparison");
3632 debug_generic_expr (op0_type);
3633 debug_generic_expr (op1_type);
3634 return true;
3637 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3639 error ("invalid vector comparison resulting type");
3640 debug_generic_expr (type);
3641 return true;
3644 else
3646 error ("bogus comparison result type");
3647 debug_generic_expr (type);
3648 return true;
3651 return false;
3654 /* Verify a gimple assignment statement STMT with an unary rhs.
3655 Returns true if anything is wrong. */
3657 static bool
3658 verify_gimple_assign_unary (gassign *stmt)
3660 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3661 tree lhs = gimple_assign_lhs (stmt);
3662 tree lhs_type = TREE_TYPE (lhs);
3663 tree rhs1 = gimple_assign_rhs1 (stmt);
3664 tree rhs1_type = TREE_TYPE (rhs1);
3666 if (!is_gimple_reg (lhs))
3668 error ("non-register as LHS of unary operation");
3669 return true;
3672 if (!is_gimple_val (rhs1))
3674 error ("invalid operand in unary operation");
3675 return true;
3678 /* First handle conversions. */
3679 switch (rhs_code)
3681 CASE_CONVERT:
3683 /* Allow conversions from pointer type to integral type only if
3684 there is no sign or zero extension involved.
3685 For targets were the precision of ptrofftype doesn't match that
3686 of pointers we need to allow arbitrary conversions to ptrofftype. */
3687 if ((POINTER_TYPE_P (lhs_type)
3688 && INTEGRAL_TYPE_P (rhs1_type))
3689 || (POINTER_TYPE_P (rhs1_type)
3690 && INTEGRAL_TYPE_P (lhs_type)
3691 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3692 || ptrofftype_p (sizetype))))
3693 return false;
3695 /* Allow conversion from integral to offset type and vice versa. */
3696 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3697 && INTEGRAL_TYPE_P (rhs1_type))
3698 || (INTEGRAL_TYPE_P (lhs_type)
3699 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3700 return false;
3702 /* Otherwise assert we are converting between types of the
3703 same kind. */
3704 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3706 error ("invalid types in nop conversion");
3707 debug_generic_expr (lhs_type);
3708 debug_generic_expr (rhs1_type);
3709 return true;
3712 return false;
3715 case ADDR_SPACE_CONVERT_EXPR:
3717 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3718 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3719 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3721 error ("invalid types in address space conversion");
3722 debug_generic_expr (lhs_type);
3723 debug_generic_expr (rhs1_type);
3724 return true;
3727 return false;
3730 case FIXED_CONVERT_EXPR:
3732 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3733 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3735 error ("invalid types in fixed-point conversion");
3736 debug_generic_expr (lhs_type);
3737 debug_generic_expr (rhs1_type);
3738 return true;
3741 return false;
3744 case FLOAT_EXPR:
3746 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3747 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3748 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3750 error ("invalid types in conversion to floating point");
3751 debug_generic_expr (lhs_type);
3752 debug_generic_expr (rhs1_type);
3753 return true;
3756 return false;
3759 case FIX_TRUNC_EXPR:
3761 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3762 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3763 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3765 error ("invalid types in conversion to integer");
3766 debug_generic_expr (lhs_type);
3767 debug_generic_expr (rhs1_type);
3768 return true;
3771 return false;
3773 case REDUC_MAX_EXPR:
3774 case REDUC_MIN_EXPR:
3775 case REDUC_PLUS_EXPR:
3776 if (!VECTOR_TYPE_P (rhs1_type)
3777 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3779 error ("reduction should convert from vector to element type");
3780 debug_generic_expr (lhs_type);
3781 debug_generic_expr (rhs1_type);
3782 return true;
3784 return false;
3786 case VEC_UNPACK_HI_EXPR:
3787 case VEC_UNPACK_LO_EXPR:
3788 case VEC_UNPACK_FLOAT_HI_EXPR:
3789 case VEC_UNPACK_FLOAT_LO_EXPR:
3790 /* FIXME. */
3791 return false;
3793 case NEGATE_EXPR:
3794 case ABS_EXPR:
3795 case BIT_NOT_EXPR:
3796 case PAREN_EXPR:
3797 case CONJ_EXPR:
3798 break;
3800 default:
3801 gcc_unreachable ();
3804 /* For the remaining codes assert there is no conversion involved. */
3805 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3807 error ("non-trivial conversion in unary operation");
3808 debug_generic_expr (lhs_type);
3809 debug_generic_expr (rhs1_type);
3810 return true;
3813 return false;
3816 /* Verify a gimple assignment statement STMT with a binary rhs.
3817 Returns true if anything is wrong. */
3819 static bool
3820 verify_gimple_assign_binary (gassign *stmt)
3822 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3823 tree lhs = gimple_assign_lhs (stmt);
3824 tree lhs_type = TREE_TYPE (lhs);
3825 tree rhs1 = gimple_assign_rhs1 (stmt);
3826 tree rhs1_type = TREE_TYPE (rhs1);
3827 tree rhs2 = gimple_assign_rhs2 (stmt);
3828 tree rhs2_type = TREE_TYPE (rhs2);
3830 if (!is_gimple_reg (lhs))
3832 error ("non-register as LHS of binary operation");
3833 return true;
3836 if (!is_gimple_val (rhs1)
3837 || !is_gimple_val (rhs2))
3839 error ("invalid operands in binary operation");
3840 return true;
3843 /* First handle operations that involve different types. */
3844 switch (rhs_code)
3846 case COMPLEX_EXPR:
3848 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3849 || !(INTEGRAL_TYPE_P (rhs1_type)
3850 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3851 || !(INTEGRAL_TYPE_P (rhs2_type)
3852 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3854 error ("type mismatch in complex expression");
3855 debug_generic_expr (lhs_type);
3856 debug_generic_expr (rhs1_type);
3857 debug_generic_expr (rhs2_type);
3858 return true;
3861 return false;
3864 case LSHIFT_EXPR:
3865 case RSHIFT_EXPR:
3866 case LROTATE_EXPR:
3867 case RROTATE_EXPR:
3869 /* Shifts and rotates are ok on integral types, fixed point
3870 types and integer vector types. */
3871 if ((!INTEGRAL_TYPE_P (rhs1_type)
3872 && !FIXED_POINT_TYPE_P (rhs1_type)
3873 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3874 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3875 || (!INTEGRAL_TYPE_P (rhs2_type)
3876 /* Vector shifts of vectors are also ok. */
3877 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3878 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3879 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3880 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3881 || !useless_type_conversion_p (lhs_type, rhs1_type))
3883 error ("type mismatch in shift expression");
3884 debug_generic_expr (lhs_type);
3885 debug_generic_expr (rhs1_type);
3886 debug_generic_expr (rhs2_type);
3887 return true;
3890 return false;
3893 case WIDEN_LSHIFT_EXPR:
3895 if (!INTEGRAL_TYPE_P (lhs_type)
3896 || !INTEGRAL_TYPE_P (rhs1_type)
3897 || TREE_CODE (rhs2) != INTEGER_CST
3898 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3900 error ("type mismatch in widening vector shift expression");
3901 debug_generic_expr (lhs_type);
3902 debug_generic_expr (rhs1_type);
3903 debug_generic_expr (rhs2_type);
3904 return true;
3907 return false;
3910 case VEC_WIDEN_LSHIFT_HI_EXPR:
3911 case VEC_WIDEN_LSHIFT_LO_EXPR:
3913 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3914 || TREE_CODE (lhs_type) != VECTOR_TYPE
3915 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3916 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3917 || TREE_CODE (rhs2) != INTEGER_CST
3918 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3919 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3921 error ("type mismatch in widening vector shift expression");
3922 debug_generic_expr (lhs_type);
3923 debug_generic_expr (rhs1_type);
3924 debug_generic_expr (rhs2_type);
3925 return true;
3928 return false;
3931 case PLUS_EXPR:
3932 case MINUS_EXPR:
3934 tree lhs_etype = lhs_type;
3935 tree rhs1_etype = rhs1_type;
3936 tree rhs2_etype = rhs2_type;
3937 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3939 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3940 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3942 error ("invalid non-vector operands to vector valued plus");
3943 return true;
3945 lhs_etype = TREE_TYPE (lhs_type);
3946 rhs1_etype = TREE_TYPE (rhs1_type);
3947 rhs2_etype = TREE_TYPE (rhs2_type);
3949 if (POINTER_TYPE_P (lhs_etype)
3950 || POINTER_TYPE_P (rhs1_etype)
3951 || POINTER_TYPE_P (rhs2_etype))
3953 error ("invalid (pointer) operands to plus/minus");
3954 return true;
3957 /* Continue with generic binary expression handling. */
3958 break;
3961 case POINTER_PLUS_EXPR:
3963 if (!POINTER_TYPE_P (rhs1_type)
3964 || !useless_type_conversion_p (lhs_type, rhs1_type)
3965 || !ptrofftype_p (rhs2_type))
3967 error ("type mismatch in pointer plus expression");
3968 debug_generic_stmt (lhs_type);
3969 debug_generic_stmt (rhs1_type);
3970 debug_generic_stmt (rhs2_type);
3971 return true;
3974 return false;
3977 case TRUTH_ANDIF_EXPR:
3978 case TRUTH_ORIF_EXPR:
3979 case TRUTH_AND_EXPR:
3980 case TRUTH_OR_EXPR:
3981 case TRUTH_XOR_EXPR:
3983 gcc_unreachable ();
3985 case LT_EXPR:
3986 case LE_EXPR:
3987 case GT_EXPR:
3988 case GE_EXPR:
3989 case EQ_EXPR:
3990 case NE_EXPR:
3991 case UNORDERED_EXPR:
3992 case ORDERED_EXPR:
3993 case UNLT_EXPR:
3994 case UNLE_EXPR:
3995 case UNGT_EXPR:
3996 case UNGE_EXPR:
3997 case UNEQ_EXPR:
3998 case LTGT_EXPR:
3999 /* Comparisons are also binary, but the result type is not
4000 connected to the operand types. */
4001 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4003 case WIDEN_MULT_EXPR:
4004 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4005 return true;
4006 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4007 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4009 case WIDEN_SUM_EXPR:
4010 case VEC_WIDEN_MULT_HI_EXPR:
4011 case VEC_WIDEN_MULT_LO_EXPR:
4012 case VEC_WIDEN_MULT_EVEN_EXPR:
4013 case VEC_WIDEN_MULT_ODD_EXPR:
4014 case VEC_PACK_TRUNC_EXPR:
4015 case VEC_PACK_SAT_EXPR:
4016 case VEC_PACK_FIX_TRUNC_EXPR:
4017 /* FIXME. */
4018 return false;
4020 case MULT_EXPR:
4021 case MULT_HIGHPART_EXPR:
4022 case TRUNC_DIV_EXPR:
4023 case CEIL_DIV_EXPR:
4024 case FLOOR_DIV_EXPR:
4025 case ROUND_DIV_EXPR:
4026 case TRUNC_MOD_EXPR:
4027 case CEIL_MOD_EXPR:
4028 case FLOOR_MOD_EXPR:
4029 case ROUND_MOD_EXPR:
4030 case RDIV_EXPR:
4031 case EXACT_DIV_EXPR:
4032 case MIN_EXPR:
4033 case MAX_EXPR:
4034 case BIT_IOR_EXPR:
4035 case BIT_XOR_EXPR:
4036 case BIT_AND_EXPR:
4037 /* Continue with generic binary expression handling. */
4038 break;
4040 default:
4041 gcc_unreachable ();
4044 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4045 || !useless_type_conversion_p (lhs_type, rhs2_type))
4047 error ("type mismatch in binary expression");
4048 debug_generic_stmt (lhs_type);
4049 debug_generic_stmt (rhs1_type);
4050 debug_generic_stmt (rhs2_type);
4051 return true;
4054 return false;
4057 /* Verify a gimple assignment statement STMT with a ternary rhs.
4058 Returns true if anything is wrong. */
4060 static bool
4061 verify_gimple_assign_ternary (gassign *stmt)
4063 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4064 tree lhs = gimple_assign_lhs (stmt);
4065 tree lhs_type = TREE_TYPE (lhs);
4066 tree rhs1 = gimple_assign_rhs1 (stmt);
4067 tree rhs1_type = TREE_TYPE (rhs1);
4068 tree rhs2 = gimple_assign_rhs2 (stmt);
4069 tree rhs2_type = TREE_TYPE (rhs2);
4070 tree rhs3 = gimple_assign_rhs3 (stmt);
4071 tree rhs3_type = TREE_TYPE (rhs3);
4073 if (!is_gimple_reg (lhs))
4075 error ("non-register as LHS of ternary operation");
4076 return true;
4079 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4080 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4081 || !is_gimple_val (rhs2)
4082 || !is_gimple_val (rhs3))
4084 error ("invalid operands in ternary operation");
4085 return true;
4088 /* First handle operations that involve different types. */
4089 switch (rhs_code)
4091 case WIDEN_MULT_PLUS_EXPR:
4092 case WIDEN_MULT_MINUS_EXPR:
4093 if ((!INTEGRAL_TYPE_P (rhs1_type)
4094 && !FIXED_POINT_TYPE_P (rhs1_type))
4095 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4096 || !useless_type_conversion_p (lhs_type, rhs3_type)
4097 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4098 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4100 error ("type mismatch in widening multiply-accumulate expression");
4101 debug_generic_expr (lhs_type);
4102 debug_generic_expr (rhs1_type);
4103 debug_generic_expr (rhs2_type);
4104 debug_generic_expr (rhs3_type);
4105 return true;
4107 break;
4109 case FMA_EXPR:
4110 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4111 || !useless_type_conversion_p (lhs_type, rhs2_type)
4112 || !useless_type_conversion_p (lhs_type, rhs3_type))
4114 error ("type mismatch in fused multiply-add expression");
4115 debug_generic_expr (lhs_type);
4116 debug_generic_expr (rhs1_type);
4117 debug_generic_expr (rhs2_type);
4118 debug_generic_expr (rhs3_type);
4119 return true;
4121 break;
4123 case VEC_COND_EXPR:
4124 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4125 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4126 != TYPE_VECTOR_SUBPARTS (lhs_type))
4128 error ("the first argument of a VEC_COND_EXPR must be of a "
4129 "boolean vector type of the same number of elements "
4130 "as the result");
4131 debug_generic_expr (lhs_type);
4132 debug_generic_expr (rhs1_type);
4133 return true;
4135 /* Fallthrough. */
4136 case COND_EXPR:
4137 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4138 || !useless_type_conversion_p (lhs_type, rhs3_type))
4140 error ("type mismatch in conditional expression");
4141 debug_generic_expr (lhs_type);
4142 debug_generic_expr (rhs2_type);
4143 debug_generic_expr (rhs3_type);
4144 return true;
4146 break;
4148 case VEC_PERM_EXPR:
4149 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4150 || !useless_type_conversion_p (lhs_type, rhs2_type))
4152 error ("type mismatch in vector permute expression");
4153 debug_generic_expr (lhs_type);
4154 debug_generic_expr (rhs1_type);
4155 debug_generic_expr (rhs2_type);
4156 debug_generic_expr (rhs3_type);
4157 return true;
4160 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4161 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4162 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4164 error ("vector types expected in vector permute expression");
4165 debug_generic_expr (lhs_type);
4166 debug_generic_expr (rhs1_type);
4167 debug_generic_expr (rhs2_type);
4168 debug_generic_expr (rhs3_type);
4169 return true;
4172 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4173 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4174 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4175 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4176 != TYPE_VECTOR_SUBPARTS (lhs_type))
4178 error ("vectors with different element number found "
4179 "in vector permute expression");
4180 debug_generic_expr (lhs_type);
4181 debug_generic_expr (rhs1_type);
4182 debug_generic_expr (rhs2_type);
4183 debug_generic_expr (rhs3_type);
4184 return true;
4187 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4188 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4189 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4191 error ("invalid mask type in vector permute expression");
4192 debug_generic_expr (lhs_type);
4193 debug_generic_expr (rhs1_type);
4194 debug_generic_expr (rhs2_type);
4195 debug_generic_expr (rhs3_type);
4196 return true;
4199 return false;
4201 case SAD_EXPR:
4202 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4203 || !useless_type_conversion_p (lhs_type, rhs3_type)
4204 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4205 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4207 error ("type mismatch in sad expression");
4208 debug_generic_expr (lhs_type);
4209 debug_generic_expr (rhs1_type);
4210 debug_generic_expr (rhs2_type);
4211 debug_generic_expr (rhs3_type);
4212 return true;
4215 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4216 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4217 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4219 error ("vector types expected in sad expression");
4220 debug_generic_expr (lhs_type);
4221 debug_generic_expr (rhs1_type);
4222 debug_generic_expr (rhs2_type);
4223 debug_generic_expr (rhs3_type);
4224 return true;
4227 return false;
4229 case BIT_INSERT_EXPR:
4230 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4232 error ("type mismatch in BIT_INSERT_EXPR");
4233 debug_generic_expr (lhs_type);
4234 debug_generic_expr (rhs1_type);
4235 return true;
4237 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4238 && INTEGRAL_TYPE_P (rhs2_type))
4239 || (VECTOR_TYPE_P (rhs1_type)
4240 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4242 error ("not allowed type combination in BIT_INSERT_EXPR");
4243 debug_generic_expr (rhs1_type);
4244 debug_generic_expr (rhs2_type);
4245 return true;
4247 if (! tree_fits_uhwi_p (rhs3)
4248 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4250 error ("invalid position or size in BIT_INSERT_EXPR");
4251 return true;
4253 if (INTEGRAL_TYPE_P (rhs1_type))
4255 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4256 if (bitpos >= TYPE_PRECISION (rhs1_type)
4257 || (bitpos + TYPE_PRECISION (rhs2_type)
4258 > TYPE_PRECISION (rhs1_type)))
4260 error ("insertion out of range in BIT_INSERT_EXPR");
4261 return true;
4264 else if (VECTOR_TYPE_P (rhs1_type))
4266 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4267 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4268 if (bitpos % bitsize != 0)
4270 error ("vector insertion not at element boundary");
4271 return true;
4274 return false;
4276 case DOT_PROD_EXPR:
4277 case REALIGN_LOAD_EXPR:
4278 /* FIXME. */
4279 return false;
4281 default:
4282 gcc_unreachable ();
4284 return false;
4287 /* Verify a gimple assignment statement STMT with a single rhs.
4288 Returns true if anything is wrong. */
4290 static bool
4291 verify_gimple_assign_single (gassign *stmt)
4293 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4294 tree lhs = gimple_assign_lhs (stmt);
4295 tree lhs_type = TREE_TYPE (lhs);
4296 tree rhs1 = gimple_assign_rhs1 (stmt);
4297 tree rhs1_type = TREE_TYPE (rhs1);
4298 bool res = false;
4300 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4302 error ("non-trivial conversion at assignment");
4303 debug_generic_expr (lhs_type);
4304 debug_generic_expr (rhs1_type);
4305 return true;
4308 if (gimple_clobber_p (stmt)
4309 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4311 error ("non-decl/MEM_REF LHS in clobber statement");
4312 debug_generic_expr (lhs);
4313 return true;
4316 if (handled_component_p (lhs)
4317 || TREE_CODE (lhs) == MEM_REF
4318 || TREE_CODE (lhs) == TARGET_MEM_REF)
4319 res |= verify_types_in_gimple_reference (lhs, true);
4321 /* Special codes we cannot handle via their class. */
4322 switch (rhs_code)
4324 case ADDR_EXPR:
4326 tree op = TREE_OPERAND (rhs1, 0);
4327 if (!is_gimple_addressable (op))
4329 error ("invalid operand in unary expression");
4330 return true;
4333 /* Technically there is no longer a need for matching types, but
4334 gimple hygiene asks for this check. In LTO we can end up
4335 combining incompatible units and thus end up with addresses
4336 of globals that change their type to a common one. */
4337 if (!in_lto_p
4338 && !types_compatible_p (TREE_TYPE (op),
4339 TREE_TYPE (TREE_TYPE (rhs1)))
4340 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4341 TREE_TYPE (op)))
4343 error ("type mismatch in address expression");
4344 debug_generic_stmt (TREE_TYPE (rhs1));
4345 debug_generic_stmt (TREE_TYPE (op));
4346 return true;
4349 return verify_types_in_gimple_reference (op, true);
4352 /* tcc_reference */
4353 case INDIRECT_REF:
4354 error ("INDIRECT_REF in gimple IL");
4355 return true;
4357 case COMPONENT_REF:
4358 case BIT_FIELD_REF:
4359 case ARRAY_REF:
4360 case ARRAY_RANGE_REF:
4361 case VIEW_CONVERT_EXPR:
4362 case REALPART_EXPR:
4363 case IMAGPART_EXPR:
4364 case TARGET_MEM_REF:
4365 case MEM_REF:
4366 if (!is_gimple_reg (lhs)
4367 && is_gimple_reg_type (TREE_TYPE (lhs)))
4369 error ("invalid rhs for gimple memory store");
4370 debug_generic_stmt (lhs);
4371 debug_generic_stmt (rhs1);
4372 return true;
4374 return res || verify_types_in_gimple_reference (rhs1, false);
4376 /* tcc_constant */
4377 case SSA_NAME:
4378 case INTEGER_CST:
4379 case REAL_CST:
4380 case FIXED_CST:
4381 case COMPLEX_CST:
4382 case VECTOR_CST:
4383 case STRING_CST:
4384 return res;
4386 /* tcc_declaration */
4387 case CONST_DECL:
4388 return res;
4389 case VAR_DECL:
4390 case PARM_DECL:
4391 if (!is_gimple_reg (lhs)
4392 && !is_gimple_reg (rhs1)
4393 && is_gimple_reg_type (TREE_TYPE (lhs)))
4395 error ("invalid rhs for gimple memory store");
4396 debug_generic_stmt (lhs);
4397 debug_generic_stmt (rhs1);
4398 return true;
4400 return res;
4402 case CONSTRUCTOR:
4403 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4405 unsigned int i;
4406 tree elt_i, elt_v, elt_t = NULL_TREE;
4408 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4409 return res;
4410 /* For vector CONSTRUCTORs we require that either it is empty
4411 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4412 (then the element count must be correct to cover the whole
4413 outer vector and index must be NULL on all elements, or it is
4414 a CONSTRUCTOR of scalar elements, where we as an exception allow
4415 smaller number of elements (assuming zero filling) and
4416 consecutive indexes as compared to NULL indexes (such
4417 CONSTRUCTORs can appear in the IL from FEs). */
4418 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4420 if (elt_t == NULL_TREE)
4422 elt_t = TREE_TYPE (elt_v);
4423 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4425 tree elt_t = TREE_TYPE (elt_v);
4426 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4427 TREE_TYPE (elt_t)))
4429 error ("incorrect type of vector CONSTRUCTOR"
4430 " elements");
4431 debug_generic_stmt (rhs1);
4432 return true;
4434 else if (CONSTRUCTOR_NELTS (rhs1)
4435 * TYPE_VECTOR_SUBPARTS (elt_t)
4436 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4438 error ("incorrect number of vector CONSTRUCTOR"
4439 " elements");
4440 debug_generic_stmt (rhs1);
4441 return true;
4444 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4445 elt_t))
4447 error ("incorrect type of vector CONSTRUCTOR elements");
4448 debug_generic_stmt (rhs1);
4449 return true;
4451 else if (CONSTRUCTOR_NELTS (rhs1)
4452 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4454 error ("incorrect number of vector CONSTRUCTOR elements");
4455 debug_generic_stmt (rhs1);
4456 return true;
4459 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4461 error ("incorrect type of vector CONSTRUCTOR elements");
4462 debug_generic_stmt (rhs1);
4463 return true;
4465 if (elt_i != NULL_TREE
4466 && (TREE_CODE (elt_t) == VECTOR_TYPE
4467 || TREE_CODE (elt_i) != INTEGER_CST
4468 || compare_tree_int (elt_i, i) != 0))
4470 error ("vector CONSTRUCTOR with non-NULL element index");
4471 debug_generic_stmt (rhs1);
4472 return true;
4474 if (!is_gimple_val (elt_v))
4476 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4477 debug_generic_stmt (rhs1);
4478 return true;
4482 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4484 error ("non-vector CONSTRUCTOR with elements");
4485 debug_generic_stmt (rhs1);
4486 return true;
4488 return res;
4489 case OBJ_TYPE_REF:
4490 case ASSERT_EXPR:
4491 case WITH_SIZE_EXPR:
4492 /* FIXME. */
4493 return res;
4495 default:;
4498 return res;
4501 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4502 is a problem, otherwise false. */
4504 static bool
4505 verify_gimple_assign (gassign *stmt)
4507 switch (gimple_assign_rhs_class (stmt))
4509 case GIMPLE_SINGLE_RHS:
4510 return verify_gimple_assign_single (stmt);
4512 case GIMPLE_UNARY_RHS:
4513 return verify_gimple_assign_unary (stmt);
4515 case GIMPLE_BINARY_RHS:
4516 return verify_gimple_assign_binary (stmt);
4518 case GIMPLE_TERNARY_RHS:
4519 return verify_gimple_assign_ternary (stmt);
4521 default:
4522 gcc_unreachable ();
4526 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4527 is a problem, otherwise false. */
4529 static bool
4530 verify_gimple_return (greturn *stmt)
4532 tree op = gimple_return_retval (stmt);
4533 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4535 /* We cannot test for present return values as we do not fix up missing
4536 return values from the original source. */
4537 if (op == NULL)
4538 return false;
4540 if (!is_gimple_val (op)
4541 && TREE_CODE (op) != RESULT_DECL)
4543 error ("invalid operand in return statement");
4544 debug_generic_stmt (op);
4545 return true;
4548 if ((TREE_CODE (op) == RESULT_DECL
4549 && DECL_BY_REFERENCE (op))
4550 || (TREE_CODE (op) == SSA_NAME
4551 && SSA_NAME_VAR (op)
4552 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4553 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4554 op = TREE_TYPE (op);
4556 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4558 error ("invalid conversion in return statement");
4559 debug_generic_stmt (restype);
4560 debug_generic_stmt (TREE_TYPE (op));
4561 return true;
4564 return false;
4568 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4569 is a problem, otherwise false. */
4571 static bool
4572 verify_gimple_goto (ggoto *stmt)
4574 tree dest = gimple_goto_dest (stmt);
4576 /* ??? We have two canonical forms of direct goto destinations, a
4577 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4578 if (TREE_CODE (dest) != LABEL_DECL
4579 && (!is_gimple_val (dest)
4580 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4582 error ("goto destination is neither a label nor a pointer");
4583 return true;
4586 return false;
4589 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4590 is a problem, otherwise false. */
4592 static bool
4593 verify_gimple_switch (gswitch *stmt)
4595 unsigned int i, n;
4596 tree elt, prev_upper_bound = NULL_TREE;
4597 tree index_type, elt_type = NULL_TREE;
4599 if (!is_gimple_val (gimple_switch_index (stmt)))
4601 error ("invalid operand to switch statement");
4602 debug_generic_stmt (gimple_switch_index (stmt));
4603 return true;
4606 index_type = TREE_TYPE (gimple_switch_index (stmt));
4607 if (! INTEGRAL_TYPE_P (index_type))
4609 error ("non-integral type switch statement");
4610 debug_generic_expr (index_type);
4611 return true;
4614 elt = gimple_switch_label (stmt, 0);
4615 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4617 error ("invalid default case label in switch statement");
4618 debug_generic_expr (elt);
4619 return true;
4622 n = gimple_switch_num_labels (stmt);
4623 for (i = 1; i < n; i++)
4625 elt = gimple_switch_label (stmt, i);
4627 if (! CASE_LOW (elt))
4629 error ("invalid case label in switch statement");
4630 debug_generic_expr (elt);
4631 return true;
4633 if (CASE_HIGH (elt)
4634 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4636 error ("invalid case range in switch statement");
4637 debug_generic_expr (elt);
4638 return true;
4641 if (elt_type)
4643 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4644 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4646 error ("type mismatch for case label in switch statement");
4647 debug_generic_expr (elt);
4648 return true;
4651 else
4653 elt_type = TREE_TYPE (CASE_LOW (elt));
4654 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4656 error ("type precision mismatch in switch statement");
4657 return true;
4661 if (prev_upper_bound)
4663 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4665 error ("case labels not sorted in switch statement");
4666 return true;
4670 prev_upper_bound = CASE_HIGH (elt);
4671 if (! prev_upper_bound)
4672 prev_upper_bound = CASE_LOW (elt);
4675 return false;
4678 /* Verify a gimple debug statement STMT.
4679 Returns true if anything is wrong. */
4681 static bool
4682 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4684 /* There isn't much that could be wrong in a gimple debug stmt. A
4685 gimple debug bind stmt, for example, maps a tree, that's usually
4686 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4687 component or member of an aggregate type, to another tree, that
4688 can be an arbitrary expression. These stmts expand into debug
4689 insns, and are converted to debug notes by var-tracking.c. */
4690 return false;
4693 /* Verify a gimple label statement STMT.
4694 Returns true if anything is wrong. */
4696 static bool
4697 verify_gimple_label (glabel *stmt)
4699 tree decl = gimple_label_label (stmt);
4700 int uid;
4701 bool err = false;
4703 if (TREE_CODE (decl) != LABEL_DECL)
4704 return true;
4705 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4706 && DECL_CONTEXT (decl) != current_function_decl)
4708 error ("label's context is not the current function decl");
4709 err |= true;
4712 uid = LABEL_DECL_UID (decl);
4713 if (cfun->cfg
4714 && (uid == -1
4715 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4717 error ("incorrect entry in label_to_block_map");
4718 err |= true;
4721 uid = EH_LANDING_PAD_NR (decl);
4722 if (uid)
4724 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4725 if (decl != lp->post_landing_pad)
4727 error ("incorrect setting of landing pad number");
4728 err |= true;
4732 return err;
4735 /* Verify a gimple cond statement STMT.
4736 Returns true if anything is wrong. */
4738 static bool
4739 verify_gimple_cond (gcond *stmt)
4741 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4743 error ("invalid comparison code in gimple cond");
4744 return true;
4746 if (!(!gimple_cond_true_label (stmt)
4747 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4748 || !(!gimple_cond_false_label (stmt)
4749 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4751 error ("invalid labels in gimple cond");
4752 return true;
4755 return verify_gimple_comparison (boolean_type_node,
4756 gimple_cond_lhs (stmt),
4757 gimple_cond_rhs (stmt),
4758 gimple_cond_code (stmt));
4761 /* Verify the GIMPLE statement STMT. Returns true if there is an
4762 error, otherwise false. */
4764 static bool
4765 verify_gimple_stmt (gimple *stmt)
4767 switch (gimple_code (stmt))
4769 case GIMPLE_ASSIGN:
4770 return verify_gimple_assign (as_a <gassign *> (stmt));
4772 case GIMPLE_LABEL:
4773 return verify_gimple_label (as_a <glabel *> (stmt));
4775 case GIMPLE_CALL:
4776 return verify_gimple_call (as_a <gcall *> (stmt));
4778 case GIMPLE_COND:
4779 return verify_gimple_cond (as_a <gcond *> (stmt));
4781 case GIMPLE_GOTO:
4782 return verify_gimple_goto (as_a <ggoto *> (stmt));
4784 case GIMPLE_SWITCH:
4785 return verify_gimple_switch (as_a <gswitch *> (stmt));
4787 case GIMPLE_RETURN:
4788 return verify_gimple_return (as_a <greturn *> (stmt));
4790 case GIMPLE_ASM:
4791 return false;
4793 case GIMPLE_TRANSACTION:
4794 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4796 /* Tuples that do not have tree operands. */
4797 case GIMPLE_NOP:
4798 case GIMPLE_PREDICT:
4799 case GIMPLE_RESX:
4800 case GIMPLE_EH_DISPATCH:
4801 case GIMPLE_EH_MUST_NOT_THROW:
4802 return false;
4804 CASE_GIMPLE_OMP:
4805 /* OpenMP directives are validated by the FE and never operated
4806 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4807 non-gimple expressions when the main index variable has had
4808 its address taken. This does not affect the loop itself
4809 because the header of an GIMPLE_OMP_FOR is merely used to determine
4810 how to setup the parallel iteration. */
4811 return false;
4813 case GIMPLE_DEBUG:
4814 return verify_gimple_debug (stmt);
4816 default:
4817 gcc_unreachable ();
4821 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4822 and false otherwise. */
4824 static bool
4825 verify_gimple_phi (gimple *phi)
4827 bool err = false;
4828 unsigned i;
4829 tree phi_result = gimple_phi_result (phi);
4830 bool virtual_p;
4832 if (!phi_result)
4834 error ("invalid PHI result");
4835 return true;
4838 virtual_p = virtual_operand_p (phi_result);
4839 if (TREE_CODE (phi_result) != SSA_NAME
4840 || (virtual_p
4841 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4843 error ("invalid PHI result");
4844 err = true;
4847 for (i = 0; i < gimple_phi_num_args (phi); i++)
4849 tree t = gimple_phi_arg_def (phi, i);
4851 if (!t)
4853 error ("missing PHI def");
4854 err |= true;
4855 continue;
4857 /* Addressable variables do have SSA_NAMEs but they
4858 are not considered gimple values. */
4859 else if ((TREE_CODE (t) == SSA_NAME
4860 && virtual_p != virtual_operand_p (t))
4861 || (virtual_p
4862 && (TREE_CODE (t) != SSA_NAME
4863 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4864 || (!virtual_p
4865 && !is_gimple_val (t)))
4867 error ("invalid PHI argument");
4868 debug_generic_expr (t);
4869 err |= true;
4871 #ifdef ENABLE_TYPES_CHECKING
4872 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4874 error ("incompatible types in PHI argument %u", i);
4875 debug_generic_stmt (TREE_TYPE (phi_result));
4876 debug_generic_stmt (TREE_TYPE (t));
4877 err |= true;
4879 #endif
4882 return err;
4885 /* Verify the GIMPLE statements inside the sequence STMTS. */
4887 static bool
4888 verify_gimple_in_seq_2 (gimple_seq stmts)
4890 gimple_stmt_iterator ittr;
4891 bool err = false;
4893 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4895 gimple *stmt = gsi_stmt (ittr);
4897 switch (gimple_code (stmt))
4899 case GIMPLE_BIND:
4900 err |= verify_gimple_in_seq_2 (
4901 gimple_bind_body (as_a <gbind *> (stmt)));
4902 break;
4904 case GIMPLE_TRY:
4905 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4906 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4907 break;
4909 case GIMPLE_EH_FILTER:
4910 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4911 break;
4913 case GIMPLE_EH_ELSE:
4915 geh_else *eh_else = as_a <geh_else *> (stmt);
4916 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4917 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4919 break;
4921 case GIMPLE_CATCH:
4922 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4923 as_a <gcatch *> (stmt)));
4924 break;
4926 case GIMPLE_TRANSACTION:
4927 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4928 break;
4930 default:
4932 bool err2 = verify_gimple_stmt (stmt);
4933 if (err2)
4934 debug_gimple_stmt (stmt);
4935 err |= err2;
4940 return err;
4943 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4944 is a problem, otherwise false. */
4946 static bool
4947 verify_gimple_transaction (gtransaction *stmt)
4949 tree lab;
4951 lab = gimple_transaction_label_norm (stmt);
4952 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4953 return true;
4954 lab = gimple_transaction_label_uninst (stmt);
4955 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4956 return true;
4957 lab = gimple_transaction_label_over (stmt);
4958 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4959 return true;
4961 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4965 /* Verify the GIMPLE statements inside the statement list STMTS. */
4967 DEBUG_FUNCTION void
4968 verify_gimple_in_seq (gimple_seq stmts)
4970 timevar_push (TV_TREE_STMT_VERIFY);
4971 if (verify_gimple_in_seq_2 (stmts))
4972 internal_error ("verify_gimple failed");
4973 timevar_pop (TV_TREE_STMT_VERIFY);
4976 /* Return true when the T can be shared. */
4978 static bool
4979 tree_node_can_be_shared (tree t)
4981 if (IS_TYPE_OR_DECL_P (t)
4982 || is_gimple_min_invariant (t)
4983 || TREE_CODE (t) == SSA_NAME
4984 || t == error_mark_node
4985 || TREE_CODE (t) == IDENTIFIER_NODE)
4986 return true;
4988 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4989 return true;
4991 if (DECL_P (t))
4992 return true;
4994 return false;
4997 /* Called via walk_tree. Verify tree sharing. */
4999 static tree
5000 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5002 hash_set<void *> *visited = (hash_set<void *> *) data;
5004 if (tree_node_can_be_shared (*tp))
5006 *walk_subtrees = false;
5007 return NULL;
5010 if (visited->add (*tp))
5011 return *tp;
5013 return NULL;
5016 /* Called via walk_gimple_stmt. Verify tree sharing. */
5018 static tree
5019 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5021 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5022 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5025 static bool eh_error_found;
5026 bool
5027 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5028 hash_set<gimple *> *visited)
5030 if (!visited->contains (stmt))
5032 error ("dead STMT in EH table");
5033 debug_gimple_stmt (stmt);
5034 eh_error_found = true;
5036 return true;
5039 /* Verify if the location LOCs block is in BLOCKS. */
5041 static bool
5042 verify_location (hash_set<tree> *blocks, location_t loc)
5044 tree block = LOCATION_BLOCK (loc);
5045 if (block != NULL_TREE
5046 && !blocks->contains (block))
5048 error ("location references block not in block tree");
5049 return true;
5051 if (block != NULL_TREE)
5052 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5053 return false;
5056 /* Called via walk_tree. Verify that expressions have no blocks. */
5058 static tree
5059 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5061 if (!EXPR_P (*tp))
5063 *walk_subtrees = false;
5064 return NULL;
5067 location_t loc = EXPR_LOCATION (*tp);
5068 if (LOCATION_BLOCK (loc) != NULL)
5069 return *tp;
5071 return NULL;
5074 /* Called via walk_tree. Verify locations of expressions. */
5076 static tree
5077 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5079 hash_set<tree> *blocks = (hash_set<tree> *) data;
5081 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5083 tree t = DECL_DEBUG_EXPR (*tp);
5084 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5085 if (addr)
5086 return addr;
5088 if ((VAR_P (*tp)
5089 || TREE_CODE (*tp) == PARM_DECL
5090 || TREE_CODE (*tp) == RESULT_DECL)
5091 && DECL_HAS_VALUE_EXPR_P (*tp))
5093 tree t = DECL_VALUE_EXPR (*tp);
5094 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5095 if (addr)
5096 return addr;
5099 if (!EXPR_P (*tp))
5101 *walk_subtrees = false;
5102 return NULL;
5105 location_t loc = EXPR_LOCATION (*tp);
5106 if (verify_location (blocks, loc))
5107 return *tp;
5109 return NULL;
5112 /* Called via walk_gimple_op. Verify locations of expressions. */
5114 static tree
5115 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5117 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5118 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5121 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5123 static void
5124 collect_subblocks (hash_set<tree> *blocks, tree block)
5126 tree t;
5127 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5129 blocks->add (t);
5130 collect_subblocks (blocks, t);
5134 /* Verify the GIMPLE statements in the CFG of FN. */
5136 DEBUG_FUNCTION void
5137 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5139 basic_block bb;
5140 bool err = false;
5142 timevar_push (TV_TREE_STMT_VERIFY);
5143 hash_set<void *> visited;
5144 hash_set<gimple *> visited_stmts;
5146 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5147 hash_set<tree> blocks;
5148 if (DECL_INITIAL (fn->decl))
5150 blocks.add (DECL_INITIAL (fn->decl));
5151 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5154 FOR_EACH_BB_FN (bb, fn)
5156 gimple_stmt_iterator gsi;
5158 for (gphi_iterator gpi = gsi_start_phis (bb);
5159 !gsi_end_p (gpi);
5160 gsi_next (&gpi))
5162 gphi *phi = gpi.phi ();
5163 bool err2 = false;
5164 unsigned i;
5166 visited_stmts.add (phi);
5168 if (gimple_bb (phi) != bb)
5170 error ("gimple_bb (phi) is set to a wrong basic block");
5171 err2 = true;
5174 err2 |= verify_gimple_phi (phi);
5176 /* Only PHI arguments have locations. */
5177 if (gimple_location (phi) != UNKNOWN_LOCATION)
5179 error ("PHI node with location");
5180 err2 = true;
5183 for (i = 0; i < gimple_phi_num_args (phi); i++)
5185 tree arg = gimple_phi_arg_def (phi, i);
5186 tree addr = walk_tree (&arg, verify_node_sharing_1,
5187 &visited, NULL);
5188 if (addr)
5190 error ("incorrect sharing of tree nodes");
5191 debug_generic_expr (addr);
5192 err2 |= true;
5194 location_t loc = gimple_phi_arg_location (phi, i);
5195 if (virtual_operand_p (gimple_phi_result (phi))
5196 && loc != UNKNOWN_LOCATION)
5198 error ("virtual PHI with argument locations");
5199 err2 = true;
5201 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5202 if (addr)
5204 debug_generic_expr (addr);
5205 err2 = true;
5207 err2 |= verify_location (&blocks, loc);
5210 if (err2)
5211 debug_gimple_stmt (phi);
5212 err |= err2;
5215 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5217 gimple *stmt = gsi_stmt (gsi);
5218 bool err2 = false;
5219 struct walk_stmt_info wi;
5220 tree addr;
5221 int lp_nr;
5223 visited_stmts.add (stmt);
5225 if (gimple_bb (stmt) != bb)
5227 error ("gimple_bb (stmt) is set to a wrong basic block");
5228 err2 = true;
5231 err2 |= verify_gimple_stmt (stmt);
5232 err2 |= verify_location (&blocks, gimple_location (stmt));
5234 memset (&wi, 0, sizeof (wi));
5235 wi.info = (void *) &visited;
5236 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5237 if (addr)
5239 error ("incorrect sharing of tree nodes");
5240 debug_generic_expr (addr);
5241 err2 |= true;
5244 memset (&wi, 0, sizeof (wi));
5245 wi.info = (void *) &blocks;
5246 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5247 if (addr)
5249 debug_generic_expr (addr);
5250 err2 |= true;
5253 /* ??? Instead of not checking these stmts at all the walker
5254 should know its context via wi. */
5255 if (!is_gimple_debug (stmt)
5256 && !is_gimple_omp (stmt))
5258 memset (&wi, 0, sizeof (wi));
5259 addr = walk_gimple_op (stmt, verify_expr, &wi);
5260 if (addr)
5262 debug_generic_expr (addr);
5263 inform (gimple_location (stmt), "in statement");
5264 err2 |= true;
5268 /* If the statement is marked as part of an EH region, then it is
5269 expected that the statement could throw. Verify that when we
5270 have optimizations that simplify statements such that we prove
5271 that they cannot throw, that we update other data structures
5272 to match. */
5273 lp_nr = lookup_stmt_eh_lp (stmt);
5274 if (lp_nr > 0)
5276 if (!stmt_could_throw_p (stmt))
5278 if (verify_nothrow)
5280 error ("statement marked for throw, but doesn%'t");
5281 err2 |= true;
5284 else if (!gsi_one_before_end_p (gsi))
5286 error ("statement marked for throw in middle of block");
5287 err2 |= true;
5291 if (err2)
5292 debug_gimple_stmt (stmt);
5293 err |= err2;
5297 eh_error_found = false;
5298 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5299 if (eh_table)
5300 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5301 (&visited_stmts);
5303 if (err || eh_error_found)
5304 internal_error ("verify_gimple failed");
5306 verify_histograms ();
5307 timevar_pop (TV_TREE_STMT_VERIFY);
5311 /* Verifies that the flow information is OK. */
5313 static int
5314 gimple_verify_flow_info (void)
5316 int err = 0;
5317 basic_block bb;
5318 gimple_stmt_iterator gsi;
5319 gimple *stmt;
5320 edge e;
5321 edge_iterator ei;
5323 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5324 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5326 error ("ENTRY_BLOCK has IL associated with it");
5327 err = 1;
5330 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5331 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5333 error ("EXIT_BLOCK has IL associated with it");
5334 err = 1;
5337 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5338 if (e->flags & EDGE_FALLTHRU)
5340 error ("fallthru to exit from bb %d", e->src->index);
5341 err = 1;
5344 FOR_EACH_BB_FN (bb, cfun)
5346 bool found_ctrl_stmt = false;
5348 stmt = NULL;
5350 /* Skip labels on the start of basic block. */
5351 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5353 tree label;
5354 gimple *prev_stmt = stmt;
5356 stmt = gsi_stmt (gsi);
5358 if (gimple_code (stmt) != GIMPLE_LABEL)
5359 break;
5361 label = gimple_label_label (as_a <glabel *> (stmt));
5362 if (prev_stmt && DECL_NONLOCAL (label))
5364 error ("nonlocal label ");
5365 print_generic_expr (stderr, label);
5366 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5367 bb->index);
5368 err = 1;
5371 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5373 error ("EH landing pad label ");
5374 print_generic_expr (stderr, label);
5375 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5376 bb->index);
5377 err = 1;
5380 if (label_to_block (label) != bb)
5382 error ("label ");
5383 print_generic_expr (stderr, label);
5384 fprintf (stderr, " to block does not match in bb %d",
5385 bb->index);
5386 err = 1;
5389 if (decl_function_context (label) != current_function_decl)
5391 error ("label ");
5392 print_generic_expr (stderr, label);
5393 fprintf (stderr, " has incorrect context in bb %d",
5394 bb->index);
5395 err = 1;
5399 /* Verify that body of basic block BB is free of control flow. */
5400 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5402 gimple *stmt = gsi_stmt (gsi);
5404 if (found_ctrl_stmt)
5406 error ("control flow in the middle of basic block %d",
5407 bb->index);
5408 err = 1;
5411 if (stmt_ends_bb_p (stmt))
5412 found_ctrl_stmt = true;
5414 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5416 error ("label ");
5417 print_generic_expr (stderr, gimple_label_label (label_stmt));
5418 fprintf (stderr, " in the middle of basic block %d", bb->index);
5419 err = 1;
5423 gsi = gsi_last_bb (bb);
5424 if (gsi_end_p (gsi))
5425 continue;
5427 stmt = gsi_stmt (gsi);
5429 if (gimple_code (stmt) == GIMPLE_LABEL)
5430 continue;
5432 err |= verify_eh_edges (stmt);
5434 if (is_ctrl_stmt (stmt))
5436 FOR_EACH_EDGE (e, ei, bb->succs)
5437 if (e->flags & EDGE_FALLTHRU)
5439 error ("fallthru edge after a control statement in bb %d",
5440 bb->index);
5441 err = 1;
5445 if (gimple_code (stmt) != GIMPLE_COND)
5447 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5448 after anything else but if statement. */
5449 FOR_EACH_EDGE (e, ei, bb->succs)
5450 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5452 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5453 bb->index);
5454 err = 1;
5458 switch (gimple_code (stmt))
5460 case GIMPLE_COND:
5462 edge true_edge;
5463 edge false_edge;
5465 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5467 if (!true_edge
5468 || !false_edge
5469 || !(true_edge->flags & EDGE_TRUE_VALUE)
5470 || !(false_edge->flags & EDGE_FALSE_VALUE)
5471 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5472 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5473 || EDGE_COUNT (bb->succs) >= 3)
5475 error ("wrong outgoing edge flags at end of bb %d",
5476 bb->index);
5477 err = 1;
5480 break;
5482 case GIMPLE_GOTO:
5483 if (simple_goto_p (stmt))
5485 error ("explicit goto at end of bb %d", bb->index);
5486 err = 1;
5488 else
5490 /* FIXME. We should double check that the labels in the
5491 destination blocks have their address taken. */
5492 FOR_EACH_EDGE (e, ei, bb->succs)
5493 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5494 | EDGE_FALSE_VALUE))
5495 || !(e->flags & EDGE_ABNORMAL))
5497 error ("wrong outgoing edge flags at end of bb %d",
5498 bb->index);
5499 err = 1;
5502 break;
5504 case GIMPLE_CALL:
5505 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5506 break;
5507 /* fallthru */
5508 case GIMPLE_RETURN:
5509 if (!single_succ_p (bb)
5510 || (single_succ_edge (bb)->flags
5511 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5512 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5514 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5515 err = 1;
5517 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5519 error ("return edge does not point to exit in bb %d",
5520 bb->index);
5521 err = 1;
5523 break;
5525 case GIMPLE_SWITCH:
5527 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5528 tree prev;
5529 edge e;
5530 size_t i, n;
5532 n = gimple_switch_num_labels (switch_stmt);
5534 /* Mark all the destination basic blocks. */
5535 for (i = 0; i < n; ++i)
5537 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5538 basic_block label_bb = label_to_block (lab);
5539 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5540 label_bb->aux = (void *)1;
5543 /* Verify that the case labels are sorted. */
5544 prev = gimple_switch_label (switch_stmt, 0);
5545 for (i = 1; i < n; ++i)
5547 tree c = gimple_switch_label (switch_stmt, i);
5548 if (!CASE_LOW (c))
5550 error ("found default case not at the start of "
5551 "case vector");
5552 err = 1;
5553 continue;
5555 if (CASE_LOW (prev)
5556 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5558 error ("case labels not sorted: ");
5559 print_generic_expr (stderr, prev);
5560 fprintf (stderr," is greater than ");
5561 print_generic_expr (stderr, c);
5562 fprintf (stderr," but comes before it.\n");
5563 err = 1;
5565 prev = c;
5567 /* VRP will remove the default case if it can prove it will
5568 never be executed. So do not verify there always exists
5569 a default case here. */
5571 FOR_EACH_EDGE (e, ei, bb->succs)
5573 if (!e->dest->aux)
5575 error ("extra outgoing edge %d->%d",
5576 bb->index, e->dest->index);
5577 err = 1;
5580 e->dest->aux = (void *)2;
5581 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5582 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5584 error ("wrong outgoing edge flags at end of bb %d",
5585 bb->index);
5586 err = 1;
5590 /* Check that we have all of them. */
5591 for (i = 0; i < n; ++i)
5593 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5594 basic_block label_bb = label_to_block (lab);
5596 if (label_bb->aux != (void *)2)
5598 error ("missing edge %i->%i", bb->index, label_bb->index);
5599 err = 1;
5603 FOR_EACH_EDGE (e, ei, bb->succs)
5604 e->dest->aux = (void *)0;
5606 break;
5608 case GIMPLE_EH_DISPATCH:
5609 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5610 break;
5612 default:
5613 break;
5617 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5618 verify_dominators (CDI_DOMINATORS);
5620 return err;
5624 /* Updates phi nodes after creating a forwarder block joined
5625 by edge FALLTHRU. */
5627 static void
5628 gimple_make_forwarder_block (edge fallthru)
5630 edge e;
5631 edge_iterator ei;
5632 basic_block dummy, bb;
5633 tree var;
5634 gphi_iterator gsi;
5636 dummy = fallthru->src;
5637 bb = fallthru->dest;
5639 if (single_pred_p (bb))
5640 return;
5642 /* If we redirected a branch we must create new PHI nodes at the
5643 start of BB. */
5644 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5646 gphi *phi, *new_phi;
5648 phi = gsi.phi ();
5649 var = gimple_phi_result (phi);
5650 new_phi = create_phi_node (var, bb);
5651 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5652 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5653 UNKNOWN_LOCATION);
5656 /* Add the arguments we have stored on edges. */
5657 FOR_EACH_EDGE (e, ei, bb->preds)
5659 if (e == fallthru)
5660 continue;
5662 flush_pending_stmts (e);
5667 /* Return a non-special label in the head of basic block BLOCK.
5668 Create one if it doesn't exist. */
5670 tree
5671 gimple_block_label (basic_block bb)
5673 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5674 bool first = true;
5675 tree label;
5676 glabel *stmt;
5678 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5680 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5681 if (!stmt)
5682 break;
5683 label = gimple_label_label (stmt);
5684 if (!DECL_NONLOCAL (label))
5686 if (!first)
5687 gsi_move_before (&i, &s);
5688 return label;
5692 label = create_artificial_label (UNKNOWN_LOCATION);
5693 stmt = gimple_build_label (label);
5694 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5695 return label;
5699 /* Attempt to perform edge redirection by replacing a possibly complex
5700 jump instruction by a goto or by removing the jump completely.
5701 This can apply only if all edges now point to the same block. The
5702 parameters and return values are equivalent to
5703 redirect_edge_and_branch. */
5705 static edge
5706 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5708 basic_block src = e->src;
5709 gimple_stmt_iterator i;
5710 gimple *stmt;
5712 /* We can replace or remove a complex jump only when we have exactly
5713 two edges. */
5714 if (EDGE_COUNT (src->succs) != 2
5715 /* Verify that all targets will be TARGET. Specifically, the
5716 edge that is not E must also go to TARGET. */
5717 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5718 return NULL;
5720 i = gsi_last_bb (src);
5721 if (gsi_end_p (i))
5722 return NULL;
5724 stmt = gsi_stmt (i);
5726 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5728 gsi_remove (&i, true);
5729 e = ssa_redirect_edge (e, target);
5730 e->flags = EDGE_FALLTHRU;
5731 return e;
5734 return NULL;
5738 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5739 edge representing the redirected branch. */
5741 static edge
5742 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5744 basic_block bb = e->src;
5745 gimple_stmt_iterator gsi;
5746 edge ret;
5747 gimple *stmt;
5749 if (e->flags & EDGE_ABNORMAL)
5750 return NULL;
5752 if (e->dest == dest)
5753 return NULL;
5755 if (e->flags & EDGE_EH)
5756 return redirect_eh_edge (e, dest);
5758 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5760 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5761 if (ret)
5762 return ret;
5765 gsi = gsi_last_bb (bb);
5766 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5768 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5770 case GIMPLE_COND:
5771 /* For COND_EXPR, we only need to redirect the edge. */
5772 break;
5774 case GIMPLE_GOTO:
5775 /* No non-abnormal edges should lead from a non-simple goto, and
5776 simple ones should be represented implicitly. */
5777 gcc_unreachable ();
5779 case GIMPLE_SWITCH:
5781 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5782 tree label = gimple_block_label (dest);
5783 tree cases = get_cases_for_edge (e, switch_stmt);
5785 /* If we have a list of cases associated with E, then use it
5786 as it's a lot faster than walking the entire case vector. */
5787 if (cases)
5789 edge e2 = find_edge (e->src, dest);
5790 tree last, first;
5792 first = cases;
5793 while (cases)
5795 last = cases;
5796 CASE_LABEL (cases) = label;
5797 cases = CASE_CHAIN (cases);
5800 /* If there was already an edge in the CFG, then we need
5801 to move all the cases associated with E to E2. */
5802 if (e2)
5804 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5806 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5807 CASE_CHAIN (cases2) = first;
5809 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5811 else
5813 size_t i, n = gimple_switch_num_labels (switch_stmt);
5815 for (i = 0; i < n; i++)
5817 tree elt = gimple_switch_label (switch_stmt, i);
5818 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5819 CASE_LABEL (elt) = label;
5823 break;
5825 case GIMPLE_ASM:
5827 gasm *asm_stmt = as_a <gasm *> (stmt);
5828 int i, n = gimple_asm_nlabels (asm_stmt);
5829 tree label = NULL;
5831 for (i = 0; i < n; ++i)
5833 tree cons = gimple_asm_label_op (asm_stmt, i);
5834 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5836 if (!label)
5837 label = gimple_block_label (dest);
5838 TREE_VALUE (cons) = label;
5842 /* If we didn't find any label matching the former edge in the
5843 asm labels, we must be redirecting the fallthrough
5844 edge. */
5845 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5847 break;
5849 case GIMPLE_RETURN:
5850 gsi_remove (&gsi, true);
5851 e->flags |= EDGE_FALLTHRU;
5852 break;
5854 case GIMPLE_OMP_RETURN:
5855 case GIMPLE_OMP_CONTINUE:
5856 case GIMPLE_OMP_SECTIONS_SWITCH:
5857 case GIMPLE_OMP_FOR:
5858 /* The edges from OMP constructs can be simply redirected. */
5859 break;
5861 case GIMPLE_EH_DISPATCH:
5862 if (!(e->flags & EDGE_FALLTHRU))
5863 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5864 break;
5866 case GIMPLE_TRANSACTION:
5867 if (e->flags & EDGE_TM_ABORT)
5868 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5869 gimple_block_label (dest));
5870 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5871 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5872 gimple_block_label (dest));
5873 else
5874 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5875 gimple_block_label (dest));
5876 break;
5878 default:
5879 /* Otherwise it must be a fallthru edge, and we don't need to
5880 do anything besides redirecting it. */
5881 gcc_assert (e->flags & EDGE_FALLTHRU);
5882 break;
5885 /* Update/insert PHI nodes as necessary. */
5887 /* Now update the edges in the CFG. */
5888 e = ssa_redirect_edge (e, dest);
5890 return e;
5893 /* Returns true if it is possible to remove edge E by redirecting
5894 it to the destination of the other edge from E->src. */
5896 static bool
5897 gimple_can_remove_branch_p (const_edge e)
5899 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5900 return false;
5902 return true;
5905 /* Simple wrapper, as we can always redirect fallthru edges. */
5907 static basic_block
5908 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5910 e = gimple_redirect_edge_and_branch (e, dest);
5911 gcc_assert (e);
5913 return NULL;
5917 /* Splits basic block BB after statement STMT (but at least after the
5918 labels). If STMT is NULL, BB is split just after the labels. */
5920 static basic_block
5921 gimple_split_block (basic_block bb, void *stmt)
5923 gimple_stmt_iterator gsi;
5924 gimple_stmt_iterator gsi_tgt;
5925 gimple_seq list;
5926 basic_block new_bb;
5927 edge e;
5928 edge_iterator ei;
5930 new_bb = create_empty_bb (bb);
5932 /* Redirect the outgoing edges. */
5933 new_bb->succs = bb->succs;
5934 bb->succs = NULL;
5935 FOR_EACH_EDGE (e, ei, new_bb->succs)
5936 e->src = new_bb;
5938 /* Get a stmt iterator pointing to the first stmt to move. */
5939 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5940 gsi = gsi_after_labels (bb);
5941 else
5943 gsi = gsi_for_stmt ((gimple *) stmt);
5944 gsi_next (&gsi);
5947 /* Move everything from GSI to the new basic block. */
5948 if (gsi_end_p (gsi))
5949 return new_bb;
5951 /* Split the statement list - avoid re-creating new containers as this
5952 brings ugly quadratic memory consumption in the inliner.
5953 (We are still quadratic since we need to update stmt BB pointers,
5954 sadly.) */
5955 gsi_split_seq_before (&gsi, &list);
5956 set_bb_seq (new_bb, list);
5957 for (gsi_tgt = gsi_start (list);
5958 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5959 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5961 return new_bb;
5965 /* Moves basic block BB after block AFTER. */
5967 static bool
5968 gimple_move_block_after (basic_block bb, basic_block after)
5970 if (bb->prev_bb == after)
5971 return true;
5973 unlink_block (bb);
5974 link_block (bb, after);
5976 return true;
5980 /* Return TRUE if block BB has no executable statements, otherwise return
5981 FALSE. */
5983 static bool
5984 gimple_empty_block_p (basic_block bb)
5986 /* BB must have no executable statements. */
5987 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5988 if (phi_nodes (bb))
5989 return false;
5990 if (gsi_end_p (gsi))
5991 return true;
5992 if (is_gimple_debug (gsi_stmt (gsi)))
5993 gsi_next_nondebug (&gsi);
5994 return gsi_end_p (gsi);
5998 /* Split a basic block if it ends with a conditional branch and if the
5999 other part of the block is not empty. */
6001 static basic_block
6002 gimple_split_block_before_cond_jump (basic_block bb)
6004 gimple *last, *split_point;
6005 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6006 if (gsi_end_p (gsi))
6007 return NULL;
6008 last = gsi_stmt (gsi);
6009 if (gimple_code (last) != GIMPLE_COND
6010 && gimple_code (last) != GIMPLE_SWITCH)
6011 return NULL;
6012 gsi_prev (&gsi);
6013 split_point = gsi_stmt (gsi);
6014 return split_block (bb, split_point)->dest;
6018 /* Return true if basic_block can be duplicated. */
6020 static bool
6021 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6023 return true;
6026 /* Create a duplicate of the basic block BB. NOTE: This does not
6027 preserve SSA form. */
6029 static basic_block
6030 gimple_duplicate_bb (basic_block bb)
6032 basic_block new_bb;
6033 gimple_stmt_iterator gsi_tgt;
6035 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6037 /* Copy the PHI nodes. We ignore PHI node arguments here because
6038 the incoming edges have not been setup yet. */
6039 for (gphi_iterator gpi = gsi_start_phis (bb);
6040 !gsi_end_p (gpi);
6041 gsi_next (&gpi))
6043 gphi *phi, *copy;
6044 phi = gpi.phi ();
6045 copy = create_phi_node (NULL_TREE, new_bb);
6046 create_new_def_for (gimple_phi_result (phi), copy,
6047 gimple_phi_result_ptr (copy));
6048 gimple_set_uid (copy, gimple_uid (phi));
6051 gsi_tgt = gsi_start_bb (new_bb);
6052 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6053 !gsi_end_p (gsi);
6054 gsi_next (&gsi))
6056 def_operand_p def_p;
6057 ssa_op_iter op_iter;
6058 tree lhs;
6059 gimple *stmt, *copy;
6061 stmt = gsi_stmt (gsi);
6062 if (gimple_code (stmt) == GIMPLE_LABEL)
6063 continue;
6065 /* Don't duplicate label debug stmts. */
6066 if (gimple_debug_bind_p (stmt)
6067 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6068 == LABEL_DECL)
6069 continue;
6071 /* Create a new copy of STMT and duplicate STMT's virtual
6072 operands. */
6073 copy = gimple_copy (stmt);
6074 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6076 maybe_duplicate_eh_stmt (copy, stmt);
6077 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6079 /* When copying around a stmt writing into a local non-user
6080 aggregate, make sure it won't share stack slot with other
6081 vars. */
6082 lhs = gimple_get_lhs (stmt);
6083 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6085 tree base = get_base_address (lhs);
6086 if (base
6087 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6088 && DECL_IGNORED_P (base)
6089 && !TREE_STATIC (base)
6090 && !DECL_EXTERNAL (base)
6091 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6092 DECL_NONSHAREABLE (base) = 1;
6095 /* Create new names for all the definitions created by COPY and
6096 add replacement mappings for each new name. */
6097 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6098 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6101 return new_bb;
6104 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6106 static void
6107 add_phi_args_after_copy_edge (edge e_copy)
6109 basic_block bb, bb_copy = e_copy->src, dest;
6110 edge e;
6111 edge_iterator ei;
6112 gphi *phi, *phi_copy;
6113 tree def;
6114 gphi_iterator psi, psi_copy;
6116 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6117 return;
6119 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6121 if (e_copy->dest->flags & BB_DUPLICATED)
6122 dest = get_bb_original (e_copy->dest);
6123 else
6124 dest = e_copy->dest;
6126 e = find_edge (bb, dest);
6127 if (!e)
6129 /* During loop unrolling the target of the latch edge is copied.
6130 In this case we are not looking for edge to dest, but to
6131 duplicated block whose original was dest. */
6132 FOR_EACH_EDGE (e, ei, bb->succs)
6134 if ((e->dest->flags & BB_DUPLICATED)
6135 && get_bb_original (e->dest) == dest)
6136 break;
6139 gcc_assert (e != NULL);
6142 for (psi = gsi_start_phis (e->dest),
6143 psi_copy = gsi_start_phis (e_copy->dest);
6144 !gsi_end_p (psi);
6145 gsi_next (&psi), gsi_next (&psi_copy))
6147 phi = psi.phi ();
6148 phi_copy = psi_copy.phi ();
6149 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6150 add_phi_arg (phi_copy, def, e_copy,
6151 gimple_phi_arg_location_from_edge (phi, e));
6156 /* Basic block BB_COPY was created by code duplication. Add phi node
6157 arguments for edges going out of BB_COPY. The blocks that were
6158 duplicated have BB_DUPLICATED set. */
6160 void
6161 add_phi_args_after_copy_bb (basic_block bb_copy)
6163 edge e_copy;
6164 edge_iterator ei;
6166 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6168 add_phi_args_after_copy_edge (e_copy);
6172 /* Blocks in REGION_COPY array of length N_REGION were created by
6173 duplication of basic blocks. Add phi node arguments for edges
6174 going from these blocks. If E_COPY is not NULL, also add
6175 phi node arguments for its destination.*/
6177 void
6178 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6179 edge e_copy)
6181 unsigned i;
6183 for (i = 0; i < n_region; i++)
6184 region_copy[i]->flags |= BB_DUPLICATED;
6186 for (i = 0; i < n_region; i++)
6187 add_phi_args_after_copy_bb (region_copy[i]);
6188 if (e_copy)
6189 add_phi_args_after_copy_edge (e_copy);
6191 for (i = 0; i < n_region; i++)
6192 region_copy[i]->flags &= ~BB_DUPLICATED;
6195 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6196 important exit edge EXIT. By important we mean that no SSA name defined
6197 inside region is live over the other exit edges of the region. All entry
6198 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6199 to the duplicate of the region. Dominance and loop information is
6200 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6201 UPDATE_DOMINANCE is false then we assume that the caller will update the
6202 dominance information after calling this function. The new basic
6203 blocks are stored to REGION_COPY in the same order as they had in REGION,
6204 provided that REGION_COPY is not NULL.
6205 The function returns false if it is unable to copy the region,
6206 true otherwise. */
6208 bool
6209 gimple_duplicate_sese_region (edge entry, edge exit,
6210 basic_block *region, unsigned n_region,
6211 basic_block *region_copy,
6212 bool update_dominance)
6214 unsigned i;
6215 bool free_region_copy = false, copying_header = false;
6216 struct loop *loop = entry->dest->loop_father;
6217 edge exit_copy;
6218 vec<basic_block> doms;
6219 edge redirected;
6220 int total_freq = 0, entry_freq = 0;
6221 profile_count total_count = profile_count::uninitialized ();
6222 profile_count entry_count = profile_count::uninitialized ();
6224 if (!can_copy_bbs_p (region, n_region))
6225 return false;
6227 /* Some sanity checking. Note that we do not check for all possible
6228 missuses of the functions. I.e. if you ask to copy something weird,
6229 it will work, but the state of structures probably will not be
6230 correct. */
6231 for (i = 0; i < n_region; i++)
6233 /* We do not handle subloops, i.e. all the blocks must belong to the
6234 same loop. */
6235 if (region[i]->loop_father != loop)
6236 return false;
6238 if (region[i] != entry->dest
6239 && region[i] == loop->header)
6240 return false;
6243 /* In case the function is used for loop header copying (which is the primary
6244 use), ensure that EXIT and its copy will be new latch and entry edges. */
6245 if (loop->header == entry->dest)
6247 copying_header = true;
6249 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6250 return false;
6252 for (i = 0; i < n_region; i++)
6253 if (region[i] != exit->src
6254 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6255 return false;
6258 initialize_original_copy_tables ();
6260 if (copying_header)
6261 set_loop_copy (loop, loop_outer (loop));
6262 else
6263 set_loop_copy (loop, loop);
6265 if (!region_copy)
6267 region_copy = XNEWVEC (basic_block, n_region);
6268 free_region_copy = true;
6271 /* Record blocks outside the region that are dominated by something
6272 inside. */
6273 if (update_dominance)
6275 doms.create (0);
6276 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6279 if (entry->dest->count.initialized_p ())
6281 total_count = entry->dest->count;
6282 entry_count = entry->count;
6283 /* Fix up corner cases, to avoid division by zero or creation of negative
6284 frequencies. */
6285 if (entry_count > total_count)
6286 entry_count = total_count;
6288 if (!(total_count > 0) || !(entry_count > 0))
6290 total_freq = entry->dest->frequency;
6291 entry_freq = EDGE_FREQUENCY (entry);
6292 /* Fix up corner cases, to avoid division by zero or creation of negative
6293 frequencies. */
6294 if (total_freq == 0)
6295 total_freq = 1;
6296 else if (entry_freq > total_freq)
6297 entry_freq = total_freq;
6300 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6301 split_edge_bb_loc (entry), update_dominance);
6302 if (total_count > 0 && entry_count > 0)
6304 scale_bbs_frequencies_profile_count (region, n_region,
6305 total_count - entry_count,
6306 total_count);
6307 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6308 total_count);
6310 else
6312 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6313 total_freq);
6314 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6317 if (copying_header)
6319 loop->header = exit->dest;
6320 loop->latch = exit->src;
6323 /* Redirect the entry and add the phi node arguments. */
6324 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6325 gcc_assert (redirected != NULL);
6326 flush_pending_stmts (entry);
6328 /* Concerning updating of dominators: We must recount dominators
6329 for entry block and its copy. Anything that is outside of the
6330 region, but was dominated by something inside needs recounting as
6331 well. */
6332 if (update_dominance)
6334 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6335 doms.safe_push (get_bb_original (entry->dest));
6336 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6337 doms.release ();
6340 /* Add the other PHI node arguments. */
6341 add_phi_args_after_copy (region_copy, n_region, NULL);
6343 if (free_region_copy)
6344 free (region_copy);
6346 free_original_copy_tables ();
6347 return true;
6350 /* Checks if BB is part of the region defined by N_REGION BBS. */
6351 static bool
6352 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6354 unsigned int n;
6356 for (n = 0; n < n_region; n++)
6358 if (bb == bbs[n])
6359 return true;
6361 return false;
6364 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6365 are stored to REGION_COPY in the same order in that they appear
6366 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6367 the region, EXIT an exit from it. The condition guarding EXIT
6368 is moved to ENTRY. Returns true if duplication succeeds, false
6369 otherwise.
6371 For example,
6373 some_code;
6374 if (cond)
6376 else
6379 is transformed to
6381 if (cond)
6383 some_code;
6386 else
6388 some_code;
6393 bool
6394 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6395 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6396 basic_block *region_copy ATTRIBUTE_UNUSED)
6398 unsigned i;
6399 bool free_region_copy = false;
6400 struct loop *loop = exit->dest->loop_father;
6401 struct loop *orig_loop = entry->dest->loop_father;
6402 basic_block switch_bb, entry_bb, nentry_bb;
6403 vec<basic_block> doms;
6404 int total_freq = 0, exit_freq = 0;
6405 profile_count total_count = profile_count::uninitialized (),
6406 exit_count = profile_count::uninitialized ();
6407 edge exits[2], nexits[2], e;
6408 gimple_stmt_iterator gsi;
6409 gimple *cond_stmt;
6410 edge sorig, snew;
6411 basic_block exit_bb;
6412 gphi_iterator psi;
6413 gphi *phi;
6414 tree def;
6415 struct loop *target, *aloop, *cloop;
6417 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6418 exits[0] = exit;
6419 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6421 if (!can_copy_bbs_p (region, n_region))
6422 return false;
6424 initialize_original_copy_tables ();
6425 set_loop_copy (orig_loop, loop);
6427 target= loop;
6428 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6430 if (bb_part_of_region_p (aloop->header, region, n_region))
6432 cloop = duplicate_loop (aloop, target);
6433 duplicate_subloops (aloop, cloop);
6437 if (!region_copy)
6439 region_copy = XNEWVEC (basic_block, n_region);
6440 free_region_copy = true;
6443 gcc_assert (!need_ssa_update_p (cfun));
6445 /* Record blocks outside the region that are dominated by something
6446 inside. */
6447 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6449 if (exit->src->count > 0)
6451 total_count = exit->src->count;
6452 exit_count = exit->count;
6453 /* Fix up corner cases, to avoid division by zero or creation of negative
6454 frequencies. */
6455 if (exit_count > total_count)
6456 exit_count = total_count;
6458 else
6460 total_freq = exit->src->frequency;
6461 exit_freq = EDGE_FREQUENCY (exit);
6462 /* Fix up corner cases, to avoid division by zero or creation of negative
6463 frequencies. */
6464 if (total_freq == 0)
6465 total_freq = 1;
6466 if (exit_freq > total_freq)
6467 exit_freq = total_freq;
6470 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6471 split_edge_bb_loc (exit), true);
6472 if (total_count.initialized_p ())
6474 scale_bbs_frequencies_profile_count (region, n_region,
6475 total_count - exit_count,
6476 total_count);
6477 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6478 total_count);
6480 else
6482 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6483 total_freq);
6484 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6487 /* Create the switch block, and put the exit condition to it. */
6488 entry_bb = entry->dest;
6489 nentry_bb = get_bb_copy (entry_bb);
6490 if (!last_stmt (entry->src)
6491 || !stmt_ends_bb_p (last_stmt (entry->src)))
6492 switch_bb = entry->src;
6493 else
6494 switch_bb = split_edge (entry);
6495 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6497 gsi = gsi_last_bb (switch_bb);
6498 cond_stmt = last_stmt (exit->src);
6499 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6500 cond_stmt = gimple_copy (cond_stmt);
6502 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6504 sorig = single_succ_edge (switch_bb);
6505 sorig->flags = exits[1]->flags;
6506 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6508 /* Register the new edge from SWITCH_BB in loop exit lists. */
6509 rescan_loop_exit (snew, true, false);
6511 /* Add the PHI node arguments. */
6512 add_phi_args_after_copy (region_copy, n_region, snew);
6514 /* Get rid of now superfluous conditions and associated edges (and phi node
6515 arguments). */
6516 exit_bb = exit->dest;
6518 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6519 PENDING_STMT (e) = NULL;
6521 /* The latch of ORIG_LOOP was copied, and so was the backedge
6522 to the original header. We redirect this backedge to EXIT_BB. */
6523 for (i = 0; i < n_region; i++)
6524 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6526 gcc_assert (single_succ_edge (region_copy[i]));
6527 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6528 PENDING_STMT (e) = NULL;
6529 for (psi = gsi_start_phis (exit_bb);
6530 !gsi_end_p (psi);
6531 gsi_next (&psi))
6533 phi = psi.phi ();
6534 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6535 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6538 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6539 PENDING_STMT (e) = NULL;
6541 /* Anything that is outside of the region, but was dominated by something
6542 inside needs to update dominance info. */
6543 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6544 doms.release ();
6545 /* Update the SSA web. */
6546 update_ssa (TODO_update_ssa);
6548 if (free_region_copy)
6549 free (region_copy);
6551 free_original_copy_tables ();
6552 return true;
6555 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6556 adding blocks when the dominator traversal reaches EXIT. This
6557 function silently assumes that ENTRY strictly dominates EXIT. */
6559 void
6560 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6561 vec<basic_block> *bbs_p)
6563 basic_block son;
6565 for (son = first_dom_son (CDI_DOMINATORS, entry);
6566 son;
6567 son = next_dom_son (CDI_DOMINATORS, son))
6569 bbs_p->safe_push (son);
6570 if (son != exit)
6571 gather_blocks_in_sese_region (son, exit, bbs_p);
6575 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6576 The duplicates are recorded in VARS_MAP. */
6578 static void
6579 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6580 tree to_context)
6582 tree t = *tp, new_t;
6583 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6585 if (DECL_CONTEXT (t) == to_context)
6586 return;
6588 bool existed;
6589 tree &loc = vars_map->get_or_insert (t, &existed);
6591 if (!existed)
6593 if (SSA_VAR_P (t))
6595 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6596 add_local_decl (f, new_t);
6598 else
6600 gcc_assert (TREE_CODE (t) == CONST_DECL);
6601 new_t = copy_node (t);
6603 DECL_CONTEXT (new_t) = to_context;
6605 loc = new_t;
6607 else
6608 new_t = loc;
6610 *tp = new_t;
6614 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6615 VARS_MAP maps old ssa names and var_decls to the new ones. */
6617 static tree
6618 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6619 tree to_context)
6621 tree new_name;
6623 gcc_assert (!virtual_operand_p (name));
6625 tree *loc = vars_map->get (name);
6627 if (!loc)
6629 tree decl = SSA_NAME_VAR (name);
6630 if (decl)
6632 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6633 replace_by_duplicate_decl (&decl, vars_map, to_context);
6634 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6635 decl, SSA_NAME_DEF_STMT (name));
6637 else
6638 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6639 name, SSA_NAME_DEF_STMT (name));
6641 /* Now that we've used the def stmt to define new_name, make sure it
6642 doesn't define name anymore. */
6643 SSA_NAME_DEF_STMT (name) = NULL;
6645 vars_map->put (name, new_name);
6647 else
6648 new_name = *loc;
6650 return new_name;
6653 struct move_stmt_d
6655 tree orig_block;
6656 tree new_block;
6657 tree from_context;
6658 tree to_context;
6659 hash_map<tree, tree> *vars_map;
6660 htab_t new_label_map;
6661 hash_map<void *, void *> *eh_map;
6662 bool remap_decls_p;
6665 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6666 contained in *TP if it has been ORIG_BLOCK previously and change the
6667 DECL_CONTEXT of every local variable referenced in *TP. */
6669 static tree
6670 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6672 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6673 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6674 tree t = *tp;
6676 if (EXPR_P (t))
6678 tree block = TREE_BLOCK (t);
6679 if (block == NULL_TREE)
6681 else if (block == p->orig_block
6682 || p->orig_block == NULL_TREE)
6683 TREE_SET_BLOCK (t, p->new_block);
6684 else if (flag_checking)
6686 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6687 block = BLOCK_SUPERCONTEXT (block);
6688 gcc_assert (block == p->orig_block);
6691 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6693 if (TREE_CODE (t) == SSA_NAME)
6694 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6695 else if (TREE_CODE (t) == PARM_DECL
6696 && gimple_in_ssa_p (cfun))
6697 *tp = *(p->vars_map->get (t));
6698 else if (TREE_CODE (t) == LABEL_DECL)
6700 if (p->new_label_map)
6702 struct tree_map in, *out;
6703 in.base.from = t;
6704 out = (struct tree_map *)
6705 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6706 if (out)
6707 *tp = t = out->to;
6710 DECL_CONTEXT (t) = p->to_context;
6712 else if (p->remap_decls_p)
6714 /* Replace T with its duplicate. T should no longer appear in the
6715 parent function, so this looks wasteful; however, it may appear
6716 in referenced_vars, and more importantly, as virtual operands of
6717 statements, and in alias lists of other variables. It would be
6718 quite difficult to expunge it from all those places. ??? It might
6719 suffice to do this for addressable variables. */
6720 if ((VAR_P (t) && !is_global_var (t))
6721 || TREE_CODE (t) == CONST_DECL)
6722 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6724 *walk_subtrees = 0;
6726 else if (TYPE_P (t))
6727 *walk_subtrees = 0;
6729 return NULL_TREE;
6732 /* Helper for move_stmt_r. Given an EH region number for the source
6733 function, map that to the duplicate EH regio number in the dest. */
6735 static int
6736 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6738 eh_region old_r, new_r;
6740 old_r = get_eh_region_from_number (old_nr);
6741 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6743 return new_r->index;
6746 /* Similar, but operate on INTEGER_CSTs. */
6748 static tree
6749 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6751 int old_nr, new_nr;
6753 old_nr = tree_to_shwi (old_t_nr);
6754 new_nr = move_stmt_eh_region_nr (old_nr, p);
6756 return build_int_cst (integer_type_node, new_nr);
6759 /* Like move_stmt_op, but for gimple statements.
6761 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6762 contained in the current statement in *GSI_P and change the
6763 DECL_CONTEXT of every local variable referenced in the current
6764 statement. */
6766 static tree
6767 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6768 struct walk_stmt_info *wi)
6770 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6771 gimple *stmt = gsi_stmt (*gsi_p);
6772 tree block = gimple_block (stmt);
6774 if (block == p->orig_block
6775 || (p->orig_block == NULL_TREE
6776 && block != NULL_TREE))
6777 gimple_set_block (stmt, p->new_block);
6779 switch (gimple_code (stmt))
6781 case GIMPLE_CALL:
6782 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6784 tree r, fndecl = gimple_call_fndecl (stmt);
6785 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6786 switch (DECL_FUNCTION_CODE (fndecl))
6788 case BUILT_IN_EH_COPY_VALUES:
6789 r = gimple_call_arg (stmt, 1);
6790 r = move_stmt_eh_region_tree_nr (r, p);
6791 gimple_call_set_arg (stmt, 1, r);
6792 /* FALLTHRU */
6794 case BUILT_IN_EH_POINTER:
6795 case BUILT_IN_EH_FILTER:
6796 r = gimple_call_arg (stmt, 0);
6797 r = move_stmt_eh_region_tree_nr (r, p);
6798 gimple_call_set_arg (stmt, 0, r);
6799 break;
6801 default:
6802 break;
6805 break;
6807 case GIMPLE_RESX:
6809 gresx *resx_stmt = as_a <gresx *> (stmt);
6810 int r = gimple_resx_region (resx_stmt);
6811 r = move_stmt_eh_region_nr (r, p);
6812 gimple_resx_set_region (resx_stmt, r);
6814 break;
6816 case GIMPLE_EH_DISPATCH:
6818 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6819 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6820 r = move_stmt_eh_region_nr (r, p);
6821 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6823 break;
6825 case GIMPLE_OMP_RETURN:
6826 case GIMPLE_OMP_CONTINUE:
6827 break;
6828 default:
6829 if (is_gimple_omp (stmt))
6831 /* Do not remap variables inside OMP directives. Variables
6832 referenced in clauses and directive header belong to the
6833 parent function and should not be moved into the child
6834 function. */
6835 bool save_remap_decls_p = p->remap_decls_p;
6836 p->remap_decls_p = false;
6837 *handled_ops_p = true;
6839 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6840 move_stmt_op, wi);
6842 p->remap_decls_p = save_remap_decls_p;
6844 break;
6847 return NULL_TREE;
6850 /* Move basic block BB from function CFUN to function DEST_FN. The
6851 block is moved out of the original linked list and placed after
6852 block AFTER in the new list. Also, the block is removed from the
6853 original array of blocks and placed in DEST_FN's array of blocks.
6854 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6855 updated to reflect the moved edges.
6857 The local variables are remapped to new instances, VARS_MAP is used
6858 to record the mapping. */
6860 static void
6861 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6862 basic_block after, bool update_edge_count_p,
6863 struct move_stmt_d *d)
6865 struct control_flow_graph *cfg;
6866 edge_iterator ei;
6867 edge e;
6868 gimple_stmt_iterator si;
6869 unsigned old_len, new_len;
6871 /* Remove BB from dominance structures. */
6872 delete_from_dominance_info (CDI_DOMINATORS, bb);
6874 /* Move BB from its current loop to the copy in the new function. */
6875 if (current_loops)
6877 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6878 if (new_loop)
6879 bb->loop_father = new_loop;
6882 /* Link BB to the new linked list. */
6883 move_block_after (bb, after);
6885 /* Update the edge count in the corresponding flowgraphs. */
6886 if (update_edge_count_p)
6887 FOR_EACH_EDGE (e, ei, bb->succs)
6889 cfun->cfg->x_n_edges--;
6890 dest_cfun->cfg->x_n_edges++;
6893 /* Remove BB from the original basic block array. */
6894 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6895 cfun->cfg->x_n_basic_blocks--;
6897 /* Grow DEST_CFUN's basic block array if needed. */
6898 cfg = dest_cfun->cfg;
6899 cfg->x_n_basic_blocks++;
6900 if (bb->index >= cfg->x_last_basic_block)
6901 cfg->x_last_basic_block = bb->index + 1;
6903 old_len = vec_safe_length (cfg->x_basic_block_info);
6904 if ((unsigned) cfg->x_last_basic_block >= old_len)
6906 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6907 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6910 (*cfg->x_basic_block_info)[bb->index] = bb;
6912 /* Remap the variables in phi nodes. */
6913 for (gphi_iterator psi = gsi_start_phis (bb);
6914 !gsi_end_p (psi); )
6916 gphi *phi = psi.phi ();
6917 use_operand_p use;
6918 tree op = PHI_RESULT (phi);
6919 ssa_op_iter oi;
6920 unsigned i;
6922 if (virtual_operand_p (op))
6924 /* Remove the phi nodes for virtual operands (alias analysis will be
6925 run for the new function, anyway). */
6926 remove_phi_node (&psi, true);
6927 continue;
6930 SET_PHI_RESULT (phi,
6931 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6932 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6934 op = USE_FROM_PTR (use);
6935 if (TREE_CODE (op) == SSA_NAME)
6936 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6939 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6941 location_t locus = gimple_phi_arg_location (phi, i);
6942 tree block = LOCATION_BLOCK (locus);
6944 if (locus == UNKNOWN_LOCATION)
6945 continue;
6946 if (d->orig_block == NULL_TREE || block == d->orig_block)
6948 locus = set_block (locus, d->new_block);
6949 gimple_phi_arg_set_location (phi, i, locus);
6953 gsi_next (&psi);
6956 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6958 gimple *stmt = gsi_stmt (si);
6959 struct walk_stmt_info wi;
6961 memset (&wi, 0, sizeof (wi));
6962 wi.info = d;
6963 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6965 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6967 tree label = gimple_label_label (label_stmt);
6968 int uid = LABEL_DECL_UID (label);
6970 gcc_assert (uid > -1);
6972 old_len = vec_safe_length (cfg->x_label_to_block_map);
6973 if (old_len <= (unsigned) uid)
6975 new_len = 3 * uid / 2 + 1;
6976 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6979 (*cfg->x_label_to_block_map)[uid] = bb;
6980 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6982 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6984 if (uid >= dest_cfun->cfg->last_label_uid)
6985 dest_cfun->cfg->last_label_uid = uid + 1;
6988 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6989 remove_stmt_from_eh_lp_fn (cfun, stmt);
6991 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6992 gimple_remove_stmt_histograms (cfun, stmt);
6994 /* We cannot leave any operands allocated from the operand caches of
6995 the current function. */
6996 free_stmt_operands (cfun, stmt);
6997 push_cfun (dest_cfun);
6998 update_stmt (stmt);
6999 pop_cfun ();
7002 FOR_EACH_EDGE (e, ei, bb->succs)
7003 if (e->goto_locus != UNKNOWN_LOCATION)
7005 tree block = LOCATION_BLOCK (e->goto_locus);
7006 if (d->orig_block == NULL_TREE
7007 || block == d->orig_block)
7008 e->goto_locus = set_block (e->goto_locus, d->new_block);
7012 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7013 the outermost EH region. Use REGION as the incoming base EH region. */
7015 static eh_region
7016 find_outermost_region_in_block (struct function *src_cfun,
7017 basic_block bb, eh_region region)
7019 gimple_stmt_iterator si;
7021 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7023 gimple *stmt = gsi_stmt (si);
7024 eh_region stmt_region;
7025 int lp_nr;
7027 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7028 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7029 if (stmt_region)
7031 if (region == NULL)
7032 region = stmt_region;
7033 else if (stmt_region != region)
7035 region = eh_region_outermost (src_cfun, stmt_region, region);
7036 gcc_assert (region != NULL);
7041 return region;
7044 static tree
7045 new_label_mapper (tree decl, void *data)
7047 htab_t hash = (htab_t) data;
7048 struct tree_map *m;
7049 void **slot;
7051 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7053 m = XNEW (struct tree_map);
7054 m->hash = DECL_UID (decl);
7055 m->base.from = decl;
7056 m->to = create_artificial_label (UNKNOWN_LOCATION);
7057 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7058 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7059 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7061 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7062 gcc_assert (*slot == NULL);
7064 *slot = m;
7066 return m->to;
7069 /* Tree walker to replace the decls used inside value expressions by
7070 duplicates. */
7072 static tree
7073 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7075 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7077 switch (TREE_CODE (*tp))
7079 case VAR_DECL:
7080 case PARM_DECL:
7081 case RESULT_DECL:
7082 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7083 break;
7084 default:
7085 break;
7088 if (IS_TYPE_OR_DECL_P (*tp))
7089 *walk_subtrees = false;
7091 return NULL;
7094 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7095 subblocks. */
7097 static void
7098 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7099 tree to_context)
7101 tree *tp, t;
7103 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7105 t = *tp;
7106 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7107 continue;
7108 replace_by_duplicate_decl (&t, vars_map, to_context);
7109 if (t != *tp)
7111 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7113 tree x = DECL_VALUE_EXPR (*tp);
7114 struct replace_decls_d rd = { vars_map, to_context };
7115 unshare_expr (x);
7116 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7117 SET_DECL_VALUE_EXPR (t, x);
7118 DECL_HAS_VALUE_EXPR_P (t) = 1;
7120 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7121 *tp = t;
7125 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7126 replace_block_vars_by_duplicates (block, vars_map, to_context);
7129 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7130 from FN1 to FN2. */
7132 static void
7133 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7134 struct loop *loop)
7136 /* Discard it from the old loop array. */
7137 (*get_loops (fn1))[loop->num] = NULL;
7139 /* Place it in the new loop array, assigning it a new number. */
7140 loop->num = number_of_loops (fn2);
7141 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7143 /* Recurse to children. */
7144 for (loop = loop->inner; loop; loop = loop->next)
7145 fixup_loop_arrays_after_move (fn1, fn2, loop);
7148 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7149 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7151 DEBUG_FUNCTION void
7152 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7154 basic_block bb;
7155 edge_iterator ei;
7156 edge e;
7157 bitmap bbs = BITMAP_ALLOC (NULL);
7158 int i;
7160 gcc_assert (entry != NULL);
7161 gcc_assert (entry != exit);
7162 gcc_assert (bbs_p != NULL);
7164 gcc_assert (bbs_p->length () > 0);
7166 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7167 bitmap_set_bit (bbs, bb->index);
7169 gcc_assert (bitmap_bit_p (bbs, entry->index));
7170 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7172 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7174 if (bb == entry)
7176 gcc_assert (single_pred_p (entry));
7177 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7179 else
7180 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7182 e = ei_edge (ei);
7183 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7186 if (bb == exit)
7188 gcc_assert (single_succ_p (exit));
7189 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7191 else
7192 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7194 e = ei_edge (ei);
7195 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7199 BITMAP_FREE (bbs);
7202 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7204 bool
7205 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7207 bitmap release_names = (bitmap)data;
7209 if (TREE_CODE (from) != SSA_NAME)
7210 return true;
7212 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7213 return true;
7216 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7217 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7218 single basic block in the original CFG and the new basic block is
7219 returned. DEST_CFUN must not have a CFG yet.
7221 Note that the region need not be a pure SESE region. Blocks inside
7222 the region may contain calls to abort/exit. The only restriction
7223 is that ENTRY_BB should be the only entry point and it must
7224 dominate EXIT_BB.
7226 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7227 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7228 to the new function.
7230 All local variables referenced in the region are assumed to be in
7231 the corresponding BLOCK_VARS and unexpanded variable lists
7232 associated with DEST_CFUN.
7234 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7235 reimplement move_sese_region_to_fn by duplicating the region rather than
7236 moving it. */
7238 basic_block
7239 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7240 basic_block exit_bb, tree orig_block)
7242 vec<basic_block> bbs, dom_bbs;
7243 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7244 basic_block after, bb, *entry_pred, *exit_succ, abb;
7245 struct function *saved_cfun = cfun;
7246 int *entry_flag, *exit_flag;
7247 unsigned *entry_prob, *exit_prob;
7248 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7249 edge e;
7250 edge_iterator ei;
7251 htab_t new_label_map;
7252 hash_map<void *, void *> *eh_map;
7253 struct loop *loop = entry_bb->loop_father;
7254 struct loop *loop0 = get_loop (saved_cfun, 0);
7255 struct move_stmt_d d;
7257 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7258 region. */
7259 gcc_assert (entry_bb != exit_bb
7260 && (!exit_bb
7261 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7263 /* Collect all the blocks in the region. Manually add ENTRY_BB
7264 because it won't be added by dfs_enumerate_from. */
7265 bbs.create (0);
7266 bbs.safe_push (entry_bb);
7267 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7269 if (flag_checking)
7270 verify_sese (entry_bb, exit_bb, &bbs);
7272 /* The blocks that used to be dominated by something in BBS will now be
7273 dominated by the new block. */
7274 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7275 bbs.address (),
7276 bbs.length ());
7278 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7279 the predecessor edges to ENTRY_BB and the successor edges to
7280 EXIT_BB so that we can re-attach them to the new basic block that
7281 will replace the region. */
7282 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7283 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7284 entry_flag = XNEWVEC (int, num_entry_edges);
7285 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7286 i = 0;
7287 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7289 entry_prob[i] = e->probability;
7290 entry_flag[i] = e->flags;
7291 entry_pred[i++] = e->src;
7292 remove_edge (e);
7295 if (exit_bb)
7297 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7298 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7299 exit_flag = XNEWVEC (int, num_exit_edges);
7300 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7301 i = 0;
7302 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7304 exit_prob[i] = e->probability;
7305 exit_flag[i] = e->flags;
7306 exit_succ[i++] = e->dest;
7307 remove_edge (e);
7310 else
7312 num_exit_edges = 0;
7313 exit_succ = NULL;
7314 exit_flag = NULL;
7315 exit_prob = NULL;
7318 /* Switch context to the child function to initialize DEST_FN's CFG. */
7319 gcc_assert (dest_cfun->cfg == NULL);
7320 push_cfun (dest_cfun);
7322 init_empty_tree_cfg ();
7324 /* Initialize EH information for the new function. */
7325 eh_map = NULL;
7326 new_label_map = NULL;
7327 if (saved_cfun->eh)
7329 eh_region region = NULL;
7331 FOR_EACH_VEC_ELT (bbs, i, bb)
7332 region = find_outermost_region_in_block (saved_cfun, bb, region);
7334 init_eh_for_function ();
7335 if (region != NULL)
7337 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7338 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7339 new_label_mapper, new_label_map);
7343 /* Initialize an empty loop tree. */
7344 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7345 init_loops_structure (dest_cfun, loops, 1);
7346 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7347 set_loops_for_fn (dest_cfun, loops);
7349 /* Move the outlined loop tree part. */
7350 num_nodes = bbs.length ();
7351 FOR_EACH_VEC_ELT (bbs, i, bb)
7353 if (bb->loop_father->header == bb)
7355 struct loop *this_loop = bb->loop_father;
7356 struct loop *outer = loop_outer (this_loop);
7357 if (outer == loop
7358 /* If the SESE region contains some bbs ending with
7359 a noreturn call, those are considered to belong
7360 to the outermost loop in saved_cfun, rather than
7361 the entry_bb's loop_father. */
7362 || outer == loop0)
7364 if (outer != loop)
7365 num_nodes -= this_loop->num_nodes;
7366 flow_loop_tree_node_remove (bb->loop_father);
7367 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7368 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7371 else if (bb->loop_father == loop0 && loop0 != loop)
7372 num_nodes--;
7374 /* Remove loop exits from the outlined region. */
7375 if (loops_for_fn (saved_cfun)->exits)
7376 FOR_EACH_EDGE (e, ei, bb->succs)
7378 struct loops *l = loops_for_fn (saved_cfun);
7379 loop_exit **slot
7380 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7381 NO_INSERT);
7382 if (slot)
7383 l->exits->clear_slot (slot);
7388 /* Adjust the number of blocks in the tree root of the outlined part. */
7389 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7391 /* Setup a mapping to be used by move_block_to_fn. */
7392 loop->aux = current_loops->tree_root;
7393 loop0->aux = current_loops->tree_root;
7395 pop_cfun ();
7397 /* Move blocks from BBS into DEST_CFUN. */
7398 gcc_assert (bbs.length () >= 2);
7399 after = dest_cfun->cfg->x_entry_block_ptr;
7400 hash_map<tree, tree> vars_map;
7402 memset (&d, 0, sizeof (d));
7403 d.orig_block = orig_block;
7404 d.new_block = DECL_INITIAL (dest_cfun->decl);
7405 d.from_context = cfun->decl;
7406 d.to_context = dest_cfun->decl;
7407 d.vars_map = &vars_map;
7408 d.new_label_map = new_label_map;
7409 d.eh_map = eh_map;
7410 d.remap_decls_p = true;
7412 if (gimple_in_ssa_p (cfun))
7413 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7415 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7416 set_ssa_default_def (dest_cfun, arg, narg);
7417 vars_map.put (arg, narg);
7420 FOR_EACH_VEC_ELT (bbs, i, bb)
7422 /* No need to update edge counts on the last block. It has
7423 already been updated earlier when we detached the region from
7424 the original CFG. */
7425 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7426 after = bb;
7429 loop->aux = NULL;
7430 loop0->aux = NULL;
7431 /* Loop sizes are no longer correct, fix them up. */
7432 loop->num_nodes -= num_nodes;
7433 for (struct loop *outer = loop_outer (loop);
7434 outer; outer = loop_outer (outer))
7435 outer->num_nodes -= num_nodes;
7436 loop0->num_nodes -= bbs.length () - num_nodes;
7438 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7440 struct loop *aloop;
7441 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7442 if (aloop != NULL)
7444 if (aloop->simduid)
7446 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7447 d.to_context);
7448 dest_cfun->has_simduid_loops = true;
7450 if (aloop->force_vectorize)
7451 dest_cfun->has_force_vectorize_loops = true;
7455 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7456 if (orig_block)
7458 tree block;
7459 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7460 == NULL_TREE);
7461 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7462 = BLOCK_SUBBLOCKS (orig_block);
7463 for (block = BLOCK_SUBBLOCKS (orig_block);
7464 block; block = BLOCK_CHAIN (block))
7465 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7466 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7469 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7470 &vars_map, dest_cfun->decl);
7472 if (new_label_map)
7473 htab_delete (new_label_map);
7474 if (eh_map)
7475 delete eh_map;
7477 if (gimple_in_ssa_p (cfun))
7479 /* We need to release ssa-names in a defined order, so first find them,
7480 and then iterate in ascending version order. */
7481 bitmap release_names = BITMAP_ALLOC (NULL);
7482 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7483 bitmap_iterator bi;
7484 unsigned i;
7485 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7486 release_ssa_name (ssa_name (i));
7487 BITMAP_FREE (release_names);
7490 /* Rewire the entry and exit blocks. The successor to the entry
7491 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7492 the child function. Similarly, the predecessor of DEST_FN's
7493 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7494 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7495 various CFG manipulation function get to the right CFG.
7497 FIXME, this is silly. The CFG ought to become a parameter to
7498 these helpers. */
7499 push_cfun (dest_cfun);
7500 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7501 if (exit_bb)
7502 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7503 pop_cfun ();
7505 /* Back in the original function, the SESE region has disappeared,
7506 create a new basic block in its place. */
7507 bb = create_empty_bb (entry_pred[0]);
7508 if (current_loops)
7509 add_bb_to_loop (bb, loop);
7510 for (i = 0; i < num_entry_edges; i++)
7512 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7513 e->probability = entry_prob[i];
7516 for (i = 0; i < num_exit_edges; i++)
7518 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7519 e->probability = exit_prob[i];
7522 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7523 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7524 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7525 dom_bbs.release ();
7527 if (exit_bb)
7529 free (exit_prob);
7530 free (exit_flag);
7531 free (exit_succ);
7533 free (entry_prob);
7534 free (entry_flag);
7535 free (entry_pred);
7536 bbs.release ();
7538 return bb;
7541 /* Dump default def DEF to file FILE using FLAGS and indentation
7542 SPC. */
7544 static void
7545 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7547 for (int i = 0; i < spc; ++i)
7548 fprintf (file, " ");
7549 dump_ssaname_info_to_file (file, def, spc);
7551 print_generic_expr (file, TREE_TYPE (def), flags);
7552 fprintf (file, " ");
7553 print_generic_expr (file, def, flags);
7554 fprintf (file, " = ");
7555 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7556 fprintf (file, ";\n");
7559 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7561 static void
7562 print_no_sanitize_attr_value (FILE *file, tree value)
7564 unsigned int flags = tree_to_uhwi (value);
7565 bool first = true;
7566 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7568 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7570 if (!first)
7571 fprintf (file, " | ");
7572 fprintf (file, "%s", sanitizer_opts[i].name);
7573 first = false;
7578 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7581 void
7582 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7584 tree arg, var, old_current_fndecl = current_function_decl;
7585 struct function *dsf;
7586 bool ignore_topmost_bind = false, any_var = false;
7587 basic_block bb;
7588 tree chain;
7589 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7590 && decl_is_tm_clone (fndecl));
7591 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7593 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7595 fprintf (file, "__attribute__((");
7597 bool first = true;
7598 tree chain;
7599 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7600 first = false, chain = TREE_CHAIN (chain))
7602 if (!first)
7603 fprintf (file, ", ");
7605 tree name = get_attribute_name (chain);
7606 print_generic_expr (file, name, dump_flags);
7607 if (TREE_VALUE (chain) != NULL_TREE)
7609 fprintf (file, " (");
7611 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7612 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7613 else
7614 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7615 fprintf (file, ")");
7619 fprintf (file, "))\n");
7622 current_function_decl = fndecl;
7623 if (flags & TDF_GIMPLE)
7625 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7626 dump_flags | TDF_SLIM);
7627 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7629 else
7630 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7632 arg = DECL_ARGUMENTS (fndecl);
7633 while (arg)
7635 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7636 fprintf (file, " ");
7637 print_generic_expr (file, arg, dump_flags);
7638 if (DECL_CHAIN (arg))
7639 fprintf (file, ", ");
7640 arg = DECL_CHAIN (arg);
7642 fprintf (file, ")\n");
7644 dsf = DECL_STRUCT_FUNCTION (fndecl);
7645 if (dsf && (flags & TDF_EH))
7646 dump_eh_tree (file, dsf);
7648 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7650 dump_node (fndecl, TDF_SLIM | flags, file);
7651 current_function_decl = old_current_fndecl;
7652 return;
7655 /* When GIMPLE is lowered, the variables are no longer available in
7656 BIND_EXPRs, so display them separately. */
7657 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7659 unsigned ix;
7660 ignore_topmost_bind = true;
7662 fprintf (file, "{\n");
7663 if (gimple_in_ssa_p (fun)
7664 && (flags & TDF_ALIAS))
7666 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7667 arg = DECL_CHAIN (arg))
7669 tree def = ssa_default_def (fun, arg);
7670 if (def)
7671 dump_default_def (file, def, 2, flags);
7674 tree res = DECL_RESULT (fun->decl);
7675 if (res != NULL_TREE
7676 && DECL_BY_REFERENCE (res))
7678 tree def = ssa_default_def (fun, res);
7679 if (def)
7680 dump_default_def (file, def, 2, flags);
7683 tree static_chain = fun->static_chain_decl;
7684 if (static_chain != NULL_TREE)
7686 tree def = ssa_default_def (fun, static_chain);
7687 if (def)
7688 dump_default_def (file, def, 2, flags);
7692 if (!vec_safe_is_empty (fun->local_decls))
7693 FOR_EACH_LOCAL_DECL (fun, ix, var)
7695 print_generic_decl (file, var, flags);
7696 fprintf (file, "\n");
7698 any_var = true;
7701 tree name;
7703 if (gimple_in_ssa_p (cfun))
7704 FOR_EACH_SSA_NAME (ix, name, cfun)
7706 if (!SSA_NAME_VAR (name))
7708 fprintf (file, " ");
7709 print_generic_expr (file, TREE_TYPE (name), flags);
7710 fprintf (file, " ");
7711 print_generic_expr (file, name, flags);
7712 fprintf (file, ";\n");
7714 any_var = true;
7719 if (fun && fun->decl == fndecl
7720 && fun->cfg
7721 && basic_block_info_for_fn (fun))
7723 /* If the CFG has been built, emit a CFG-based dump. */
7724 if (!ignore_topmost_bind)
7725 fprintf (file, "{\n");
7727 if (any_var && n_basic_blocks_for_fn (fun))
7728 fprintf (file, "\n");
7730 FOR_EACH_BB_FN (bb, fun)
7731 dump_bb (file, bb, 2, flags);
7733 fprintf (file, "}\n");
7735 else if (fun->curr_properties & PROP_gimple_any)
7737 /* The function is now in GIMPLE form but the CFG has not been
7738 built yet. Emit the single sequence of GIMPLE statements
7739 that make up its body. */
7740 gimple_seq body = gimple_body (fndecl);
7742 if (gimple_seq_first_stmt (body)
7743 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7744 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7745 print_gimple_seq (file, body, 0, flags);
7746 else
7748 if (!ignore_topmost_bind)
7749 fprintf (file, "{\n");
7751 if (any_var)
7752 fprintf (file, "\n");
7754 print_gimple_seq (file, body, 2, flags);
7755 fprintf (file, "}\n");
7758 else
7760 int indent;
7762 /* Make a tree based dump. */
7763 chain = DECL_SAVED_TREE (fndecl);
7764 if (chain && TREE_CODE (chain) == BIND_EXPR)
7766 if (ignore_topmost_bind)
7768 chain = BIND_EXPR_BODY (chain);
7769 indent = 2;
7771 else
7772 indent = 0;
7774 else
7776 if (!ignore_topmost_bind)
7778 fprintf (file, "{\n");
7779 /* No topmost bind, pretend it's ignored for later. */
7780 ignore_topmost_bind = true;
7782 indent = 2;
7785 if (any_var)
7786 fprintf (file, "\n");
7788 print_generic_stmt_indented (file, chain, flags, indent);
7789 if (ignore_topmost_bind)
7790 fprintf (file, "}\n");
7793 if (flags & TDF_ENUMERATE_LOCALS)
7794 dump_enumerated_decls (file, flags);
7795 fprintf (file, "\n\n");
7797 current_function_decl = old_current_fndecl;
7800 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7802 DEBUG_FUNCTION void
7803 debug_function (tree fn, dump_flags_t flags)
7805 dump_function_to_file (fn, stderr, flags);
7809 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7811 static void
7812 print_pred_bbs (FILE *file, basic_block bb)
7814 edge e;
7815 edge_iterator ei;
7817 FOR_EACH_EDGE (e, ei, bb->preds)
7818 fprintf (file, "bb_%d ", e->src->index);
7822 /* Print on FILE the indexes for the successors of basic_block BB. */
7824 static void
7825 print_succ_bbs (FILE *file, basic_block bb)
7827 edge e;
7828 edge_iterator ei;
7830 FOR_EACH_EDGE (e, ei, bb->succs)
7831 fprintf (file, "bb_%d ", e->dest->index);
7834 /* Print to FILE the basic block BB following the VERBOSITY level. */
7836 void
7837 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7839 char *s_indent = (char *) alloca ((size_t) indent + 1);
7840 memset ((void *) s_indent, ' ', (size_t) indent);
7841 s_indent[indent] = '\0';
7843 /* Print basic_block's header. */
7844 if (verbosity >= 2)
7846 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7847 print_pred_bbs (file, bb);
7848 fprintf (file, "}, succs = {");
7849 print_succ_bbs (file, bb);
7850 fprintf (file, "})\n");
7853 /* Print basic_block's body. */
7854 if (verbosity >= 3)
7856 fprintf (file, "%s {\n", s_indent);
7857 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7858 fprintf (file, "%s }\n", s_indent);
7862 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7864 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7865 VERBOSITY level this outputs the contents of the loop, or just its
7866 structure. */
7868 static void
7869 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7871 char *s_indent;
7872 basic_block bb;
7874 if (loop == NULL)
7875 return;
7877 s_indent = (char *) alloca ((size_t) indent + 1);
7878 memset ((void *) s_indent, ' ', (size_t) indent);
7879 s_indent[indent] = '\0';
7881 /* Print loop's header. */
7882 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7883 if (loop->header)
7884 fprintf (file, "header = %d", loop->header->index);
7885 else
7887 fprintf (file, "deleted)\n");
7888 return;
7890 if (loop->latch)
7891 fprintf (file, ", latch = %d", loop->latch->index);
7892 else
7893 fprintf (file, ", multiple latches");
7894 fprintf (file, ", niter = ");
7895 print_generic_expr (file, loop->nb_iterations);
7897 if (loop->any_upper_bound)
7899 fprintf (file, ", upper_bound = ");
7900 print_decu (loop->nb_iterations_upper_bound, file);
7902 if (loop->any_likely_upper_bound)
7904 fprintf (file, ", likely_upper_bound = ");
7905 print_decu (loop->nb_iterations_likely_upper_bound, file);
7908 if (loop->any_estimate)
7910 fprintf (file, ", estimate = ");
7911 print_decu (loop->nb_iterations_estimate, file);
7913 fprintf (file, ")\n");
7915 /* Print loop's body. */
7916 if (verbosity >= 1)
7918 fprintf (file, "%s{\n", s_indent);
7919 FOR_EACH_BB_FN (bb, cfun)
7920 if (bb->loop_father == loop)
7921 print_loops_bb (file, bb, indent, verbosity);
7923 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7924 fprintf (file, "%s}\n", s_indent);
7928 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7929 spaces. Following VERBOSITY level this outputs the contents of the
7930 loop, or just its structure. */
7932 static void
7933 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7934 int verbosity)
7936 if (loop == NULL)
7937 return;
7939 print_loop (file, loop, indent, verbosity);
7940 print_loop_and_siblings (file, loop->next, indent, verbosity);
7943 /* Follow a CFG edge from the entry point of the program, and on entry
7944 of a loop, pretty print the loop structure on FILE. */
7946 void
7947 print_loops (FILE *file, int verbosity)
7949 basic_block bb;
7951 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7952 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7953 if (bb && bb->loop_father)
7954 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7957 /* Dump a loop. */
7959 DEBUG_FUNCTION void
7960 debug (struct loop &ref)
7962 print_loop (stderr, &ref, 0, /*verbosity*/0);
7965 DEBUG_FUNCTION void
7966 debug (struct loop *ptr)
7968 if (ptr)
7969 debug (*ptr);
7970 else
7971 fprintf (stderr, "<nil>\n");
7974 /* Dump a loop verbosely. */
7976 DEBUG_FUNCTION void
7977 debug_verbose (struct loop &ref)
7979 print_loop (stderr, &ref, 0, /*verbosity*/3);
7982 DEBUG_FUNCTION void
7983 debug_verbose (struct loop *ptr)
7985 if (ptr)
7986 debug (*ptr);
7987 else
7988 fprintf (stderr, "<nil>\n");
7992 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7994 DEBUG_FUNCTION void
7995 debug_loops (int verbosity)
7997 print_loops (stderr, verbosity);
8000 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8002 DEBUG_FUNCTION void
8003 debug_loop (struct loop *loop, int verbosity)
8005 print_loop (stderr, loop, 0, verbosity);
8008 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8009 level. */
8011 DEBUG_FUNCTION void
8012 debug_loop_num (unsigned num, int verbosity)
8014 debug_loop (get_loop (cfun, num), verbosity);
8017 /* Return true if BB ends with a call, possibly followed by some
8018 instructions that must stay with the call. Return false,
8019 otherwise. */
8021 static bool
8022 gimple_block_ends_with_call_p (basic_block bb)
8024 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8025 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8029 /* Return true if BB ends with a conditional branch. Return false,
8030 otherwise. */
8032 static bool
8033 gimple_block_ends_with_condjump_p (const_basic_block bb)
8035 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8036 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8040 /* Return true if statement T may terminate execution of BB in ways not
8041 explicitly represtented in the CFG. */
8043 bool
8044 stmt_can_terminate_bb_p (gimple *t)
8046 tree fndecl = NULL_TREE;
8047 int call_flags = 0;
8049 /* Eh exception not handled internally terminates execution of the whole
8050 function. */
8051 if (stmt_can_throw_external (t))
8052 return true;
8054 /* NORETURN and LONGJMP calls already have an edge to exit.
8055 CONST and PURE calls do not need one.
8056 We don't currently check for CONST and PURE here, although
8057 it would be a good idea, because those attributes are
8058 figured out from the RTL in mark_constant_function, and
8059 the counter incrementation code from -fprofile-arcs
8060 leads to different results from -fbranch-probabilities. */
8061 if (is_gimple_call (t))
8063 fndecl = gimple_call_fndecl (t);
8064 call_flags = gimple_call_flags (t);
8067 if (is_gimple_call (t)
8068 && fndecl
8069 && DECL_BUILT_IN (fndecl)
8070 && (call_flags & ECF_NOTHROW)
8071 && !(call_flags & ECF_RETURNS_TWICE)
8072 /* fork() doesn't really return twice, but the effect of
8073 wrapping it in __gcov_fork() which calls __gcov_flush()
8074 and clears the counters before forking has the same
8075 effect as returning twice. Force a fake edge. */
8076 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8077 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8078 return false;
8080 if (is_gimple_call (t))
8082 edge_iterator ei;
8083 edge e;
8084 basic_block bb;
8086 if (call_flags & (ECF_PURE | ECF_CONST)
8087 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8088 return false;
8090 /* Function call may do longjmp, terminate program or do other things.
8091 Special case noreturn that have non-abnormal edges out as in this case
8092 the fact is sufficiently represented by lack of edges out of T. */
8093 if (!(call_flags & ECF_NORETURN))
8094 return true;
8096 bb = gimple_bb (t);
8097 FOR_EACH_EDGE (e, ei, bb->succs)
8098 if ((e->flags & EDGE_FAKE) == 0)
8099 return true;
8102 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8103 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8104 return true;
8106 return false;
8110 /* Add fake edges to the function exit for any non constant and non
8111 noreturn calls (or noreturn calls with EH/abnormal edges),
8112 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8113 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8114 that were split.
8116 The goal is to expose cases in which entering a basic block does
8117 not imply that all subsequent instructions must be executed. */
8119 static int
8120 gimple_flow_call_edges_add (sbitmap blocks)
8122 int i;
8123 int blocks_split = 0;
8124 int last_bb = last_basic_block_for_fn (cfun);
8125 bool check_last_block = false;
8127 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8128 return 0;
8130 if (! blocks)
8131 check_last_block = true;
8132 else
8133 check_last_block = bitmap_bit_p (blocks,
8134 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8136 /* In the last basic block, before epilogue generation, there will be
8137 a fallthru edge to EXIT. Special care is required if the last insn
8138 of the last basic block is a call because make_edge folds duplicate
8139 edges, which would result in the fallthru edge also being marked
8140 fake, which would result in the fallthru edge being removed by
8141 remove_fake_edges, which would result in an invalid CFG.
8143 Moreover, we can't elide the outgoing fake edge, since the block
8144 profiler needs to take this into account in order to solve the minimal
8145 spanning tree in the case that the call doesn't return.
8147 Handle this by adding a dummy instruction in a new last basic block. */
8148 if (check_last_block)
8150 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8151 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8152 gimple *t = NULL;
8154 if (!gsi_end_p (gsi))
8155 t = gsi_stmt (gsi);
8157 if (t && stmt_can_terminate_bb_p (t))
8159 edge e;
8161 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8162 if (e)
8164 gsi_insert_on_edge (e, gimple_build_nop ());
8165 gsi_commit_edge_inserts ();
8170 /* Now add fake edges to the function exit for any non constant
8171 calls since there is no way that we can determine if they will
8172 return or not... */
8173 for (i = 0; i < last_bb; i++)
8175 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8176 gimple_stmt_iterator gsi;
8177 gimple *stmt, *last_stmt;
8179 if (!bb)
8180 continue;
8182 if (blocks && !bitmap_bit_p (blocks, i))
8183 continue;
8185 gsi = gsi_last_nondebug_bb (bb);
8186 if (!gsi_end_p (gsi))
8188 last_stmt = gsi_stmt (gsi);
8191 stmt = gsi_stmt (gsi);
8192 if (stmt_can_terminate_bb_p (stmt))
8194 edge e;
8196 /* The handling above of the final block before the
8197 epilogue should be enough to verify that there is
8198 no edge to the exit block in CFG already.
8199 Calling make_edge in such case would cause us to
8200 mark that edge as fake and remove it later. */
8201 if (flag_checking && stmt == last_stmt)
8203 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8204 gcc_assert (e == NULL);
8207 /* Note that the following may create a new basic block
8208 and renumber the existing basic blocks. */
8209 if (stmt != last_stmt)
8211 e = split_block (bb, stmt);
8212 if (e)
8213 blocks_split++;
8215 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8217 gsi_prev (&gsi);
8219 while (!gsi_end_p (gsi));
8223 if (blocks_split)
8224 verify_flow_info ();
8226 return blocks_split;
8229 /* Removes edge E and all the blocks dominated by it, and updates dominance
8230 information. The IL in E->src needs to be updated separately.
8231 If dominance info is not available, only the edge E is removed.*/
8233 void
8234 remove_edge_and_dominated_blocks (edge e)
8236 vec<basic_block> bbs_to_remove = vNULL;
8237 vec<basic_block> bbs_to_fix_dom = vNULL;
8238 edge f;
8239 edge_iterator ei;
8240 bool none_removed = false;
8241 unsigned i;
8242 basic_block bb, dbb;
8243 bitmap_iterator bi;
8245 /* If we are removing a path inside a non-root loop that may change
8246 loop ownership of blocks or remove loops. Mark loops for fixup. */
8247 if (current_loops
8248 && loop_outer (e->src->loop_father) != NULL
8249 && e->src->loop_father == e->dest->loop_father)
8250 loops_state_set (LOOPS_NEED_FIXUP);
8252 if (!dom_info_available_p (CDI_DOMINATORS))
8254 remove_edge (e);
8255 return;
8258 /* No updating is needed for edges to exit. */
8259 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8261 if (cfgcleanup_altered_bbs)
8262 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8263 remove_edge (e);
8264 return;
8267 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8268 that is not dominated by E->dest, then this set is empty. Otherwise,
8269 all the basic blocks dominated by E->dest are removed.
8271 Also, to DF_IDOM we store the immediate dominators of the blocks in
8272 the dominance frontier of E (i.e., of the successors of the
8273 removed blocks, if there are any, and of E->dest otherwise). */
8274 FOR_EACH_EDGE (f, ei, e->dest->preds)
8276 if (f == e)
8277 continue;
8279 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8281 none_removed = true;
8282 break;
8286 auto_bitmap df, df_idom;
8287 if (none_removed)
8288 bitmap_set_bit (df_idom,
8289 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8290 else
8292 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8293 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8295 FOR_EACH_EDGE (f, ei, bb->succs)
8297 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8298 bitmap_set_bit (df, f->dest->index);
8301 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8302 bitmap_clear_bit (df, bb->index);
8304 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8306 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8307 bitmap_set_bit (df_idom,
8308 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8312 if (cfgcleanup_altered_bbs)
8314 /* Record the set of the altered basic blocks. */
8315 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8316 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8319 /* Remove E and the cancelled blocks. */
8320 if (none_removed)
8321 remove_edge (e);
8322 else
8324 /* Walk backwards so as to get a chance to substitute all
8325 released DEFs into debug stmts. See
8326 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8327 details. */
8328 for (i = bbs_to_remove.length (); i-- > 0; )
8329 delete_basic_block (bbs_to_remove[i]);
8332 /* Update the dominance information. The immediate dominator may change only
8333 for blocks whose immediate dominator belongs to DF_IDOM:
8335 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8336 removal. Let Z the arbitrary block such that idom(Z) = Y and
8337 Z dominates X after the removal. Before removal, there exists a path P
8338 from Y to X that avoids Z. Let F be the last edge on P that is
8339 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8340 dominates W, and because of P, Z does not dominate W), and W belongs to
8341 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8342 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8344 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8345 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8346 dbb;
8347 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8348 bbs_to_fix_dom.safe_push (dbb);
8351 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8353 bbs_to_remove.release ();
8354 bbs_to_fix_dom.release ();
8357 /* Purge dead EH edges from basic block BB. */
8359 bool
8360 gimple_purge_dead_eh_edges (basic_block bb)
8362 bool changed = false;
8363 edge e;
8364 edge_iterator ei;
8365 gimple *stmt = last_stmt (bb);
8367 if (stmt && stmt_can_throw_internal (stmt))
8368 return false;
8370 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8372 if (e->flags & EDGE_EH)
8374 remove_edge_and_dominated_blocks (e);
8375 changed = true;
8377 else
8378 ei_next (&ei);
8381 return changed;
8384 /* Purge dead EH edges from basic block listed in BLOCKS. */
8386 bool
8387 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8389 bool changed = false;
8390 unsigned i;
8391 bitmap_iterator bi;
8393 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8395 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8397 /* Earlier gimple_purge_dead_eh_edges could have removed
8398 this basic block already. */
8399 gcc_assert (bb || changed);
8400 if (bb != NULL)
8401 changed |= gimple_purge_dead_eh_edges (bb);
8404 return changed;
8407 /* Purge dead abnormal call edges from basic block BB. */
8409 bool
8410 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8412 bool changed = false;
8413 edge e;
8414 edge_iterator ei;
8415 gimple *stmt = last_stmt (bb);
8417 if (!cfun->has_nonlocal_label
8418 && !cfun->calls_setjmp)
8419 return false;
8421 if (stmt && stmt_can_make_abnormal_goto (stmt))
8422 return false;
8424 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8426 if (e->flags & EDGE_ABNORMAL)
8428 if (e->flags & EDGE_FALLTHRU)
8429 e->flags &= ~EDGE_ABNORMAL;
8430 else
8431 remove_edge_and_dominated_blocks (e);
8432 changed = true;
8434 else
8435 ei_next (&ei);
8438 return changed;
8441 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8443 bool
8444 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8446 bool changed = false;
8447 unsigned i;
8448 bitmap_iterator bi;
8450 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8452 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8454 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8455 this basic block already. */
8456 gcc_assert (bb || changed);
8457 if (bb != NULL)
8458 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8461 return changed;
8464 /* This function is called whenever a new edge is created or
8465 redirected. */
8467 static void
8468 gimple_execute_on_growing_pred (edge e)
8470 basic_block bb = e->dest;
8472 if (!gimple_seq_empty_p (phi_nodes (bb)))
8473 reserve_phi_args_for_new_edge (bb);
8476 /* This function is called immediately before edge E is removed from
8477 the edge vector E->dest->preds. */
8479 static void
8480 gimple_execute_on_shrinking_pred (edge e)
8482 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8483 remove_phi_args (e);
8486 /*---------------------------------------------------------------------------
8487 Helper functions for Loop versioning
8488 ---------------------------------------------------------------------------*/
8490 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8491 of 'first'. Both of them are dominated by 'new_head' basic block. When
8492 'new_head' was created by 'second's incoming edge it received phi arguments
8493 on the edge by split_edge(). Later, additional edge 'e' was created to
8494 connect 'new_head' and 'first'. Now this routine adds phi args on this
8495 additional edge 'e' that new_head to second edge received as part of edge
8496 splitting. */
8498 static void
8499 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8500 basic_block new_head, edge e)
8502 gphi *phi1, *phi2;
8503 gphi_iterator psi1, psi2;
8504 tree def;
8505 edge e2 = find_edge (new_head, second);
8507 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8508 edge, we should always have an edge from NEW_HEAD to SECOND. */
8509 gcc_assert (e2 != NULL);
8511 /* Browse all 'second' basic block phi nodes and add phi args to
8512 edge 'e' for 'first' head. PHI args are always in correct order. */
8514 for (psi2 = gsi_start_phis (second),
8515 psi1 = gsi_start_phis (first);
8516 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8517 gsi_next (&psi2), gsi_next (&psi1))
8519 phi1 = psi1.phi ();
8520 phi2 = psi2.phi ();
8521 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8522 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8527 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8528 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8529 the destination of the ELSE part. */
8531 static void
8532 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8533 basic_block second_head ATTRIBUTE_UNUSED,
8534 basic_block cond_bb, void *cond_e)
8536 gimple_stmt_iterator gsi;
8537 gimple *new_cond_expr;
8538 tree cond_expr = (tree) cond_e;
8539 edge e0;
8541 /* Build new conditional expr */
8542 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8543 NULL_TREE, NULL_TREE);
8545 /* Add new cond in cond_bb. */
8546 gsi = gsi_last_bb (cond_bb);
8547 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8549 /* Adjust edges appropriately to connect new head with first head
8550 as well as second head. */
8551 e0 = single_succ_edge (cond_bb);
8552 e0->flags &= ~EDGE_FALLTHRU;
8553 e0->flags |= EDGE_FALSE_VALUE;
8557 /* Do book-keeping of basic block BB for the profile consistency checker.
8558 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8559 then do post-pass accounting. Store the counting in RECORD. */
8560 static void
8561 gimple_account_profile_record (basic_block bb, int after_pass,
8562 struct profile_record *record)
8564 gimple_stmt_iterator i;
8565 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8567 record->size[after_pass]
8568 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8569 if (bb->count.initialized_p ())
8570 record->time[after_pass]
8571 += estimate_num_insns (gsi_stmt (i),
8572 &eni_time_weights) * bb->count.to_gcov_type ();
8573 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8574 record->time[after_pass]
8575 += estimate_num_insns (gsi_stmt (i),
8576 &eni_time_weights) * bb->frequency;
8580 struct cfg_hooks gimple_cfg_hooks = {
8581 "gimple",
8582 gimple_verify_flow_info,
8583 gimple_dump_bb, /* dump_bb */
8584 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8585 create_bb, /* create_basic_block */
8586 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8587 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8588 gimple_can_remove_branch_p, /* can_remove_branch_p */
8589 remove_bb, /* delete_basic_block */
8590 gimple_split_block, /* split_block */
8591 gimple_move_block_after, /* move_block_after */
8592 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8593 gimple_merge_blocks, /* merge_blocks */
8594 gimple_predict_edge, /* predict_edge */
8595 gimple_predicted_by_p, /* predicted_by_p */
8596 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8597 gimple_duplicate_bb, /* duplicate_block */
8598 gimple_split_edge, /* split_edge */
8599 gimple_make_forwarder_block, /* make_forward_block */
8600 NULL, /* tidy_fallthru_edge */
8601 NULL, /* force_nonfallthru */
8602 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8603 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8604 gimple_flow_call_edges_add, /* flow_call_edges_add */
8605 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8606 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8607 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8608 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8609 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8610 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8611 flush_pending_stmts, /* flush_pending_stmts */
8612 gimple_empty_block_p, /* block_empty_p */
8613 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8614 gimple_account_profile_record,
8618 /* Split all critical edges. */
8620 unsigned int
8621 split_critical_edges (void)
8623 basic_block bb;
8624 edge e;
8625 edge_iterator ei;
8627 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8628 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8629 mappings around the calls to split_edge. */
8630 start_recording_case_labels ();
8631 FOR_ALL_BB_FN (bb, cfun)
8633 FOR_EACH_EDGE (e, ei, bb->succs)
8635 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8636 split_edge (e);
8637 /* PRE inserts statements to edges and expects that
8638 since split_critical_edges was done beforehand, committing edge
8639 insertions will not split more edges. In addition to critical
8640 edges we must split edges that have multiple successors and
8641 end by control flow statements, such as RESX.
8642 Go ahead and split them too. This matches the logic in
8643 gimple_find_edge_insert_loc. */
8644 else if ((!single_pred_p (e->dest)
8645 || !gimple_seq_empty_p (phi_nodes (e->dest))
8646 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8647 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8648 && !(e->flags & EDGE_ABNORMAL))
8650 gimple_stmt_iterator gsi;
8652 gsi = gsi_last_bb (e->src);
8653 if (!gsi_end_p (gsi)
8654 && stmt_ends_bb_p (gsi_stmt (gsi))
8655 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8656 && !gimple_call_builtin_p (gsi_stmt (gsi),
8657 BUILT_IN_RETURN)))
8658 split_edge (e);
8662 end_recording_case_labels ();
8663 return 0;
8666 namespace {
8668 const pass_data pass_data_split_crit_edges =
8670 GIMPLE_PASS, /* type */
8671 "crited", /* name */
8672 OPTGROUP_NONE, /* optinfo_flags */
8673 TV_TREE_SPLIT_EDGES, /* tv_id */
8674 PROP_cfg, /* properties_required */
8675 PROP_no_crit_edges, /* properties_provided */
8676 0, /* properties_destroyed */
8677 0, /* todo_flags_start */
8678 0, /* todo_flags_finish */
8681 class pass_split_crit_edges : public gimple_opt_pass
8683 public:
8684 pass_split_crit_edges (gcc::context *ctxt)
8685 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8688 /* opt_pass methods: */
8689 virtual unsigned int execute (function *) { return split_critical_edges (); }
8691 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8692 }; // class pass_split_crit_edges
8694 } // anon namespace
8696 gimple_opt_pass *
8697 make_pass_split_crit_edges (gcc::context *ctxt)
8699 return new pass_split_crit_edges (ctxt);
8703 /* Insert COND expression which is GIMPLE_COND after STMT
8704 in basic block BB with appropriate basic block split
8705 and creation of a new conditionally executed basic block.
8706 Return created basic block. */
8707 basic_block
8708 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8710 edge fall = split_block (bb, stmt);
8711 gimple_stmt_iterator iter = gsi_last_bb (bb);
8712 basic_block new_bb;
8714 /* Insert cond statement. */
8715 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8716 if (gsi_end_p (iter))
8717 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8718 else
8719 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8721 /* Create conditionally executed block. */
8722 new_bb = create_empty_bb (bb);
8723 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8724 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8726 /* Fix edge for split bb. */
8727 fall->flags = EDGE_FALSE_VALUE;
8729 /* Update dominance info. */
8730 if (dom_info_available_p (CDI_DOMINATORS))
8732 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8733 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8736 /* Update loop info. */
8737 if (current_loops)
8738 add_bb_to_loop (new_bb, bb->loop_father);
8740 return new_bb;
8743 /* Build a ternary operation and gimplify it. Emit code before GSI.
8744 Return the gimple_val holding the result. */
8746 tree
8747 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8748 tree type, tree a, tree b, tree c)
8750 tree ret;
8751 location_t loc = gimple_location (gsi_stmt (*gsi));
8753 ret = fold_build3_loc (loc, code, type, a, b, c);
8754 STRIP_NOPS (ret);
8756 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8757 GSI_SAME_STMT);
8760 /* Build a binary operation and gimplify it. Emit code before GSI.
8761 Return the gimple_val holding the result. */
8763 tree
8764 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8765 tree type, tree a, tree b)
8767 tree ret;
8769 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8770 STRIP_NOPS (ret);
8772 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8773 GSI_SAME_STMT);
8776 /* Build a unary operation and gimplify it. Emit code before GSI.
8777 Return the gimple_val holding the result. */
8779 tree
8780 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8781 tree a)
8783 tree ret;
8785 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8786 STRIP_NOPS (ret);
8788 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8789 GSI_SAME_STMT);
8794 /* Given a basic block B which ends with a conditional and has
8795 precisely two successors, determine which of the edges is taken if
8796 the conditional is true and which is taken if the conditional is
8797 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8799 void
8800 extract_true_false_edges_from_block (basic_block b,
8801 edge *true_edge,
8802 edge *false_edge)
8804 edge e = EDGE_SUCC (b, 0);
8806 if (e->flags & EDGE_TRUE_VALUE)
8808 *true_edge = e;
8809 *false_edge = EDGE_SUCC (b, 1);
8811 else
8813 *false_edge = e;
8814 *true_edge = EDGE_SUCC (b, 1);
8819 /* From a controlling predicate in the immediate dominator DOM of
8820 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8821 predicate evaluates to true and false and store them to
8822 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8823 they are non-NULL. Returns true if the edges can be determined,
8824 else return false. */
8826 bool
8827 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8828 edge *true_controlled_edge,
8829 edge *false_controlled_edge)
8831 basic_block bb = phiblock;
8832 edge true_edge, false_edge, tem;
8833 edge e0 = NULL, e1 = NULL;
8835 /* We have to verify that one edge into the PHI node is dominated
8836 by the true edge of the predicate block and the other edge
8837 dominated by the false edge. This ensures that the PHI argument
8838 we are going to take is completely determined by the path we
8839 take from the predicate block.
8840 We can only use BB dominance checks below if the destination of
8841 the true/false edges are dominated by their edge, thus only
8842 have a single predecessor. */
8843 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8844 tem = EDGE_PRED (bb, 0);
8845 if (tem == true_edge
8846 || (single_pred_p (true_edge->dest)
8847 && (tem->src == true_edge->dest
8848 || dominated_by_p (CDI_DOMINATORS,
8849 tem->src, true_edge->dest))))
8850 e0 = tem;
8851 else if (tem == false_edge
8852 || (single_pred_p (false_edge->dest)
8853 && (tem->src == false_edge->dest
8854 || dominated_by_p (CDI_DOMINATORS,
8855 tem->src, false_edge->dest))))
8856 e1 = tem;
8857 else
8858 return false;
8859 tem = EDGE_PRED (bb, 1);
8860 if (tem == true_edge
8861 || (single_pred_p (true_edge->dest)
8862 && (tem->src == true_edge->dest
8863 || dominated_by_p (CDI_DOMINATORS,
8864 tem->src, true_edge->dest))))
8865 e0 = tem;
8866 else if (tem == false_edge
8867 || (single_pred_p (false_edge->dest)
8868 && (tem->src == false_edge->dest
8869 || dominated_by_p (CDI_DOMINATORS,
8870 tem->src, false_edge->dest))))
8871 e1 = tem;
8872 else
8873 return false;
8874 if (!e0 || !e1)
8875 return false;
8877 if (true_controlled_edge)
8878 *true_controlled_edge = e0;
8879 if (false_controlled_edge)
8880 *false_controlled_edge = e1;
8882 return true;
8887 /* Emit return warnings. */
8889 namespace {
8891 const pass_data pass_data_warn_function_return =
8893 GIMPLE_PASS, /* type */
8894 "*warn_function_return", /* name */
8895 OPTGROUP_NONE, /* optinfo_flags */
8896 TV_NONE, /* tv_id */
8897 PROP_cfg, /* properties_required */
8898 0, /* properties_provided */
8899 0, /* properties_destroyed */
8900 0, /* todo_flags_start */
8901 0, /* todo_flags_finish */
8904 class pass_warn_function_return : public gimple_opt_pass
8906 public:
8907 pass_warn_function_return (gcc::context *ctxt)
8908 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8911 /* opt_pass methods: */
8912 virtual unsigned int execute (function *);
8914 }; // class pass_warn_function_return
8916 unsigned int
8917 pass_warn_function_return::execute (function *fun)
8919 source_location location;
8920 gimple *last;
8921 edge e;
8922 edge_iterator ei;
8924 if (!targetm.warn_func_return (fun->decl))
8925 return 0;
8927 /* If we have a path to EXIT, then we do return. */
8928 if (TREE_THIS_VOLATILE (fun->decl)
8929 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8931 location = UNKNOWN_LOCATION;
8932 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8934 last = last_stmt (e->src);
8935 if ((gimple_code (last) == GIMPLE_RETURN
8936 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8937 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8938 break;
8940 if (location == UNKNOWN_LOCATION)
8941 location = cfun->function_end_locus;
8942 warning_at (location, 0, "%<noreturn%> function does return");
8945 /* If we see "return;" in some basic block, then we do reach the end
8946 without returning a value. */
8947 else if (warn_return_type
8948 && !TREE_NO_WARNING (fun->decl)
8949 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8950 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8952 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8954 gimple *last = last_stmt (e->src);
8955 greturn *return_stmt = dyn_cast <greturn *> (last);
8956 if (return_stmt
8957 && gimple_return_retval (return_stmt) == NULL
8958 && !gimple_no_warning_p (last))
8960 location = gimple_location (last);
8961 if (location == UNKNOWN_LOCATION)
8962 location = fun->function_end_locus;
8963 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8964 TREE_NO_WARNING (fun->decl) = 1;
8965 break;
8969 return 0;
8972 } // anon namespace
8974 gimple_opt_pass *
8975 make_pass_warn_function_return (gcc::context *ctxt)
8977 return new pass_warn_function_return (ctxt);
8980 /* Walk a gimplified function and warn for functions whose return value is
8981 ignored and attribute((warn_unused_result)) is set. This is done before
8982 inlining, so we don't have to worry about that. */
8984 static void
8985 do_warn_unused_result (gimple_seq seq)
8987 tree fdecl, ftype;
8988 gimple_stmt_iterator i;
8990 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8992 gimple *g = gsi_stmt (i);
8994 switch (gimple_code (g))
8996 case GIMPLE_BIND:
8997 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8998 break;
8999 case GIMPLE_TRY:
9000 do_warn_unused_result (gimple_try_eval (g));
9001 do_warn_unused_result (gimple_try_cleanup (g));
9002 break;
9003 case GIMPLE_CATCH:
9004 do_warn_unused_result (gimple_catch_handler (
9005 as_a <gcatch *> (g)));
9006 break;
9007 case GIMPLE_EH_FILTER:
9008 do_warn_unused_result (gimple_eh_filter_failure (g));
9009 break;
9011 case GIMPLE_CALL:
9012 if (gimple_call_lhs (g))
9013 break;
9014 if (gimple_call_internal_p (g))
9015 break;
9017 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9018 LHS. All calls whose value is ignored should be
9019 represented like this. Look for the attribute. */
9020 fdecl = gimple_call_fndecl (g);
9021 ftype = gimple_call_fntype (g);
9023 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9025 location_t loc = gimple_location (g);
9027 if (fdecl)
9028 warning_at (loc, OPT_Wunused_result,
9029 "ignoring return value of %qD, "
9030 "declared with attribute warn_unused_result",
9031 fdecl);
9032 else
9033 warning_at (loc, OPT_Wunused_result,
9034 "ignoring return value of function "
9035 "declared with attribute warn_unused_result");
9037 break;
9039 default:
9040 /* Not a container, not a call, or a call whose value is used. */
9041 break;
9046 namespace {
9048 const pass_data pass_data_warn_unused_result =
9050 GIMPLE_PASS, /* type */
9051 "*warn_unused_result", /* name */
9052 OPTGROUP_NONE, /* optinfo_flags */
9053 TV_NONE, /* tv_id */
9054 PROP_gimple_any, /* properties_required */
9055 0, /* properties_provided */
9056 0, /* properties_destroyed */
9057 0, /* todo_flags_start */
9058 0, /* todo_flags_finish */
9061 class pass_warn_unused_result : public gimple_opt_pass
9063 public:
9064 pass_warn_unused_result (gcc::context *ctxt)
9065 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9068 /* opt_pass methods: */
9069 virtual bool gate (function *) { return flag_warn_unused_result; }
9070 virtual unsigned int execute (function *)
9072 do_warn_unused_result (gimple_body (current_function_decl));
9073 return 0;
9076 }; // class pass_warn_unused_result
9078 } // anon namespace
9080 gimple_opt_pass *
9081 make_pass_warn_unused_result (gcc::context *ctxt)
9083 return new pass_warn_unused_result (ctxt);
9086 /* IPA passes, compilation of earlier functions or inlining
9087 might have changed some properties, such as marked functions nothrow,
9088 pure, const or noreturn.
9089 Remove redundant edges and basic blocks, and create new ones if necessary.
9091 This pass can't be executed as stand alone pass from pass manager, because
9092 in between inlining and this fixup the verify_flow_info would fail. */
9094 unsigned int
9095 execute_fixup_cfg (void)
9097 basic_block bb;
9098 gimple_stmt_iterator gsi;
9099 int todo = 0;
9100 edge e;
9101 edge_iterator ei;
9102 cgraph_node *node = cgraph_node::get (current_function_decl);
9103 profile_count num = node->count;
9104 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9105 bool scale = num.initialized_p ()
9106 && (den > 0 || num == profile_count::zero ())
9107 && !(num == den);
9109 if (scale)
9111 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9112 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9113 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9115 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
9116 e->count = e->count.apply_scale (num, den);
9119 FOR_EACH_BB_FN (bb, cfun)
9121 if (scale)
9122 bb->count = bb->count.apply_scale (num, den);
9123 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9125 gimple *stmt = gsi_stmt (gsi);
9126 tree decl = is_gimple_call (stmt)
9127 ? gimple_call_fndecl (stmt)
9128 : NULL;
9129 if (decl)
9131 int flags = gimple_call_flags (stmt);
9132 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9134 if (gimple_purge_dead_abnormal_call_edges (bb))
9135 todo |= TODO_cleanup_cfg;
9137 if (gimple_in_ssa_p (cfun))
9139 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9140 update_stmt (stmt);
9144 if (flags & ECF_NORETURN
9145 && fixup_noreturn_call (stmt))
9146 todo |= TODO_cleanup_cfg;
9149 /* Remove stores to variables we marked write-only.
9150 Keep access when store has side effect, i.e. in case when source
9151 is volatile. */
9152 if (gimple_store_p (stmt)
9153 && !gimple_has_side_effects (stmt))
9155 tree lhs = get_base_address (gimple_get_lhs (stmt));
9157 if (VAR_P (lhs)
9158 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9159 && varpool_node::get (lhs)->writeonly)
9161 unlink_stmt_vdef (stmt);
9162 gsi_remove (&gsi, true);
9163 release_defs (stmt);
9164 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9165 continue;
9168 /* For calls we can simply remove LHS when it is known
9169 to be write-only. */
9170 if (is_gimple_call (stmt)
9171 && gimple_get_lhs (stmt))
9173 tree lhs = get_base_address (gimple_get_lhs (stmt));
9175 if (VAR_P (lhs)
9176 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9177 && varpool_node::get (lhs)->writeonly)
9179 gimple_call_set_lhs (stmt, NULL);
9180 update_stmt (stmt);
9181 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9185 if (maybe_clean_eh_stmt (stmt)
9186 && gimple_purge_dead_eh_edges (bb))
9187 todo |= TODO_cleanup_cfg;
9188 gsi_next (&gsi);
9191 if (scale)
9192 FOR_EACH_EDGE (e, ei, bb->succs)
9193 e->count = e->count.apply_scale (num, den);
9195 /* If we have a basic block with no successors that does not
9196 end with a control statement or a noreturn call end it with
9197 a call to __builtin_unreachable. This situation can occur
9198 when inlining a noreturn call that does in fact return. */
9199 if (EDGE_COUNT (bb->succs) == 0)
9201 gimple *stmt = last_stmt (bb);
9202 if (!stmt
9203 || (!is_ctrl_stmt (stmt)
9204 && (!is_gimple_call (stmt)
9205 || !gimple_call_noreturn_p (stmt))))
9207 if (stmt && is_gimple_call (stmt))
9208 gimple_call_set_ctrl_altering (stmt, false);
9209 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9210 stmt = gimple_build_call (fndecl, 0);
9211 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9212 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9213 if (!cfun->after_inlining)
9215 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9216 int freq
9217 = compute_call_stmt_bb_frequency (current_function_decl,
9218 bb);
9219 node->create_edge (cgraph_node::get_create (fndecl),
9220 call_stmt, bb->count, freq);
9225 if (scale)
9226 compute_function_frequency ();
9228 if (current_loops
9229 && (todo & TODO_cleanup_cfg))
9230 loops_state_set (LOOPS_NEED_FIXUP);
9232 return todo;
9235 namespace {
9237 const pass_data pass_data_fixup_cfg =
9239 GIMPLE_PASS, /* type */
9240 "fixup_cfg", /* name */
9241 OPTGROUP_NONE, /* optinfo_flags */
9242 TV_NONE, /* tv_id */
9243 PROP_cfg, /* properties_required */
9244 0, /* properties_provided */
9245 0, /* properties_destroyed */
9246 0, /* todo_flags_start */
9247 0, /* todo_flags_finish */
9250 class pass_fixup_cfg : public gimple_opt_pass
9252 public:
9253 pass_fixup_cfg (gcc::context *ctxt)
9254 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9257 /* opt_pass methods: */
9258 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9259 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9261 }; // class pass_fixup_cfg
9263 } // anon namespace
9265 gimple_opt_pass *
9266 make_pass_fixup_cfg (gcc::context *ctxt)
9268 return new pass_fixup_cfg (ctxt);
9271 /* Garbage collection support for edge_def. */
9273 extern void gt_ggc_mx (tree&);
9274 extern void gt_ggc_mx (gimple *&);
9275 extern void gt_ggc_mx (rtx&);
9276 extern void gt_ggc_mx (basic_block&);
9278 static void
9279 gt_ggc_mx (rtx_insn *& x)
9281 if (x)
9282 gt_ggc_mx_rtx_def ((void *) x);
9285 void
9286 gt_ggc_mx (edge_def *e)
9288 tree block = LOCATION_BLOCK (e->goto_locus);
9289 gt_ggc_mx (e->src);
9290 gt_ggc_mx (e->dest);
9291 if (current_ir_type () == IR_GIMPLE)
9292 gt_ggc_mx (e->insns.g);
9293 else
9294 gt_ggc_mx (e->insns.r);
9295 gt_ggc_mx (block);
9298 /* PCH support for edge_def. */
9300 extern void gt_pch_nx (tree&);
9301 extern void gt_pch_nx (gimple *&);
9302 extern void gt_pch_nx (rtx&);
9303 extern void gt_pch_nx (basic_block&);
9305 static void
9306 gt_pch_nx (rtx_insn *& x)
9308 if (x)
9309 gt_pch_nx_rtx_def ((void *) x);
9312 void
9313 gt_pch_nx (edge_def *e)
9315 tree block = LOCATION_BLOCK (e->goto_locus);
9316 gt_pch_nx (e->src);
9317 gt_pch_nx (e->dest);
9318 if (current_ir_type () == IR_GIMPLE)
9319 gt_pch_nx (e->insns.g);
9320 else
9321 gt_pch_nx (e->insns.r);
9322 gt_pch_nx (block);
9325 void
9326 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9328 tree block = LOCATION_BLOCK (e->goto_locus);
9329 op (&(e->src), cookie);
9330 op (&(e->dest), cookie);
9331 if (current_ir_type () == IR_GIMPLE)
9332 op (&(e->insns.g), cookie);
9333 else
9334 op (&(e->insns.r), cookie);
9335 op (&(block), cookie);
9338 #if CHECKING_P
9340 namespace selftest {
9342 /* Helper function for CFG selftests: create a dummy function decl
9343 and push it as cfun. */
9345 static tree
9346 push_fndecl (const char *name)
9348 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9349 /* FIXME: this uses input_location: */
9350 tree fndecl = build_fn_decl (name, fn_type);
9351 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9352 NULL_TREE, integer_type_node);
9353 DECL_RESULT (fndecl) = retval;
9354 push_struct_function (fndecl);
9355 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9356 ASSERT_TRUE (fun != NULL);
9357 init_empty_tree_cfg_for_function (fun);
9358 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9359 ASSERT_EQ (0, n_edges_for_fn (fun));
9360 return fndecl;
9363 /* These tests directly create CFGs.
9364 Compare with the static fns within tree-cfg.c:
9365 - build_gimple_cfg
9366 - make_blocks: calls create_basic_block (seq, bb);
9367 - make_edges. */
9369 /* Verify a simple cfg of the form:
9370 ENTRY -> A -> B -> C -> EXIT. */
9372 static void
9373 test_linear_chain ()
9375 gimple_register_cfg_hooks ();
9377 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9378 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9380 /* Create some empty blocks. */
9381 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9382 basic_block bb_b = create_empty_bb (bb_a);
9383 basic_block bb_c = create_empty_bb (bb_b);
9385 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9386 ASSERT_EQ (0, n_edges_for_fn (fun));
9388 /* Create some edges: a simple linear chain of BBs. */
9389 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9390 make_edge (bb_a, bb_b, 0);
9391 make_edge (bb_b, bb_c, 0);
9392 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9394 /* Verify the edges. */
9395 ASSERT_EQ (4, n_edges_for_fn (fun));
9396 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9397 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9398 ASSERT_EQ (1, bb_a->preds->length ());
9399 ASSERT_EQ (1, bb_a->succs->length ());
9400 ASSERT_EQ (1, bb_b->preds->length ());
9401 ASSERT_EQ (1, bb_b->succs->length ());
9402 ASSERT_EQ (1, bb_c->preds->length ());
9403 ASSERT_EQ (1, bb_c->succs->length ());
9404 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9405 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9407 /* Verify the dominance information
9408 Each BB in our simple chain should be dominated by the one before
9409 it. */
9410 calculate_dominance_info (CDI_DOMINATORS);
9411 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9412 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9413 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9414 ASSERT_EQ (1, dom_by_b.length ());
9415 ASSERT_EQ (bb_c, dom_by_b[0]);
9416 free_dominance_info (CDI_DOMINATORS);
9417 dom_by_b.release ();
9419 /* Similarly for post-dominance: each BB in our chain is post-dominated
9420 by the one after it. */
9421 calculate_dominance_info (CDI_POST_DOMINATORS);
9422 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9423 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9424 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9425 ASSERT_EQ (1, postdom_by_b.length ());
9426 ASSERT_EQ (bb_a, postdom_by_b[0]);
9427 free_dominance_info (CDI_POST_DOMINATORS);
9428 postdom_by_b.release ();
9430 pop_cfun ();
9433 /* Verify a simple CFG of the form:
9434 ENTRY
9438 /t \f
9444 EXIT. */
9446 static void
9447 test_diamond ()
9449 gimple_register_cfg_hooks ();
9451 tree fndecl = push_fndecl ("cfg_test_diamond");
9452 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9454 /* Create some empty blocks. */
9455 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9456 basic_block bb_b = create_empty_bb (bb_a);
9457 basic_block bb_c = create_empty_bb (bb_a);
9458 basic_block bb_d = create_empty_bb (bb_b);
9460 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9461 ASSERT_EQ (0, n_edges_for_fn (fun));
9463 /* Create the edges. */
9464 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9465 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9466 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9467 make_edge (bb_b, bb_d, 0);
9468 make_edge (bb_c, bb_d, 0);
9469 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9471 /* Verify the edges. */
9472 ASSERT_EQ (6, n_edges_for_fn (fun));
9473 ASSERT_EQ (1, bb_a->preds->length ());
9474 ASSERT_EQ (2, bb_a->succs->length ());
9475 ASSERT_EQ (1, bb_b->preds->length ());
9476 ASSERT_EQ (1, bb_b->succs->length ());
9477 ASSERT_EQ (1, bb_c->preds->length ());
9478 ASSERT_EQ (1, bb_c->succs->length ());
9479 ASSERT_EQ (2, bb_d->preds->length ());
9480 ASSERT_EQ (1, bb_d->succs->length ());
9482 /* Verify the dominance information. */
9483 calculate_dominance_info (CDI_DOMINATORS);
9484 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9485 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9486 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9487 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9488 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9489 dom_by_a.release ();
9490 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9491 ASSERT_EQ (0, dom_by_b.length ());
9492 dom_by_b.release ();
9493 free_dominance_info (CDI_DOMINATORS);
9495 /* Similarly for post-dominance. */
9496 calculate_dominance_info (CDI_POST_DOMINATORS);
9497 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9498 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9499 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9500 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9501 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9502 postdom_by_d.release ();
9503 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9504 ASSERT_EQ (0, postdom_by_b.length ());
9505 postdom_by_b.release ();
9506 free_dominance_info (CDI_POST_DOMINATORS);
9508 pop_cfun ();
9511 /* Verify that we can handle a CFG containing a "complete" aka
9512 fully-connected subgraph (where A B C D below all have edges
9513 pointing to each other node, also to themselves).
9514 e.g.:
9515 ENTRY EXIT
9521 A<--->B
9522 ^^ ^^
9523 | \ / |
9524 | X |
9525 | / \ |
9526 VV VV
9527 C<--->D
9530 static void
9531 test_fully_connected ()
9533 gimple_register_cfg_hooks ();
9535 tree fndecl = push_fndecl ("cfg_fully_connected");
9536 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9538 const int n = 4;
9540 /* Create some empty blocks. */
9541 auto_vec <basic_block> subgraph_nodes;
9542 for (int i = 0; i < n; i++)
9543 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9545 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9546 ASSERT_EQ (0, n_edges_for_fn (fun));
9548 /* Create the edges. */
9549 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9550 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9551 for (int i = 0; i < n; i++)
9552 for (int j = 0; j < n; j++)
9553 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9555 /* Verify the edges. */
9556 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9557 /* The first one is linked to ENTRY/EXIT as well as itself and
9558 everything else. */
9559 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9560 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9561 /* The other ones in the subgraph are linked to everything in
9562 the subgraph (including themselves). */
9563 for (int i = 1; i < n; i++)
9565 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9566 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9569 /* Verify the dominance information. */
9570 calculate_dominance_info (CDI_DOMINATORS);
9571 /* The initial block in the subgraph should be dominated by ENTRY. */
9572 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9573 get_immediate_dominator (CDI_DOMINATORS,
9574 subgraph_nodes[0]));
9575 /* Every other block in the subgraph should be dominated by the
9576 initial block. */
9577 for (int i = 1; i < n; i++)
9578 ASSERT_EQ (subgraph_nodes[0],
9579 get_immediate_dominator (CDI_DOMINATORS,
9580 subgraph_nodes[i]));
9581 free_dominance_info (CDI_DOMINATORS);
9583 /* Similarly for post-dominance. */
9584 calculate_dominance_info (CDI_POST_DOMINATORS);
9585 /* The initial block in the subgraph should be postdominated by EXIT. */
9586 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9587 get_immediate_dominator (CDI_POST_DOMINATORS,
9588 subgraph_nodes[0]));
9589 /* Every other block in the subgraph should be postdominated by the
9590 initial block, since that leads to EXIT. */
9591 for (int i = 1; i < n; i++)
9592 ASSERT_EQ (subgraph_nodes[0],
9593 get_immediate_dominator (CDI_POST_DOMINATORS,
9594 subgraph_nodes[i]));
9595 free_dominance_info (CDI_POST_DOMINATORS);
9597 pop_cfun ();
9600 /* Run all of the selftests within this file. */
9602 void
9603 tree_cfg_c_tests ()
9605 test_linear_chain ();
9606 test_diamond ();
9607 test_fully_connected ();
9610 } // namespace selftest
9612 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9613 - loop
9614 - nested loops
9615 - switch statement (a block with many out-edges)
9616 - something that jumps to itself
9617 - etc */
9619 #endif /* CHECKING_P */