PR rtl-optimization/82913
[official-gcc.git] / gcc / tree-cfg.c
blobdef3f0b8d9bc49359af52fe69d9716dc5e746f8d
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
65 /* This file contains functions for building the Control Flow Graph (CFG)
66 for a function tree. */
68 /* Local declarations. */
70 /* Initial capacity for the basic block array. */
71 static const int initial_cfg_capacity = 20;
73 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
74 which use a particular edge. The CASE_LABEL_EXPRs are chained together
75 via their CASE_CHAIN field, which we clear after we're done with the
76 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
78 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
79 update the case vector in response to edge redirections.
81 Right now this table is set up and torn down at key points in the
82 compilation process. It would be nice if we could make the table
83 more persistent. The key is getting notification of changes to
84 the CFG (particularly edge removal, creation and redirection). */
86 static hash_map<edge, tree> *edge_to_cases;
88 /* If we record edge_to_cases, this bitmap will hold indexes
89 of basic blocks that end in a GIMPLE_SWITCH which we touched
90 due to edge manipulations. */
92 static bitmap touched_switch_bbs;
94 /* CFG statistics. */
95 struct cfg_stats_d
97 long num_merged_labels;
100 static struct cfg_stats_d cfg_stats;
102 /* Data to pass to replace_block_vars_by_duplicates_1. */
103 struct replace_decls_d
105 hash_map<tree, tree> *vars_map;
106 tree to_context;
109 /* Hash table to store last discriminator assigned for each locus. */
110 struct locus_discrim_map
112 location_t locus;
113 int discriminator;
116 /* Hashtable helpers. */
118 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
120 static inline hashval_t hash (const locus_discrim_map *);
121 static inline bool equal (const locus_discrim_map *,
122 const locus_discrim_map *);
125 /* Trivial hash function for a location_t. ITEM is a pointer to
126 a hash table entry that maps a location_t to a discriminator. */
128 inline hashval_t
129 locus_discrim_hasher::hash (const locus_discrim_map *item)
131 return LOCATION_LINE (item->locus);
134 /* Equality function for the locus-to-discriminator map. A and B
135 point to the two hash table entries to compare. */
137 inline bool
138 locus_discrim_hasher::equal (const locus_discrim_map *a,
139 const locus_discrim_map *b)
141 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
144 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
146 /* Basic blocks and flowgraphs. */
147 static void make_blocks (gimple_seq);
149 /* Edges. */
150 static void make_edges (void);
151 static void assign_discriminators (void);
152 static void make_cond_expr_edges (basic_block);
153 static void make_gimple_switch_edges (gswitch *, basic_block);
154 static bool make_goto_expr_edges (basic_block);
155 static void make_gimple_asm_edges (basic_block);
156 static edge gimple_redirect_edge_and_branch (edge, basic_block);
157 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple *, gimple *);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge);
163 static gimple *first_non_label_stmt (basic_block);
164 static bool verify_gimple_transaction (gtransaction *);
165 static bool call_can_make_abnormal_goto (gimple *);
167 /* Flowgraph optimization and cleanup. */
168 static void gimple_merge_blocks (basic_block, basic_block);
169 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
170 static void remove_bb (basic_block);
171 static edge find_taken_edge_computed_goto (basic_block, tree);
172 static edge find_taken_edge_cond_expr (basic_block, tree);
173 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
174 static tree find_case_label_for_value (gswitch *, tree);
175 static void lower_phi_internal_fn ();
177 void
178 init_empty_tree_cfg_for_function (struct function *fn)
180 /* Initialize the basic block array. */
181 init_flow (fn);
182 profile_status_for_fn (fn) = PROFILE_ABSENT;
183 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
184 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
185 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
186 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
187 initial_cfg_capacity);
189 /* Build a mapping of labels to their associated blocks. */
190 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
192 initial_cfg_capacity);
194 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
195 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
197 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn);
199 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 void
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun);
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
216 static void
217 build_gimple_cfg (gimple_seq seq)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224 init_empty_tree_cfg ();
226 make_blocks (seq);
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
236 n_basic_blocks_for_fn (cfun));
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
248 make_edges ();
249 assign_discriminators ();
250 lower_phi_internal_fn ();
251 cleanup_dead_labels ();
252 delete discriminator_per_locus;
253 discriminator_per_locus = NULL;
256 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
257 them and propagate the information to LOOP. We assume that the annotations
258 come immediately before the condition in BB, if any. */
260 static void
261 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
263 gimple_stmt_iterator gsi = gsi_last_bb (bb);
264 gimple *stmt = gsi_stmt (gsi);
266 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
267 return;
269 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
271 stmt = gsi_stmt (gsi);
272 if (gimple_code (stmt) != GIMPLE_CALL)
273 break;
274 if (!gimple_call_internal_p (stmt)
275 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
276 break;
278 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 case annot_expr_ivdep_kind:
281 loop->safelen = INT_MAX;
282 break;
283 case annot_expr_no_vector_kind:
284 loop->dont_vectorize = true;
285 break;
286 case annot_expr_vector_kind:
287 loop->force_vectorize = true;
288 cfun->has_force_vectorize_loops = true;
289 break;
290 default:
291 gcc_unreachable ();
294 stmt = gimple_build_assign (gimple_call_lhs (stmt),
295 gimple_call_arg (stmt, 0));
296 gsi_replace (&gsi, stmt, true);
300 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
301 them and propagate the information to the loop. We assume that the
302 annotations come immediately before the condition of the loop. */
304 static void
305 replace_loop_annotate (void)
307 struct loop *loop;
308 basic_block bb;
309 gimple_stmt_iterator gsi;
310 gimple *stmt;
312 FOR_EACH_LOOP (loop, 0)
314 /* First look into the header. */
315 replace_loop_annotate_in_block (loop->header, loop);
317 /* Then look into the latch, if any. */
318 if (loop->latch)
319 replace_loop_annotate_in_block (loop->latch, loop);
322 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
323 FOR_EACH_BB_FN (bb, cfun)
325 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
327 stmt = gsi_stmt (gsi);
328 if (gimple_code (stmt) != GIMPLE_CALL)
329 continue;
330 if (!gimple_call_internal_p (stmt)
331 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
332 continue;
334 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
336 case annot_expr_ivdep_kind:
337 case annot_expr_no_vector_kind:
338 case annot_expr_vector_kind:
339 break;
340 default:
341 gcc_unreachable ();
344 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
345 stmt = gimple_build_assign (gimple_call_lhs (stmt),
346 gimple_call_arg (stmt, 0));
347 gsi_replace (&gsi, stmt, true);
352 /* Lower internal PHI function from GIMPLE FE. */
354 static void
355 lower_phi_internal_fn ()
357 basic_block bb, pred = NULL;
358 gimple_stmt_iterator gsi;
359 tree lhs;
360 gphi *phi_node;
361 gimple *stmt;
363 /* After edge creation, handle __PHI function from GIMPLE FE. */
364 FOR_EACH_BB_FN (bb, cfun)
366 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
368 stmt = gsi_stmt (gsi);
369 if (! gimple_call_internal_p (stmt, IFN_PHI))
370 break;
372 lhs = gimple_call_lhs (stmt);
373 phi_node = create_phi_node (lhs, bb);
375 /* Add arguments to the PHI node. */
376 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
378 tree arg = gimple_call_arg (stmt, i);
379 if (TREE_CODE (arg) == LABEL_DECL)
380 pred = label_to_block (arg);
381 else
383 edge e = find_edge (pred, bb);
384 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
388 gsi_remove (&gsi, true);
393 static unsigned int
394 execute_build_cfg (void)
396 gimple_seq body = gimple_body (current_function_decl);
398 build_gimple_cfg (body);
399 gimple_set_body (current_function_decl, NULL);
400 if (dump_file && (dump_flags & TDF_DETAILS))
402 fprintf (dump_file, "Scope blocks:\n");
403 dump_scope_blocks (dump_file, dump_flags);
405 cleanup_tree_cfg ();
406 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
407 replace_loop_annotate ();
408 return 0;
411 namespace {
413 const pass_data pass_data_build_cfg =
415 GIMPLE_PASS, /* type */
416 "cfg", /* name */
417 OPTGROUP_NONE, /* optinfo_flags */
418 TV_TREE_CFG, /* tv_id */
419 PROP_gimple_leh, /* properties_required */
420 ( PROP_cfg | PROP_loops ), /* properties_provided */
421 0, /* properties_destroyed */
422 0, /* todo_flags_start */
423 0, /* todo_flags_finish */
426 class pass_build_cfg : public gimple_opt_pass
428 public:
429 pass_build_cfg (gcc::context *ctxt)
430 : gimple_opt_pass (pass_data_build_cfg, ctxt)
433 /* opt_pass methods: */
434 virtual unsigned int execute (function *) { return execute_build_cfg (); }
436 }; // class pass_build_cfg
438 } // anon namespace
440 gimple_opt_pass *
441 make_pass_build_cfg (gcc::context *ctxt)
443 return new pass_build_cfg (ctxt);
447 /* Return true if T is a computed goto. */
449 bool
450 computed_goto_p (gimple *t)
452 return (gimple_code (t) == GIMPLE_GOTO
453 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
456 /* Returns true if the sequence of statements STMTS only contains
457 a call to __builtin_unreachable (). */
459 bool
460 gimple_seq_unreachable_p (gimple_seq stmts)
462 if (stmts == NULL)
463 return false;
465 gimple_stmt_iterator gsi = gsi_last (stmts);
467 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
468 return false;
470 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
472 gimple *stmt = gsi_stmt (gsi);
473 if (gimple_code (stmt) != GIMPLE_LABEL
474 && !is_gimple_debug (stmt)
475 && !gimple_clobber_p (stmt))
476 return false;
478 return true;
481 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
482 the other edge points to a bb with just __builtin_unreachable ().
483 I.e. return true for C->M edge in:
484 <bb C>:
486 if (something)
487 goto <bb N>;
488 else
489 goto <bb M>;
490 <bb N>:
491 __builtin_unreachable ();
492 <bb M>: */
494 bool
495 assert_unreachable_fallthru_edge_p (edge e)
497 basic_block pred_bb = e->src;
498 gimple *last = last_stmt (pred_bb);
499 if (last && gimple_code (last) == GIMPLE_COND)
501 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
502 if (other_bb == e->dest)
503 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
504 if (EDGE_COUNT (other_bb->succs) == 0)
505 return gimple_seq_unreachable_p (bb_seq (other_bb));
507 return false;
511 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
512 could alter control flow except via eh. We initialize the flag at
513 CFG build time and only ever clear it later. */
515 static void
516 gimple_call_initialize_ctrl_altering (gimple *stmt)
518 int flags = gimple_call_flags (stmt);
520 /* A call alters control flow if it can make an abnormal goto. */
521 if (call_can_make_abnormal_goto (stmt)
522 /* A call also alters control flow if it does not return. */
523 || flags & ECF_NORETURN
524 /* TM ending statements have backedges out of the transaction.
525 Return true so we split the basic block containing them.
526 Note that the TM_BUILTIN test is merely an optimization. */
527 || ((flags & ECF_TM_BUILTIN)
528 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
529 /* BUILT_IN_RETURN call is same as return statement. */
530 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
531 /* IFN_UNIQUE should be the last insn, to make checking for it
532 as cheap as possible. */
533 || (gimple_call_internal_p (stmt)
534 && gimple_call_internal_unique_p (stmt)))
535 gimple_call_set_ctrl_altering (stmt, true);
536 else
537 gimple_call_set_ctrl_altering (stmt, false);
541 /* Insert SEQ after BB and build a flowgraph. */
543 static basic_block
544 make_blocks_1 (gimple_seq seq, basic_block bb)
546 gimple_stmt_iterator i = gsi_start (seq);
547 gimple *stmt = NULL;
548 bool start_new_block = true;
549 bool first_stmt_of_seq = true;
551 while (!gsi_end_p (i))
553 gimple *prev_stmt;
555 prev_stmt = stmt;
556 stmt = gsi_stmt (i);
558 if (stmt && is_gimple_call (stmt))
559 gimple_call_initialize_ctrl_altering (stmt);
561 /* If the statement starts a new basic block or if we have determined
562 in a previous pass that we need to create a new block for STMT, do
563 so now. */
564 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
566 if (!first_stmt_of_seq)
567 gsi_split_seq_before (&i, &seq);
568 bb = create_basic_block (seq, bb);
569 start_new_block = false;
572 /* Now add STMT to BB and create the subgraphs for special statement
573 codes. */
574 gimple_set_bb (stmt, bb);
576 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
577 next iteration. */
578 if (stmt_ends_bb_p (stmt))
580 /* If the stmt can make abnormal goto use a new temporary
581 for the assignment to the LHS. This makes sure the old value
582 of the LHS is available on the abnormal edge. Otherwise
583 we will end up with overlapping life-ranges for abnormal
584 SSA names. */
585 if (gimple_has_lhs (stmt)
586 && stmt_can_make_abnormal_goto (stmt)
587 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
589 tree lhs = gimple_get_lhs (stmt);
590 tree tmp = create_tmp_var (TREE_TYPE (lhs));
591 gimple *s = gimple_build_assign (lhs, tmp);
592 gimple_set_location (s, gimple_location (stmt));
593 gimple_set_block (s, gimple_block (stmt));
594 gimple_set_lhs (stmt, tmp);
595 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
596 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
597 DECL_GIMPLE_REG_P (tmp) = 1;
598 gsi_insert_after (&i, s, GSI_SAME_STMT);
600 start_new_block = true;
603 gsi_next (&i);
604 first_stmt_of_seq = false;
606 return bb;
609 /* Build a flowgraph for the sequence of stmts SEQ. */
611 static void
612 make_blocks (gimple_seq seq)
614 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
617 /* Create and return a new empty basic block after bb AFTER. */
619 static basic_block
620 create_bb (void *h, void *e, basic_block after)
622 basic_block bb;
624 gcc_assert (!e);
626 /* Create and initialize a new basic block. Since alloc_block uses
627 GC allocation that clears memory to allocate a basic block, we do
628 not have to clear the newly allocated basic block here. */
629 bb = alloc_block ();
631 bb->index = last_basic_block_for_fn (cfun);
632 bb->flags = BB_NEW;
633 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
635 /* Add the new block to the linked list of blocks. */
636 link_block (bb, after);
638 /* Grow the basic block array if needed. */
639 if ((size_t) last_basic_block_for_fn (cfun)
640 == basic_block_info_for_fn (cfun)->length ())
642 size_t new_size =
643 (last_basic_block_for_fn (cfun)
644 + (last_basic_block_for_fn (cfun) + 3) / 4);
645 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
648 /* Add the newly created block to the array. */
649 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
651 n_basic_blocks_for_fn (cfun)++;
652 last_basic_block_for_fn (cfun)++;
654 return bb;
658 /*---------------------------------------------------------------------------
659 Edge creation
660 ---------------------------------------------------------------------------*/
662 /* If basic block BB has an abnormal edge to a basic block
663 containing IFN_ABNORMAL_DISPATCHER internal call, return
664 that the dispatcher's basic block, otherwise return NULL. */
666 basic_block
667 get_abnormal_succ_dispatcher (basic_block bb)
669 edge e;
670 edge_iterator ei;
672 FOR_EACH_EDGE (e, ei, bb->succs)
673 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
675 gimple_stmt_iterator gsi
676 = gsi_start_nondebug_after_labels_bb (e->dest);
677 gimple *g = gsi_stmt (gsi);
678 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
679 return e->dest;
681 return NULL;
684 /* Helper function for make_edges. Create a basic block with
685 with ABNORMAL_DISPATCHER internal call in it if needed, and
686 create abnormal edges from BBS to it and from it to FOR_BB
687 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
689 static void
690 handle_abnormal_edges (basic_block *dispatcher_bbs,
691 basic_block for_bb, int *bb_to_omp_idx,
692 auto_vec<basic_block> *bbs, bool computed_goto)
694 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
695 unsigned int idx = 0;
696 basic_block bb;
697 bool inner = false;
699 if (bb_to_omp_idx)
701 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
702 if (bb_to_omp_idx[for_bb->index] != 0)
703 inner = true;
706 /* If the dispatcher has been created already, then there are basic
707 blocks with abnormal edges to it, so just make a new edge to
708 for_bb. */
709 if (*dispatcher == NULL)
711 /* Check if there are any basic blocks that need to have
712 abnormal edges to this dispatcher. If there are none, return
713 early. */
714 if (bb_to_omp_idx == NULL)
716 if (bbs->is_empty ())
717 return;
719 else
721 FOR_EACH_VEC_ELT (*bbs, idx, bb)
722 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
723 break;
724 if (bb == NULL)
725 return;
728 /* Create the dispatcher bb. */
729 *dispatcher = create_basic_block (NULL, for_bb);
730 if (computed_goto)
732 /* Factor computed gotos into a common computed goto site. Also
733 record the location of that site so that we can un-factor the
734 gotos after we have converted back to normal form. */
735 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
737 /* Create the destination of the factored goto. Each original
738 computed goto will put its desired destination into this
739 variable and jump to the label we create immediately below. */
740 tree var = create_tmp_var (ptr_type_node, "gotovar");
742 /* Build a label for the new block which will contain the
743 factored computed goto. */
744 tree factored_label_decl
745 = create_artificial_label (UNKNOWN_LOCATION);
746 gimple *factored_computed_goto_label
747 = gimple_build_label (factored_label_decl);
748 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
750 /* Build our new computed goto. */
751 gimple *factored_computed_goto = gimple_build_goto (var);
752 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
754 FOR_EACH_VEC_ELT (*bbs, idx, bb)
756 if (bb_to_omp_idx
757 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
758 continue;
760 gsi = gsi_last_bb (bb);
761 gimple *last = gsi_stmt (gsi);
763 gcc_assert (computed_goto_p (last));
765 /* Copy the original computed goto's destination into VAR. */
766 gimple *assignment
767 = gimple_build_assign (var, gimple_goto_dest (last));
768 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
770 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
771 e->goto_locus = gimple_location (last);
772 gsi_remove (&gsi, true);
775 else
777 tree arg = inner ? boolean_true_node : boolean_false_node;
778 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
779 1, arg);
780 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
781 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
783 /* Create predecessor edges of the dispatcher. */
784 FOR_EACH_VEC_ELT (*bbs, idx, bb)
786 if (bb_to_omp_idx
787 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
788 continue;
789 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
794 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
797 /* Creates outgoing edges for BB. Returns 1 when it ends with an
798 computed goto, returns 2 when it ends with a statement that
799 might return to this function via an nonlocal goto, otherwise
800 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
802 static int
803 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
805 gimple *last = last_stmt (bb);
806 bool fallthru = false;
807 int ret = 0;
809 if (!last)
810 return ret;
812 switch (gimple_code (last))
814 case GIMPLE_GOTO:
815 if (make_goto_expr_edges (bb))
816 ret = 1;
817 fallthru = false;
818 break;
819 case GIMPLE_RETURN:
821 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
822 e->goto_locus = gimple_location (last);
823 fallthru = false;
825 break;
826 case GIMPLE_COND:
827 make_cond_expr_edges (bb);
828 fallthru = false;
829 break;
830 case GIMPLE_SWITCH:
831 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
832 fallthru = false;
833 break;
834 case GIMPLE_RESX:
835 make_eh_edges (last);
836 fallthru = false;
837 break;
838 case GIMPLE_EH_DISPATCH:
839 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
840 break;
842 case GIMPLE_CALL:
843 /* If this function receives a nonlocal goto, then we need to
844 make edges from this call site to all the nonlocal goto
845 handlers. */
846 if (stmt_can_make_abnormal_goto (last))
847 ret = 2;
849 /* If this statement has reachable exception handlers, then
850 create abnormal edges to them. */
851 make_eh_edges (last);
853 /* BUILTIN_RETURN is really a return statement. */
854 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
856 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
857 fallthru = false;
859 /* Some calls are known not to return. */
860 else
861 fallthru = !gimple_call_noreturn_p (last);
862 break;
864 case GIMPLE_ASSIGN:
865 /* A GIMPLE_ASSIGN may throw internally and thus be considered
866 control-altering. */
867 if (is_ctrl_altering_stmt (last))
868 make_eh_edges (last);
869 fallthru = true;
870 break;
872 case GIMPLE_ASM:
873 make_gimple_asm_edges (bb);
874 fallthru = true;
875 break;
877 CASE_GIMPLE_OMP:
878 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
879 break;
881 case GIMPLE_TRANSACTION:
883 gtransaction *txn = as_a <gtransaction *> (last);
884 tree label1 = gimple_transaction_label_norm (txn);
885 tree label2 = gimple_transaction_label_uninst (txn);
887 if (label1)
888 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
889 if (label2)
890 make_edge (bb, label_to_block (label2),
891 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
893 tree label3 = gimple_transaction_label_over (txn);
894 if (gimple_transaction_subcode (txn)
895 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
896 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
898 fallthru = false;
900 break;
902 default:
903 gcc_assert (!stmt_ends_bb_p (last));
904 fallthru = true;
905 break;
908 if (fallthru)
909 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
911 return ret;
914 /* Join all the blocks in the flowgraph. */
916 static void
917 make_edges (void)
919 basic_block bb;
920 struct omp_region *cur_region = NULL;
921 auto_vec<basic_block> ab_edge_goto;
922 auto_vec<basic_block> ab_edge_call;
923 int *bb_to_omp_idx = NULL;
924 int cur_omp_region_idx = 0;
926 /* Create an edge from entry to the first block with executable
927 statements in it. */
928 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
929 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
930 EDGE_FALLTHRU);
932 /* Traverse the basic block array placing edges. */
933 FOR_EACH_BB_FN (bb, cfun)
935 int mer;
937 if (bb_to_omp_idx)
938 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
940 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
941 if (mer == 1)
942 ab_edge_goto.safe_push (bb);
943 else if (mer == 2)
944 ab_edge_call.safe_push (bb);
946 if (cur_region && bb_to_omp_idx == NULL)
947 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
950 /* Computed gotos are hell to deal with, especially if there are
951 lots of them with a large number of destinations. So we factor
952 them to a common computed goto location before we build the
953 edge list. After we convert back to normal form, we will un-factor
954 the computed gotos since factoring introduces an unwanted jump.
955 For non-local gotos and abnormal edges from calls to calls that return
956 twice or forced labels, factor the abnormal edges too, by having all
957 abnormal edges from the calls go to a common artificial basic block
958 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
959 basic block to all forced labels and calls returning twice.
960 We do this per-OpenMP structured block, because those regions
961 are guaranteed to be single entry single exit by the standard,
962 so it is not allowed to enter or exit such regions abnormally this way,
963 thus all computed gotos, non-local gotos and setjmp/longjmp calls
964 must not transfer control across SESE region boundaries. */
965 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
967 gimple_stmt_iterator gsi;
968 basic_block dispatcher_bb_array[2] = { NULL, NULL };
969 basic_block *dispatcher_bbs = dispatcher_bb_array;
970 int count = n_basic_blocks_for_fn (cfun);
972 if (bb_to_omp_idx)
973 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
975 FOR_EACH_BB_FN (bb, cfun)
977 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
979 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
980 tree target;
982 if (!label_stmt)
983 break;
985 target = gimple_label_label (label_stmt);
987 /* Make an edge to every label block that has been marked as a
988 potential target for a computed goto or a non-local goto. */
989 if (FORCED_LABEL (target))
990 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
991 &ab_edge_goto, true);
992 if (DECL_NONLOCAL (target))
994 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
995 &ab_edge_call, false);
996 break;
1000 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1001 gsi_next_nondebug (&gsi);
1002 if (!gsi_end_p (gsi))
1004 /* Make an edge to every setjmp-like call. */
1005 gimple *call_stmt = gsi_stmt (gsi);
1006 if (is_gimple_call (call_stmt)
1007 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1008 || gimple_call_builtin_p (call_stmt,
1009 BUILT_IN_SETJMP_RECEIVER)))
1010 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1011 &ab_edge_call, false);
1015 if (bb_to_omp_idx)
1016 XDELETE (dispatcher_bbs);
1019 XDELETE (bb_to_omp_idx);
1021 omp_free_regions ();
1024 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1025 needed. Returns true if new bbs were created.
1026 Note: This is transitional code, and should not be used for new code. We
1027 should be able to get rid of this by rewriting all target va-arg
1028 gimplification hooks to use an interface gimple_build_cond_value as described
1029 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1031 bool
1032 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1034 gimple *stmt = gsi_stmt (*gsi);
1035 basic_block bb = gimple_bb (stmt);
1036 basic_block lastbb, afterbb;
1037 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1038 edge e;
1039 lastbb = make_blocks_1 (seq, bb);
1040 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1041 return false;
1042 e = split_block (bb, stmt);
1043 /* Move e->dest to come after the new basic blocks. */
1044 afterbb = e->dest;
1045 unlink_block (afterbb);
1046 link_block (afterbb, lastbb);
1047 redirect_edge_succ (e, bb->next_bb);
1048 bb = bb->next_bb;
1049 while (bb != afterbb)
1051 struct omp_region *cur_region = NULL;
1052 profile_count cnt = profile_count::zero ();
1053 int freq = 0;
1054 bool all = true;
1056 int cur_omp_region_idx = 0;
1057 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1058 gcc_assert (!mer && !cur_region);
1059 add_bb_to_loop (bb, afterbb->loop_father);
1061 edge e;
1062 edge_iterator ei;
1063 FOR_EACH_EDGE (e, ei, bb->preds)
1065 if (e->count ().initialized_p ())
1066 cnt += e->count ();
1067 else
1068 all = false;
1069 freq += EDGE_FREQUENCY (e);
1071 tree_guess_outgoing_edge_probabilities (bb);
1072 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1073 bb->count = cnt;
1075 bb = bb->next_bb;
1077 return true;
1080 /* Find the next available discriminator value for LOCUS. The
1081 discriminator distinguishes among several basic blocks that
1082 share a common locus, allowing for more accurate sample-based
1083 profiling. */
1085 static int
1086 next_discriminator_for_locus (location_t locus)
1088 struct locus_discrim_map item;
1089 struct locus_discrim_map **slot;
1091 item.locus = locus;
1092 item.discriminator = 0;
1093 slot = discriminator_per_locus->find_slot_with_hash (
1094 &item, LOCATION_LINE (locus), INSERT);
1095 gcc_assert (slot);
1096 if (*slot == HTAB_EMPTY_ENTRY)
1098 *slot = XNEW (struct locus_discrim_map);
1099 gcc_assert (*slot);
1100 (*slot)->locus = locus;
1101 (*slot)->discriminator = 0;
1103 (*slot)->discriminator++;
1104 return (*slot)->discriminator;
1107 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1109 static bool
1110 same_line_p (location_t locus1, location_t locus2)
1112 expanded_location from, to;
1114 if (locus1 == locus2)
1115 return true;
1117 from = expand_location (locus1);
1118 to = expand_location (locus2);
1120 if (from.line != to.line)
1121 return false;
1122 if (from.file == to.file)
1123 return true;
1124 return (from.file != NULL
1125 && to.file != NULL
1126 && filename_cmp (from.file, to.file) == 0);
1129 /* Assign discriminators to each basic block. */
1131 static void
1132 assign_discriminators (void)
1134 basic_block bb;
1136 FOR_EACH_BB_FN (bb, cfun)
1138 edge e;
1139 edge_iterator ei;
1140 gimple *last = last_stmt (bb);
1141 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1143 if (locus == UNKNOWN_LOCATION)
1144 continue;
1146 FOR_EACH_EDGE (e, ei, bb->succs)
1148 gimple *first = first_non_label_stmt (e->dest);
1149 gimple *last = last_stmt (e->dest);
1150 if ((first && same_line_p (locus, gimple_location (first)))
1151 || (last && same_line_p (locus, gimple_location (last))))
1153 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1154 bb->discriminator = next_discriminator_for_locus (locus);
1155 else
1156 e->dest->discriminator = next_discriminator_for_locus (locus);
1162 /* Create the edges for a GIMPLE_COND starting at block BB. */
1164 static void
1165 make_cond_expr_edges (basic_block bb)
1167 gcond *entry = as_a <gcond *> (last_stmt (bb));
1168 gimple *then_stmt, *else_stmt;
1169 basic_block then_bb, else_bb;
1170 tree then_label, else_label;
1171 edge e;
1173 gcc_assert (entry);
1174 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1176 /* Entry basic blocks for each component. */
1177 then_label = gimple_cond_true_label (entry);
1178 else_label = gimple_cond_false_label (entry);
1179 then_bb = label_to_block (then_label);
1180 else_bb = label_to_block (else_label);
1181 then_stmt = first_stmt (then_bb);
1182 else_stmt = first_stmt (else_bb);
1184 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1185 e->goto_locus = gimple_location (then_stmt);
1186 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1187 if (e)
1188 e->goto_locus = gimple_location (else_stmt);
1190 /* We do not need the labels anymore. */
1191 gimple_cond_set_true_label (entry, NULL_TREE);
1192 gimple_cond_set_false_label (entry, NULL_TREE);
1196 /* Called for each element in the hash table (P) as we delete the
1197 edge to cases hash table.
1199 Clear all the CASE_CHAINs to prevent problems with copying of
1200 SWITCH_EXPRs and structure sharing rules, then free the hash table
1201 element. */
1203 bool
1204 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1206 tree t, next;
1208 for (t = value; t; t = next)
1210 next = CASE_CHAIN (t);
1211 CASE_CHAIN (t) = NULL;
1214 return true;
1217 /* Start recording information mapping edges to case labels. */
1219 void
1220 start_recording_case_labels (void)
1222 gcc_assert (edge_to_cases == NULL);
1223 edge_to_cases = new hash_map<edge, tree>;
1224 touched_switch_bbs = BITMAP_ALLOC (NULL);
1227 /* Return nonzero if we are recording information for case labels. */
1229 static bool
1230 recording_case_labels_p (void)
1232 return (edge_to_cases != NULL);
1235 /* Stop recording information mapping edges to case labels and
1236 remove any information we have recorded. */
1237 void
1238 end_recording_case_labels (void)
1240 bitmap_iterator bi;
1241 unsigned i;
1242 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1243 delete edge_to_cases;
1244 edge_to_cases = NULL;
1245 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1247 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1248 if (bb)
1250 gimple *stmt = last_stmt (bb);
1251 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1252 group_case_labels_stmt (as_a <gswitch *> (stmt));
1255 BITMAP_FREE (touched_switch_bbs);
1258 /* If we are inside a {start,end}_recording_cases block, then return
1259 a chain of CASE_LABEL_EXPRs from T which reference E.
1261 Otherwise return NULL. */
1263 static tree
1264 get_cases_for_edge (edge e, gswitch *t)
1266 tree *slot;
1267 size_t i, n;
1269 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1270 chains available. Return NULL so the caller can detect this case. */
1271 if (!recording_case_labels_p ())
1272 return NULL;
1274 slot = edge_to_cases->get (e);
1275 if (slot)
1276 return *slot;
1278 /* If we did not find E in the hash table, then this must be the first
1279 time we have been queried for information about E & T. Add all the
1280 elements from T to the hash table then perform the query again. */
1282 n = gimple_switch_num_labels (t);
1283 for (i = 0; i < n; i++)
1285 tree elt = gimple_switch_label (t, i);
1286 tree lab = CASE_LABEL (elt);
1287 basic_block label_bb = label_to_block (lab);
1288 edge this_edge = find_edge (e->src, label_bb);
1290 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1291 a new chain. */
1292 tree &s = edge_to_cases->get_or_insert (this_edge);
1293 CASE_CHAIN (elt) = s;
1294 s = elt;
1297 return *edge_to_cases->get (e);
1300 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1302 static void
1303 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1305 size_t i, n;
1307 n = gimple_switch_num_labels (entry);
1309 for (i = 0; i < n; ++i)
1311 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1312 basic_block label_bb = label_to_block (lab);
1313 make_edge (bb, label_bb, 0);
1318 /* Return the basic block holding label DEST. */
1320 basic_block
1321 label_to_block_fn (struct function *ifun, tree dest)
1323 int uid = LABEL_DECL_UID (dest);
1325 /* We would die hard when faced by an undefined label. Emit a label to
1326 the very first basic block. This will hopefully make even the dataflow
1327 and undefined variable warnings quite right. */
1328 if (seen_error () && uid < 0)
1330 gimple_stmt_iterator gsi =
1331 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1332 gimple *stmt;
1334 stmt = gimple_build_label (dest);
1335 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1336 uid = LABEL_DECL_UID (dest);
1338 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1339 return NULL;
1340 return (*ifun->cfg->x_label_to_block_map)[uid];
1343 /* Create edges for a goto statement at block BB. Returns true
1344 if abnormal edges should be created. */
1346 static bool
1347 make_goto_expr_edges (basic_block bb)
1349 gimple_stmt_iterator last = gsi_last_bb (bb);
1350 gimple *goto_t = gsi_stmt (last);
1352 /* A simple GOTO creates normal edges. */
1353 if (simple_goto_p (goto_t))
1355 tree dest = gimple_goto_dest (goto_t);
1356 basic_block label_bb = label_to_block (dest);
1357 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1358 e->goto_locus = gimple_location (goto_t);
1359 gsi_remove (&last, true);
1360 return false;
1363 /* A computed GOTO creates abnormal edges. */
1364 return true;
1367 /* Create edges for an asm statement with labels at block BB. */
1369 static void
1370 make_gimple_asm_edges (basic_block bb)
1372 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1373 int i, n = gimple_asm_nlabels (stmt);
1375 for (i = 0; i < n; ++i)
1377 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1378 basic_block label_bb = label_to_block (label);
1379 make_edge (bb, label_bb, 0);
1383 /*---------------------------------------------------------------------------
1384 Flowgraph analysis
1385 ---------------------------------------------------------------------------*/
1387 /* Cleanup useless labels in basic blocks. This is something we wish
1388 to do early because it allows us to group case labels before creating
1389 the edges for the CFG, and it speeds up block statement iterators in
1390 all passes later on.
1391 We rerun this pass after CFG is created, to get rid of the labels that
1392 are no longer referenced. After then we do not run it any more, since
1393 (almost) no new labels should be created. */
1395 /* A map from basic block index to the leading label of that block. */
1396 static struct label_record
1398 /* The label. */
1399 tree label;
1401 /* True if the label is referenced from somewhere. */
1402 bool used;
1403 } *label_for_bb;
1405 /* Given LABEL return the first label in the same basic block. */
1407 static tree
1408 main_block_label (tree label)
1410 basic_block bb = label_to_block (label);
1411 tree main_label = label_for_bb[bb->index].label;
1413 /* label_to_block possibly inserted undefined label into the chain. */
1414 if (!main_label)
1416 label_for_bb[bb->index].label = label;
1417 main_label = label;
1420 label_for_bb[bb->index].used = true;
1421 return main_label;
1424 /* Clean up redundant labels within the exception tree. */
1426 static void
1427 cleanup_dead_labels_eh (void)
1429 eh_landing_pad lp;
1430 eh_region r;
1431 tree lab;
1432 int i;
1434 if (cfun->eh == NULL)
1435 return;
1437 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1438 if (lp && lp->post_landing_pad)
1440 lab = main_block_label (lp->post_landing_pad);
1441 if (lab != lp->post_landing_pad)
1443 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1444 EH_LANDING_PAD_NR (lab) = lp->index;
1448 FOR_ALL_EH_REGION (r)
1449 switch (r->type)
1451 case ERT_CLEANUP:
1452 case ERT_MUST_NOT_THROW:
1453 break;
1455 case ERT_TRY:
1457 eh_catch c;
1458 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1460 lab = c->label;
1461 if (lab)
1462 c->label = main_block_label (lab);
1465 break;
1467 case ERT_ALLOWED_EXCEPTIONS:
1468 lab = r->u.allowed.label;
1469 if (lab)
1470 r->u.allowed.label = main_block_label (lab);
1471 break;
1476 /* Cleanup redundant labels. This is a three-step process:
1477 1) Find the leading label for each block.
1478 2) Redirect all references to labels to the leading labels.
1479 3) Cleanup all useless labels. */
1481 void
1482 cleanup_dead_labels (void)
1484 basic_block bb;
1485 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1487 /* Find a suitable label for each block. We use the first user-defined
1488 label if there is one, or otherwise just the first label we see. */
1489 FOR_EACH_BB_FN (bb, cfun)
1491 gimple_stmt_iterator i;
1493 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1495 tree label;
1496 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1498 if (!label_stmt)
1499 break;
1501 label = gimple_label_label (label_stmt);
1503 /* If we have not yet seen a label for the current block,
1504 remember this one and see if there are more labels. */
1505 if (!label_for_bb[bb->index].label)
1507 label_for_bb[bb->index].label = label;
1508 continue;
1511 /* If we did see a label for the current block already, but it
1512 is an artificially created label, replace it if the current
1513 label is a user defined label. */
1514 if (!DECL_ARTIFICIAL (label)
1515 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1517 label_for_bb[bb->index].label = label;
1518 break;
1523 /* Now redirect all jumps/branches to the selected label.
1524 First do so for each block ending in a control statement. */
1525 FOR_EACH_BB_FN (bb, cfun)
1527 gimple *stmt = last_stmt (bb);
1528 tree label, new_label;
1530 if (!stmt)
1531 continue;
1533 switch (gimple_code (stmt))
1535 case GIMPLE_COND:
1537 gcond *cond_stmt = as_a <gcond *> (stmt);
1538 label = gimple_cond_true_label (cond_stmt);
1539 if (label)
1541 new_label = main_block_label (label);
1542 if (new_label != label)
1543 gimple_cond_set_true_label (cond_stmt, new_label);
1546 label = gimple_cond_false_label (cond_stmt);
1547 if (label)
1549 new_label = main_block_label (label);
1550 if (new_label != label)
1551 gimple_cond_set_false_label (cond_stmt, new_label);
1554 break;
1556 case GIMPLE_SWITCH:
1558 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1559 size_t i, n = gimple_switch_num_labels (switch_stmt);
1561 /* Replace all destination labels. */
1562 for (i = 0; i < n; ++i)
1564 tree case_label = gimple_switch_label (switch_stmt, i);
1565 label = CASE_LABEL (case_label);
1566 new_label = main_block_label (label);
1567 if (new_label != label)
1568 CASE_LABEL (case_label) = new_label;
1570 break;
1573 case GIMPLE_ASM:
1575 gasm *asm_stmt = as_a <gasm *> (stmt);
1576 int i, n = gimple_asm_nlabels (asm_stmt);
1578 for (i = 0; i < n; ++i)
1580 tree cons = gimple_asm_label_op (asm_stmt, i);
1581 tree label = main_block_label (TREE_VALUE (cons));
1582 TREE_VALUE (cons) = label;
1584 break;
1587 /* We have to handle gotos until they're removed, and we don't
1588 remove them until after we've created the CFG edges. */
1589 case GIMPLE_GOTO:
1590 if (!computed_goto_p (stmt))
1592 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1593 label = gimple_goto_dest (goto_stmt);
1594 new_label = main_block_label (label);
1595 if (new_label != label)
1596 gimple_goto_set_dest (goto_stmt, new_label);
1598 break;
1600 case GIMPLE_TRANSACTION:
1602 gtransaction *txn = as_a <gtransaction *> (stmt);
1604 label = gimple_transaction_label_norm (txn);
1605 if (label)
1607 new_label = main_block_label (label);
1608 if (new_label != label)
1609 gimple_transaction_set_label_norm (txn, new_label);
1612 label = gimple_transaction_label_uninst (txn);
1613 if (label)
1615 new_label = main_block_label (label);
1616 if (new_label != label)
1617 gimple_transaction_set_label_uninst (txn, new_label);
1620 label = gimple_transaction_label_over (txn);
1621 if (label)
1623 new_label = main_block_label (label);
1624 if (new_label != label)
1625 gimple_transaction_set_label_over (txn, new_label);
1628 break;
1630 default:
1631 break;
1635 /* Do the same for the exception region tree labels. */
1636 cleanup_dead_labels_eh ();
1638 /* Finally, purge dead labels. All user-defined labels and labels that
1639 can be the target of non-local gotos and labels which have their
1640 address taken are preserved. */
1641 FOR_EACH_BB_FN (bb, cfun)
1643 gimple_stmt_iterator i;
1644 tree label_for_this_bb = label_for_bb[bb->index].label;
1646 if (!label_for_this_bb)
1647 continue;
1649 /* If the main label of the block is unused, we may still remove it. */
1650 if (!label_for_bb[bb->index].used)
1651 label_for_this_bb = NULL;
1653 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1655 tree label;
1656 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1658 if (!label_stmt)
1659 break;
1661 label = gimple_label_label (label_stmt);
1663 if (label == label_for_this_bb
1664 || !DECL_ARTIFICIAL (label)
1665 || DECL_NONLOCAL (label)
1666 || FORCED_LABEL (label))
1667 gsi_next (&i);
1668 else
1669 gsi_remove (&i, true);
1673 free (label_for_bb);
1676 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1677 the ones jumping to the same label.
1678 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1680 bool
1681 group_case_labels_stmt (gswitch *stmt)
1683 int old_size = gimple_switch_num_labels (stmt);
1684 int i, next_index, new_size;
1685 basic_block default_bb = NULL;
1687 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1689 /* Look for possible opportunities to merge cases. */
1690 new_size = i = 1;
1691 while (i < old_size)
1693 tree base_case, base_high;
1694 basic_block base_bb;
1696 base_case = gimple_switch_label (stmt, i);
1698 gcc_assert (base_case);
1699 base_bb = label_to_block (CASE_LABEL (base_case));
1701 /* Discard cases that have the same destination as the default case or
1702 whose destiniation blocks have already been removed as unreachable. */
1703 if (base_bb == NULL || base_bb == default_bb)
1705 i++;
1706 continue;
1709 base_high = CASE_HIGH (base_case)
1710 ? CASE_HIGH (base_case)
1711 : CASE_LOW (base_case);
1712 next_index = i + 1;
1714 /* Try to merge case labels. Break out when we reach the end
1715 of the label vector or when we cannot merge the next case
1716 label with the current one. */
1717 while (next_index < old_size)
1719 tree merge_case = gimple_switch_label (stmt, next_index);
1720 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1721 wide_int bhp1 = wi::to_wide (base_high) + 1;
1723 /* Merge the cases if they jump to the same place,
1724 and their ranges are consecutive. */
1725 if (merge_bb == base_bb
1726 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1728 base_high = CASE_HIGH (merge_case) ?
1729 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1730 CASE_HIGH (base_case) = base_high;
1731 next_index++;
1733 else
1734 break;
1737 /* Discard cases that have an unreachable destination block. */
1738 if (EDGE_COUNT (base_bb->succs) == 0
1739 && gimple_seq_unreachable_p (bb_seq (base_bb)))
1741 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1742 if (base_edge != NULL)
1743 remove_edge_and_dominated_blocks (base_edge);
1744 i = next_index;
1745 continue;
1748 if (new_size < i)
1749 gimple_switch_set_label (stmt, new_size,
1750 gimple_switch_label (stmt, i));
1751 i = next_index;
1752 new_size++;
1755 gcc_assert (new_size <= old_size);
1757 if (new_size < old_size)
1758 gimple_switch_set_num_labels (stmt, new_size);
1760 return new_size < old_size;
1763 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1764 and scan the sorted vector of cases. Combine the ones jumping to the
1765 same label. */
1767 bool
1768 group_case_labels (void)
1770 basic_block bb;
1771 bool changed = false;
1773 FOR_EACH_BB_FN (bb, cfun)
1775 gimple *stmt = last_stmt (bb);
1776 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1777 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1780 return changed;
1783 /* Checks whether we can merge block B into block A. */
1785 static bool
1786 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1788 gimple *stmt;
1790 if (!single_succ_p (a))
1791 return false;
1793 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1794 return false;
1796 if (single_succ (a) != b)
1797 return false;
1799 if (!single_pred_p (b))
1800 return false;
1802 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1803 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1804 return false;
1806 /* If A ends by a statement causing exceptions or something similar, we
1807 cannot merge the blocks. */
1808 stmt = last_stmt (a);
1809 if (stmt && stmt_ends_bb_p (stmt))
1810 return false;
1812 /* Do not allow a block with only a non-local label to be merged. */
1813 if (stmt)
1814 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1815 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1816 return false;
1818 /* Examine the labels at the beginning of B. */
1819 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1820 gsi_next (&gsi))
1822 tree lab;
1823 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1824 if (!label_stmt)
1825 break;
1826 lab = gimple_label_label (label_stmt);
1828 /* Do not remove user forced labels or for -O0 any user labels. */
1829 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1830 return false;
1833 /* Protect simple loop latches. We only want to avoid merging
1834 the latch with the loop header or with a block in another
1835 loop in this case. */
1836 if (current_loops
1837 && b->loop_father->latch == b
1838 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1839 && (b->loop_father->header == a
1840 || b->loop_father != a->loop_father))
1841 return false;
1843 /* It must be possible to eliminate all phi nodes in B. If ssa form
1844 is not up-to-date and a name-mapping is registered, we cannot eliminate
1845 any phis. Symbols marked for renaming are never a problem though. */
1846 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1847 gsi_next (&gsi))
1849 gphi *phi = gsi.phi ();
1850 /* Technically only new names matter. */
1851 if (name_registered_for_update_p (PHI_RESULT (phi)))
1852 return false;
1855 /* When not optimizing, don't merge if we'd lose goto_locus. */
1856 if (!optimize
1857 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1859 location_t goto_locus = single_succ_edge (a)->goto_locus;
1860 gimple_stmt_iterator prev, next;
1861 prev = gsi_last_nondebug_bb (a);
1862 next = gsi_after_labels (b);
1863 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1864 gsi_next_nondebug (&next);
1865 if ((gsi_end_p (prev)
1866 || gimple_location (gsi_stmt (prev)) != goto_locus)
1867 && (gsi_end_p (next)
1868 || gimple_location (gsi_stmt (next)) != goto_locus))
1869 return false;
1872 return true;
1875 /* Replaces all uses of NAME by VAL. */
1877 void
1878 replace_uses_by (tree name, tree val)
1880 imm_use_iterator imm_iter;
1881 use_operand_p use;
1882 gimple *stmt;
1883 edge e;
1885 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1887 /* Mark the block if we change the last stmt in it. */
1888 if (cfgcleanup_altered_bbs
1889 && stmt_ends_bb_p (stmt))
1890 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1892 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1894 replace_exp (use, val);
1896 if (gimple_code (stmt) == GIMPLE_PHI)
1898 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1899 PHI_ARG_INDEX_FROM_USE (use));
1900 if (e->flags & EDGE_ABNORMAL
1901 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1903 /* This can only occur for virtual operands, since
1904 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1905 would prevent replacement. */
1906 gcc_checking_assert (virtual_operand_p (name));
1907 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1912 if (gimple_code (stmt) != GIMPLE_PHI)
1914 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1915 gimple *orig_stmt = stmt;
1916 size_t i;
1918 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1919 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1920 only change sth from non-invariant to invariant, and only
1921 when propagating constants. */
1922 if (is_gimple_min_invariant (val))
1923 for (i = 0; i < gimple_num_ops (stmt); i++)
1925 tree op = gimple_op (stmt, i);
1926 /* Operands may be empty here. For example, the labels
1927 of a GIMPLE_COND are nulled out following the creation
1928 of the corresponding CFG edges. */
1929 if (op && TREE_CODE (op) == ADDR_EXPR)
1930 recompute_tree_invariant_for_addr_expr (op);
1933 if (fold_stmt (&gsi))
1934 stmt = gsi_stmt (gsi);
1936 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1937 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1939 update_stmt (stmt);
1943 gcc_checking_assert (has_zero_uses (name));
1945 /* Also update the trees stored in loop structures. */
1946 if (current_loops)
1948 struct loop *loop;
1950 FOR_EACH_LOOP (loop, 0)
1952 substitute_in_loop_info (loop, name, val);
1957 /* Merge block B into block A. */
1959 static void
1960 gimple_merge_blocks (basic_block a, basic_block b)
1962 gimple_stmt_iterator last, gsi;
1963 gphi_iterator psi;
1965 if (dump_file)
1966 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1968 /* Remove all single-valued PHI nodes from block B of the form
1969 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1970 gsi = gsi_last_bb (a);
1971 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1973 gimple *phi = gsi_stmt (psi);
1974 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1975 gimple *copy;
1976 bool may_replace_uses = (virtual_operand_p (def)
1977 || may_propagate_copy (def, use));
1979 /* In case we maintain loop closed ssa form, do not propagate arguments
1980 of loop exit phi nodes. */
1981 if (current_loops
1982 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1983 && !virtual_operand_p (def)
1984 && TREE_CODE (use) == SSA_NAME
1985 && a->loop_father != b->loop_father)
1986 may_replace_uses = false;
1988 if (!may_replace_uses)
1990 gcc_assert (!virtual_operand_p (def));
1992 /* Note that just emitting the copies is fine -- there is no problem
1993 with ordering of phi nodes. This is because A is the single
1994 predecessor of B, therefore results of the phi nodes cannot
1995 appear as arguments of the phi nodes. */
1996 copy = gimple_build_assign (def, use);
1997 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1998 remove_phi_node (&psi, false);
2000 else
2002 /* If we deal with a PHI for virtual operands, we can simply
2003 propagate these without fussing with folding or updating
2004 the stmt. */
2005 if (virtual_operand_p (def))
2007 imm_use_iterator iter;
2008 use_operand_p use_p;
2009 gimple *stmt;
2011 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2012 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2013 SET_USE (use_p, use);
2015 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2016 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2018 else
2019 replace_uses_by (def, use);
2021 remove_phi_node (&psi, true);
2025 /* Ensure that B follows A. */
2026 move_block_after (b, a);
2028 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2029 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2031 /* Remove labels from B and set gimple_bb to A for other statements. */
2032 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2034 gimple *stmt = gsi_stmt (gsi);
2035 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2037 tree label = gimple_label_label (label_stmt);
2038 int lp_nr;
2040 gsi_remove (&gsi, false);
2042 /* Now that we can thread computed gotos, we might have
2043 a situation where we have a forced label in block B
2044 However, the label at the start of block B might still be
2045 used in other ways (think about the runtime checking for
2046 Fortran assigned gotos). So we can not just delete the
2047 label. Instead we move the label to the start of block A. */
2048 if (FORCED_LABEL (label))
2050 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2051 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2053 /* Other user labels keep around in a form of a debug stmt. */
2054 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2056 gimple *dbg = gimple_build_debug_bind (label,
2057 integer_zero_node,
2058 stmt);
2059 gimple_debug_bind_reset_value (dbg);
2060 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2063 lp_nr = EH_LANDING_PAD_NR (label);
2064 if (lp_nr)
2066 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2067 lp->post_landing_pad = NULL;
2070 else
2072 gimple_set_bb (stmt, a);
2073 gsi_next (&gsi);
2077 /* When merging two BBs, if their counts are different, the larger count
2078 is selected as the new bb count. This is to handle inconsistent
2079 profiles. */
2080 if (a->loop_father == b->loop_father)
2082 a->count = a->count.merge (b->count);
2085 /* Merge the sequences. */
2086 last = gsi_last_bb (a);
2087 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2088 set_bb_seq (b, NULL);
2090 if (cfgcleanup_altered_bbs)
2091 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2095 /* Return the one of two successors of BB that is not reachable by a
2096 complex edge, if there is one. Else, return BB. We use
2097 this in optimizations that use post-dominators for their heuristics,
2098 to catch the cases in C++ where function calls are involved. */
2100 basic_block
2101 single_noncomplex_succ (basic_block bb)
2103 edge e0, e1;
2104 if (EDGE_COUNT (bb->succs) != 2)
2105 return bb;
2107 e0 = EDGE_SUCC (bb, 0);
2108 e1 = EDGE_SUCC (bb, 1);
2109 if (e0->flags & EDGE_COMPLEX)
2110 return e1->dest;
2111 if (e1->flags & EDGE_COMPLEX)
2112 return e0->dest;
2114 return bb;
2117 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2119 void
2120 notice_special_calls (gcall *call)
2122 int flags = gimple_call_flags (call);
2124 if (flags & ECF_MAY_BE_ALLOCA)
2125 cfun->calls_alloca = true;
2126 if (flags & ECF_RETURNS_TWICE)
2127 cfun->calls_setjmp = true;
2131 /* Clear flags set by notice_special_calls. Used by dead code removal
2132 to update the flags. */
2134 void
2135 clear_special_calls (void)
2137 cfun->calls_alloca = false;
2138 cfun->calls_setjmp = false;
2141 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2143 static void
2144 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2146 /* Since this block is no longer reachable, we can just delete all
2147 of its PHI nodes. */
2148 remove_phi_nodes (bb);
2150 /* Remove edges to BB's successors. */
2151 while (EDGE_COUNT (bb->succs) > 0)
2152 remove_edge (EDGE_SUCC (bb, 0));
2156 /* Remove statements of basic block BB. */
2158 static void
2159 remove_bb (basic_block bb)
2161 gimple_stmt_iterator i;
2163 if (dump_file)
2165 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2166 if (dump_flags & TDF_DETAILS)
2168 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2169 fprintf (dump_file, "\n");
2173 if (current_loops)
2175 struct loop *loop = bb->loop_father;
2177 /* If a loop gets removed, clean up the information associated
2178 with it. */
2179 if (loop->latch == bb
2180 || loop->header == bb)
2181 free_numbers_of_iterations_estimates (loop);
2184 /* Remove all the instructions in the block. */
2185 if (bb_seq (bb) != NULL)
2187 /* Walk backwards so as to get a chance to substitute all
2188 released DEFs into debug stmts. See
2189 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2190 details. */
2191 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2193 gimple *stmt = gsi_stmt (i);
2194 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2195 if (label_stmt
2196 && (FORCED_LABEL (gimple_label_label (label_stmt))
2197 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2199 basic_block new_bb;
2200 gimple_stmt_iterator new_gsi;
2202 /* A non-reachable non-local label may still be referenced.
2203 But it no longer needs to carry the extra semantics of
2204 non-locality. */
2205 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2207 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2208 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2211 new_bb = bb->prev_bb;
2212 new_gsi = gsi_start_bb (new_bb);
2213 gsi_remove (&i, false);
2214 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2216 else
2218 /* Release SSA definitions. */
2219 release_defs (stmt);
2220 gsi_remove (&i, true);
2223 if (gsi_end_p (i))
2224 i = gsi_last_bb (bb);
2225 else
2226 gsi_prev (&i);
2230 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2231 bb->il.gimple.seq = NULL;
2232 bb->il.gimple.phi_nodes = NULL;
2236 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2237 predicate VAL, return the edge that will be taken out of the block.
2238 If VAL does not match a unique edge, NULL is returned. */
2240 edge
2241 find_taken_edge (basic_block bb, tree val)
2243 gimple *stmt;
2245 stmt = last_stmt (bb);
2247 gcc_assert (is_ctrl_stmt (stmt));
2249 if (gimple_code (stmt) == GIMPLE_COND)
2250 return find_taken_edge_cond_expr (bb, val);
2252 if (gimple_code (stmt) == GIMPLE_SWITCH)
2253 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2255 if (computed_goto_p (stmt))
2257 /* Only optimize if the argument is a label, if the argument is
2258 not a label then we can not construct a proper CFG.
2260 It may be the case that we only need to allow the LABEL_REF to
2261 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2262 appear inside a LABEL_EXPR just to be safe. */
2263 if (val
2264 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2265 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2266 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2267 return NULL;
2270 gcc_unreachable ();
2273 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2274 statement, determine which of the outgoing edges will be taken out of the
2275 block. Return NULL if either edge may be taken. */
2277 static edge
2278 find_taken_edge_computed_goto (basic_block bb, tree val)
2280 basic_block dest;
2281 edge e = NULL;
2283 dest = label_to_block (val);
2284 if (dest)
2286 e = find_edge (bb, dest);
2287 gcc_assert (e != NULL);
2290 return e;
2293 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2294 statement, determine which of the two edges will be taken out of the
2295 block. Return NULL if either edge may be taken. */
2297 static edge
2298 find_taken_edge_cond_expr (basic_block bb, tree val)
2300 edge true_edge, false_edge;
2302 if (val == NULL
2303 || TREE_CODE (val) != INTEGER_CST)
2304 return NULL;
2306 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2308 return (integer_zerop (val) ? false_edge : true_edge);
2311 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2312 statement, determine which edge will be taken out of the block. Return
2313 NULL if any edge may be taken. */
2315 static edge
2316 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2317 tree val)
2319 basic_block dest_bb;
2320 edge e;
2321 tree taken_case;
2323 if (gimple_switch_num_labels (switch_stmt) == 1)
2324 taken_case = gimple_switch_default_label (switch_stmt);
2325 else if (! val || TREE_CODE (val) != INTEGER_CST)
2326 return NULL;
2327 else
2328 taken_case = find_case_label_for_value (switch_stmt, val);
2329 dest_bb = label_to_block (CASE_LABEL (taken_case));
2331 e = find_edge (bb, dest_bb);
2332 gcc_assert (e);
2333 return e;
2337 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2338 We can make optimal use here of the fact that the case labels are
2339 sorted: We can do a binary search for a case matching VAL. */
2341 static tree
2342 find_case_label_for_value (gswitch *switch_stmt, tree val)
2344 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2345 tree default_case = gimple_switch_default_label (switch_stmt);
2347 for (low = 0, high = n; high - low > 1; )
2349 size_t i = (high + low) / 2;
2350 tree t = gimple_switch_label (switch_stmt, i);
2351 int cmp;
2353 /* Cache the result of comparing CASE_LOW and val. */
2354 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2356 if (cmp > 0)
2357 high = i;
2358 else
2359 low = i;
2361 if (CASE_HIGH (t) == NULL)
2363 /* A singe-valued case label. */
2364 if (cmp == 0)
2365 return t;
2367 else
2369 /* A case range. We can only handle integer ranges. */
2370 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2371 return t;
2375 return default_case;
2379 /* Dump a basic block on stderr. */
2381 void
2382 gimple_debug_bb (basic_block bb)
2384 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2388 /* Dump basic block with index N on stderr. */
2390 basic_block
2391 gimple_debug_bb_n (int n)
2393 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2394 return BASIC_BLOCK_FOR_FN (cfun, n);
2398 /* Dump the CFG on stderr.
2400 FLAGS are the same used by the tree dumping functions
2401 (see TDF_* in dumpfile.h). */
2403 void
2404 gimple_debug_cfg (dump_flags_t flags)
2406 gimple_dump_cfg (stderr, flags);
2410 /* Dump the program showing basic block boundaries on the given FILE.
2412 FLAGS are the same used by the tree dumping functions (see TDF_* in
2413 tree.h). */
2415 void
2416 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2418 if (flags & TDF_DETAILS)
2420 dump_function_header (file, current_function_decl, flags);
2421 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2422 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2423 last_basic_block_for_fn (cfun));
2425 brief_dump_cfg (file, flags);
2426 fprintf (file, "\n");
2429 if (flags & TDF_STATS)
2430 dump_cfg_stats (file);
2432 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2436 /* Dump CFG statistics on FILE. */
2438 void
2439 dump_cfg_stats (FILE *file)
2441 static long max_num_merged_labels = 0;
2442 unsigned long size, total = 0;
2443 long num_edges;
2444 basic_block bb;
2445 const char * const fmt_str = "%-30s%-13s%12s\n";
2446 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2447 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2448 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2449 const char *funcname = current_function_name ();
2451 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2453 fprintf (file, "---------------------------------------------------------\n");
2454 fprintf (file, fmt_str, "", " Number of ", "Memory");
2455 fprintf (file, fmt_str, "", " instances ", "used ");
2456 fprintf (file, "---------------------------------------------------------\n");
2458 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2459 total += size;
2460 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2461 SCALE (size), LABEL (size));
2463 num_edges = 0;
2464 FOR_EACH_BB_FN (bb, cfun)
2465 num_edges += EDGE_COUNT (bb->succs);
2466 size = num_edges * sizeof (struct edge_def);
2467 total += size;
2468 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2470 fprintf (file, "---------------------------------------------------------\n");
2471 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2472 LABEL (total));
2473 fprintf (file, "---------------------------------------------------------\n");
2474 fprintf (file, "\n");
2476 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2477 max_num_merged_labels = cfg_stats.num_merged_labels;
2479 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2480 cfg_stats.num_merged_labels, max_num_merged_labels);
2482 fprintf (file, "\n");
2486 /* Dump CFG statistics on stderr. Keep extern so that it's always
2487 linked in the final executable. */
2489 DEBUG_FUNCTION void
2490 debug_cfg_stats (void)
2492 dump_cfg_stats (stderr);
2495 /*---------------------------------------------------------------------------
2496 Miscellaneous helpers
2497 ---------------------------------------------------------------------------*/
2499 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2500 flow. Transfers of control flow associated with EH are excluded. */
2502 static bool
2503 call_can_make_abnormal_goto (gimple *t)
2505 /* If the function has no non-local labels, then a call cannot make an
2506 abnormal transfer of control. */
2507 if (!cfun->has_nonlocal_label
2508 && !cfun->calls_setjmp)
2509 return false;
2511 /* Likewise if the call has no side effects. */
2512 if (!gimple_has_side_effects (t))
2513 return false;
2515 /* Likewise if the called function is leaf. */
2516 if (gimple_call_flags (t) & ECF_LEAF)
2517 return false;
2519 return true;
2523 /* Return true if T can make an abnormal transfer of control flow.
2524 Transfers of control flow associated with EH are excluded. */
2526 bool
2527 stmt_can_make_abnormal_goto (gimple *t)
2529 if (computed_goto_p (t))
2530 return true;
2531 if (is_gimple_call (t))
2532 return call_can_make_abnormal_goto (t);
2533 return false;
2537 /* Return true if T represents a stmt that always transfers control. */
2539 bool
2540 is_ctrl_stmt (gimple *t)
2542 switch (gimple_code (t))
2544 case GIMPLE_COND:
2545 case GIMPLE_SWITCH:
2546 case GIMPLE_GOTO:
2547 case GIMPLE_RETURN:
2548 case GIMPLE_RESX:
2549 return true;
2550 default:
2551 return false;
2556 /* Return true if T is a statement that may alter the flow of control
2557 (e.g., a call to a non-returning function). */
2559 bool
2560 is_ctrl_altering_stmt (gimple *t)
2562 gcc_assert (t);
2564 switch (gimple_code (t))
2566 case GIMPLE_CALL:
2567 /* Per stmt call flag indicates whether the call could alter
2568 controlflow. */
2569 if (gimple_call_ctrl_altering_p (t))
2570 return true;
2571 break;
2573 case GIMPLE_EH_DISPATCH:
2574 /* EH_DISPATCH branches to the individual catch handlers at
2575 this level of a try or allowed-exceptions region. It can
2576 fallthru to the next statement as well. */
2577 return true;
2579 case GIMPLE_ASM:
2580 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2581 return true;
2582 break;
2584 CASE_GIMPLE_OMP:
2585 /* OpenMP directives alter control flow. */
2586 return true;
2588 case GIMPLE_TRANSACTION:
2589 /* A transaction start alters control flow. */
2590 return true;
2592 default:
2593 break;
2596 /* If a statement can throw, it alters control flow. */
2597 return stmt_can_throw_internal (t);
2601 /* Return true if T is a simple local goto. */
2603 bool
2604 simple_goto_p (gimple *t)
2606 return (gimple_code (t) == GIMPLE_GOTO
2607 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2611 /* Return true if STMT should start a new basic block. PREV_STMT is
2612 the statement preceding STMT. It is used when STMT is a label or a
2613 case label. Labels should only start a new basic block if their
2614 previous statement wasn't a label. Otherwise, sequence of labels
2615 would generate unnecessary basic blocks that only contain a single
2616 label. */
2618 static inline bool
2619 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2621 if (stmt == NULL)
2622 return false;
2624 /* Labels start a new basic block only if the preceding statement
2625 wasn't a label of the same type. This prevents the creation of
2626 consecutive blocks that have nothing but a single label. */
2627 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2629 /* Nonlocal and computed GOTO targets always start a new block. */
2630 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2631 || FORCED_LABEL (gimple_label_label (label_stmt)))
2632 return true;
2634 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2636 if (DECL_NONLOCAL (gimple_label_label (
2637 as_a <glabel *> (prev_stmt))))
2638 return true;
2640 cfg_stats.num_merged_labels++;
2641 return false;
2643 else
2644 return true;
2646 else if (gimple_code (stmt) == GIMPLE_CALL)
2648 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2649 /* setjmp acts similar to a nonlocal GOTO target and thus should
2650 start a new block. */
2651 return true;
2652 if (gimple_call_internal_p (stmt, IFN_PHI)
2653 && prev_stmt
2654 && gimple_code (prev_stmt) != GIMPLE_LABEL
2655 && (gimple_code (prev_stmt) != GIMPLE_CALL
2656 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2657 /* PHI nodes start a new block unless preceeded by a label
2658 or another PHI. */
2659 return true;
2662 return false;
2666 /* Return true if T should end a basic block. */
2668 bool
2669 stmt_ends_bb_p (gimple *t)
2671 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2674 /* Remove block annotations and other data structures. */
2676 void
2677 delete_tree_cfg_annotations (struct function *fn)
2679 vec_free (label_to_block_map_for_fn (fn));
2682 /* Return the virtual phi in BB. */
2684 gphi *
2685 get_virtual_phi (basic_block bb)
2687 for (gphi_iterator gsi = gsi_start_phis (bb);
2688 !gsi_end_p (gsi);
2689 gsi_next (&gsi))
2691 gphi *phi = gsi.phi ();
2693 if (virtual_operand_p (PHI_RESULT (phi)))
2694 return phi;
2697 return NULL;
2700 /* Return the first statement in basic block BB. */
2702 gimple *
2703 first_stmt (basic_block bb)
2705 gimple_stmt_iterator i = gsi_start_bb (bb);
2706 gimple *stmt = NULL;
2708 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2710 gsi_next (&i);
2711 stmt = NULL;
2713 return stmt;
2716 /* Return the first non-label statement in basic block BB. */
2718 static gimple *
2719 first_non_label_stmt (basic_block bb)
2721 gimple_stmt_iterator i = gsi_start_bb (bb);
2722 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2723 gsi_next (&i);
2724 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2727 /* Return the last statement in basic block BB. */
2729 gimple *
2730 last_stmt (basic_block bb)
2732 gimple_stmt_iterator i = gsi_last_bb (bb);
2733 gimple *stmt = NULL;
2735 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2737 gsi_prev (&i);
2738 stmt = NULL;
2740 return stmt;
2743 /* Return the last statement of an otherwise empty block. Return NULL
2744 if the block is totally empty, or if it contains more than one
2745 statement. */
2747 gimple *
2748 last_and_only_stmt (basic_block bb)
2750 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2751 gimple *last, *prev;
2753 if (gsi_end_p (i))
2754 return NULL;
2756 last = gsi_stmt (i);
2757 gsi_prev_nondebug (&i);
2758 if (gsi_end_p (i))
2759 return last;
2761 /* Empty statements should no longer appear in the instruction stream.
2762 Everything that might have appeared before should be deleted by
2763 remove_useless_stmts, and the optimizers should just gsi_remove
2764 instead of smashing with build_empty_stmt.
2766 Thus the only thing that should appear here in a block containing
2767 one executable statement is a label. */
2768 prev = gsi_stmt (i);
2769 if (gimple_code (prev) == GIMPLE_LABEL)
2770 return last;
2771 else
2772 return NULL;
2775 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2777 static void
2778 reinstall_phi_args (edge new_edge, edge old_edge)
2780 edge_var_map *vm;
2781 int i;
2782 gphi_iterator phis;
2784 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2785 if (!v)
2786 return;
2788 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2789 v->iterate (i, &vm) && !gsi_end_p (phis);
2790 i++, gsi_next (&phis))
2792 gphi *phi = phis.phi ();
2793 tree result = redirect_edge_var_map_result (vm);
2794 tree arg = redirect_edge_var_map_def (vm);
2796 gcc_assert (result == gimple_phi_result (phi));
2798 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2801 redirect_edge_var_map_clear (old_edge);
2804 /* Returns the basic block after which the new basic block created
2805 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2806 near its "logical" location. This is of most help to humans looking
2807 at debugging dumps. */
2809 basic_block
2810 split_edge_bb_loc (edge edge_in)
2812 basic_block dest = edge_in->dest;
2813 basic_block dest_prev = dest->prev_bb;
2815 if (dest_prev)
2817 edge e = find_edge (dest_prev, dest);
2818 if (e && !(e->flags & EDGE_COMPLEX))
2819 return edge_in->src;
2821 return dest_prev;
2824 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2825 Abort on abnormal edges. */
2827 static basic_block
2828 gimple_split_edge (edge edge_in)
2830 basic_block new_bb, after_bb, dest;
2831 edge new_edge, e;
2833 /* Abnormal edges cannot be split. */
2834 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2836 dest = edge_in->dest;
2838 after_bb = split_edge_bb_loc (edge_in);
2840 new_bb = create_empty_bb (after_bb);
2841 new_bb->count = edge_in->count ();
2843 e = redirect_edge_and_branch (edge_in, new_bb);
2844 gcc_assert (e == edge_in);
2846 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2847 reinstall_phi_args (new_edge, e);
2849 return new_bb;
2853 /* Verify properties of the address expression T with base object BASE. */
2855 static tree
2856 verify_address (tree t, tree base)
2858 bool old_constant;
2859 bool old_side_effects;
2860 bool new_constant;
2861 bool new_side_effects;
2863 old_constant = TREE_CONSTANT (t);
2864 old_side_effects = TREE_SIDE_EFFECTS (t);
2866 recompute_tree_invariant_for_addr_expr (t);
2867 new_side_effects = TREE_SIDE_EFFECTS (t);
2868 new_constant = TREE_CONSTANT (t);
2870 if (old_constant != new_constant)
2872 error ("constant not recomputed when ADDR_EXPR changed");
2873 return t;
2875 if (old_side_effects != new_side_effects)
2877 error ("side effects not recomputed when ADDR_EXPR changed");
2878 return t;
2881 if (!(VAR_P (base)
2882 || TREE_CODE (base) == PARM_DECL
2883 || TREE_CODE (base) == RESULT_DECL))
2884 return NULL_TREE;
2886 if (DECL_GIMPLE_REG_P (base))
2888 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2889 return base;
2892 return NULL_TREE;
2895 /* Callback for walk_tree, check that all elements with address taken are
2896 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2897 inside a PHI node. */
2899 static tree
2900 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2902 tree t = *tp, x;
2904 if (TYPE_P (t))
2905 *walk_subtrees = 0;
2907 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2908 #define CHECK_OP(N, MSG) \
2909 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2910 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2912 switch (TREE_CODE (t))
2914 case SSA_NAME:
2915 if (SSA_NAME_IN_FREE_LIST (t))
2917 error ("SSA name in freelist but still referenced");
2918 return *tp;
2920 break;
2922 case PARM_DECL:
2923 case VAR_DECL:
2924 case RESULT_DECL:
2926 tree context = decl_function_context (t);
2927 if (context != cfun->decl
2928 && !SCOPE_FILE_SCOPE_P (context)
2929 && !TREE_STATIC (t)
2930 && !DECL_EXTERNAL (t))
2932 error ("Local declaration from a different function");
2933 return t;
2936 break;
2938 case INDIRECT_REF:
2939 error ("INDIRECT_REF in gimple IL");
2940 return t;
2942 case MEM_REF:
2943 x = TREE_OPERAND (t, 0);
2944 if (!POINTER_TYPE_P (TREE_TYPE (x))
2945 || !is_gimple_mem_ref_addr (x))
2947 error ("invalid first operand of MEM_REF");
2948 return x;
2950 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2951 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2953 error ("invalid offset operand of MEM_REF");
2954 return TREE_OPERAND (t, 1);
2956 if (TREE_CODE (x) == ADDR_EXPR)
2958 tree va = verify_address (x, TREE_OPERAND (x, 0));
2959 if (va)
2960 return va;
2961 x = TREE_OPERAND (x, 0);
2963 walk_tree (&x, verify_expr, data, NULL);
2964 *walk_subtrees = 0;
2965 break;
2967 case ASSERT_EXPR:
2968 x = fold (ASSERT_EXPR_COND (t));
2969 if (x == boolean_false_node)
2971 error ("ASSERT_EXPR with an always-false condition");
2972 return *tp;
2974 break;
2976 case MODIFY_EXPR:
2977 error ("MODIFY_EXPR not expected while having tuples");
2978 return *tp;
2980 case ADDR_EXPR:
2982 tree tem;
2984 gcc_assert (is_gimple_address (t));
2986 /* Skip any references (they will be checked when we recurse down the
2987 tree) and ensure that any variable used as a prefix is marked
2988 addressable. */
2989 for (x = TREE_OPERAND (t, 0);
2990 handled_component_p (x);
2991 x = TREE_OPERAND (x, 0))
2994 if ((tem = verify_address (t, x)))
2995 return tem;
2997 if (!(VAR_P (x)
2998 || TREE_CODE (x) == PARM_DECL
2999 || TREE_CODE (x) == RESULT_DECL))
3000 return NULL;
3002 if (!TREE_ADDRESSABLE (x))
3004 error ("address taken, but ADDRESSABLE bit not set");
3005 return x;
3008 break;
3011 case COND_EXPR:
3012 x = COND_EXPR_COND (t);
3013 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3015 error ("non-integral used in condition");
3016 return x;
3018 if (!is_gimple_condexpr (x))
3020 error ("invalid conditional operand");
3021 return x;
3023 break;
3025 case NON_LVALUE_EXPR:
3026 case TRUTH_NOT_EXPR:
3027 gcc_unreachable ();
3029 CASE_CONVERT:
3030 case FIX_TRUNC_EXPR:
3031 case FLOAT_EXPR:
3032 case NEGATE_EXPR:
3033 case ABS_EXPR:
3034 case BIT_NOT_EXPR:
3035 CHECK_OP (0, "invalid operand to unary operator");
3036 break;
3038 case REALPART_EXPR:
3039 case IMAGPART_EXPR:
3040 case BIT_FIELD_REF:
3041 if (!is_gimple_reg_type (TREE_TYPE (t)))
3043 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3044 return t;
3047 if (TREE_CODE (t) == BIT_FIELD_REF)
3049 tree t0 = TREE_OPERAND (t, 0);
3050 tree t1 = TREE_OPERAND (t, 1);
3051 tree t2 = TREE_OPERAND (t, 2);
3052 if (!tree_fits_uhwi_p (t1)
3053 || !tree_fits_uhwi_p (t2)
3054 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3055 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3057 error ("invalid position or size operand to BIT_FIELD_REF");
3058 return t;
3060 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3061 && (TYPE_PRECISION (TREE_TYPE (t))
3062 != tree_to_uhwi (t1)))
3064 error ("integral result type precision does not match "
3065 "field size of BIT_FIELD_REF");
3066 return t;
3068 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3069 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3070 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3071 != tree_to_uhwi (t1)))
3073 error ("mode size of non-integral result does not "
3074 "match field size of BIT_FIELD_REF");
3075 return t;
3077 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3078 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3079 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3081 error ("position plus size exceeds size of referenced object in "
3082 "BIT_FIELD_REF");
3083 return t;
3086 t = TREE_OPERAND (t, 0);
3088 /* Fall-through. */
3089 case COMPONENT_REF:
3090 case ARRAY_REF:
3091 case ARRAY_RANGE_REF:
3092 case VIEW_CONVERT_EXPR:
3093 /* We have a nest of references. Verify that each of the operands
3094 that determine where to reference is either a constant or a variable,
3095 verify that the base is valid, and then show we've already checked
3096 the subtrees. */
3097 while (handled_component_p (t))
3099 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3100 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3101 else if (TREE_CODE (t) == ARRAY_REF
3102 || TREE_CODE (t) == ARRAY_RANGE_REF)
3104 CHECK_OP (1, "invalid array index");
3105 if (TREE_OPERAND (t, 2))
3106 CHECK_OP (2, "invalid array lower bound");
3107 if (TREE_OPERAND (t, 3))
3108 CHECK_OP (3, "invalid array stride");
3110 else if (TREE_CODE (t) == BIT_FIELD_REF
3111 || TREE_CODE (t) == REALPART_EXPR
3112 || TREE_CODE (t) == IMAGPART_EXPR)
3114 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3115 "REALPART_EXPR");
3116 return t;
3119 t = TREE_OPERAND (t, 0);
3122 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3124 error ("invalid reference prefix");
3125 return t;
3127 walk_tree (&t, verify_expr, data, NULL);
3128 *walk_subtrees = 0;
3129 break;
3130 case PLUS_EXPR:
3131 case MINUS_EXPR:
3132 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3133 POINTER_PLUS_EXPR. */
3134 if (POINTER_TYPE_P (TREE_TYPE (t)))
3136 error ("invalid operand to plus/minus, type is a pointer");
3137 return t;
3139 CHECK_OP (0, "invalid operand to binary operator");
3140 CHECK_OP (1, "invalid operand to binary operator");
3141 break;
3143 case POINTER_PLUS_EXPR:
3144 /* Check to make sure the first operand is a pointer or reference type. */
3145 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3147 error ("invalid operand to pointer plus, first operand is not a pointer");
3148 return t;
3150 /* Check to make sure the second operand is a ptrofftype. */
3151 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3153 error ("invalid operand to pointer plus, second operand is not an "
3154 "integer type of appropriate width");
3155 return t;
3157 /* FALLTHROUGH */
3158 case LT_EXPR:
3159 case LE_EXPR:
3160 case GT_EXPR:
3161 case GE_EXPR:
3162 case EQ_EXPR:
3163 case NE_EXPR:
3164 case UNORDERED_EXPR:
3165 case ORDERED_EXPR:
3166 case UNLT_EXPR:
3167 case UNLE_EXPR:
3168 case UNGT_EXPR:
3169 case UNGE_EXPR:
3170 case UNEQ_EXPR:
3171 case LTGT_EXPR:
3172 case MULT_EXPR:
3173 case TRUNC_DIV_EXPR:
3174 case CEIL_DIV_EXPR:
3175 case FLOOR_DIV_EXPR:
3176 case ROUND_DIV_EXPR:
3177 case TRUNC_MOD_EXPR:
3178 case CEIL_MOD_EXPR:
3179 case FLOOR_MOD_EXPR:
3180 case ROUND_MOD_EXPR:
3181 case RDIV_EXPR:
3182 case EXACT_DIV_EXPR:
3183 case MIN_EXPR:
3184 case MAX_EXPR:
3185 case LSHIFT_EXPR:
3186 case RSHIFT_EXPR:
3187 case LROTATE_EXPR:
3188 case RROTATE_EXPR:
3189 case BIT_IOR_EXPR:
3190 case BIT_XOR_EXPR:
3191 case BIT_AND_EXPR:
3192 CHECK_OP (0, "invalid operand to binary operator");
3193 CHECK_OP (1, "invalid operand to binary operator");
3194 break;
3196 case CONSTRUCTOR:
3197 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3198 *walk_subtrees = 0;
3199 break;
3201 case CASE_LABEL_EXPR:
3202 if (CASE_CHAIN (t))
3204 error ("invalid CASE_CHAIN");
3205 return t;
3207 break;
3209 default:
3210 break;
3212 return NULL;
3214 #undef CHECK_OP
3218 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3219 Returns true if there is an error, otherwise false. */
3221 static bool
3222 verify_types_in_gimple_min_lval (tree expr)
3224 tree op;
3226 if (is_gimple_id (expr))
3227 return false;
3229 if (TREE_CODE (expr) != TARGET_MEM_REF
3230 && TREE_CODE (expr) != MEM_REF)
3232 error ("invalid expression for min lvalue");
3233 return true;
3236 /* TARGET_MEM_REFs are strange beasts. */
3237 if (TREE_CODE (expr) == TARGET_MEM_REF)
3238 return false;
3240 op = TREE_OPERAND (expr, 0);
3241 if (!is_gimple_val (op))
3243 error ("invalid operand in indirect reference");
3244 debug_generic_stmt (op);
3245 return true;
3247 /* Memory references now generally can involve a value conversion. */
3249 return false;
3252 /* Verify if EXPR is a valid GIMPLE reference expression. If
3253 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3254 if there is an error, otherwise false. */
3256 static bool
3257 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3259 while (handled_component_p (expr))
3261 tree op = TREE_OPERAND (expr, 0);
3263 if (TREE_CODE (expr) == ARRAY_REF
3264 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3266 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3267 || (TREE_OPERAND (expr, 2)
3268 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3269 || (TREE_OPERAND (expr, 3)
3270 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3272 error ("invalid operands to array reference");
3273 debug_generic_stmt (expr);
3274 return true;
3278 /* Verify if the reference array element types are compatible. */
3279 if (TREE_CODE (expr) == ARRAY_REF
3280 && !useless_type_conversion_p (TREE_TYPE (expr),
3281 TREE_TYPE (TREE_TYPE (op))))
3283 error ("type mismatch in array reference");
3284 debug_generic_stmt (TREE_TYPE (expr));
3285 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3286 return true;
3288 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3289 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3290 TREE_TYPE (TREE_TYPE (op))))
3292 error ("type mismatch in array range reference");
3293 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3294 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3295 return true;
3298 if ((TREE_CODE (expr) == REALPART_EXPR
3299 || TREE_CODE (expr) == IMAGPART_EXPR)
3300 && !useless_type_conversion_p (TREE_TYPE (expr),
3301 TREE_TYPE (TREE_TYPE (op))))
3303 error ("type mismatch in real/imagpart reference");
3304 debug_generic_stmt (TREE_TYPE (expr));
3305 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3306 return true;
3309 if (TREE_CODE (expr) == COMPONENT_REF
3310 && !useless_type_conversion_p (TREE_TYPE (expr),
3311 TREE_TYPE (TREE_OPERAND (expr, 1))))
3313 error ("type mismatch in component reference");
3314 debug_generic_stmt (TREE_TYPE (expr));
3315 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3316 return true;
3319 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3321 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3322 that their operand is not an SSA name or an invariant when
3323 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3324 bug). Otherwise there is nothing to verify, gross mismatches at
3325 most invoke undefined behavior. */
3326 if (require_lvalue
3327 && (TREE_CODE (op) == SSA_NAME
3328 || is_gimple_min_invariant (op)))
3330 error ("conversion of an SSA_NAME on the left hand side");
3331 debug_generic_stmt (expr);
3332 return true;
3334 else if (TREE_CODE (op) == SSA_NAME
3335 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3337 error ("conversion of register to a different size");
3338 debug_generic_stmt (expr);
3339 return true;
3341 else if (!handled_component_p (op))
3342 return false;
3345 expr = op;
3348 if (TREE_CODE (expr) == MEM_REF)
3350 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3352 error ("invalid address operand in MEM_REF");
3353 debug_generic_stmt (expr);
3354 return true;
3356 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3357 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3359 error ("invalid offset operand in MEM_REF");
3360 debug_generic_stmt (expr);
3361 return true;
3364 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3366 if (!TMR_BASE (expr)
3367 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3369 error ("invalid address operand in TARGET_MEM_REF");
3370 return true;
3372 if (!TMR_OFFSET (expr)
3373 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3374 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3376 error ("invalid offset operand in TARGET_MEM_REF");
3377 debug_generic_stmt (expr);
3378 return true;
3382 return ((require_lvalue || !is_gimple_min_invariant (expr))
3383 && verify_types_in_gimple_min_lval (expr));
3386 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3387 list of pointer-to types that is trivially convertible to DEST. */
3389 static bool
3390 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3392 tree src;
3394 if (!TYPE_POINTER_TO (src_obj))
3395 return true;
3397 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3398 if (useless_type_conversion_p (dest, src))
3399 return true;
3401 return false;
3404 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3405 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3407 static bool
3408 valid_fixed_convert_types_p (tree type1, tree type2)
3410 return (FIXED_POINT_TYPE_P (type1)
3411 && (INTEGRAL_TYPE_P (type2)
3412 || SCALAR_FLOAT_TYPE_P (type2)
3413 || FIXED_POINT_TYPE_P (type2)));
3416 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3417 is a problem, otherwise false. */
3419 static bool
3420 verify_gimple_call (gcall *stmt)
3422 tree fn = gimple_call_fn (stmt);
3423 tree fntype, fndecl;
3424 unsigned i;
3426 if (gimple_call_internal_p (stmt))
3428 if (fn)
3430 error ("gimple call has two targets");
3431 debug_generic_stmt (fn);
3432 return true;
3434 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3435 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3437 return false;
3440 else
3442 if (!fn)
3444 error ("gimple call has no target");
3445 return true;
3449 if (fn && !is_gimple_call_addr (fn))
3451 error ("invalid function in gimple call");
3452 debug_generic_stmt (fn);
3453 return true;
3456 if (fn
3457 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3458 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3459 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3461 error ("non-function in gimple call");
3462 return true;
3465 fndecl = gimple_call_fndecl (stmt);
3466 if (fndecl
3467 && TREE_CODE (fndecl) == FUNCTION_DECL
3468 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3469 && !DECL_PURE_P (fndecl)
3470 && !TREE_READONLY (fndecl))
3472 error ("invalid pure const state for function");
3473 return true;
3476 tree lhs = gimple_call_lhs (stmt);
3477 if (lhs
3478 && (!is_gimple_lvalue (lhs)
3479 || verify_types_in_gimple_reference (lhs, true)))
3481 error ("invalid LHS in gimple call");
3482 return true;
3485 if (gimple_call_ctrl_altering_p (stmt)
3486 && gimple_call_noreturn_p (stmt)
3487 && should_remove_lhs_p (lhs))
3489 error ("LHS in noreturn call");
3490 return true;
3493 fntype = gimple_call_fntype (stmt);
3494 if (fntype
3495 && lhs
3496 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3497 /* ??? At least C++ misses conversions at assignments from
3498 void * call results.
3499 For now simply allow arbitrary pointer type conversions. */
3500 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3501 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3503 error ("invalid conversion in gimple call");
3504 debug_generic_stmt (TREE_TYPE (lhs));
3505 debug_generic_stmt (TREE_TYPE (fntype));
3506 return true;
3509 if (gimple_call_chain (stmt)
3510 && !is_gimple_val (gimple_call_chain (stmt)))
3512 error ("invalid static chain in gimple call");
3513 debug_generic_stmt (gimple_call_chain (stmt));
3514 return true;
3517 /* If there is a static chain argument, the call should either be
3518 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3519 if (gimple_call_chain (stmt)
3520 && fndecl
3521 && !DECL_STATIC_CHAIN (fndecl))
3523 error ("static chain with function that doesn%'t use one");
3524 return true;
3527 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3529 switch (DECL_FUNCTION_CODE (fndecl))
3531 case BUILT_IN_UNREACHABLE:
3532 case BUILT_IN_TRAP:
3533 if (gimple_call_num_args (stmt) > 0)
3535 /* Built-in unreachable with parameters might not be caught by
3536 undefined behavior sanitizer. Front-ends do check users do not
3537 call them that way but we also produce calls to
3538 __builtin_unreachable internally, for example when IPA figures
3539 out a call cannot happen in a legal program. In such cases,
3540 we must make sure arguments are stripped off. */
3541 error ("__builtin_unreachable or __builtin_trap call with "
3542 "arguments");
3543 return true;
3545 break;
3546 default:
3547 break;
3551 /* ??? The C frontend passes unpromoted arguments in case it
3552 didn't see a function declaration before the call. So for now
3553 leave the call arguments mostly unverified. Once we gimplify
3554 unit-at-a-time we have a chance to fix this. */
3556 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3558 tree arg = gimple_call_arg (stmt, i);
3559 if ((is_gimple_reg_type (TREE_TYPE (arg))
3560 && !is_gimple_val (arg))
3561 || (!is_gimple_reg_type (TREE_TYPE (arg))
3562 && !is_gimple_lvalue (arg)))
3564 error ("invalid argument to gimple call");
3565 debug_generic_expr (arg);
3566 return true;
3570 return false;
3573 /* Verifies the gimple comparison with the result type TYPE and
3574 the operands OP0 and OP1, comparison code is CODE. */
3576 static bool
3577 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3579 tree op0_type = TREE_TYPE (op0);
3580 tree op1_type = TREE_TYPE (op1);
3582 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3584 error ("invalid operands in gimple comparison");
3585 return true;
3588 /* For comparisons we do not have the operations type as the
3589 effective type the comparison is carried out in. Instead
3590 we require that either the first operand is trivially
3591 convertible into the second, or the other way around.
3592 Because we special-case pointers to void we allow
3593 comparisons of pointers with the same mode as well. */
3594 if (!useless_type_conversion_p (op0_type, op1_type)
3595 && !useless_type_conversion_p (op1_type, op0_type)
3596 && (!POINTER_TYPE_P (op0_type)
3597 || !POINTER_TYPE_P (op1_type)
3598 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3600 error ("mismatching comparison operand types");
3601 debug_generic_expr (op0_type);
3602 debug_generic_expr (op1_type);
3603 return true;
3606 /* The resulting type of a comparison may be an effective boolean type. */
3607 if (INTEGRAL_TYPE_P (type)
3608 && (TREE_CODE (type) == BOOLEAN_TYPE
3609 || TYPE_PRECISION (type) == 1))
3611 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3612 || TREE_CODE (op1_type) == VECTOR_TYPE)
3613 && code != EQ_EXPR && code != NE_EXPR
3614 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3615 && !VECTOR_INTEGER_TYPE_P (op0_type))
3617 error ("unsupported operation or type for vector comparison"
3618 " returning a boolean");
3619 debug_generic_expr (op0_type);
3620 debug_generic_expr (op1_type);
3621 return true;
3624 /* Or a boolean vector type with the same element count
3625 as the comparison operand types. */
3626 else if (TREE_CODE (type) == VECTOR_TYPE
3627 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3629 if (TREE_CODE (op0_type) != VECTOR_TYPE
3630 || TREE_CODE (op1_type) != VECTOR_TYPE)
3632 error ("non-vector operands in vector comparison");
3633 debug_generic_expr (op0_type);
3634 debug_generic_expr (op1_type);
3635 return true;
3638 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3640 error ("invalid vector comparison resulting type");
3641 debug_generic_expr (type);
3642 return true;
3645 else
3647 error ("bogus comparison result type");
3648 debug_generic_expr (type);
3649 return true;
3652 return false;
3655 /* Verify a gimple assignment statement STMT with an unary rhs.
3656 Returns true if anything is wrong. */
3658 static bool
3659 verify_gimple_assign_unary (gassign *stmt)
3661 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3662 tree lhs = gimple_assign_lhs (stmt);
3663 tree lhs_type = TREE_TYPE (lhs);
3664 tree rhs1 = gimple_assign_rhs1 (stmt);
3665 tree rhs1_type = TREE_TYPE (rhs1);
3667 if (!is_gimple_reg (lhs))
3669 error ("non-register as LHS of unary operation");
3670 return true;
3673 if (!is_gimple_val (rhs1))
3675 error ("invalid operand in unary operation");
3676 return true;
3679 /* First handle conversions. */
3680 switch (rhs_code)
3682 CASE_CONVERT:
3684 /* Allow conversions from pointer type to integral type only if
3685 there is no sign or zero extension involved.
3686 For targets were the precision of ptrofftype doesn't match that
3687 of pointers we need to allow arbitrary conversions to ptrofftype. */
3688 if ((POINTER_TYPE_P (lhs_type)
3689 && INTEGRAL_TYPE_P (rhs1_type))
3690 || (POINTER_TYPE_P (rhs1_type)
3691 && INTEGRAL_TYPE_P (lhs_type)
3692 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3693 || ptrofftype_p (sizetype))))
3694 return false;
3696 /* Allow conversion from integral to offset type and vice versa. */
3697 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3698 && INTEGRAL_TYPE_P (rhs1_type))
3699 || (INTEGRAL_TYPE_P (lhs_type)
3700 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3701 return false;
3703 /* Otherwise assert we are converting between types of the
3704 same kind. */
3705 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3707 error ("invalid types in nop conversion");
3708 debug_generic_expr (lhs_type);
3709 debug_generic_expr (rhs1_type);
3710 return true;
3713 return false;
3716 case ADDR_SPACE_CONVERT_EXPR:
3718 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3719 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3720 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3722 error ("invalid types in address space conversion");
3723 debug_generic_expr (lhs_type);
3724 debug_generic_expr (rhs1_type);
3725 return true;
3728 return false;
3731 case FIXED_CONVERT_EXPR:
3733 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3734 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3736 error ("invalid types in fixed-point conversion");
3737 debug_generic_expr (lhs_type);
3738 debug_generic_expr (rhs1_type);
3739 return true;
3742 return false;
3745 case FLOAT_EXPR:
3747 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3748 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3749 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3751 error ("invalid types in conversion to floating point");
3752 debug_generic_expr (lhs_type);
3753 debug_generic_expr (rhs1_type);
3754 return true;
3757 return false;
3760 case FIX_TRUNC_EXPR:
3762 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3763 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3764 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3766 error ("invalid types in conversion to integer");
3767 debug_generic_expr (lhs_type);
3768 debug_generic_expr (rhs1_type);
3769 return true;
3772 return false;
3774 case REDUC_MAX_EXPR:
3775 case REDUC_MIN_EXPR:
3776 case REDUC_PLUS_EXPR:
3777 if (!VECTOR_TYPE_P (rhs1_type)
3778 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3780 error ("reduction should convert from vector to element type");
3781 debug_generic_expr (lhs_type);
3782 debug_generic_expr (rhs1_type);
3783 return true;
3785 return false;
3787 case VEC_UNPACK_HI_EXPR:
3788 case VEC_UNPACK_LO_EXPR:
3789 case VEC_UNPACK_FLOAT_HI_EXPR:
3790 case VEC_UNPACK_FLOAT_LO_EXPR:
3791 /* FIXME. */
3792 return false;
3794 case NEGATE_EXPR:
3795 case ABS_EXPR:
3796 case BIT_NOT_EXPR:
3797 case PAREN_EXPR:
3798 case CONJ_EXPR:
3799 break;
3801 default:
3802 gcc_unreachable ();
3805 /* For the remaining codes assert there is no conversion involved. */
3806 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3808 error ("non-trivial conversion in unary operation");
3809 debug_generic_expr (lhs_type);
3810 debug_generic_expr (rhs1_type);
3811 return true;
3814 return false;
3817 /* Verify a gimple assignment statement STMT with a binary rhs.
3818 Returns true if anything is wrong. */
3820 static bool
3821 verify_gimple_assign_binary (gassign *stmt)
3823 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3824 tree lhs = gimple_assign_lhs (stmt);
3825 tree lhs_type = TREE_TYPE (lhs);
3826 tree rhs1 = gimple_assign_rhs1 (stmt);
3827 tree rhs1_type = TREE_TYPE (rhs1);
3828 tree rhs2 = gimple_assign_rhs2 (stmt);
3829 tree rhs2_type = TREE_TYPE (rhs2);
3831 if (!is_gimple_reg (lhs))
3833 error ("non-register as LHS of binary operation");
3834 return true;
3837 if (!is_gimple_val (rhs1)
3838 || !is_gimple_val (rhs2))
3840 error ("invalid operands in binary operation");
3841 return true;
3844 /* First handle operations that involve different types. */
3845 switch (rhs_code)
3847 case COMPLEX_EXPR:
3849 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3850 || !(INTEGRAL_TYPE_P (rhs1_type)
3851 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3852 || !(INTEGRAL_TYPE_P (rhs2_type)
3853 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3855 error ("type mismatch in complex expression");
3856 debug_generic_expr (lhs_type);
3857 debug_generic_expr (rhs1_type);
3858 debug_generic_expr (rhs2_type);
3859 return true;
3862 return false;
3865 case LSHIFT_EXPR:
3866 case RSHIFT_EXPR:
3867 case LROTATE_EXPR:
3868 case RROTATE_EXPR:
3870 /* Shifts and rotates are ok on integral types, fixed point
3871 types and integer vector types. */
3872 if ((!INTEGRAL_TYPE_P (rhs1_type)
3873 && !FIXED_POINT_TYPE_P (rhs1_type)
3874 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3875 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3876 || (!INTEGRAL_TYPE_P (rhs2_type)
3877 /* Vector shifts of vectors are also ok. */
3878 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3879 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3880 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3881 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3882 || !useless_type_conversion_p (lhs_type, rhs1_type))
3884 error ("type mismatch in shift expression");
3885 debug_generic_expr (lhs_type);
3886 debug_generic_expr (rhs1_type);
3887 debug_generic_expr (rhs2_type);
3888 return true;
3891 return false;
3894 case WIDEN_LSHIFT_EXPR:
3896 if (!INTEGRAL_TYPE_P (lhs_type)
3897 || !INTEGRAL_TYPE_P (rhs1_type)
3898 || TREE_CODE (rhs2) != INTEGER_CST
3899 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3901 error ("type mismatch in widening vector shift expression");
3902 debug_generic_expr (lhs_type);
3903 debug_generic_expr (rhs1_type);
3904 debug_generic_expr (rhs2_type);
3905 return true;
3908 return false;
3911 case VEC_WIDEN_LSHIFT_HI_EXPR:
3912 case VEC_WIDEN_LSHIFT_LO_EXPR:
3914 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3915 || TREE_CODE (lhs_type) != VECTOR_TYPE
3916 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3917 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3918 || TREE_CODE (rhs2) != INTEGER_CST
3919 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3920 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3922 error ("type mismatch in widening vector shift expression");
3923 debug_generic_expr (lhs_type);
3924 debug_generic_expr (rhs1_type);
3925 debug_generic_expr (rhs2_type);
3926 return true;
3929 return false;
3932 case PLUS_EXPR:
3933 case MINUS_EXPR:
3935 tree lhs_etype = lhs_type;
3936 tree rhs1_etype = rhs1_type;
3937 tree rhs2_etype = rhs2_type;
3938 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3940 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3941 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3943 error ("invalid non-vector operands to vector valued plus");
3944 return true;
3946 lhs_etype = TREE_TYPE (lhs_type);
3947 rhs1_etype = TREE_TYPE (rhs1_type);
3948 rhs2_etype = TREE_TYPE (rhs2_type);
3950 if (POINTER_TYPE_P (lhs_etype)
3951 || POINTER_TYPE_P (rhs1_etype)
3952 || POINTER_TYPE_P (rhs2_etype))
3954 error ("invalid (pointer) operands to plus/minus");
3955 return true;
3958 /* Continue with generic binary expression handling. */
3959 break;
3962 case POINTER_PLUS_EXPR:
3964 if (!POINTER_TYPE_P (rhs1_type)
3965 || !useless_type_conversion_p (lhs_type, rhs1_type)
3966 || !ptrofftype_p (rhs2_type))
3968 error ("type mismatch in pointer plus expression");
3969 debug_generic_stmt (lhs_type);
3970 debug_generic_stmt (rhs1_type);
3971 debug_generic_stmt (rhs2_type);
3972 return true;
3975 return false;
3978 case TRUTH_ANDIF_EXPR:
3979 case TRUTH_ORIF_EXPR:
3980 case TRUTH_AND_EXPR:
3981 case TRUTH_OR_EXPR:
3982 case TRUTH_XOR_EXPR:
3984 gcc_unreachable ();
3986 case LT_EXPR:
3987 case LE_EXPR:
3988 case GT_EXPR:
3989 case GE_EXPR:
3990 case EQ_EXPR:
3991 case NE_EXPR:
3992 case UNORDERED_EXPR:
3993 case ORDERED_EXPR:
3994 case UNLT_EXPR:
3995 case UNLE_EXPR:
3996 case UNGT_EXPR:
3997 case UNGE_EXPR:
3998 case UNEQ_EXPR:
3999 case LTGT_EXPR:
4000 /* Comparisons are also binary, but the result type is not
4001 connected to the operand types. */
4002 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4004 case WIDEN_MULT_EXPR:
4005 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4006 return true;
4007 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4008 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4010 case WIDEN_SUM_EXPR:
4012 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4013 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4014 && ((!INTEGRAL_TYPE_P (rhs1_type)
4015 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4016 || (!INTEGRAL_TYPE_P (lhs_type)
4017 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4018 || !useless_type_conversion_p (lhs_type, rhs2_type)
4019 || (GET_MODE_SIZE (element_mode (rhs2_type))
4020 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4022 error ("type mismatch in widening sum reduction");
4023 debug_generic_expr (lhs_type);
4024 debug_generic_expr (rhs1_type);
4025 debug_generic_expr (rhs2_type);
4026 return true;
4028 return false;
4031 case VEC_WIDEN_MULT_HI_EXPR:
4032 case VEC_WIDEN_MULT_LO_EXPR:
4033 case VEC_WIDEN_MULT_EVEN_EXPR:
4034 case VEC_WIDEN_MULT_ODD_EXPR:
4036 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4037 || TREE_CODE (lhs_type) != VECTOR_TYPE
4038 || !types_compatible_p (rhs1_type, rhs2_type)
4039 || (GET_MODE_SIZE (element_mode (lhs_type))
4040 != 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4042 error ("type mismatch in vector widening multiplication");
4043 debug_generic_expr (lhs_type);
4044 debug_generic_expr (rhs1_type);
4045 debug_generic_expr (rhs2_type);
4046 return true;
4048 return false;
4051 case VEC_PACK_TRUNC_EXPR:
4052 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4053 vector boolean types. */
4054 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4055 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4056 && types_compatible_p (rhs1_type, rhs2_type)
4057 && (TYPE_VECTOR_SUBPARTS (lhs_type)
4058 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4059 return false;
4061 /* Fallthru. */
4062 case VEC_PACK_SAT_EXPR:
4063 case VEC_PACK_FIX_TRUNC_EXPR:
4065 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4066 || TREE_CODE (lhs_type) != VECTOR_TYPE
4067 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4068 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4069 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4070 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4071 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4072 || !types_compatible_p (rhs1_type, rhs2_type)
4073 || (GET_MODE_SIZE (element_mode (rhs1_type))
4074 != 2 * GET_MODE_SIZE (element_mode (lhs_type))))
4076 error ("type mismatch in vector pack expression");
4077 debug_generic_expr (lhs_type);
4078 debug_generic_expr (rhs1_type);
4079 debug_generic_expr (rhs2_type);
4080 return true;
4083 return false;
4086 case MULT_EXPR:
4087 case MULT_HIGHPART_EXPR:
4088 case TRUNC_DIV_EXPR:
4089 case CEIL_DIV_EXPR:
4090 case FLOOR_DIV_EXPR:
4091 case ROUND_DIV_EXPR:
4092 case TRUNC_MOD_EXPR:
4093 case CEIL_MOD_EXPR:
4094 case FLOOR_MOD_EXPR:
4095 case ROUND_MOD_EXPR:
4096 case RDIV_EXPR:
4097 case EXACT_DIV_EXPR:
4098 case MIN_EXPR:
4099 case MAX_EXPR:
4100 case BIT_IOR_EXPR:
4101 case BIT_XOR_EXPR:
4102 case BIT_AND_EXPR:
4103 /* Continue with generic binary expression handling. */
4104 break;
4106 default:
4107 gcc_unreachable ();
4110 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4111 || !useless_type_conversion_p (lhs_type, rhs2_type))
4113 error ("type mismatch in binary expression");
4114 debug_generic_stmt (lhs_type);
4115 debug_generic_stmt (rhs1_type);
4116 debug_generic_stmt (rhs2_type);
4117 return true;
4120 return false;
4123 /* Verify a gimple assignment statement STMT with a ternary rhs.
4124 Returns true if anything is wrong. */
4126 static bool
4127 verify_gimple_assign_ternary (gassign *stmt)
4129 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4130 tree lhs = gimple_assign_lhs (stmt);
4131 tree lhs_type = TREE_TYPE (lhs);
4132 tree rhs1 = gimple_assign_rhs1 (stmt);
4133 tree rhs1_type = TREE_TYPE (rhs1);
4134 tree rhs2 = gimple_assign_rhs2 (stmt);
4135 tree rhs2_type = TREE_TYPE (rhs2);
4136 tree rhs3 = gimple_assign_rhs3 (stmt);
4137 tree rhs3_type = TREE_TYPE (rhs3);
4139 if (!is_gimple_reg (lhs))
4141 error ("non-register as LHS of ternary operation");
4142 return true;
4145 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4146 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4147 || !is_gimple_val (rhs2)
4148 || !is_gimple_val (rhs3))
4150 error ("invalid operands in ternary operation");
4151 return true;
4154 /* First handle operations that involve different types. */
4155 switch (rhs_code)
4157 case WIDEN_MULT_PLUS_EXPR:
4158 case WIDEN_MULT_MINUS_EXPR:
4159 if ((!INTEGRAL_TYPE_P (rhs1_type)
4160 && !FIXED_POINT_TYPE_P (rhs1_type))
4161 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4162 || !useless_type_conversion_p (lhs_type, rhs3_type)
4163 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4164 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4166 error ("type mismatch in widening multiply-accumulate expression");
4167 debug_generic_expr (lhs_type);
4168 debug_generic_expr (rhs1_type);
4169 debug_generic_expr (rhs2_type);
4170 debug_generic_expr (rhs3_type);
4171 return true;
4173 break;
4175 case FMA_EXPR:
4176 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4177 || !useless_type_conversion_p (lhs_type, rhs2_type)
4178 || !useless_type_conversion_p (lhs_type, rhs3_type))
4180 error ("type mismatch in fused multiply-add expression");
4181 debug_generic_expr (lhs_type);
4182 debug_generic_expr (rhs1_type);
4183 debug_generic_expr (rhs2_type);
4184 debug_generic_expr (rhs3_type);
4185 return true;
4187 break;
4189 case VEC_COND_EXPR:
4190 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4191 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4192 != TYPE_VECTOR_SUBPARTS (lhs_type))
4194 error ("the first argument of a VEC_COND_EXPR must be of a "
4195 "boolean vector type of the same number of elements "
4196 "as the result");
4197 debug_generic_expr (lhs_type);
4198 debug_generic_expr (rhs1_type);
4199 return true;
4201 /* Fallthrough. */
4202 case COND_EXPR:
4203 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4204 || !useless_type_conversion_p (lhs_type, rhs3_type))
4206 error ("type mismatch in conditional expression");
4207 debug_generic_expr (lhs_type);
4208 debug_generic_expr (rhs2_type);
4209 debug_generic_expr (rhs3_type);
4210 return true;
4212 break;
4214 case VEC_PERM_EXPR:
4215 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4216 || !useless_type_conversion_p (lhs_type, rhs2_type))
4218 error ("type mismatch in vector permute expression");
4219 debug_generic_expr (lhs_type);
4220 debug_generic_expr (rhs1_type);
4221 debug_generic_expr (rhs2_type);
4222 debug_generic_expr (rhs3_type);
4223 return true;
4226 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4227 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4228 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4230 error ("vector types expected in vector permute expression");
4231 debug_generic_expr (lhs_type);
4232 debug_generic_expr (rhs1_type);
4233 debug_generic_expr (rhs2_type);
4234 debug_generic_expr (rhs3_type);
4235 return true;
4238 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4239 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4240 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4241 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4242 != TYPE_VECTOR_SUBPARTS (lhs_type))
4244 error ("vectors with different element number found "
4245 "in vector permute expression");
4246 debug_generic_expr (lhs_type);
4247 debug_generic_expr (rhs1_type);
4248 debug_generic_expr (rhs2_type);
4249 debug_generic_expr (rhs3_type);
4250 return true;
4253 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4254 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type)))
4255 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type))))
4257 error ("invalid mask type in vector permute expression");
4258 debug_generic_expr (lhs_type);
4259 debug_generic_expr (rhs1_type);
4260 debug_generic_expr (rhs2_type);
4261 debug_generic_expr (rhs3_type);
4262 return true;
4265 return false;
4267 case SAD_EXPR:
4268 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4269 || !useless_type_conversion_p (lhs_type, rhs3_type)
4270 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4271 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4273 error ("type mismatch in sad expression");
4274 debug_generic_expr (lhs_type);
4275 debug_generic_expr (rhs1_type);
4276 debug_generic_expr (rhs2_type);
4277 debug_generic_expr (rhs3_type);
4278 return true;
4281 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4282 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4283 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4285 error ("vector types expected in sad expression");
4286 debug_generic_expr (lhs_type);
4287 debug_generic_expr (rhs1_type);
4288 debug_generic_expr (rhs2_type);
4289 debug_generic_expr (rhs3_type);
4290 return true;
4293 return false;
4295 case BIT_INSERT_EXPR:
4296 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4298 error ("type mismatch in BIT_INSERT_EXPR");
4299 debug_generic_expr (lhs_type);
4300 debug_generic_expr (rhs1_type);
4301 return true;
4303 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4304 && INTEGRAL_TYPE_P (rhs2_type))
4305 || (VECTOR_TYPE_P (rhs1_type)
4306 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4308 error ("not allowed type combination in BIT_INSERT_EXPR");
4309 debug_generic_expr (rhs1_type);
4310 debug_generic_expr (rhs2_type);
4311 return true;
4313 if (! tree_fits_uhwi_p (rhs3)
4314 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4315 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4317 error ("invalid position or size in BIT_INSERT_EXPR");
4318 return true;
4320 if (INTEGRAL_TYPE_P (rhs1_type))
4322 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4323 if (bitpos >= TYPE_PRECISION (rhs1_type)
4324 || (bitpos + TYPE_PRECISION (rhs2_type)
4325 > TYPE_PRECISION (rhs1_type)))
4327 error ("insertion out of range in BIT_INSERT_EXPR");
4328 return true;
4331 else if (VECTOR_TYPE_P (rhs1_type))
4333 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4334 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4335 if (bitpos % bitsize != 0)
4337 error ("vector insertion not at element boundary");
4338 return true;
4341 return false;
4343 case DOT_PROD_EXPR:
4345 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4346 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4347 && ((!INTEGRAL_TYPE_P (rhs1_type)
4348 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4349 || (!INTEGRAL_TYPE_P (lhs_type)
4350 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4351 || !types_compatible_p (rhs1_type, rhs2_type)
4352 || !useless_type_conversion_p (lhs_type, rhs3_type)
4353 || (GET_MODE_SIZE (element_mode (rhs3_type))
4354 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4356 error ("type mismatch in dot product reduction");
4357 debug_generic_expr (lhs_type);
4358 debug_generic_expr (rhs1_type);
4359 debug_generic_expr (rhs2_type);
4360 return true;
4362 return false;
4365 case REALIGN_LOAD_EXPR:
4366 /* FIXME. */
4367 return false;
4369 default:
4370 gcc_unreachable ();
4372 return false;
4375 /* Verify a gimple assignment statement STMT with a single rhs.
4376 Returns true if anything is wrong. */
4378 static bool
4379 verify_gimple_assign_single (gassign *stmt)
4381 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4382 tree lhs = gimple_assign_lhs (stmt);
4383 tree lhs_type = TREE_TYPE (lhs);
4384 tree rhs1 = gimple_assign_rhs1 (stmt);
4385 tree rhs1_type = TREE_TYPE (rhs1);
4386 bool res = false;
4388 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4390 error ("non-trivial conversion at assignment");
4391 debug_generic_expr (lhs_type);
4392 debug_generic_expr (rhs1_type);
4393 return true;
4396 if (gimple_clobber_p (stmt)
4397 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4399 error ("non-decl/MEM_REF LHS in clobber statement");
4400 debug_generic_expr (lhs);
4401 return true;
4404 if (handled_component_p (lhs)
4405 || TREE_CODE (lhs) == MEM_REF
4406 || TREE_CODE (lhs) == TARGET_MEM_REF)
4407 res |= verify_types_in_gimple_reference (lhs, true);
4409 /* Special codes we cannot handle via their class. */
4410 switch (rhs_code)
4412 case ADDR_EXPR:
4414 tree op = TREE_OPERAND (rhs1, 0);
4415 if (!is_gimple_addressable (op))
4417 error ("invalid operand in unary expression");
4418 return true;
4421 /* Technically there is no longer a need for matching types, but
4422 gimple hygiene asks for this check. In LTO we can end up
4423 combining incompatible units and thus end up with addresses
4424 of globals that change their type to a common one. */
4425 if (!in_lto_p
4426 && !types_compatible_p (TREE_TYPE (op),
4427 TREE_TYPE (TREE_TYPE (rhs1)))
4428 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4429 TREE_TYPE (op)))
4431 error ("type mismatch in address expression");
4432 debug_generic_stmt (TREE_TYPE (rhs1));
4433 debug_generic_stmt (TREE_TYPE (op));
4434 return true;
4437 return verify_types_in_gimple_reference (op, true);
4440 /* tcc_reference */
4441 case INDIRECT_REF:
4442 error ("INDIRECT_REF in gimple IL");
4443 return true;
4445 case COMPONENT_REF:
4446 case BIT_FIELD_REF:
4447 case ARRAY_REF:
4448 case ARRAY_RANGE_REF:
4449 case VIEW_CONVERT_EXPR:
4450 case REALPART_EXPR:
4451 case IMAGPART_EXPR:
4452 case TARGET_MEM_REF:
4453 case MEM_REF:
4454 if (!is_gimple_reg (lhs)
4455 && is_gimple_reg_type (TREE_TYPE (lhs)))
4457 error ("invalid rhs for gimple memory store");
4458 debug_generic_stmt (lhs);
4459 debug_generic_stmt (rhs1);
4460 return true;
4462 return res || verify_types_in_gimple_reference (rhs1, false);
4464 /* tcc_constant */
4465 case SSA_NAME:
4466 case INTEGER_CST:
4467 case REAL_CST:
4468 case FIXED_CST:
4469 case COMPLEX_CST:
4470 case VECTOR_CST:
4471 case STRING_CST:
4472 return res;
4474 /* tcc_declaration */
4475 case CONST_DECL:
4476 return res;
4477 case VAR_DECL:
4478 case PARM_DECL:
4479 if (!is_gimple_reg (lhs)
4480 && !is_gimple_reg (rhs1)
4481 && is_gimple_reg_type (TREE_TYPE (lhs)))
4483 error ("invalid rhs for gimple memory store");
4484 debug_generic_stmt (lhs);
4485 debug_generic_stmt (rhs1);
4486 return true;
4488 return res;
4490 case CONSTRUCTOR:
4491 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4493 unsigned int i;
4494 tree elt_i, elt_v, elt_t = NULL_TREE;
4496 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4497 return res;
4498 /* For vector CONSTRUCTORs we require that either it is empty
4499 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4500 (then the element count must be correct to cover the whole
4501 outer vector and index must be NULL on all elements, or it is
4502 a CONSTRUCTOR of scalar elements, where we as an exception allow
4503 smaller number of elements (assuming zero filling) and
4504 consecutive indexes as compared to NULL indexes (such
4505 CONSTRUCTORs can appear in the IL from FEs). */
4506 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4508 if (elt_t == NULL_TREE)
4510 elt_t = TREE_TYPE (elt_v);
4511 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4513 tree elt_t = TREE_TYPE (elt_v);
4514 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4515 TREE_TYPE (elt_t)))
4517 error ("incorrect type of vector CONSTRUCTOR"
4518 " elements");
4519 debug_generic_stmt (rhs1);
4520 return true;
4522 else if (CONSTRUCTOR_NELTS (rhs1)
4523 * TYPE_VECTOR_SUBPARTS (elt_t)
4524 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4526 error ("incorrect number of vector CONSTRUCTOR"
4527 " elements");
4528 debug_generic_stmt (rhs1);
4529 return true;
4532 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4533 elt_t))
4535 error ("incorrect type of vector CONSTRUCTOR elements");
4536 debug_generic_stmt (rhs1);
4537 return true;
4539 else if (CONSTRUCTOR_NELTS (rhs1)
4540 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4542 error ("incorrect number of vector CONSTRUCTOR elements");
4543 debug_generic_stmt (rhs1);
4544 return true;
4547 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4549 error ("incorrect type of vector CONSTRUCTOR elements");
4550 debug_generic_stmt (rhs1);
4551 return true;
4553 if (elt_i != NULL_TREE
4554 && (TREE_CODE (elt_t) == VECTOR_TYPE
4555 || TREE_CODE (elt_i) != INTEGER_CST
4556 || compare_tree_int (elt_i, i) != 0))
4558 error ("vector CONSTRUCTOR with non-NULL element index");
4559 debug_generic_stmt (rhs1);
4560 return true;
4562 if (!is_gimple_val (elt_v))
4564 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4565 debug_generic_stmt (rhs1);
4566 return true;
4570 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4572 error ("non-vector CONSTRUCTOR with elements");
4573 debug_generic_stmt (rhs1);
4574 return true;
4576 return res;
4577 case OBJ_TYPE_REF:
4578 case ASSERT_EXPR:
4579 case WITH_SIZE_EXPR:
4580 /* FIXME. */
4581 return res;
4583 default:;
4586 return res;
4589 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4590 is a problem, otherwise false. */
4592 static bool
4593 verify_gimple_assign (gassign *stmt)
4595 switch (gimple_assign_rhs_class (stmt))
4597 case GIMPLE_SINGLE_RHS:
4598 return verify_gimple_assign_single (stmt);
4600 case GIMPLE_UNARY_RHS:
4601 return verify_gimple_assign_unary (stmt);
4603 case GIMPLE_BINARY_RHS:
4604 return verify_gimple_assign_binary (stmt);
4606 case GIMPLE_TERNARY_RHS:
4607 return verify_gimple_assign_ternary (stmt);
4609 default:
4610 gcc_unreachable ();
4614 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4615 is a problem, otherwise false. */
4617 static bool
4618 verify_gimple_return (greturn *stmt)
4620 tree op = gimple_return_retval (stmt);
4621 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4623 /* We cannot test for present return values as we do not fix up missing
4624 return values from the original source. */
4625 if (op == NULL)
4626 return false;
4628 if (!is_gimple_val (op)
4629 && TREE_CODE (op) != RESULT_DECL)
4631 error ("invalid operand in return statement");
4632 debug_generic_stmt (op);
4633 return true;
4636 if ((TREE_CODE (op) == RESULT_DECL
4637 && DECL_BY_REFERENCE (op))
4638 || (TREE_CODE (op) == SSA_NAME
4639 && SSA_NAME_VAR (op)
4640 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4641 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4642 op = TREE_TYPE (op);
4644 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4646 error ("invalid conversion in return statement");
4647 debug_generic_stmt (restype);
4648 debug_generic_stmt (TREE_TYPE (op));
4649 return true;
4652 return false;
4656 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4657 is a problem, otherwise false. */
4659 static bool
4660 verify_gimple_goto (ggoto *stmt)
4662 tree dest = gimple_goto_dest (stmt);
4664 /* ??? We have two canonical forms of direct goto destinations, a
4665 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4666 if (TREE_CODE (dest) != LABEL_DECL
4667 && (!is_gimple_val (dest)
4668 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4670 error ("goto destination is neither a label nor a pointer");
4671 return true;
4674 return false;
4677 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4678 is a problem, otherwise false. */
4680 static bool
4681 verify_gimple_switch (gswitch *stmt)
4683 unsigned int i, n;
4684 tree elt, prev_upper_bound = NULL_TREE;
4685 tree index_type, elt_type = NULL_TREE;
4687 if (!is_gimple_val (gimple_switch_index (stmt)))
4689 error ("invalid operand to switch statement");
4690 debug_generic_stmt (gimple_switch_index (stmt));
4691 return true;
4694 index_type = TREE_TYPE (gimple_switch_index (stmt));
4695 if (! INTEGRAL_TYPE_P (index_type))
4697 error ("non-integral type switch statement");
4698 debug_generic_expr (index_type);
4699 return true;
4702 elt = gimple_switch_label (stmt, 0);
4703 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4705 error ("invalid default case label in switch statement");
4706 debug_generic_expr (elt);
4707 return true;
4710 n = gimple_switch_num_labels (stmt);
4711 for (i = 1; i < n; i++)
4713 elt = gimple_switch_label (stmt, i);
4715 if (! CASE_LOW (elt))
4717 error ("invalid case label in switch statement");
4718 debug_generic_expr (elt);
4719 return true;
4721 if (CASE_HIGH (elt)
4722 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4724 error ("invalid case range in switch statement");
4725 debug_generic_expr (elt);
4726 return true;
4729 if (elt_type)
4731 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4732 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4734 error ("type mismatch for case label in switch statement");
4735 debug_generic_expr (elt);
4736 return true;
4739 else
4741 elt_type = TREE_TYPE (CASE_LOW (elt));
4742 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4744 error ("type precision mismatch in switch statement");
4745 return true;
4749 if (prev_upper_bound)
4751 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4753 error ("case labels not sorted in switch statement");
4754 return true;
4758 prev_upper_bound = CASE_HIGH (elt);
4759 if (! prev_upper_bound)
4760 prev_upper_bound = CASE_LOW (elt);
4763 return false;
4766 /* Verify a gimple debug statement STMT.
4767 Returns true if anything is wrong. */
4769 static bool
4770 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4772 /* There isn't much that could be wrong in a gimple debug stmt. A
4773 gimple debug bind stmt, for example, maps a tree, that's usually
4774 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4775 component or member of an aggregate type, to another tree, that
4776 can be an arbitrary expression. These stmts expand into debug
4777 insns, and are converted to debug notes by var-tracking.c. */
4778 return false;
4781 /* Verify a gimple label statement STMT.
4782 Returns true if anything is wrong. */
4784 static bool
4785 verify_gimple_label (glabel *stmt)
4787 tree decl = gimple_label_label (stmt);
4788 int uid;
4789 bool err = false;
4791 if (TREE_CODE (decl) != LABEL_DECL)
4792 return true;
4793 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4794 && DECL_CONTEXT (decl) != current_function_decl)
4796 error ("label's context is not the current function decl");
4797 err |= true;
4800 uid = LABEL_DECL_UID (decl);
4801 if (cfun->cfg
4802 && (uid == -1
4803 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4805 error ("incorrect entry in label_to_block_map");
4806 err |= true;
4809 uid = EH_LANDING_PAD_NR (decl);
4810 if (uid)
4812 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4813 if (decl != lp->post_landing_pad)
4815 error ("incorrect setting of landing pad number");
4816 err |= true;
4820 return err;
4823 /* Verify a gimple cond statement STMT.
4824 Returns true if anything is wrong. */
4826 static bool
4827 verify_gimple_cond (gcond *stmt)
4829 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4831 error ("invalid comparison code in gimple cond");
4832 return true;
4834 if (!(!gimple_cond_true_label (stmt)
4835 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4836 || !(!gimple_cond_false_label (stmt)
4837 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4839 error ("invalid labels in gimple cond");
4840 return true;
4843 return verify_gimple_comparison (boolean_type_node,
4844 gimple_cond_lhs (stmt),
4845 gimple_cond_rhs (stmt),
4846 gimple_cond_code (stmt));
4849 /* Verify the GIMPLE statement STMT. Returns true if there is an
4850 error, otherwise false. */
4852 static bool
4853 verify_gimple_stmt (gimple *stmt)
4855 switch (gimple_code (stmt))
4857 case GIMPLE_ASSIGN:
4858 return verify_gimple_assign (as_a <gassign *> (stmt));
4860 case GIMPLE_LABEL:
4861 return verify_gimple_label (as_a <glabel *> (stmt));
4863 case GIMPLE_CALL:
4864 return verify_gimple_call (as_a <gcall *> (stmt));
4866 case GIMPLE_COND:
4867 return verify_gimple_cond (as_a <gcond *> (stmt));
4869 case GIMPLE_GOTO:
4870 return verify_gimple_goto (as_a <ggoto *> (stmt));
4872 case GIMPLE_SWITCH:
4873 return verify_gimple_switch (as_a <gswitch *> (stmt));
4875 case GIMPLE_RETURN:
4876 return verify_gimple_return (as_a <greturn *> (stmt));
4878 case GIMPLE_ASM:
4879 return false;
4881 case GIMPLE_TRANSACTION:
4882 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4884 /* Tuples that do not have tree operands. */
4885 case GIMPLE_NOP:
4886 case GIMPLE_PREDICT:
4887 case GIMPLE_RESX:
4888 case GIMPLE_EH_DISPATCH:
4889 case GIMPLE_EH_MUST_NOT_THROW:
4890 return false;
4892 CASE_GIMPLE_OMP:
4893 /* OpenMP directives are validated by the FE and never operated
4894 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4895 non-gimple expressions when the main index variable has had
4896 its address taken. This does not affect the loop itself
4897 because the header of an GIMPLE_OMP_FOR is merely used to determine
4898 how to setup the parallel iteration. */
4899 return false;
4901 case GIMPLE_DEBUG:
4902 return verify_gimple_debug (stmt);
4904 default:
4905 gcc_unreachable ();
4909 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4910 and false otherwise. */
4912 static bool
4913 verify_gimple_phi (gimple *phi)
4915 bool err = false;
4916 unsigned i;
4917 tree phi_result = gimple_phi_result (phi);
4918 bool virtual_p;
4920 if (!phi_result)
4922 error ("invalid PHI result");
4923 return true;
4926 virtual_p = virtual_operand_p (phi_result);
4927 if (TREE_CODE (phi_result) != SSA_NAME
4928 || (virtual_p
4929 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4931 error ("invalid PHI result");
4932 err = true;
4935 for (i = 0; i < gimple_phi_num_args (phi); i++)
4937 tree t = gimple_phi_arg_def (phi, i);
4939 if (!t)
4941 error ("missing PHI def");
4942 err |= true;
4943 continue;
4945 /* Addressable variables do have SSA_NAMEs but they
4946 are not considered gimple values. */
4947 else if ((TREE_CODE (t) == SSA_NAME
4948 && virtual_p != virtual_operand_p (t))
4949 || (virtual_p
4950 && (TREE_CODE (t) != SSA_NAME
4951 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4952 || (!virtual_p
4953 && !is_gimple_val (t)))
4955 error ("invalid PHI argument");
4956 debug_generic_expr (t);
4957 err |= true;
4959 #ifdef ENABLE_TYPES_CHECKING
4960 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4962 error ("incompatible types in PHI argument %u", i);
4963 debug_generic_stmt (TREE_TYPE (phi_result));
4964 debug_generic_stmt (TREE_TYPE (t));
4965 err |= true;
4967 #endif
4970 return err;
4973 /* Verify the GIMPLE statements inside the sequence STMTS. */
4975 static bool
4976 verify_gimple_in_seq_2 (gimple_seq stmts)
4978 gimple_stmt_iterator ittr;
4979 bool err = false;
4981 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4983 gimple *stmt = gsi_stmt (ittr);
4985 switch (gimple_code (stmt))
4987 case GIMPLE_BIND:
4988 err |= verify_gimple_in_seq_2 (
4989 gimple_bind_body (as_a <gbind *> (stmt)));
4990 break;
4992 case GIMPLE_TRY:
4993 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4994 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4995 break;
4997 case GIMPLE_EH_FILTER:
4998 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4999 break;
5001 case GIMPLE_EH_ELSE:
5003 geh_else *eh_else = as_a <geh_else *> (stmt);
5004 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5005 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5007 break;
5009 case GIMPLE_CATCH:
5010 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5011 as_a <gcatch *> (stmt)));
5012 break;
5014 case GIMPLE_TRANSACTION:
5015 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5016 break;
5018 default:
5020 bool err2 = verify_gimple_stmt (stmt);
5021 if (err2)
5022 debug_gimple_stmt (stmt);
5023 err |= err2;
5028 return err;
5031 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5032 is a problem, otherwise false. */
5034 static bool
5035 verify_gimple_transaction (gtransaction *stmt)
5037 tree lab;
5039 lab = gimple_transaction_label_norm (stmt);
5040 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5041 return true;
5042 lab = gimple_transaction_label_uninst (stmt);
5043 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5044 return true;
5045 lab = gimple_transaction_label_over (stmt);
5046 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5047 return true;
5049 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5053 /* Verify the GIMPLE statements inside the statement list STMTS. */
5055 DEBUG_FUNCTION void
5056 verify_gimple_in_seq (gimple_seq stmts)
5058 timevar_push (TV_TREE_STMT_VERIFY);
5059 if (verify_gimple_in_seq_2 (stmts))
5060 internal_error ("verify_gimple failed");
5061 timevar_pop (TV_TREE_STMT_VERIFY);
5064 /* Return true when the T can be shared. */
5066 static bool
5067 tree_node_can_be_shared (tree t)
5069 if (IS_TYPE_OR_DECL_P (t)
5070 || is_gimple_min_invariant (t)
5071 || TREE_CODE (t) == SSA_NAME
5072 || t == error_mark_node
5073 || TREE_CODE (t) == IDENTIFIER_NODE)
5074 return true;
5076 if (TREE_CODE (t) == CASE_LABEL_EXPR)
5077 return true;
5079 if (DECL_P (t))
5080 return true;
5082 return false;
5085 /* Called via walk_tree. Verify tree sharing. */
5087 static tree
5088 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5090 hash_set<void *> *visited = (hash_set<void *> *) data;
5092 if (tree_node_can_be_shared (*tp))
5094 *walk_subtrees = false;
5095 return NULL;
5098 if (visited->add (*tp))
5099 return *tp;
5101 return NULL;
5104 /* Called via walk_gimple_stmt. Verify tree sharing. */
5106 static tree
5107 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5109 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5110 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5113 static bool eh_error_found;
5114 bool
5115 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5116 hash_set<gimple *> *visited)
5118 if (!visited->contains (stmt))
5120 error ("dead STMT in EH table");
5121 debug_gimple_stmt (stmt);
5122 eh_error_found = true;
5124 return true;
5127 /* Verify if the location LOCs block is in BLOCKS. */
5129 static bool
5130 verify_location (hash_set<tree> *blocks, location_t loc)
5132 tree block = LOCATION_BLOCK (loc);
5133 if (block != NULL_TREE
5134 && !blocks->contains (block))
5136 error ("location references block not in block tree");
5137 return true;
5139 if (block != NULL_TREE)
5140 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5141 return false;
5144 /* Called via walk_tree. Verify that expressions have no blocks. */
5146 static tree
5147 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5149 if (!EXPR_P (*tp))
5151 *walk_subtrees = false;
5152 return NULL;
5155 location_t loc = EXPR_LOCATION (*tp);
5156 if (LOCATION_BLOCK (loc) != NULL)
5157 return *tp;
5159 return NULL;
5162 /* Called via walk_tree. Verify locations of expressions. */
5164 static tree
5165 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5167 hash_set<tree> *blocks = (hash_set<tree> *) data;
5169 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5171 tree t = DECL_DEBUG_EXPR (*tp);
5172 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5173 if (addr)
5174 return addr;
5176 if ((VAR_P (*tp)
5177 || TREE_CODE (*tp) == PARM_DECL
5178 || TREE_CODE (*tp) == RESULT_DECL)
5179 && DECL_HAS_VALUE_EXPR_P (*tp))
5181 tree t = DECL_VALUE_EXPR (*tp);
5182 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5183 if (addr)
5184 return addr;
5187 if (!EXPR_P (*tp))
5189 *walk_subtrees = false;
5190 return NULL;
5193 location_t loc = EXPR_LOCATION (*tp);
5194 if (verify_location (blocks, loc))
5195 return *tp;
5197 return NULL;
5200 /* Called via walk_gimple_op. Verify locations of expressions. */
5202 static tree
5203 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5205 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5206 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5209 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5211 static void
5212 collect_subblocks (hash_set<tree> *blocks, tree block)
5214 tree t;
5215 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5217 blocks->add (t);
5218 collect_subblocks (blocks, t);
5222 /* Verify the GIMPLE statements in the CFG of FN. */
5224 DEBUG_FUNCTION void
5225 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5227 basic_block bb;
5228 bool err = false;
5230 timevar_push (TV_TREE_STMT_VERIFY);
5231 hash_set<void *> visited;
5232 hash_set<gimple *> visited_stmts;
5234 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5235 hash_set<tree> blocks;
5236 if (DECL_INITIAL (fn->decl))
5238 blocks.add (DECL_INITIAL (fn->decl));
5239 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5242 FOR_EACH_BB_FN (bb, fn)
5244 gimple_stmt_iterator gsi;
5246 for (gphi_iterator gpi = gsi_start_phis (bb);
5247 !gsi_end_p (gpi);
5248 gsi_next (&gpi))
5250 gphi *phi = gpi.phi ();
5251 bool err2 = false;
5252 unsigned i;
5254 visited_stmts.add (phi);
5256 if (gimple_bb (phi) != bb)
5258 error ("gimple_bb (phi) is set to a wrong basic block");
5259 err2 = true;
5262 err2 |= verify_gimple_phi (phi);
5264 /* Only PHI arguments have locations. */
5265 if (gimple_location (phi) != UNKNOWN_LOCATION)
5267 error ("PHI node with location");
5268 err2 = true;
5271 for (i = 0; i < gimple_phi_num_args (phi); i++)
5273 tree arg = gimple_phi_arg_def (phi, i);
5274 tree addr = walk_tree (&arg, verify_node_sharing_1,
5275 &visited, NULL);
5276 if (addr)
5278 error ("incorrect sharing of tree nodes");
5279 debug_generic_expr (addr);
5280 err2 |= true;
5282 location_t loc = gimple_phi_arg_location (phi, i);
5283 if (virtual_operand_p (gimple_phi_result (phi))
5284 && loc != UNKNOWN_LOCATION)
5286 error ("virtual PHI with argument locations");
5287 err2 = true;
5289 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5290 if (addr)
5292 debug_generic_expr (addr);
5293 err2 = true;
5295 err2 |= verify_location (&blocks, loc);
5298 if (err2)
5299 debug_gimple_stmt (phi);
5300 err |= err2;
5303 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5305 gimple *stmt = gsi_stmt (gsi);
5306 bool err2 = false;
5307 struct walk_stmt_info wi;
5308 tree addr;
5309 int lp_nr;
5311 visited_stmts.add (stmt);
5313 if (gimple_bb (stmt) != bb)
5315 error ("gimple_bb (stmt) is set to a wrong basic block");
5316 err2 = true;
5319 err2 |= verify_gimple_stmt (stmt);
5320 err2 |= verify_location (&blocks, gimple_location (stmt));
5322 memset (&wi, 0, sizeof (wi));
5323 wi.info = (void *) &visited;
5324 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5325 if (addr)
5327 error ("incorrect sharing of tree nodes");
5328 debug_generic_expr (addr);
5329 err2 |= true;
5332 memset (&wi, 0, sizeof (wi));
5333 wi.info = (void *) &blocks;
5334 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5335 if (addr)
5337 debug_generic_expr (addr);
5338 err2 |= true;
5341 /* ??? Instead of not checking these stmts at all the walker
5342 should know its context via wi. */
5343 if (!is_gimple_debug (stmt)
5344 && !is_gimple_omp (stmt))
5346 memset (&wi, 0, sizeof (wi));
5347 addr = walk_gimple_op (stmt, verify_expr, &wi);
5348 if (addr)
5350 debug_generic_expr (addr);
5351 inform (gimple_location (stmt), "in statement");
5352 err2 |= true;
5356 /* If the statement is marked as part of an EH region, then it is
5357 expected that the statement could throw. Verify that when we
5358 have optimizations that simplify statements such that we prove
5359 that they cannot throw, that we update other data structures
5360 to match. */
5361 lp_nr = lookup_stmt_eh_lp (stmt);
5362 if (lp_nr > 0)
5364 if (!stmt_could_throw_p (stmt))
5366 if (verify_nothrow)
5368 error ("statement marked for throw, but doesn%'t");
5369 err2 |= true;
5372 else if (!gsi_one_before_end_p (gsi))
5374 error ("statement marked for throw in middle of block");
5375 err2 |= true;
5379 if (err2)
5380 debug_gimple_stmt (stmt);
5381 err |= err2;
5385 eh_error_found = false;
5386 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5387 if (eh_table)
5388 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5389 (&visited_stmts);
5391 if (err || eh_error_found)
5392 internal_error ("verify_gimple failed");
5394 verify_histograms ();
5395 timevar_pop (TV_TREE_STMT_VERIFY);
5399 /* Verifies that the flow information is OK. */
5401 static int
5402 gimple_verify_flow_info (void)
5404 int err = 0;
5405 basic_block bb;
5406 gimple_stmt_iterator gsi;
5407 gimple *stmt;
5408 edge e;
5409 edge_iterator ei;
5411 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5412 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5414 error ("ENTRY_BLOCK has IL associated with it");
5415 err = 1;
5418 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5419 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5421 error ("EXIT_BLOCK has IL associated with it");
5422 err = 1;
5425 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5426 if (e->flags & EDGE_FALLTHRU)
5428 error ("fallthru to exit from bb %d", e->src->index);
5429 err = 1;
5432 FOR_EACH_BB_FN (bb, cfun)
5434 bool found_ctrl_stmt = false;
5436 stmt = NULL;
5438 /* Skip labels on the start of basic block. */
5439 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5441 tree label;
5442 gimple *prev_stmt = stmt;
5444 stmt = gsi_stmt (gsi);
5446 if (gimple_code (stmt) != GIMPLE_LABEL)
5447 break;
5449 label = gimple_label_label (as_a <glabel *> (stmt));
5450 if (prev_stmt && DECL_NONLOCAL (label))
5452 error ("nonlocal label ");
5453 print_generic_expr (stderr, label);
5454 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5455 bb->index);
5456 err = 1;
5459 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5461 error ("EH landing pad label ");
5462 print_generic_expr (stderr, label);
5463 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5464 bb->index);
5465 err = 1;
5468 if (label_to_block (label) != bb)
5470 error ("label ");
5471 print_generic_expr (stderr, label);
5472 fprintf (stderr, " to block does not match in bb %d",
5473 bb->index);
5474 err = 1;
5477 if (decl_function_context (label) != current_function_decl)
5479 error ("label ");
5480 print_generic_expr (stderr, label);
5481 fprintf (stderr, " has incorrect context in bb %d",
5482 bb->index);
5483 err = 1;
5487 /* Verify that body of basic block BB is free of control flow. */
5488 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5490 gimple *stmt = gsi_stmt (gsi);
5492 if (found_ctrl_stmt)
5494 error ("control flow in the middle of basic block %d",
5495 bb->index);
5496 err = 1;
5499 if (stmt_ends_bb_p (stmt))
5500 found_ctrl_stmt = true;
5502 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5504 error ("label ");
5505 print_generic_expr (stderr, gimple_label_label (label_stmt));
5506 fprintf (stderr, " in the middle of basic block %d", bb->index);
5507 err = 1;
5511 gsi = gsi_last_bb (bb);
5512 if (gsi_end_p (gsi))
5513 continue;
5515 stmt = gsi_stmt (gsi);
5517 if (gimple_code (stmt) == GIMPLE_LABEL)
5518 continue;
5520 err |= verify_eh_edges (stmt);
5522 if (is_ctrl_stmt (stmt))
5524 FOR_EACH_EDGE (e, ei, bb->succs)
5525 if (e->flags & EDGE_FALLTHRU)
5527 error ("fallthru edge after a control statement in bb %d",
5528 bb->index);
5529 err = 1;
5533 if (gimple_code (stmt) != GIMPLE_COND)
5535 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5536 after anything else but if statement. */
5537 FOR_EACH_EDGE (e, ei, bb->succs)
5538 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5540 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5541 bb->index);
5542 err = 1;
5546 switch (gimple_code (stmt))
5548 case GIMPLE_COND:
5550 edge true_edge;
5551 edge false_edge;
5553 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5555 if (!true_edge
5556 || !false_edge
5557 || !(true_edge->flags & EDGE_TRUE_VALUE)
5558 || !(false_edge->flags & EDGE_FALSE_VALUE)
5559 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5560 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5561 || EDGE_COUNT (bb->succs) >= 3)
5563 error ("wrong outgoing edge flags at end of bb %d",
5564 bb->index);
5565 err = 1;
5568 break;
5570 case GIMPLE_GOTO:
5571 if (simple_goto_p (stmt))
5573 error ("explicit goto at end of bb %d", bb->index);
5574 err = 1;
5576 else
5578 /* FIXME. We should double check that the labels in the
5579 destination blocks have their address taken. */
5580 FOR_EACH_EDGE (e, ei, bb->succs)
5581 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5582 | EDGE_FALSE_VALUE))
5583 || !(e->flags & EDGE_ABNORMAL))
5585 error ("wrong outgoing edge flags at end of bb %d",
5586 bb->index);
5587 err = 1;
5590 break;
5592 case GIMPLE_CALL:
5593 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5594 break;
5595 /* fallthru */
5596 case GIMPLE_RETURN:
5597 if (!single_succ_p (bb)
5598 || (single_succ_edge (bb)->flags
5599 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5600 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5602 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5603 err = 1;
5605 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5607 error ("return edge does not point to exit in bb %d",
5608 bb->index);
5609 err = 1;
5611 break;
5613 case GIMPLE_SWITCH:
5615 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5616 tree prev;
5617 edge e;
5618 size_t i, n;
5620 n = gimple_switch_num_labels (switch_stmt);
5622 /* Mark all the destination basic blocks. */
5623 for (i = 0; i < n; ++i)
5625 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5626 basic_block label_bb = label_to_block (lab);
5627 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5628 label_bb->aux = (void *)1;
5631 /* Verify that the case labels are sorted. */
5632 prev = gimple_switch_label (switch_stmt, 0);
5633 for (i = 1; i < n; ++i)
5635 tree c = gimple_switch_label (switch_stmt, i);
5636 if (!CASE_LOW (c))
5638 error ("found default case not at the start of "
5639 "case vector");
5640 err = 1;
5641 continue;
5643 if (CASE_LOW (prev)
5644 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5646 error ("case labels not sorted: ");
5647 print_generic_expr (stderr, prev);
5648 fprintf (stderr," is greater than ");
5649 print_generic_expr (stderr, c);
5650 fprintf (stderr," but comes before it.\n");
5651 err = 1;
5653 prev = c;
5655 /* VRP will remove the default case if it can prove it will
5656 never be executed. So do not verify there always exists
5657 a default case here. */
5659 FOR_EACH_EDGE (e, ei, bb->succs)
5661 if (!e->dest->aux)
5663 error ("extra outgoing edge %d->%d",
5664 bb->index, e->dest->index);
5665 err = 1;
5668 e->dest->aux = (void *)2;
5669 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5670 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5672 error ("wrong outgoing edge flags at end of bb %d",
5673 bb->index);
5674 err = 1;
5678 /* Check that we have all of them. */
5679 for (i = 0; i < n; ++i)
5681 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5682 basic_block label_bb = label_to_block (lab);
5684 if (label_bb->aux != (void *)2)
5686 error ("missing edge %i->%i", bb->index, label_bb->index);
5687 err = 1;
5691 FOR_EACH_EDGE (e, ei, bb->succs)
5692 e->dest->aux = (void *)0;
5694 break;
5696 case GIMPLE_EH_DISPATCH:
5697 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5698 break;
5700 default:
5701 break;
5705 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5706 verify_dominators (CDI_DOMINATORS);
5708 return err;
5712 /* Updates phi nodes after creating a forwarder block joined
5713 by edge FALLTHRU. */
5715 static void
5716 gimple_make_forwarder_block (edge fallthru)
5718 edge e;
5719 edge_iterator ei;
5720 basic_block dummy, bb;
5721 tree var;
5722 gphi_iterator gsi;
5724 dummy = fallthru->src;
5725 bb = fallthru->dest;
5727 if (single_pred_p (bb))
5728 return;
5730 /* If we redirected a branch we must create new PHI nodes at the
5731 start of BB. */
5732 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5734 gphi *phi, *new_phi;
5736 phi = gsi.phi ();
5737 var = gimple_phi_result (phi);
5738 new_phi = create_phi_node (var, bb);
5739 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5740 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5741 UNKNOWN_LOCATION);
5744 /* Add the arguments we have stored on edges. */
5745 FOR_EACH_EDGE (e, ei, bb->preds)
5747 if (e == fallthru)
5748 continue;
5750 flush_pending_stmts (e);
5755 /* Return a non-special label in the head of basic block BLOCK.
5756 Create one if it doesn't exist. */
5758 tree
5759 gimple_block_label (basic_block bb)
5761 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5762 bool first = true;
5763 tree label;
5764 glabel *stmt;
5766 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5768 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5769 if (!stmt)
5770 break;
5771 label = gimple_label_label (stmt);
5772 if (!DECL_NONLOCAL (label))
5774 if (!first)
5775 gsi_move_before (&i, &s);
5776 return label;
5780 label = create_artificial_label (UNKNOWN_LOCATION);
5781 stmt = gimple_build_label (label);
5782 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5783 return label;
5787 /* Attempt to perform edge redirection by replacing a possibly complex
5788 jump instruction by a goto or by removing the jump completely.
5789 This can apply only if all edges now point to the same block. The
5790 parameters and return values are equivalent to
5791 redirect_edge_and_branch. */
5793 static edge
5794 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5796 basic_block src = e->src;
5797 gimple_stmt_iterator i;
5798 gimple *stmt;
5800 /* We can replace or remove a complex jump only when we have exactly
5801 two edges. */
5802 if (EDGE_COUNT (src->succs) != 2
5803 /* Verify that all targets will be TARGET. Specifically, the
5804 edge that is not E must also go to TARGET. */
5805 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5806 return NULL;
5808 i = gsi_last_bb (src);
5809 if (gsi_end_p (i))
5810 return NULL;
5812 stmt = gsi_stmt (i);
5814 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5816 gsi_remove (&i, true);
5817 e = ssa_redirect_edge (e, target);
5818 e->flags = EDGE_FALLTHRU;
5819 return e;
5822 return NULL;
5826 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5827 edge representing the redirected branch. */
5829 static edge
5830 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5832 basic_block bb = e->src;
5833 gimple_stmt_iterator gsi;
5834 edge ret;
5835 gimple *stmt;
5837 if (e->flags & EDGE_ABNORMAL)
5838 return NULL;
5840 if (e->dest == dest)
5841 return NULL;
5843 if (e->flags & EDGE_EH)
5844 return redirect_eh_edge (e, dest);
5846 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5848 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5849 if (ret)
5850 return ret;
5853 gsi = gsi_last_bb (bb);
5854 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5856 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5858 case GIMPLE_COND:
5859 /* For COND_EXPR, we only need to redirect the edge. */
5860 break;
5862 case GIMPLE_GOTO:
5863 /* No non-abnormal edges should lead from a non-simple goto, and
5864 simple ones should be represented implicitly. */
5865 gcc_unreachable ();
5867 case GIMPLE_SWITCH:
5869 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5870 tree label = gimple_block_label (dest);
5871 tree cases = get_cases_for_edge (e, switch_stmt);
5873 /* If we have a list of cases associated with E, then use it
5874 as it's a lot faster than walking the entire case vector. */
5875 if (cases)
5877 edge e2 = find_edge (e->src, dest);
5878 tree last, first;
5880 first = cases;
5881 while (cases)
5883 last = cases;
5884 CASE_LABEL (cases) = label;
5885 cases = CASE_CHAIN (cases);
5888 /* If there was already an edge in the CFG, then we need
5889 to move all the cases associated with E to E2. */
5890 if (e2)
5892 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5894 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5895 CASE_CHAIN (cases2) = first;
5897 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5899 else
5901 size_t i, n = gimple_switch_num_labels (switch_stmt);
5903 for (i = 0; i < n; i++)
5905 tree elt = gimple_switch_label (switch_stmt, i);
5906 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5907 CASE_LABEL (elt) = label;
5911 break;
5913 case GIMPLE_ASM:
5915 gasm *asm_stmt = as_a <gasm *> (stmt);
5916 int i, n = gimple_asm_nlabels (asm_stmt);
5917 tree label = NULL;
5919 for (i = 0; i < n; ++i)
5921 tree cons = gimple_asm_label_op (asm_stmt, i);
5922 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5924 if (!label)
5925 label = gimple_block_label (dest);
5926 TREE_VALUE (cons) = label;
5930 /* If we didn't find any label matching the former edge in the
5931 asm labels, we must be redirecting the fallthrough
5932 edge. */
5933 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5935 break;
5937 case GIMPLE_RETURN:
5938 gsi_remove (&gsi, true);
5939 e->flags |= EDGE_FALLTHRU;
5940 break;
5942 case GIMPLE_OMP_RETURN:
5943 case GIMPLE_OMP_CONTINUE:
5944 case GIMPLE_OMP_SECTIONS_SWITCH:
5945 case GIMPLE_OMP_FOR:
5946 /* The edges from OMP constructs can be simply redirected. */
5947 break;
5949 case GIMPLE_EH_DISPATCH:
5950 if (!(e->flags & EDGE_FALLTHRU))
5951 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5952 break;
5954 case GIMPLE_TRANSACTION:
5955 if (e->flags & EDGE_TM_ABORT)
5956 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5957 gimple_block_label (dest));
5958 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5959 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5960 gimple_block_label (dest));
5961 else
5962 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5963 gimple_block_label (dest));
5964 break;
5966 default:
5967 /* Otherwise it must be a fallthru edge, and we don't need to
5968 do anything besides redirecting it. */
5969 gcc_assert (e->flags & EDGE_FALLTHRU);
5970 break;
5973 /* Update/insert PHI nodes as necessary. */
5975 /* Now update the edges in the CFG. */
5976 e = ssa_redirect_edge (e, dest);
5978 return e;
5981 /* Returns true if it is possible to remove edge E by redirecting
5982 it to the destination of the other edge from E->src. */
5984 static bool
5985 gimple_can_remove_branch_p (const_edge e)
5987 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5988 return false;
5990 return true;
5993 /* Simple wrapper, as we can always redirect fallthru edges. */
5995 static basic_block
5996 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5998 e = gimple_redirect_edge_and_branch (e, dest);
5999 gcc_assert (e);
6001 return NULL;
6005 /* Splits basic block BB after statement STMT (but at least after the
6006 labels). If STMT is NULL, BB is split just after the labels. */
6008 static basic_block
6009 gimple_split_block (basic_block bb, void *stmt)
6011 gimple_stmt_iterator gsi;
6012 gimple_stmt_iterator gsi_tgt;
6013 gimple_seq list;
6014 basic_block new_bb;
6015 edge e;
6016 edge_iterator ei;
6018 new_bb = create_empty_bb (bb);
6020 /* Redirect the outgoing edges. */
6021 new_bb->succs = bb->succs;
6022 bb->succs = NULL;
6023 FOR_EACH_EDGE (e, ei, new_bb->succs)
6024 e->src = new_bb;
6026 /* Get a stmt iterator pointing to the first stmt to move. */
6027 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6028 gsi = gsi_after_labels (bb);
6029 else
6031 gsi = gsi_for_stmt ((gimple *) stmt);
6032 gsi_next (&gsi);
6035 /* Move everything from GSI to the new basic block. */
6036 if (gsi_end_p (gsi))
6037 return new_bb;
6039 /* Split the statement list - avoid re-creating new containers as this
6040 brings ugly quadratic memory consumption in the inliner.
6041 (We are still quadratic since we need to update stmt BB pointers,
6042 sadly.) */
6043 gsi_split_seq_before (&gsi, &list);
6044 set_bb_seq (new_bb, list);
6045 for (gsi_tgt = gsi_start (list);
6046 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6047 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6049 return new_bb;
6053 /* Moves basic block BB after block AFTER. */
6055 static bool
6056 gimple_move_block_after (basic_block bb, basic_block after)
6058 if (bb->prev_bb == after)
6059 return true;
6061 unlink_block (bb);
6062 link_block (bb, after);
6064 return true;
6068 /* Return TRUE if block BB has no executable statements, otherwise return
6069 FALSE. */
6071 static bool
6072 gimple_empty_block_p (basic_block bb)
6074 /* BB must have no executable statements. */
6075 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6076 if (phi_nodes (bb))
6077 return false;
6078 if (gsi_end_p (gsi))
6079 return true;
6080 if (is_gimple_debug (gsi_stmt (gsi)))
6081 gsi_next_nondebug (&gsi);
6082 return gsi_end_p (gsi);
6086 /* Split a basic block if it ends with a conditional branch and if the
6087 other part of the block is not empty. */
6089 static basic_block
6090 gimple_split_block_before_cond_jump (basic_block bb)
6092 gimple *last, *split_point;
6093 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6094 if (gsi_end_p (gsi))
6095 return NULL;
6096 last = gsi_stmt (gsi);
6097 if (gimple_code (last) != GIMPLE_COND
6098 && gimple_code (last) != GIMPLE_SWITCH)
6099 return NULL;
6100 gsi_prev (&gsi);
6101 split_point = gsi_stmt (gsi);
6102 return split_block (bb, split_point)->dest;
6106 /* Return true if basic_block can be duplicated. */
6108 static bool
6109 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6111 return true;
6114 /* Create a duplicate of the basic block BB. NOTE: This does not
6115 preserve SSA form. */
6117 static basic_block
6118 gimple_duplicate_bb (basic_block bb)
6120 basic_block new_bb;
6121 gimple_stmt_iterator gsi_tgt;
6123 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6125 /* Copy the PHI nodes. We ignore PHI node arguments here because
6126 the incoming edges have not been setup yet. */
6127 for (gphi_iterator gpi = gsi_start_phis (bb);
6128 !gsi_end_p (gpi);
6129 gsi_next (&gpi))
6131 gphi *phi, *copy;
6132 phi = gpi.phi ();
6133 copy = create_phi_node (NULL_TREE, new_bb);
6134 create_new_def_for (gimple_phi_result (phi), copy,
6135 gimple_phi_result_ptr (copy));
6136 gimple_set_uid (copy, gimple_uid (phi));
6139 gsi_tgt = gsi_start_bb (new_bb);
6140 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6141 !gsi_end_p (gsi);
6142 gsi_next (&gsi))
6144 def_operand_p def_p;
6145 ssa_op_iter op_iter;
6146 tree lhs;
6147 gimple *stmt, *copy;
6149 stmt = gsi_stmt (gsi);
6150 if (gimple_code (stmt) == GIMPLE_LABEL)
6151 continue;
6153 /* Don't duplicate label debug stmts. */
6154 if (gimple_debug_bind_p (stmt)
6155 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6156 == LABEL_DECL)
6157 continue;
6159 /* Create a new copy of STMT and duplicate STMT's virtual
6160 operands. */
6161 copy = gimple_copy (stmt);
6162 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6164 maybe_duplicate_eh_stmt (copy, stmt);
6165 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6167 /* When copying around a stmt writing into a local non-user
6168 aggregate, make sure it won't share stack slot with other
6169 vars. */
6170 lhs = gimple_get_lhs (stmt);
6171 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6173 tree base = get_base_address (lhs);
6174 if (base
6175 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6176 && DECL_IGNORED_P (base)
6177 && !TREE_STATIC (base)
6178 && !DECL_EXTERNAL (base)
6179 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6180 DECL_NONSHAREABLE (base) = 1;
6183 /* Create new names for all the definitions created by COPY and
6184 add replacement mappings for each new name. */
6185 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6186 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6189 return new_bb;
6192 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6194 static void
6195 add_phi_args_after_copy_edge (edge e_copy)
6197 basic_block bb, bb_copy = e_copy->src, dest;
6198 edge e;
6199 edge_iterator ei;
6200 gphi *phi, *phi_copy;
6201 tree def;
6202 gphi_iterator psi, psi_copy;
6204 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6205 return;
6207 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6209 if (e_copy->dest->flags & BB_DUPLICATED)
6210 dest = get_bb_original (e_copy->dest);
6211 else
6212 dest = e_copy->dest;
6214 e = find_edge (bb, dest);
6215 if (!e)
6217 /* During loop unrolling the target of the latch edge is copied.
6218 In this case we are not looking for edge to dest, but to
6219 duplicated block whose original was dest. */
6220 FOR_EACH_EDGE (e, ei, bb->succs)
6222 if ((e->dest->flags & BB_DUPLICATED)
6223 && get_bb_original (e->dest) == dest)
6224 break;
6227 gcc_assert (e != NULL);
6230 for (psi = gsi_start_phis (e->dest),
6231 psi_copy = gsi_start_phis (e_copy->dest);
6232 !gsi_end_p (psi);
6233 gsi_next (&psi), gsi_next (&psi_copy))
6235 phi = psi.phi ();
6236 phi_copy = psi_copy.phi ();
6237 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6238 add_phi_arg (phi_copy, def, e_copy,
6239 gimple_phi_arg_location_from_edge (phi, e));
6244 /* Basic block BB_COPY was created by code duplication. Add phi node
6245 arguments for edges going out of BB_COPY. The blocks that were
6246 duplicated have BB_DUPLICATED set. */
6248 void
6249 add_phi_args_after_copy_bb (basic_block bb_copy)
6251 edge e_copy;
6252 edge_iterator ei;
6254 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6256 add_phi_args_after_copy_edge (e_copy);
6260 /* Blocks in REGION_COPY array of length N_REGION were created by
6261 duplication of basic blocks. Add phi node arguments for edges
6262 going from these blocks. If E_COPY is not NULL, also add
6263 phi node arguments for its destination.*/
6265 void
6266 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6267 edge e_copy)
6269 unsigned i;
6271 for (i = 0; i < n_region; i++)
6272 region_copy[i]->flags |= BB_DUPLICATED;
6274 for (i = 0; i < n_region; i++)
6275 add_phi_args_after_copy_bb (region_copy[i]);
6276 if (e_copy)
6277 add_phi_args_after_copy_edge (e_copy);
6279 for (i = 0; i < n_region; i++)
6280 region_copy[i]->flags &= ~BB_DUPLICATED;
6283 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6284 important exit edge EXIT. By important we mean that no SSA name defined
6285 inside region is live over the other exit edges of the region. All entry
6286 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6287 to the duplicate of the region. Dominance and loop information is
6288 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6289 UPDATE_DOMINANCE is false then we assume that the caller will update the
6290 dominance information after calling this function. The new basic
6291 blocks are stored to REGION_COPY in the same order as they had in REGION,
6292 provided that REGION_COPY is not NULL.
6293 The function returns false if it is unable to copy the region,
6294 true otherwise. */
6296 bool
6297 gimple_duplicate_sese_region (edge entry, edge exit,
6298 basic_block *region, unsigned n_region,
6299 basic_block *region_copy,
6300 bool update_dominance)
6302 unsigned i;
6303 bool free_region_copy = false, copying_header = false;
6304 struct loop *loop = entry->dest->loop_father;
6305 edge exit_copy;
6306 vec<basic_block> doms = vNULL;
6307 edge redirected;
6308 profile_count total_count = profile_count::uninitialized ();
6309 profile_count entry_count = profile_count::uninitialized ();
6311 if (!can_copy_bbs_p (region, n_region))
6312 return false;
6314 /* Some sanity checking. Note that we do not check for all possible
6315 missuses of the functions. I.e. if you ask to copy something weird,
6316 it will work, but the state of structures probably will not be
6317 correct. */
6318 for (i = 0; i < n_region; i++)
6320 /* We do not handle subloops, i.e. all the blocks must belong to the
6321 same loop. */
6322 if (region[i]->loop_father != loop)
6323 return false;
6325 if (region[i] != entry->dest
6326 && region[i] == loop->header)
6327 return false;
6330 /* In case the function is used for loop header copying (which is the primary
6331 use), ensure that EXIT and its copy will be new latch and entry edges. */
6332 if (loop->header == entry->dest)
6334 copying_header = true;
6336 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6337 return false;
6339 for (i = 0; i < n_region; i++)
6340 if (region[i] != exit->src
6341 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6342 return false;
6345 initialize_original_copy_tables ();
6347 if (copying_header)
6348 set_loop_copy (loop, loop_outer (loop));
6349 else
6350 set_loop_copy (loop, loop);
6352 if (!region_copy)
6354 region_copy = XNEWVEC (basic_block, n_region);
6355 free_region_copy = true;
6358 /* Record blocks outside the region that are dominated by something
6359 inside. */
6360 if (update_dominance)
6362 doms.create (0);
6363 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6366 if (entry->dest->count.initialized_p ())
6368 total_count = entry->dest->count;
6369 entry_count = entry->count ();
6370 /* Fix up corner cases, to avoid division by zero or creation of negative
6371 frequencies. */
6372 if (entry_count > total_count)
6373 entry_count = total_count;
6376 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6377 split_edge_bb_loc (entry), update_dominance);
6378 if (total_count.initialized_p () && entry_count.initialized_p ())
6380 scale_bbs_frequencies_profile_count (region, n_region,
6381 total_count - entry_count,
6382 total_count);
6383 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6384 total_count);
6387 if (copying_header)
6389 loop->header = exit->dest;
6390 loop->latch = exit->src;
6393 /* Redirect the entry and add the phi node arguments. */
6394 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6395 gcc_assert (redirected != NULL);
6396 flush_pending_stmts (entry);
6398 /* Concerning updating of dominators: We must recount dominators
6399 for entry block and its copy. Anything that is outside of the
6400 region, but was dominated by something inside needs recounting as
6401 well. */
6402 if (update_dominance)
6404 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6405 doms.safe_push (get_bb_original (entry->dest));
6406 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6407 doms.release ();
6410 /* Add the other PHI node arguments. */
6411 add_phi_args_after_copy (region_copy, n_region, NULL);
6413 if (free_region_copy)
6414 free (region_copy);
6416 free_original_copy_tables ();
6417 return true;
6420 /* Checks if BB is part of the region defined by N_REGION BBS. */
6421 static bool
6422 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6424 unsigned int n;
6426 for (n = 0; n < n_region; n++)
6428 if (bb == bbs[n])
6429 return true;
6431 return false;
6434 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6435 are stored to REGION_COPY in the same order in that they appear
6436 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6437 the region, EXIT an exit from it. The condition guarding EXIT
6438 is moved to ENTRY. Returns true if duplication succeeds, false
6439 otherwise.
6441 For example,
6443 some_code;
6444 if (cond)
6446 else
6449 is transformed to
6451 if (cond)
6453 some_code;
6456 else
6458 some_code;
6463 bool
6464 gimple_duplicate_sese_tail (edge entry, edge exit,
6465 basic_block *region, unsigned n_region,
6466 basic_block *region_copy)
6468 unsigned i;
6469 bool free_region_copy = false;
6470 struct loop *loop = exit->dest->loop_father;
6471 struct loop *orig_loop = entry->dest->loop_father;
6472 basic_block switch_bb, entry_bb, nentry_bb;
6473 vec<basic_block> doms;
6474 profile_count total_count = profile_count::uninitialized (),
6475 exit_count = profile_count::uninitialized ();
6476 edge exits[2], nexits[2], e;
6477 gimple_stmt_iterator gsi;
6478 gimple *cond_stmt;
6479 edge sorig, snew;
6480 basic_block exit_bb;
6481 gphi_iterator psi;
6482 gphi *phi;
6483 tree def;
6484 struct loop *target, *aloop, *cloop;
6486 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6487 exits[0] = exit;
6488 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6490 if (!can_copy_bbs_p (region, n_region))
6491 return false;
6493 initialize_original_copy_tables ();
6494 set_loop_copy (orig_loop, loop);
6496 target= loop;
6497 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6499 if (bb_part_of_region_p (aloop->header, region, n_region))
6501 cloop = duplicate_loop (aloop, target);
6502 duplicate_subloops (aloop, cloop);
6506 if (!region_copy)
6508 region_copy = XNEWVEC (basic_block, n_region);
6509 free_region_copy = true;
6512 gcc_assert (!need_ssa_update_p (cfun));
6514 /* Record blocks outside the region that are dominated by something
6515 inside. */
6516 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6518 total_count = exit->src->count;
6519 exit_count = exit->count ();
6520 /* Fix up corner cases, to avoid division by zero or creation of negative
6521 frequencies. */
6522 if (exit_count > total_count)
6523 exit_count = total_count;
6525 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6526 split_edge_bb_loc (exit), true);
6527 if (total_count.initialized_p () && exit_count.initialized_p ())
6529 scale_bbs_frequencies_profile_count (region, n_region,
6530 total_count - exit_count,
6531 total_count);
6532 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6533 total_count);
6536 /* Create the switch block, and put the exit condition to it. */
6537 entry_bb = entry->dest;
6538 nentry_bb = get_bb_copy (entry_bb);
6539 if (!last_stmt (entry->src)
6540 || !stmt_ends_bb_p (last_stmt (entry->src)))
6541 switch_bb = entry->src;
6542 else
6543 switch_bb = split_edge (entry);
6544 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6546 gsi = gsi_last_bb (switch_bb);
6547 cond_stmt = last_stmt (exit->src);
6548 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6549 cond_stmt = gimple_copy (cond_stmt);
6551 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6553 sorig = single_succ_edge (switch_bb);
6554 sorig->flags = exits[1]->flags;
6555 sorig->probability = exits[1]->probability;
6556 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6557 snew->probability = exits[0]->probability;
6560 /* Register the new edge from SWITCH_BB in loop exit lists. */
6561 rescan_loop_exit (snew, true, false);
6563 /* Add the PHI node arguments. */
6564 add_phi_args_after_copy (region_copy, n_region, snew);
6566 /* Get rid of now superfluous conditions and associated edges (and phi node
6567 arguments). */
6568 exit_bb = exit->dest;
6570 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6571 PENDING_STMT (e) = NULL;
6573 /* The latch of ORIG_LOOP was copied, and so was the backedge
6574 to the original header. We redirect this backedge to EXIT_BB. */
6575 for (i = 0; i < n_region; i++)
6576 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6578 gcc_assert (single_succ_edge (region_copy[i]));
6579 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6580 PENDING_STMT (e) = NULL;
6581 for (psi = gsi_start_phis (exit_bb);
6582 !gsi_end_p (psi);
6583 gsi_next (&psi))
6585 phi = psi.phi ();
6586 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6587 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6590 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6591 PENDING_STMT (e) = NULL;
6593 /* Anything that is outside of the region, but was dominated by something
6594 inside needs to update dominance info. */
6595 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6596 doms.release ();
6597 /* Update the SSA web. */
6598 update_ssa (TODO_update_ssa);
6600 if (free_region_copy)
6601 free (region_copy);
6603 free_original_copy_tables ();
6604 return true;
6607 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6608 adding blocks when the dominator traversal reaches EXIT. This
6609 function silently assumes that ENTRY strictly dominates EXIT. */
6611 void
6612 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6613 vec<basic_block> *bbs_p)
6615 basic_block son;
6617 for (son = first_dom_son (CDI_DOMINATORS, entry);
6618 son;
6619 son = next_dom_son (CDI_DOMINATORS, son))
6621 bbs_p->safe_push (son);
6622 if (son != exit)
6623 gather_blocks_in_sese_region (son, exit, bbs_p);
6627 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6628 The duplicates are recorded in VARS_MAP. */
6630 static void
6631 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6632 tree to_context)
6634 tree t = *tp, new_t;
6635 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6637 if (DECL_CONTEXT (t) == to_context)
6638 return;
6640 bool existed;
6641 tree &loc = vars_map->get_or_insert (t, &existed);
6643 if (!existed)
6645 if (SSA_VAR_P (t))
6647 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6648 add_local_decl (f, new_t);
6650 else
6652 gcc_assert (TREE_CODE (t) == CONST_DECL);
6653 new_t = copy_node (t);
6655 DECL_CONTEXT (new_t) = to_context;
6657 loc = new_t;
6659 else
6660 new_t = loc;
6662 *tp = new_t;
6666 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6667 VARS_MAP maps old ssa names and var_decls to the new ones. */
6669 static tree
6670 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6671 tree to_context)
6673 tree new_name;
6675 gcc_assert (!virtual_operand_p (name));
6677 tree *loc = vars_map->get (name);
6679 if (!loc)
6681 tree decl = SSA_NAME_VAR (name);
6682 if (decl)
6684 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6685 replace_by_duplicate_decl (&decl, vars_map, to_context);
6686 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6687 decl, SSA_NAME_DEF_STMT (name));
6689 else
6690 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6691 name, SSA_NAME_DEF_STMT (name));
6693 /* Now that we've used the def stmt to define new_name, make sure it
6694 doesn't define name anymore. */
6695 SSA_NAME_DEF_STMT (name) = NULL;
6697 vars_map->put (name, new_name);
6699 else
6700 new_name = *loc;
6702 return new_name;
6705 struct move_stmt_d
6707 tree orig_block;
6708 tree new_block;
6709 tree from_context;
6710 tree to_context;
6711 hash_map<tree, tree> *vars_map;
6712 htab_t new_label_map;
6713 hash_map<void *, void *> *eh_map;
6714 bool remap_decls_p;
6717 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6718 contained in *TP if it has been ORIG_BLOCK previously and change the
6719 DECL_CONTEXT of every local variable referenced in *TP. */
6721 static tree
6722 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6724 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6725 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6726 tree t = *tp;
6728 if (EXPR_P (t))
6730 tree block = TREE_BLOCK (t);
6731 if (block == NULL_TREE)
6733 else if (block == p->orig_block
6734 || p->orig_block == NULL_TREE)
6735 TREE_SET_BLOCK (t, p->new_block);
6736 else if (flag_checking)
6738 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6739 block = BLOCK_SUPERCONTEXT (block);
6740 gcc_assert (block == p->orig_block);
6743 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6745 if (TREE_CODE (t) == SSA_NAME)
6746 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6747 else if (TREE_CODE (t) == PARM_DECL
6748 && gimple_in_ssa_p (cfun))
6749 *tp = *(p->vars_map->get (t));
6750 else if (TREE_CODE (t) == LABEL_DECL)
6752 if (p->new_label_map)
6754 struct tree_map in, *out;
6755 in.base.from = t;
6756 out = (struct tree_map *)
6757 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6758 if (out)
6759 *tp = t = out->to;
6762 /* For FORCED_LABELs we can end up with references from other
6763 functions if some SESE regions are outlined. It is UB to
6764 jump in between them, but they could be used just for printing
6765 addresses etc. In that case, DECL_CONTEXT on the label should
6766 be the function containing the glabel stmt with that LABEL_DECL,
6767 rather than whatever function a reference to the label was seen
6768 last time. */
6769 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6770 DECL_CONTEXT (t) = p->to_context;
6772 else if (p->remap_decls_p)
6774 /* Replace T with its duplicate. T should no longer appear in the
6775 parent function, so this looks wasteful; however, it may appear
6776 in referenced_vars, and more importantly, as virtual operands of
6777 statements, and in alias lists of other variables. It would be
6778 quite difficult to expunge it from all those places. ??? It might
6779 suffice to do this for addressable variables. */
6780 if ((VAR_P (t) && !is_global_var (t))
6781 || TREE_CODE (t) == CONST_DECL)
6782 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6784 *walk_subtrees = 0;
6786 else if (TYPE_P (t))
6787 *walk_subtrees = 0;
6789 return NULL_TREE;
6792 /* Helper for move_stmt_r. Given an EH region number for the source
6793 function, map that to the duplicate EH regio number in the dest. */
6795 static int
6796 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6798 eh_region old_r, new_r;
6800 old_r = get_eh_region_from_number (old_nr);
6801 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6803 return new_r->index;
6806 /* Similar, but operate on INTEGER_CSTs. */
6808 static tree
6809 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6811 int old_nr, new_nr;
6813 old_nr = tree_to_shwi (old_t_nr);
6814 new_nr = move_stmt_eh_region_nr (old_nr, p);
6816 return build_int_cst (integer_type_node, new_nr);
6819 /* Like move_stmt_op, but for gimple statements.
6821 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6822 contained in the current statement in *GSI_P and change the
6823 DECL_CONTEXT of every local variable referenced in the current
6824 statement. */
6826 static tree
6827 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6828 struct walk_stmt_info *wi)
6830 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6831 gimple *stmt = gsi_stmt (*gsi_p);
6832 tree block = gimple_block (stmt);
6834 if (block == p->orig_block
6835 || (p->orig_block == NULL_TREE
6836 && block != NULL_TREE))
6837 gimple_set_block (stmt, p->new_block);
6839 switch (gimple_code (stmt))
6841 case GIMPLE_CALL:
6842 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6844 tree r, fndecl = gimple_call_fndecl (stmt);
6845 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6846 switch (DECL_FUNCTION_CODE (fndecl))
6848 case BUILT_IN_EH_COPY_VALUES:
6849 r = gimple_call_arg (stmt, 1);
6850 r = move_stmt_eh_region_tree_nr (r, p);
6851 gimple_call_set_arg (stmt, 1, r);
6852 /* FALLTHRU */
6854 case BUILT_IN_EH_POINTER:
6855 case BUILT_IN_EH_FILTER:
6856 r = gimple_call_arg (stmt, 0);
6857 r = move_stmt_eh_region_tree_nr (r, p);
6858 gimple_call_set_arg (stmt, 0, r);
6859 break;
6861 default:
6862 break;
6865 break;
6867 case GIMPLE_RESX:
6869 gresx *resx_stmt = as_a <gresx *> (stmt);
6870 int r = gimple_resx_region (resx_stmt);
6871 r = move_stmt_eh_region_nr (r, p);
6872 gimple_resx_set_region (resx_stmt, r);
6874 break;
6876 case GIMPLE_EH_DISPATCH:
6878 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6879 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6880 r = move_stmt_eh_region_nr (r, p);
6881 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6883 break;
6885 case GIMPLE_OMP_RETURN:
6886 case GIMPLE_OMP_CONTINUE:
6887 break;
6889 case GIMPLE_LABEL:
6891 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6892 so that such labels can be referenced from other regions.
6893 Make sure to update it when seeing a GIMPLE_LABEL though,
6894 that is the owner of the label. */
6895 walk_gimple_op (stmt, move_stmt_op, wi);
6896 *handled_ops_p = true;
6897 tree label = gimple_label_label (as_a <glabel *> (stmt));
6898 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6899 DECL_CONTEXT (label) = p->to_context;
6901 break;
6903 default:
6904 if (is_gimple_omp (stmt))
6906 /* Do not remap variables inside OMP directives. Variables
6907 referenced in clauses and directive header belong to the
6908 parent function and should not be moved into the child
6909 function. */
6910 bool save_remap_decls_p = p->remap_decls_p;
6911 p->remap_decls_p = false;
6912 *handled_ops_p = true;
6914 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6915 move_stmt_op, wi);
6917 p->remap_decls_p = save_remap_decls_p;
6919 break;
6922 return NULL_TREE;
6925 /* Move basic block BB from function CFUN to function DEST_FN. The
6926 block is moved out of the original linked list and placed after
6927 block AFTER in the new list. Also, the block is removed from the
6928 original array of blocks and placed in DEST_FN's array of blocks.
6929 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6930 updated to reflect the moved edges.
6932 The local variables are remapped to new instances, VARS_MAP is used
6933 to record the mapping. */
6935 static void
6936 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6937 basic_block after, bool update_edge_count_p,
6938 struct move_stmt_d *d)
6940 struct control_flow_graph *cfg;
6941 edge_iterator ei;
6942 edge e;
6943 gimple_stmt_iterator si;
6944 unsigned old_len, new_len;
6946 /* Remove BB from dominance structures. */
6947 delete_from_dominance_info (CDI_DOMINATORS, bb);
6949 /* Move BB from its current loop to the copy in the new function. */
6950 if (current_loops)
6952 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6953 if (new_loop)
6954 bb->loop_father = new_loop;
6957 /* Link BB to the new linked list. */
6958 move_block_after (bb, after);
6960 /* Update the edge count in the corresponding flowgraphs. */
6961 if (update_edge_count_p)
6962 FOR_EACH_EDGE (e, ei, bb->succs)
6964 cfun->cfg->x_n_edges--;
6965 dest_cfun->cfg->x_n_edges++;
6968 /* Remove BB from the original basic block array. */
6969 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6970 cfun->cfg->x_n_basic_blocks--;
6972 /* Grow DEST_CFUN's basic block array if needed. */
6973 cfg = dest_cfun->cfg;
6974 cfg->x_n_basic_blocks++;
6975 if (bb->index >= cfg->x_last_basic_block)
6976 cfg->x_last_basic_block = bb->index + 1;
6978 old_len = vec_safe_length (cfg->x_basic_block_info);
6979 if ((unsigned) cfg->x_last_basic_block >= old_len)
6981 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6982 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6985 (*cfg->x_basic_block_info)[bb->index] = bb;
6987 /* Remap the variables in phi nodes. */
6988 for (gphi_iterator psi = gsi_start_phis (bb);
6989 !gsi_end_p (psi); )
6991 gphi *phi = psi.phi ();
6992 use_operand_p use;
6993 tree op = PHI_RESULT (phi);
6994 ssa_op_iter oi;
6995 unsigned i;
6997 if (virtual_operand_p (op))
6999 /* Remove the phi nodes for virtual operands (alias analysis will be
7000 run for the new function, anyway). */
7001 remove_phi_node (&psi, true);
7002 continue;
7005 SET_PHI_RESULT (phi,
7006 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7007 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7009 op = USE_FROM_PTR (use);
7010 if (TREE_CODE (op) == SSA_NAME)
7011 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7014 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7016 location_t locus = gimple_phi_arg_location (phi, i);
7017 tree block = LOCATION_BLOCK (locus);
7019 if (locus == UNKNOWN_LOCATION)
7020 continue;
7021 if (d->orig_block == NULL_TREE || block == d->orig_block)
7023 locus = set_block (locus, d->new_block);
7024 gimple_phi_arg_set_location (phi, i, locus);
7028 gsi_next (&psi);
7031 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7033 gimple *stmt = gsi_stmt (si);
7034 struct walk_stmt_info wi;
7036 memset (&wi, 0, sizeof (wi));
7037 wi.info = d;
7038 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7040 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7042 tree label = gimple_label_label (label_stmt);
7043 int uid = LABEL_DECL_UID (label);
7045 gcc_assert (uid > -1);
7047 old_len = vec_safe_length (cfg->x_label_to_block_map);
7048 if (old_len <= (unsigned) uid)
7050 new_len = 3 * uid / 2 + 1;
7051 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7054 (*cfg->x_label_to_block_map)[uid] = bb;
7055 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7057 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7059 if (uid >= dest_cfun->cfg->last_label_uid)
7060 dest_cfun->cfg->last_label_uid = uid + 1;
7063 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7064 remove_stmt_from_eh_lp_fn (cfun, stmt);
7066 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7067 gimple_remove_stmt_histograms (cfun, stmt);
7069 /* We cannot leave any operands allocated from the operand caches of
7070 the current function. */
7071 free_stmt_operands (cfun, stmt);
7072 push_cfun (dest_cfun);
7073 update_stmt (stmt);
7074 pop_cfun ();
7077 FOR_EACH_EDGE (e, ei, bb->succs)
7078 if (e->goto_locus != UNKNOWN_LOCATION)
7080 tree block = LOCATION_BLOCK (e->goto_locus);
7081 if (d->orig_block == NULL_TREE
7082 || block == d->orig_block)
7083 e->goto_locus = set_block (e->goto_locus, d->new_block);
7087 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7088 the outermost EH region. Use REGION as the incoming base EH region. */
7090 static eh_region
7091 find_outermost_region_in_block (struct function *src_cfun,
7092 basic_block bb, eh_region region)
7094 gimple_stmt_iterator si;
7096 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7098 gimple *stmt = gsi_stmt (si);
7099 eh_region stmt_region;
7100 int lp_nr;
7102 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7103 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7104 if (stmt_region)
7106 if (region == NULL)
7107 region = stmt_region;
7108 else if (stmt_region != region)
7110 region = eh_region_outermost (src_cfun, stmt_region, region);
7111 gcc_assert (region != NULL);
7116 return region;
7119 static tree
7120 new_label_mapper (tree decl, void *data)
7122 htab_t hash = (htab_t) data;
7123 struct tree_map *m;
7124 void **slot;
7126 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7128 m = XNEW (struct tree_map);
7129 m->hash = DECL_UID (decl);
7130 m->base.from = decl;
7131 m->to = create_artificial_label (UNKNOWN_LOCATION);
7132 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7133 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7134 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7136 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7137 gcc_assert (*slot == NULL);
7139 *slot = m;
7141 return m->to;
7144 /* Tree walker to replace the decls used inside value expressions by
7145 duplicates. */
7147 static tree
7148 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7150 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7152 switch (TREE_CODE (*tp))
7154 case VAR_DECL:
7155 case PARM_DECL:
7156 case RESULT_DECL:
7157 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7158 break;
7159 default:
7160 break;
7163 if (IS_TYPE_OR_DECL_P (*tp))
7164 *walk_subtrees = false;
7166 return NULL;
7169 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7170 subblocks. */
7172 static void
7173 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7174 tree to_context)
7176 tree *tp, t;
7178 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7180 t = *tp;
7181 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7182 continue;
7183 replace_by_duplicate_decl (&t, vars_map, to_context);
7184 if (t != *tp)
7186 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7188 tree x = DECL_VALUE_EXPR (*tp);
7189 struct replace_decls_d rd = { vars_map, to_context };
7190 unshare_expr (x);
7191 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7192 SET_DECL_VALUE_EXPR (t, x);
7193 DECL_HAS_VALUE_EXPR_P (t) = 1;
7195 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7196 *tp = t;
7200 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7201 replace_block_vars_by_duplicates (block, vars_map, to_context);
7204 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7205 from FN1 to FN2. */
7207 static void
7208 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7209 struct loop *loop)
7211 /* Discard it from the old loop array. */
7212 (*get_loops (fn1))[loop->num] = NULL;
7214 /* Place it in the new loop array, assigning it a new number. */
7215 loop->num = number_of_loops (fn2);
7216 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7218 /* Recurse to children. */
7219 for (loop = loop->inner; loop; loop = loop->next)
7220 fixup_loop_arrays_after_move (fn1, fn2, loop);
7223 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7224 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7226 DEBUG_FUNCTION void
7227 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7229 basic_block bb;
7230 edge_iterator ei;
7231 edge e;
7232 bitmap bbs = BITMAP_ALLOC (NULL);
7233 int i;
7235 gcc_assert (entry != NULL);
7236 gcc_assert (entry != exit);
7237 gcc_assert (bbs_p != NULL);
7239 gcc_assert (bbs_p->length () > 0);
7241 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7242 bitmap_set_bit (bbs, bb->index);
7244 gcc_assert (bitmap_bit_p (bbs, entry->index));
7245 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7247 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7249 if (bb == entry)
7251 gcc_assert (single_pred_p (entry));
7252 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7254 else
7255 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7257 e = ei_edge (ei);
7258 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7261 if (bb == exit)
7263 gcc_assert (single_succ_p (exit));
7264 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7266 else
7267 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7269 e = ei_edge (ei);
7270 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7274 BITMAP_FREE (bbs);
7277 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7279 bool
7280 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7282 bitmap release_names = (bitmap)data;
7284 if (TREE_CODE (from) != SSA_NAME)
7285 return true;
7287 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7288 return true;
7291 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7292 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7293 single basic block in the original CFG and the new basic block is
7294 returned. DEST_CFUN must not have a CFG yet.
7296 Note that the region need not be a pure SESE region. Blocks inside
7297 the region may contain calls to abort/exit. The only restriction
7298 is that ENTRY_BB should be the only entry point and it must
7299 dominate EXIT_BB.
7301 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7302 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7303 to the new function.
7305 All local variables referenced in the region are assumed to be in
7306 the corresponding BLOCK_VARS and unexpanded variable lists
7307 associated with DEST_CFUN.
7309 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7310 reimplement move_sese_region_to_fn by duplicating the region rather than
7311 moving it. */
7313 basic_block
7314 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7315 basic_block exit_bb, tree orig_block)
7317 vec<basic_block> bbs, dom_bbs;
7318 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7319 basic_block after, bb, *entry_pred, *exit_succ, abb;
7320 struct function *saved_cfun = cfun;
7321 int *entry_flag, *exit_flag;
7322 profile_probability *entry_prob, *exit_prob;
7323 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7324 edge e;
7325 edge_iterator ei;
7326 htab_t new_label_map;
7327 hash_map<void *, void *> *eh_map;
7328 struct loop *loop = entry_bb->loop_father;
7329 struct loop *loop0 = get_loop (saved_cfun, 0);
7330 struct move_stmt_d d;
7332 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7333 region. */
7334 gcc_assert (entry_bb != exit_bb
7335 && (!exit_bb
7336 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7338 /* Collect all the blocks in the region. Manually add ENTRY_BB
7339 because it won't be added by dfs_enumerate_from. */
7340 bbs.create (0);
7341 bbs.safe_push (entry_bb);
7342 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7344 if (flag_checking)
7345 verify_sese (entry_bb, exit_bb, &bbs);
7347 /* The blocks that used to be dominated by something in BBS will now be
7348 dominated by the new block. */
7349 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7350 bbs.address (),
7351 bbs.length ());
7353 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7354 the predecessor edges to ENTRY_BB and the successor edges to
7355 EXIT_BB so that we can re-attach them to the new basic block that
7356 will replace the region. */
7357 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7358 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7359 entry_flag = XNEWVEC (int, num_entry_edges);
7360 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7361 i = 0;
7362 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7364 entry_prob[i] = e->probability;
7365 entry_flag[i] = e->flags;
7366 entry_pred[i++] = e->src;
7367 remove_edge (e);
7370 if (exit_bb)
7372 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7373 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7374 exit_flag = XNEWVEC (int, num_exit_edges);
7375 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7376 i = 0;
7377 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7379 exit_prob[i] = e->probability;
7380 exit_flag[i] = e->flags;
7381 exit_succ[i++] = e->dest;
7382 remove_edge (e);
7385 else
7387 num_exit_edges = 0;
7388 exit_succ = NULL;
7389 exit_flag = NULL;
7390 exit_prob = NULL;
7393 /* Switch context to the child function to initialize DEST_FN's CFG. */
7394 gcc_assert (dest_cfun->cfg == NULL);
7395 push_cfun (dest_cfun);
7397 init_empty_tree_cfg ();
7399 /* Initialize EH information for the new function. */
7400 eh_map = NULL;
7401 new_label_map = NULL;
7402 if (saved_cfun->eh)
7404 eh_region region = NULL;
7406 FOR_EACH_VEC_ELT (bbs, i, bb)
7407 region = find_outermost_region_in_block (saved_cfun, bb, region);
7409 init_eh_for_function ();
7410 if (region != NULL)
7412 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7413 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7414 new_label_mapper, new_label_map);
7418 /* Initialize an empty loop tree. */
7419 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7420 init_loops_structure (dest_cfun, loops, 1);
7421 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7422 set_loops_for_fn (dest_cfun, loops);
7424 /* Move the outlined loop tree part. */
7425 num_nodes = bbs.length ();
7426 FOR_EACH_VEC_ELT (bbs, i, bb)
7428 if (bb->loop_father->header == bb)
7430 struct loop *this_loop = bb->loop_father;
7431 struct loop *outer = loop_outer (this_loop);
7432 if (outer == loop
7433 /* If the SESE region contains some bbs ending with
7434 a noreturn call, those are considered to belong
7435 to the outermost loop in saved_cfun, rather than
7436 the entry_bb's loop_father. */
7437 || outer == loop0)
7439 if (outer != loop)
7440 num_nodes -= this_loop->num_nodes;
7441 flow_loop_tree_node_remove (bb->loop_father);
7442 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7443 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7446 else if (bb->loop_father == loop0 && loop0 != loop)
7447 num_nodes--;
7449 /* Remove loop exits from the outlined region. */
7450 if (loops_for_fn (saved_cfun)->exits)
7451 FOR_EACH_EDGE (e, ei, bb->succs)
7453 struct loops *l = loops_for_fn (saved_cfun);
7454 loop_exit **slot
7455 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7456 NO_INSERT);
7457 if (slot)
7458 l->exits->clear_slot (slot);
7463 /* Adjust the number of blocks in the tree root of the outlined part. */
7464 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7466 /* Setup a mapping to be used by move_block_to_fn. */
7467 loop->aux = current_loops->tree_root;
7468 loop0->aux = current_loops->tree_root;
7470 pop_cfun ();
7472 /* Move blocks from BBS into DEST_CFUN. */
7473 gcc_assert (bbs.length () >= 2);
7474 after = dest_cfun->cfg->x_entry_block_ptr;
7475 hash_map<tree, tree> vars_map;
7477 memset (&d, 0, sizeof (d));
7478 d.orig_block = orig_block;
7479 d.new_block = DECL_INITIAL (dest_cfun->decl);
7480 d.from_context = cfun->decl;
7481 d.to_context = dest_cfun->decl;
7482 d.vars_map = &vars_map;
7483 d.new_label_map = new_label_map;
7484 d.eh_map = eh_map;
7485 d.remap_decls_p = true;
7487 if (gimple_in_ssa_p (cfun))
7488 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7490 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7491 set_ssa_default_def (dest_cfun, arg, narg);
7492 vars_map.put (arg, narg);
7495 FOR_EACH_VEC_ELT (bbs, i, bb)
7497 /* No need to update edge counts on the last block. It has
7498 already been updated earlier when we detached the region from
7499 the original CFG. */
7500 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7501 after = bb;
7504 loop->aux = NULL;
7505 loop0->aux = NULL;
7506 /* Loop sizes are no longer correct, fix them up. */
7507 loop->num_nodes -= num_nodes;
7508 for (struct loop *outer = loop_outer (loop);
7509 outer; outer = loop_outer (outer))
7510 outer->num_nodes -= num_nodes;
7511 loop0->num_nodes -= bbs.length () - num_nodes;
7513 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7515 struct loop *aloop;
7516 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7517 if (aloop != NULL)
7519 if (aloop->simduid)
7521 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7522 d.to_context);
7523 dest_cfun->has_simduid_loops = true;
7525 if (aloop->force_vectorize)
7526 dest_cfun->has_force_vectorize_loops = true;
7530 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7531 if (orig_block)
7533 tree block;
7534 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7535 == NULL_TREE);
7536 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7537 = BLOCK_SUBBLOCKS (orig_block);
7538 for (block = BLOCK_SUBBLOCKS (orig_block);
7539 block; block = BLOCK_CHAIN (block))
7540 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7541 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7544 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7545 &vars_map, dest_cfun->decl);
7547 if (new_label_map)
7548 htab_delete (new_label_map);
7549 if (eh_map)
7550 delete eh_map;
7552 if (gimple_in_ssa_p (cfun))
7554 /* We need to release ssa-names in a defined order, so first find them,
7555 and then iterate in ascending version order. */
7556 bitmap release_names = BITMAP_ALLOC (NULL);
7557 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7558 bitmap_iterator bi;
7559 unsigned i;
7560 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7561 release_ssa_name (ssa_name (i));
7562 BITMAP_FREE (release_names);
7565 /* Rewire the entry and exit blocks. The successor to the entry
7566 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7567 the child function. Similarly, the predecessor of DEST_FN's
7568 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7569 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7570 various CFG manipulation function get to the right CFG.
7572 FIXME, this is silly. The CFG ought to become a parameter to
7573 these helpers. */
7574 push_cfun (dest_cfun);
7575 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7576 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7577 if (exit_bb)
7579 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7580 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7582 else
7583 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7584 pop_cfun ();
7586 /* Back in the original function, the SESE region has disappeared,
7587 create a new basic block in its place. */
7588 bb = create_empty_bb (entry_pred[0]);
7589 if (current_loops)
7590 add_bb_to_loop (bb, loop);
7591 for (i = 0; i < num_entry_edges; i++)
7593 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7594 e->probability = entry_prob[i];
7597 for (i = 0; i < num_exit_edges; i++)
7599 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7600 e->probability = exit_prob[i];
7603 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7604 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7605 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7606 dom_bbs.release ();
7608 if (exit_bb)
7610 free (exit_prob);
7611 free (exit_flag);
7612 free (exit_succ);
7614 free (entry_prob);
7615 free (entry_flag);
7616 free (entry_pred);
7617 bbs.release ();
7619 return bb;
7622 /* Dump default def DEF to file FILE using FLAGS and indentation
7623 SPC. */
7625 static void
7626 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7628 for (int i = 0; i < spc; ++i)
7629 fprintf (file, " ");
7630 dump_ssaname_info_to_file (file, def, spc);
7632 print_generic_expr (file, TREE_TYPE (def), flags);
7633 fprintf (file, " ");
7634 print_generic_expr (file, def, flags);
7635 fprintf (file, " = ");
7636 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7637 fprintf (file, ";\n");
7640 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7642 static void
7643 print_no_sanitize_attr_value (FILE *file, tree value)
7645 unsigned int flags = tree_to_uhwi (value);
7646 bool first = true;
7647 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7649 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7651 if (!first)
7652 fprintf (file, " | ");
7653 fprintf (file, "%s", sanitizer_opts[i].name);
7654 first = false;
7659 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7662 void
7663 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7665 tree arg, var, old_current_fndecl = current_function_decl;
7666 struct function *dsf;
7667 bool ignore_topmost_bind = false, any_var = false;
7668 basic_block bb;
7669 tree chain;
7670 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7671 && decl_is_tm_clone (fndecl));
7672 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7674 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7676 fprintf (file, "__attribute__((");
7678 bool first = true;
7679 tree chain;
7680 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7681 first = false, chain = TREE_CHAIN (chain))
7683 if (!first)
7684 fprintf (file, ", ");
7686 tree name = get_attribute_name (chain);
7687 print_generic_expr (file, name, dump_flags);
7688 if (TREE_VALUE (chain) != NULL_TREE)
7690 fprintf (file, " (");
7692 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7693 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7694 else
7695 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7696 fprintf (file, ")");
7700 fprintf (file, "))\n");
7703 current_function_decl = fndecl;
7704 if (flags & TDF_GIMPLE)
7706 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7707 dump_flags | TDF_SLIM);
7708 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7710 else
7711 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7713 arg = DECL_ARGUMENTS (fndecl);
7714 while (arg)
7716 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7717 fprintf (file, " ");
7718 print_generic_expr (file, arg, dump_flags);
7719 if (DECL_CHAIN (arg))
7720 fprintf (file, ", ");
7721 arg = DECL_CHAIN (arg);
7723 fprintf (file, ")\n");
7725 dsf = DECL_STRUCT_FUNCTION (fndecl);
7726 if (dsf && (flags & TDF_EH))
7727 dump_eh_tree (file, dsf);
7729 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7731 dump_node (fndecl, TDF_SLIM | flags, file);
7732 current_function_decl = old_current_fndecl;
7733 return;
7736 /* When GIMPLE is lowered, the variables are no longer available in
7737 BIND_EXPRs, so display them separately. */
7738 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7740 unsigned ix;
7741 ignore_topmost_bind = true;
7743 fprintf (file, "{\n");
7744 if (gimple_in_ssa_p (fun)
7745 && (flags & TDF_ALIAS))
7747 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7748 arg = DECL_CHAIN (arg))
7750 tree def = ssa_default_def (fun, arg);
7751 if (def)
7752 dump_default_def (file, def, 2, flags);
7755 tree res = DECL_RESULT (fun->decl);
7756 if (res != NULL_TREE
7757 && DECL_BY_REFERENCE (res))
7759 tree def = ssa_default_def (fun, res);
7760 if (def)
7761 dump_default_def (file, def, 2, flags);
7764 tree static_chain = fun->static_chain_decl;
7765 if (static_chain != NULL_TREE)
7767 tree def = ssa_default_def (fun, static_chain);
7768 if (def)
7769 dump_default_def (file, def, 2, flags);
7773 if (!vec_safe_is_empty (fun->local_decls))
7774 FOR_EACH_LOCAL_DECL (fun, ix, var)
7776 print_generic_decl (file, var, flags);
7777 fprintf (file, "\n");
7779 any_var = true;
7782 tree name;
7784 if (gimple_in_ssa_p (cfun))
7785 FOR_EACH_SSA_NAME (ix, name, cfun)
7787 if (!SSA_NAME_VAR (name))
7789 fprintf (file, " ");
7790 print_generic_expr (file, TREE_TYPE (name), flags);
7791 fprintf (file, " ");
7792 print_generic_expr (file, name, flags);
7793 fprintf (file, ";\n");
7795 any_var = true;
7800 if (fun && fun->decl == fndecl
7801 && fun->cfg
7802 && basic_block_info_for_fn (fun))
7804 /* If the CFG has been built, emit a CFG-based dump. */
7805 if (!ignore_topmost_bind)
7806 fprintf (file, "{\n");
7808 if (any_var && n_basic_blocks_for_fn (fun))
7809 fprintf (file, "\n");
7811 FOR_EACH_BB_FN (bb, fun)
7812 dump_bb (file, bb, 2, flags);
7814 fprintf (file, "}\n");
7816 else if (fun->curr_properties & PROP_gimple_any)
7818 /* The function is now in GIMPLE form but the CFG has not been
7819 built yet. Emit the single sequence of GIMPLE statements
7820 that make up its body. */
7821 gimple_seq body = gimple_body (fndecl);
7823 if (gimple_seq_first_stmt (body)
7824 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7825 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7826 print_gimple_seq (file, body, 0, flags);
7827 else
7829 if (!ignore_topmost_bind)
7830 fprintf (file, "{\n");
7832 if (any_var)
7833 fprintf (file, "\n");
7835 print_gimple_seq (file, body, 2, flags);
7836 fprintf (file, "}\n");
7839 else
7841 int indent;
7843 /* Make a tree based dump. */
7844 chain = DECL_SAVED_TREE (fndecl);
7845 if (chain && TREE_CODE (chain) == BIND_EXPR)
7847 if (ignore_topmost_bind)
7849 chain = BIND_EXPR_BODY (chain);
7850 indent = 2;
7852 else
7853 indent = 0;
7855 else
7857 if (!ignore_topmost_bind)
7859 fprintf (file, "{\n");
7860 /* No topmost bind, pretend it's ignored for later. */
7861 ignore_topmost_bind = true;
7863 indent = 2;
7866 if (any_var)
7867 fprintf (file, "\n");
7869 print_generic_stmt_indented (file, chain, flags, indent);
7870 if (ignore_topmost_bind)
7871 fprintf (file, "}\n");
7874 if (flags & TDF_ENUMERATE_LOCALS)
7875 dump_enumerated_decls (file, flags);
7876 fprintf (file, "\n\n");
7878 current_function_decl = old_current_fndecl;
7881 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7883 DEBUG_FUNCTION void
7884 debug_function (tree fn, dump_flags_t flags)
7886 dump_function_to_file (fn, stderr, flags);
7890 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7892 static void
7893 print_pred_bbs (FILE *file, basic_block bb)
7895 edge e;
7896 edge_iterator ei;
7898 FOR_EACH_EDGE (e, ei, bb->preds)
7899 fprintf (file, "bb_%d ", e->src->index);
7903 /* Print on FILE the indexes for the successors of basic_block BB. */
7905 static void
7906 print_succ_bbs (FILE *file, basic_block bb)
7908 edge e;
7909 edge_iterator ei;
7911 FOR_EACH_EDGE (e, ei, bb->succs)
7912 fprintf (file, "bb_%d ", e->dest->index);
7915 /* Print to FILE the basic block BB following the VERBOSITY level. */
7917 void
7918 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7920 char *s_indent = (char *) alloca ((size_t) indent + 1);
7921 memset ((void *) s_indent, ' ', (size_t) indent);
7922 s_indent[indent] = '\0';
7924 /* Print basic_block's header. */
7925 if (verbosity >= 2)
7927 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7928 print_pred_bbs (file, bb);
7929 fprintf (file, "}, succs = {");
7930 print_succ_bbs (file, bb);
7931 fprintf (file, "})\n");
7934 /* Print basic_block's body. */
7935 if (verbosity >= 3)
7937 fprintf (file, "%s {\n", s_indent);
7938 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7939 fprintf (file, "%s }\n", s_indent);
7943 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7945 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7946 VERBOSITY level this outputs the contents of the loop, or just its
7947 structure. */
7949 static void
7950 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7952 char *s_indent;
7953 basic_block bb;
7955 if (loop == NULL)
7956 return;
7958 s_indent = (char *) alloca ((size_t) indent + 1);
7959 memset ((void *) s_indent, ' ', (size_t) indent);
7960 s_indent[indent] = '\0';
7962 /* Print loop's header. */
7963 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7964 if (loop->header)
7965 fprintf (file, "header = %d", loop->header->index);
7966 else
7968 fprintf (file, "deleted)\n");
7969 return;
7971 if (loop->latch)
7972 fprintf (file, ", latch = %d", loop->latch->index);
7973 else
7974 fprintf (file, ", multiple latches");
7975 fprintf (file, ", niter = ");
7976 print_generic_expr (file, loop->nb_iterations);
7978 if (loop->any_upper_bound)
7980 fprintf (file, ", upper_bound = ");
7981 print_decu (loop->nb_iterations_upper_bound, file);
7983 if (loop->any_likely_upper_bound)
7985 fprintf (file, ", likely_upper_bound = ");
7986 print_decu (loop->nb_iterations_likely_upper_bound, file);
7989 if (loop->any_estimate)
7991 fprintf (file, ", estimate = ");
7992 print_decu (loop->nb_iterations_estimate, file);
7994 fprintf (file, ")\n");
7996 /* Print loop's body. */
7997 if (verbosity >= 1)
7999 fprintf (file, "%s{\n", s_indent);
8000 FOR_EACH_BB_FN (bb, cfun)
8001 if (bb->loop_father == loop)
8002 print_loops_bb (file, bb, indent, verbosity);
8004 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8005 fprintf (file, "%s}\n", s_indent);
8009 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8010 spaces. Following VERBOSITY level this outputs the contents of the
8011 loop, or just its structure. */
8013 static void
8014 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8015 int verbosity)
8017 if (loop == NULL)
8018 return;
8020 print_loop (file, loop, indent, verbosity);
8021 print_loop_and_siblings (file, loop->next, indent, verbosity);
8024 /* Follow a CFG edge from the entry point of the program, and on entry
8025 of a loop, pretty print the loop structure on FILE. */
8027 void
8028 print_loops (FILE *file, int verbosity)
8030 basic_block bb;
8032 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8033 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8034 if (bb && bb->loop_father)
8035 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8038 /* Dump a loop. */
8040 DEBUG_FUNCTION void
8041 debug (struct loop &ref)
8043 print_loop (stderr, &ref, 0, /*verbosity*/0);
8046 DEBUG_FUNCTION void
8047 debug (struct loop *ptr)
8049 if (ptr)
8050 debug (*ptr);
8051 else
8052 fprintf (stderr, "<nil>\n");
8055 /* Dump a loop verbosely. */
8057 DEBUG_FUNCTION void
8058 debug_verbose (struct loop &ref)
8060 print_loop (stderr, &ref, 0, /*verbosity*/3);
8063 DEBUG_FUNCTION void
8064 debug_verbose (struct loop *ptr)
8066 if (ptr)
8067 debug (*ptr);
8068 else
8069 fprintf (stderr, "<nil>\n");
8073 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8075 DEBUG_FUNCTION void
8076 debug_loops (int verbosity)
8078 print_loops (stderr, verbosity);
8081 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8083 DEBUG_FUNCTION void
8084 debug_loop (struct loop *loop, int verbosity)
8086 print_loop (stderr, loop, 0, verbosity);
8089 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8090 level. */
8092 DEBUG_FUNCTION void
8093 debug_loop_num (unsigned num, int verbosity)
8095 debug_loop (get_loop (cfun, num), verbosity);
8098 /* Return true if BB ends with a call, possibly followed by some
8099 instructions that must stay with the call. Return false,
8100 otherwise. */
8102 static bool
8103 gimple_block_ends_with_call_p (basic_block bb)
8105 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8106 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8110 /* Return true if BB ends with a conditional branch. Return false,
8111 otherwise. */
8113 static bool
8114 gimple_block_ends_with_condjump_p (const_basic_block bb)
8116 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8117 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8121 /* Return true if statement T may terminate execution of BB in ways not
8122 explicitly represtented in the CFG. */
8124 bool
8125 stmt_can_terminate_bb_p (gimple *t)
8127 tree fndecl = NULL_TREE;
8128 int call_flags = 0;
8130 /* Eh exception not handled internally terminates execution of the whole
8131 function. */
8132 if (stmt_can_throw_external (t))
8133 return true;
8135 /* NORETURN and LONGJMP calls already have an edge to exit.
8136 CONST and PURE calls do not need one.
8137 We don't currently check for CONST and PURE here, although
8138 it would be a good idea, because those attributes are
8139 figured out from the RTL in mark_constant_function, and
8140 the counter incrementation code from -fprofile-arcs
8141 leads to different results from -fbranch-probabilities. */
8142 if (is_gimple_call (t))
8144 fndecl = gimple_call_fndecl (t);
8145 call_flags = gimple_call_flags (t);
8148 if (is_gimple_call (t)
8149 && fndecl
8150 && DECL_BUILT_IN (fndecl)
8151 && (call_flags & ECF_NOTHROW)
8152 && !(call_flags & ECF_RETURNS_TWICE)
8153 /* fork() doesn't really return twice, but the effect of
8154 wrapping it in __gcov_fork() which calls __gcov_flush()
8155 and clears the counters before forking has the same
8156 effect as returning twice. Force a fake edge. */
8157 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8158 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8159 return false;
8161 if (is_gimple_call (t))
8163 edge_iterator ei;
8164 edge e;
8165 basic_block bb;
8167 if (call_flags & (ECF_PURE | ECF_CONST)
8168 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8169 return false;
8171 /* Function call may do longjmp, terminate program or do other things.
8172 Special case noreturn that have non-abnormal edges out as in this case
8173 the fact is sufficiently represented by lack of edges out of T. */
8174 if (!(call_flags & ECF_NORETURN))
8175 return true;
8177 bb = gimple_bb (t);
8178 FOR_EACH_EDGE (e, ei, bb->succs)
8179 if ((e->flags & EDGE_FAKE) == 0)
8180 return true;
8183 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8184 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8185 return true;
8187 return false;
8191 /* Add fake edges to the function exit for any non constant and non
8192 noreturn calls (or noreturn calls with EH/abnormal edges),
8193 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8194 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8195 that were split.
8197 The goal is to expose cases in which entering a basic block does
8198 not imply that all subsequent instructions must be executed. */
8200 static int
8201 gimple_flow_call_edges_add (sbitmap blocks)
8203 int i;
8204 int blocks_split = 0;
8205 int last_bb = last_basic_block_for_fn (cfun);
8206 bool check_last_block = false;
8208 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8209 return 0;
8211 if (! blocks)
8212 check_last_block = true;
8213 else
8214 check_last_block = bitmap_bit_p (blocks,
8215 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8217 /* In the last basic block, before epilogue generation, there will be
8218 a fallthru edge to EXIT. Special care is required if the last insn
8219 of the last basic block is a call because make_edge folds duplicate
8220 edges, which would result in the fallthru edge also being marked
8221 fake, which would result in the fallthru edge being removed by
8222 remove_fake_edges, which would result in an invalid CFG.
8224 Moreover, we can't elide the outgoing fake edge, since the block
8225 profiler needs to take this into account in order to solve the minimal
8226 spanning tree in the case that the call doesn't return.
8228 Handle this by adding a dummy instruction in a new last basic block. */
8229 if (check_last_block)
8231 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8232 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8233 gimple *t = NULL;
8235 if (!gsi_end_p (gsi))
8236 t = gsi_stmt (gsi);
8238 if (t && stmt_can_terminate_bb_p (t))
8240 edge e;
8242 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8243 if (e)
8245 gsi_insert_on_edge (e, gimple_build_nop ());
8246 gsi_commit_edge_inserts ();
8251 /* Now add fake edges to the function exit for any non constant
8252 calls since there is no way that we can determine if they will
8253 return or not... */
8254 for (i = 0; i < last_bb; i++)
8256 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8257 gimple_stmt_iterator gsi;
8258 gimple *stmt, *last_stmt;
8260 if (!bb)
8261 continue;
8263 if (blocks && !bitmap_bit_p (blocks, i))
8264 continue;
8266 gsi = gsi_last_nondebug_bb (bb);
8267 if (!gsi_end_p (gsi))
8269 last_stmt = gsi_stmt (gsi);
8272 stmt = gsi_stmt (gsi);
8273 if (stmt_can_terminate_bb_p (stmt))
8275 edge e;
8277 /* The handling above of the final block before the
8278 epilogue should be enough to verify that there is
8279 no edge to the exit block in CFG already.
8280 Calling make_edge in such case would cause us to
8281 mark that edge as fake and remove it later. */
8282 if (flag_checking && stmt == last_stmt)
8284 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8285 gcc_assert (e == NULL);
8288 /* Note that the following may create a new basic block
8289 and renumber the existing basic blocks. */
8290 if (stmt != last_stmt)
8292 e = split_block (bb, stmt);
8293 if (e)
8294 blocks_split++;
8296 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8297 e->probability = profile_probability::guessed_never ();
8299 gsi_prev (&gsi);
8301 while (!gsi_end_p (gsi));
8305 if (blocks_split)
8306 checking_verify_flow_info ();
8308 return blocks_split;
8311 /* Removes edge E and all the blocks dominated by it, and updates dominance
8312 information. The IL in E->src needs to be updated separately.
8313 If dominance info is not available, only the edge E is removed.*/
8315 void
8316 remove_edge_and_dominated_blocks (edge e)
8318 vec<basic_block> bbs_to_remove = vNULL;
8319 vec<basic_block> bbs_to_fix_dom = vNULL;
8320 edge f;
8321 edge_iterator ei;
8322 bool none_removed = false;
8323 unsigned i;
8324 basic_block bb, dbb;
8325 bitmap_iterator bi;
8327 /* If we are removing a path inside a non-root loop that may change
8328 loop ownership of blocks or remove loops. Mark loops for fixup. */
8329 if (current_loops
8330 && loop_outer (e->src->loop_father) != NULL
8331 && e->src->loop_father == e->dest->loop_father)
8332 loops_state_set (LOOPS_NEED_FIXUP);
8334 if (!dom_info_available_p (CDI_DOMINATORS))
8336 remove_edge (e);
8337 return;
8340 /* No updating is needed for edges to exit. */
8341 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8343 if (cfgcleanup_altered_bbs)
8344 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8345 remove_edge (e);
8346 return;
8349 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8350 that is not dominated by E->dest, then this set is empty. Otherwise,
8351 all the basic blocks dominated by E->dest are removed.
8353 Also, to DF_IDOM we store the immediate dominators of the blocks in
8354 the dominance frontier of E (i.e., of the successors of the
8355 removed blocks, if there are any, and of E->dest otherwise). */
8356 FOR_EACH_EDGE (f, ei, e->dest->preds)
8358 if (f == e)
8359 continue;
8361 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8363 none_removed = true;
8364 break;
8368 auto_bitmap df, df_idom;
8369 if (none_removed)
8370 bitmap_set_bit (df_idom,
8371 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8372 else
8374 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8375 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8377 FOR_EACH_EDGE (f, ei, bb->succs)
8379 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8380 bitmap_set_bit (df, f->dest->index);
8383 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8384 bitmap_clear_bit (df, bb->index);
8386 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8388 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8389 bitmap_set_bit (df_idom,
8390 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8394 if (cfgcleanup_altered_bbs)
8396 /* Record the set of the altered basic blocks. */
8397 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8398 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8401 /* Remove E and the cancelled blocks. */
8402 if (none_removed)
8403 remove_edge (e);
8404 else
8406 /* Walk backwards so as to get a chance to substitute all
8407 released DEFs into debug stmts. See
8408 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8409 details. */
8410 for (i = bbs_to_remove.length (); i-- > 0; )
8411 delete_basic_block (bbs_to_remove[i]);
8414 /* Update the dominance information. The immediate dominator may change only
8415 for blocks whose immediate dominator belongs to DF_IDOM:
8417 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8418 removal. Let Z the arbitrary block such that idom(Z) = Y and
8419 Z dominates X after the removal. Before removal, there exists a path P
8420 from Y to X that avoids Z. Let F be the last edge on P that is
8421 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8422 dominates W, and because of P, Z does not dominate W), and W belongs to
8423 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8424 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8426 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8427 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8428 dbb;
8429 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8430 bbs_to_fix_dom.safe_push (dbb);
8433 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8435 bbs_to_remove.release ();
8436 bbs_to_fix_dom.release ();
8439 /* Purge dead EH edges from basic block BB. */
8441 bool
8442 gimple_purge_dead_eh_edges (basic_block bb)
8444 bool changed = false;
8445 edge e;
8446 edge_iterator ei;
8447 gimple *stmt = last_stmt (bb);
8449 if (stmt && stmt_can_throw_internal (stmt))
8450 return false;
8452 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8454 if (e->flags & EDGE_EH)
8456 remove_edge_and_dominated_blocks (e);
8457 changed = true;
8459 else
8460 ei_next (&ei);
8463 return changed;
8466 /* Purge dead EH edges from basic block listed in BLOCKS. */
8468 bool
8469 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8471 bool changed = false;
8472 unsigned i;
8473 bitmap_iterator bi;
8475 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8477 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8479 /* Earlier gimple_purge_dead_eh_edges could have removed
8480 this basic block already. */
8481 gcc_assert (bb || changed);
8482 if (bb != NULL)
8483 changed |= gimple_purge_dead_eh_edges (bb);
8486 return changed;
8489 /* Purge dead abnormal call edges from basic block BB. */
8491 bool
8492 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8494 bool changed = false;
8495 edge e;
8496 edge_iterator ei;
8497 gimple *stmt = last_stmt (bb);
8499 if (!cfun->has_nonlocal_label
8500 && !cfun->calls_setjmp)
8501 return false;
8503 if (stmt && stmt_can_make_abnormal_goto (stmt))
8504 return false;
8506 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8508 if (e->flags & EDGE_ABNORMAL)
8510 if (e->flags & EDGE_FALLTHRU)
8511 e->flags &= ~EDGE_ABNORMAL;
8512 else
8513 remove_edge_and_dominated_blocks (e);
8514 changed = true;
8516 else
8517 ei_next (&ei);
8520 return changed;
8523 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8525 bool
8526 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8528 bool changed = false;
8529 unsigned i;
8530 bitmap_iterator bi;
8532 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8534 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8536 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8537 this basic block already. */
8538 gcc_assert (bb || changed);
8539 if (bb != NULL)
8540 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8543 return changed;
8546 /* This function is called whenever a new edge is created or
8547 redirected. */
8549 static void
8550 gimple_execute_on_growing_pred (edge e)
8552 basic_block bb = e->dest;
8554 if (!gimple_seq_empty_p (phi_nodes (bb)))
8555 reserve_phi_args_for_new_edge (bb);
8558 /* This function is called immediately before edge E is removed from
8559 the edge vector E->dest->preds. */
8561 static void
8562 gimple_execute_on_shrinking_pred (edge e)
8564 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8565 remove_phi_args (e);
8568 /*---------------------------------------------------------------------------
8569 Helper functions for Loop versioning
8570 ---------------------------------------------------------------------------*/
8572 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8573 of 'first'. Both of them are dominated by 'new_head' basic block. When
8574 'new_head' was created by 'second's incoming edge it received phi arguments
8575 on the edge by split_edge(). Later, additional edge 'e' was created to
8576 connect 'new_head' and 'first'. Now this routine adds phi args on this
8577 additional edge 'e' that new_head to second edge received as part of edge
8578 splitting. */
8580 static void
8581 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8582 basic_block new_head, edge e)
8584 gphi *phi1, *phi2;
8585 gphi_iterator psi1, psi2;
8586 tree def;
8587 edge e2 = find_edge (new_head, second);
8589 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8590 edge, we should always have an edge from NEW_HEAD to SECOND. */
8591 gcc_assert (e2 != NULL);
8593 /* Browse all 'second' basic block phi nodes and add phi args to
8594 edge 'e' for 'first' head. PHI args are always in correct order. */
8596 for (psi2 = gsi_start_phis (second),
8597 psi1 = gsi_start_phis (first);
8598 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8599 gsi_next (&psi2), gsi_next (&psi1))
8601 phi1 = psi1.phi ();
8602 phi2 = psi2.phi ();
8603 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8604 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8609 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8610 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8611 the destination of the ELSE part. */
8613 static void
8614 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8615 basic_block second_head ATTRIBUTE_UNUSED,
8616 basic_block cond_bb, void *cond_e)
8618 gimple_stmt_iterator gsi;
8619 gimple *new_cond_expr;
8620 tree cond_expr = (tree) cond_e;
8621 edge e0;
8623 /* Build new conditional expr */
8624 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8625 NULL_TREE, NULL_TREE);
8627 /* Add new cond in cond_bb. */
8628 gsi = gsi_last_bb (cond_bb);
8629 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8631 /* Adjust edges appropriately to connect new head with first head
8632 as well as second head. */
8633 e0 = single_succ_edge (cond_bb);
8634 e0->flags &= ~EDGE_FALLTHRU;
8635 e0->flags |= EDGE_FALSE_VALUE;
8639 /* Do book-keeping of basic block BB for the profile consistency checker.
8640 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8641 then do post-pass accounting. Store the counting in RECORD. */
8642 static void
8643 gimple_account_profile_record (basic_block bb, int after_pass,
8644 struct profile_record *record)
8646 gimple_stmt_iterator i;
8647 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8649 record->size[after_pass]
8650 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8651 if (bb->count.initialized_p ())
8652 record->time[after_pass]
8653 += estimate_num_insns (gsi_stmt (i),
8654 &eni_time_weights) * bb->count.to_gcov_type ();
8655 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8656 record->time[after_pass]
8657 += estimate_num_insns (gsi_stmt (i),
8658 &eni_time_weights) * bb->count.to_frequency (cfun);
8662 struct cfg_hooks gimple_cfg_hooks = {
8663 "gimple",
8664 gimple_verify_flow_info,
8665 gimple_dump_bb, /* dump_bb */
8666 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8667 create_bb, /* create_basic_block */
8668 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8669 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8670 gimple_can_remove_branch_p, /* can_remove_branch_p */
8671 remove_bb, /* delete_basic_block */
8672 gimple_split_block, /* split_block */
8673 gimple_move_block_after, /* move_block_after */
8674 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8675 gimple_merge_blocks, /* merge_blocks */
8676 gimple_predict_edge, /* predict_edge */
8677 gimple_predicted_by_p, /* predicted_by_p */
8678 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8679 gimple_duplicate_bb, /* duplicate_block */
8680 gimple_split_edge, /* split_edge */
8681 gimple_make_forwarder_block, /* make_forward_block */
8682 NULL, /* tidy_fallthru_edge */
8683 NULL, /* force_nonfallthru */
8684 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8685 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8686 gimple_flow_call_edges_add, /* flow_call_edges_add */
8687 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8688 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8689 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8690 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8691 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8692 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8693 flush_pending_stmts, /* flush_pending_stmts */
8694 gimple_empty_block_p, /* block_empty_p */
8695 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8696 gimple_account_profile_record,
8700 /* Split all critical edges. */
8702 unsigned int
8703 split_critical_edges (void)
8705 basic_block bb;
8706 edge e;
8707 edge_iterator ei;
8709 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8710 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8711 mappings around the calls to split_edge. */
8712 start_recording_case_labels ();
8713 FOR_ALL_BB_FN (bb, cfun)
8715 FOR_EACH_EDGE (e, ei, bb->succs)
8717 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8718 split_edge (e);
8719 /* PRE inserts statements to edges and expects that
8720 since split_critical_edges was done beforehand, committing edge
8721 insertions will not split more edges. In addition to critical
8722 edges we must split edges that have multiple successors and
8723 end by control flow statements, such as RESX.
8724 Go ahead and split them too. This matches the logic in
8725 gimple_find_edge_insert_loc. */
8726 else if ((!single_pred_p (e->dest)
8727 || !gimple_seq_empty_p (phi_nodes (e->dest))
8728 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8729 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8730 && !(e->flags & EDGE_ABNORMAL))
8732 gimple_stmt_iterator gsi;
8734 gsi = gsi_last_bb (e->src);
8735 if (!gsi_end_p (gsi)
8736 && stmt_ends_bb_p (gsi_stmt (gsi))
8737 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8738 && !gimple_call_builtin_p (gsi_stmt (gsi),
8739 BUILT_IN_RETURN)))
8740 split_edge (e);
8744 end_recording_case_labels ();
8745 return 0;
8748 namespace {
8750 const pass_data pass_data_split_crit_edges =
8752 GIMPLE_PASS, /* type */
8753 "crited", /* name */
8754 OPTGROUP_NONE, /* optinfo_flags */
8755 TV_TREE_SPLIT_EDGES, /* tv_id */
8756 PROP_cfg, /* properties_required */
8757 PROP_no_crit_edges, /* properties_provided */
8758 0, /* properties_destroyed */
8759 0, /* todo_flags_start */
8760 0, /* todo_flags_finish */
8763 class pass_split_crit_edges : public gimple_opt_pass
8765 public:
8766 pass_split_crit_edges (gcc::context *ctxt)
8767 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8770 /* opt_pass methods: */
8771 virtual unsigned int execute (function *) { return split_critical_edges (); }
8773 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8774 }; // class pass_split_crit_edges
8776 } // anon namespace
8778 gimple_opt_pass *
8779 make_pass_split_crit_edges (gcc::context *ctxt)
8781 return new pass_split_crit_edges (ctxt);
8785 /* Insert COND expression which is GIMPLE_COND after STMT
8786 in basic block BB with appropriate basic block split
8787 and creation of a new conditionally executed basic block.
8788 Update profile so the new bb is visited with probability PROB.
8789 Return created basic block. */
8790 basic_block
8791 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
8792 profile_probability prob)
8794 edge fall = split_block (bb, stmt);
8795 gimple_stmt_iterator iter = gsi_last_bb (bb);
8796 basic_block new_bb;
8798 /* Insert cond statement. */
8799 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8800 if (gsi_end_p (iter))
8801 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8802 else
8803 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8805 /* Create conditionally executed block. */
8806 new_bb = create_empty_bb (bb);
8807 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8808 e->probability = prob;
8809 new_bb->count = e->count ();
8810 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8812 /* Fix edge for split bb. */
8813 fall->flags = EDGE_FALSE_VALUE;
8814 fall->probability -= e->probability;
8816 /* Update dominance info. */
8817 if (dom_info_available_p (CDI_DOMINATORS))
8819 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8820 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8823 /* Update loop info. */
8824 if (current_loops)
8825 add_bb_to_loop (new_bb, bb->loop_father);
8827 return new_bb;
8830 /* Build a ternary operation and gimplify it. Emit code before GSI.
8831 Return the gimple_val holding the result. */
8833 tree
8834 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8835 tree type, tree a, tree b, tree c)
8837 tree ret;
8838 location_t loc = gimple_location (gsi_stmt (*gsi));
8840 ret = fold_build3_loc (loc, code, type, a, b, c);
8841 STRIP_NOPS (ret);
8843 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8844 GSI_SAME_STMT);
8847 /* Build a binary operation and gimplify it. Emit code before GSI.
8848 Return the gimple_val holding the result. */
8850 tree
8851 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8852 tree type, tree a, tree b)
8854 tree ret;
8856 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8857 STRIP_NOPS (ret);
8859 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8860 GSI_SAME_STMT);
8863 /* Build a unary operation and gimplify it. Emit code before GSI.
8864 Return the gimple_val holding the result. */
8866 tree
8867 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8868 tree a)
8870 tree ret;
8872 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8873 STRIP_NOPS (ret);
8875 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8876 GSI_SAME_STMT);
8881 /* Given a basic block B which ends with a conditional and has
8882 precisely two successors, determine which of the edges is taken if
8883 the conditional is true and which is taken if the conditional is
8884 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8886 void
8887 extract_true_false_edges_from_block (basic_block b,
8888 edge *true_edge,
8889 edge *false_edge)
8891 edge e = EDGE_SUCC (b, 0);
8893 if (e->flags & EDGE_TRUE_VALUE)
8895 *true_edge = e;
8896 *false_edge = EDGE_SUCC (b, 1);
8898 else
8900 *false_edge = e;
8901 *true_edge = EDGE_SUCC (b, 1);
8906 /* From a controlling predicate in the immediate dominator DOM of
8907 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8908 predicate evaluates to true and false and store them to
8909 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8910 they are non-NULL. Returns true if the edges can be determined,
8911 else return false. */
8913 bool
8914 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8915 edge *true_controlled_edge,
8916 edge *false_controlled_edge)
8918 basic_block bb = phiblock;
8919 edge true_edge, false_edge, tem;
8920 edge e0 = NULL, e1 = NULL;
8922 /* We have to verify that one edge into the PHI node is dominated
8923 by the true edge of the predicate block and the other edge
8924 dominated by the false edge. This ensures that the PHI argument
8925 we are going to take is completely determined by the path we
8926 take from the predicate block.
8927 We can only use BB dominance checks below if the destination of
8928 the true/false edges are dominated by their edge, thus only
8929 have a single predecessor. */
8930 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8931 tem = EDGE_PRED (bb, 0);
8932 if (tem == true_edge
8933 || (single_pred_p (true_edge->dest)
8934 && (tem->src == true_edge->dest
8935 || dominated_by_p (CDI_DOMINATORS,
8936 tem->src, true_edge->dest))))
8937 e0 = tem;
8938 else if (tem == false_edge
8939 || (single_pred_p (false_edge->dest)
8940 && (tem->src == false_edge->dest
8941 || dominated_by_p (CDI_DOMINATORS,
8942 tem->src, false_edge->dest))))
8943 e1 = tem;
8944 else
8945 return false;
8946 tem = EDGE_PRED (bb, 1);
8947 if (tem == true_edge
8948 || (single_pred_p (true_edge->dest)
8949 && (tem->src == true_edge->dest
8950 || dominated_by_p (CDI_DOMINATORS,
8951 tem->src, true_edge->dest))))
8952 e0 = tem;
8953 else if (tem == false_edge
8954 || (single_pred_p (false_edge->dest)
8955 && (tem->src == false_edge->dest
8956 || dominated_by_p (CDI_DOMINATORS,
8957 tem->src, false_edge->dest))))
8958 e1 = tem;
8959 else
8960 return false;
8961 if (!e0 || !e1)
8962 return false;
8964 if (true_controlled_edge)
8965 *true_controlled_edge = e0;
8966 if (false_controlled_edge)
8967 *false_controlled_edge = e1;
8969 return true;
8972 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
8973 range [low, high]. Place associated stmts before *GSI. */
8975 void
8976 generate_range_test (basic_block bb, tree index, tree low, tree high,
8977 tree *lhs, tree *rhs)
8979 tree type = TREE_TYPE (index);
8980 tree utype = unsigned_type_for (type);
8982 low = fold_convert (type, low);
8983 high = fold_convert (type, high);
8985 tree tmp = make_ssa_name (type);
8986 gassign *sub1
8987 = gimple_build_assign (tmp, MINUS_EXPR, index, low);
8989 *lhs = make_ssa_name (utype);
8990 gassign *a = gimple_build_assign (*lhs, NOP_EXPR, tmp);
8992 *rhs = fold_build2 (MINUS_EXPR, utype, high, low);
8993 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8994 gsi_insert_before (&gsi, sub1, GSI_SAME_STMT);
8995 gsi_insert_before (&gsi, a, GSI_SAME_STMT);
8998 /* Emit return warnings. */
9000 namespace {
9002 const pass_data pass_data_warn_function_return =
9004 GIMPLE_PASS, /* type */
9005 "*warn_function_return", /* name */
9006 OPTGROUP_NONE, /* optinfo_flags */
9007 TV_NONE, /* tv_id */
9008 PROP_cfg, /* properties_required */
9009 0, /* properties_provided */
9010 0, /* properties_destroyed */
9011 0, /* todo_flags_start */
9012 0, /* todo_flags_finish */
9015 class pass_warn_function_return : public gimple_opt_pass
9017 public:
9018 pass_warn_function_return (gcc::context *ctxt)
9019 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9022 /* opt_pass methods: */
9023 virtual unsigned int execute (function *);
9025 }; // class pass_warn_function_return
9027 unsigned int
9028 pass_warn_function_return::execute (function *fun)
9030 source_location location;
9031 gimple *last;
9032 edge e;
9033 edge_iterator ei;
9035 if (!targetm.warn_func_return (fun->decl))
9036 return 0;
9038 /* If we have a path to EXIT, then we do return. */
9039 if (TREE_THIS_VOLATILE (fun->decl)
9040 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9042 location = UNKNOWN_LOCATION;
9043 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9044 (e = ei_safe_edge (ei)); )
9046 last = last_stmt (e->src);
9047 if ((gimple_code (last) == GIMPLE_RETURN
9048 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9049 && location == UNKNOWN_LOCATION
9050 && (location = gimple_location (last)) != UNKNOWN_LOCATION
9051 && !optimize)
9052 break;
9053 /* When optimizing, replace return stmts in noreturn functions
9054 with __builtin_unreachable () call. */
9055 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9057 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9058 gimple *new_stmt = gimple_build_call (fndecl, 0);
9059 gimple_set_location (new_stmt, gimple_location (last));
9060 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9061 gsi_replace (&gsi, new_stmt, true);
9062 remove_edge (e);
9064 else
9065 ei_next (&ei);
9067 if (location == UNKNOWN_LOCATION)
9068 location = cfun->function_end_locus;
9069 warning_at (location, 0, "%<noreturn%> function does return");
9072 /* If we see "return;" in some basic block, then we do reach the end
9073 without returning a value. */
9074 else if (warn_return_type
9075 && !TREE_NO_WARNING (fun->decl)
9076 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
9077 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9079 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9081 gimple *last = last_stmt (e->src);
9082 greturn *return_stmt = dyn_cast <greturn *> (last);
9083 if (return_stmt
9084 && gimple_return_retval (return_stmt) == NULL
9085 && !gimple_no_warning_p (last))
9087 location = gimple_location (last);
9088 if (location == UNKNOWN_LOCATION)
9089 location = fun->function_end_locus;
9090 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
9091 TREE_NO_WARNING (fun->decl) = 1;
9092 break;
9096 return 0;
9099 } // anon namespace
9101 gimple_opt_pass *
9102 make_pass_warn_function_return (gcc::context *ctxt)
9104 return new pass_warn_function_return (ctxt);
9107 /* Walk a gimplified function and warn for functions whose return value is
9108 ignored and attribute((warn_unused_result)) is set. This is done before
9109 inlining, so we don't have to worry about that. */
9111 static void
9112 do_warn_unused_result (gimple_seq seq)
9114 tree fdecl, ftype;
9115 gimple_stmt_iterator i;
9117 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9119 gimple *g = gsi_stmt (i);
9121 switch (gimple_code (g))
9123 case GIMPLE_BIND:
9124 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9125 break;
9126 case GIMPLE_TRY:
9127 do_warn_unused_result (gimple_try_eval (g));
9128 do_warn_unused_result (gimple_try_cleanup (g));
9129 break;
9130 case GIMPLE_CATCH:
9131 do_warn_unused_result (gimple_catch_handler (
9132 as_a <gcatch *> (g)));
9133 break;
9134 case GIMPLE_EH_FILTER:
9135 do_warn_unused_result (gimple_eh_filter_failure (g));
9136 break;
9138 case GIMPLE_CALL:
9139 if (gimple_call_lhs (g))
9140 break;
9141 if (gimple_call_internal_p (g))
9142 break;
9144 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9145 LHS. All calls whose value is ignored should be
9146 represented like this. Look for the attribute. */
9147 fdecl = gimple_call_fndecl (g);
9148 ftype = gimple_call_fntype (g);
9150 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9152 location_t loc = gimple_location (g);
9154 if (fdecl)
9155 warning_at (loc, OPT_Wunused_result,
9156 "ignoring return value of %qD, "
9157 "declared with attribute warn_unused_result",
9158 fdecl);
9159 else
9160 warning_at (loc, OPT_Wunused_result,
9161 "ignoring return value of function "
9162 "declared with attribute warn_unused_result");
9164 break;
9166 default:
9167 /* Not a container, not a call, or a call whose value is used. */
9168 break;
9173 namespace {
9175 const pass_data pass_data_warn_unused_result =
9177 GIMPLE_PASS, /* type */
9178 "*warn_unused_result", /* name */
9179 OPTGROUP_NONE, /* optinfo_flags */
9180 TV_NONE, /* tv_id */
9181 PROP_gimple_any, /* properties_required */
9182 0, /* properties_provided */
9183 0, /* properties_destroyed */
9184 0, /* todo_flags_start */
9185 0, /* todo_flags_finish */
9188 class pass_warn_unused_result : public gimple_opt_pass
9190 public:
9191 pass_warn_unused_result (gcc::context *ctxt)
9192 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9195 /* opt_pass methods: */
9196 virtual bool gate (function *) { return flag_warn_unused_result; }
9197 virtual unsigned int execute (function *)
9199 do_warn_unused_result (gimple_body (current_function_decl));
9200 return 0;
9203 }; // class pass_warn_unused_result
9205 } // anon namespace
9207 gimple_opt_pass *
9208 make_pass_warn_unused_result (gcc::context *ctxt)
9210 return new pass_warn_unused_result (ctxt);
9213 /* IPA passes, compilation of earlier functions or inlining
9214 might have changed some properties, such as marked functions nothrow,
9215 pure, const or noreturn.
9216 Remove redundant edges and basic blocks, and create new ones if necessary.
9218 This pass can't be executed as stand alone pass from pass manager, because
9219 in between inlining and this fixup the verify_flow_info would fail. */
9221 unsigned int
9222 execute_fixup_cfg (void)
9224 basic_block bb;
9225 gimple_stmt_iterator gsi;
9226 int todo = 0;
9227 cgraph_node *node = cgraph_node::get (current_function_decl);
9228 profile_count num = node->count;
9229 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9230 bool scale = num.initialized_p () && den.ipa_p ()
9231 && (den.nonzero_p () || num == profile_count::zero ())
9232 && !(num == den.ipa ());
9234 if (scale)
9236 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9237 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9238 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9241 FOR_EACH_BB_FN (bb, cfun)
9243 if (scale)
9244 bb->count = bb->count.apply_scale (num, den);
9245 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9247 gimple *stmt = gsi_stmt (gsi);
9248 tree decl = is_gimple_call (stmt)
9249 ? gimple_call_fndecl (stmt)
9250 : NULL;
9251 if (decl)
9253 int flags = gimple_call_flags (stmt);
9254 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9256 if (gimple_purge_dead_abnormal_call_edges (bb))
9257 todo |= TODO_cleanup_cfg;
9259 if (gimple_in_ssa_p (cfun))
9261 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9262 update_stmt (stmt);
9266 if (flags & ECF_NORETURN
9267 && fixup_noreturn_call (stmt))
9268 todo |= TODO_cleanup_cfg;
9271 /* Remove stores to variables we marked write-only.
9272 Keep access when store has side effect, i.e. in case when source
9273 is volatile. */
9274 if (gimple_store_p (stmt)
9275 && !gimple_has_side_effects (stmt))
9277 tree lhs = get_base_address (gimple_get_lhs (stmt));
9279 if (VAR_P (lhs)
9280 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9281 && varpool_node::get (lhs)->writeonly)
9283 unlink_stmt_vdef (stmt);
9284 gsi_remove (&gsi, true);
9285 release_defs (stmt);
9286 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9287 continue;
9290 /* For calls we can simply remove LHS when it is known
9291 to be write-only. */
9292 if (is_gimple_call (stmt)
9293 && gimple_get_lhs (stmt))
9295 tree lhs = get_base_address (gimple_get_lhs (stmt));
9297 if (VAR_P (lhs)
9298 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9299 && varpool_node::get (lhs)->writeonly)
9301 gimple_call_set_lhs (stmt, NULL);
9302 update_stmt (stmt);
9303 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9307 if (maybe_clean_eh_stmt (stmt)
9308 && gimple_purge_dead_eh_edges (bb))
9309 todo |= TODO_cleanup_cfg;
9310 gsi_next (&gsi);
9313 /* If we have a basic block with no successors that does not
9314 end with a control statement or a noreturn call end it with
9315 a call to __builtin_unreachable. This situation can occur
9316 when inlining a noreturn call that does in fact return. */
9317 if (EDGE_COUNT (bb->succs) == 0)
9319 gimple *stmt = last_stmt (bb);
9320 if (!stmt
9321 || (!is_ctrl_stmt (stmt)
9322 && (!is_gimple_call (stmt)
9323 || !gimple_call_noreturn_p (stmt))))
9325 if (stmt && is_gimple_call (stmt))
9326 gimple_call_set_ctrl_altering (stmt, false);
9327 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9328 stmt = gimple_build_call (fndecl, 0);
9329 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9330 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9331 if (!cfun->after_inlining)
9333 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9334 int freq
9335 = compute_call_stmt_bb_frequency (current_function_decl,
9336 bb);
9337 node->create_edge (cgraph_node::get_create (fndecl),
9338 call_stmt, bb->count, freq);
9343 if (scale)
9344 compute_function_frequency ();
9346 if (current_loops
9347 && (todo & TODO_cleanup_cfg))
9348 loops_state_set (LOOPS_NEED_FIXUP);
9350 return todo;
9353 namespace {
9355 const pass_data pass_data_fixup_cfg =
9357 GIMPLE_PASS, /* type */
9358 "fixup_cfg", /* name */
9359 OPTGROUP_NONE, /* optinfo_flags */
9360 TV_NONE, /* tv_id */
9361 PROP_cfg, /* properties_required */
9362 0, /* properties_provided */
9363 0, /* properties_destroyed */
9364 0, /* todo_flags_start */
9365 0, /* todo_flags_finish */
9368 class pass_fixup_cfg : public gimple_opt_pass
9370 public:
9371 pass_fixup_cfg (gcc::context *ctxt)
9372 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9375 /* opt_pass methods: */
9376 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9377 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9379 }; // class pass_fixup_cfg
9381 } // anon namespace
9383 gimple_opt_pass *
9384 make_pass_fixup_cfg (gcc::context *ctxt)
9386 return new pass_fixup_cfg (ctxt);
9389 /* Garbage collection support for edge_def. */
9391 extern void gt_ggc_mx (tree&);
9392 extern void gt_ggc_mx (gimple *&);
9393 extern void gt_ggc_mx (rtx&);
9394 extern void gt_ggc_mx (basic_block&);
9396 static void
9397 gt_ggc_mx (rtx_insn *& x)
9399 if (x)
9400 gt_ggc_mx_rtx_def ((void *) x);
9403 void
9404 gt_ggc_mx (edge_def *e)
9406 tree block = LOCATION_BLOCK (e->goto_locus);
9407 gt_ggc_mx (e->src);
9408 gt_ggc_mx (e->dest);
9409 if (current_ir_type () == IR_GIMPLE)
9410 gt_ggc_mx (e->insns.g);
9411 else
9412 gt_ggc_mx (e->insns.r);
9413 gt_ggc_mx (block);
9416 /* PCH support for edge_def. */
9418 extern void gt_pch_nx (tree&);
9419 extern void gt_pch_nx (gimple *&);
9420 extern void gt_pch_nx (rtx&);
9421 extern void gt_pch_nx (basic_block&);
9423 static void
9424 gt_pch_nx (rtx_insn *& x)
9426 if (x)
9427 gt_pch_nx_rtx_def ((void *) x);
9430 void
9431 gt_pch_nx (edge_def *e)
9433 tree block = LOCATION_BLOCK (e->goto_locus);
9434 gt_pch_nx (e->src);
9435 gt_pch_nx (e->dest);
9436 if (current_ir_type () == IR_GIMPLE)
9437 gt_pch_nx (e->insns.g);
9438 else
9439 gt_pch_nx (e->insns.r);
9440 gt_pch_nx (block);
9443 void
9444 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9446 tree block = LOCATION_BLOCK (e->goto_locus);
9447 op (&(e->src), cookie);
9448 op (&(e->dest), cookie);
9449 if (current_ir_type () == IR_GIMPLE)
9450 op (&(e->insns.g), cookie);
9451 else
9452 op (&(e->insns.r), cookie);
9453 op (&(block), cookie);
9456 #if CHECKING_P
9458 namespace selftest {
9460 /* Helper function for CFG selftests: create a dummy function decl
9461 and push it as cfun. */
9463 static tree
9464 push_fndecl (const char *name)
9466 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9467 /* FIXME: this uses input_location: */
9468 tree fndecl = build_fn_decl (name, fn_type);
9469 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9470 NULL_TREE, integer_type_node);
9471 DECL_RESULT (fndecl) = retval;
9472 push_struct_function (fndecl);
9473 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9474 ASSERT_TRUE (fun != NULL);
9475 init_empty_tree_cfg_for_function (fun);
9476 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9477 ASSERT_EQ (0, n_edges_for_fn (fun));
9478 return fndecl;
9481 /* These tests directly create CFGs.
9482 Compare with the static fns within tree-cfg.c:
9483 - build_gimple_cfg
9484 - make_blocks: calls create_basic_block (seq, bb);
9485 - make_edges. */
9487 /* Verify a simple cfg of the form:
9488 ENTRY -> A -> B -> C -> EXIT. */
9490 static void
9491 test_linear_chain ()
9493 gimple_register_cfg_hooks ();
9495 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9496 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9498 /* Create some empty blocks. */
9499 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9500 basic_block bb_b = create_empty_bb (bb_a);
9501 basic_block bb_c = create_empty_bb (bb_b);
9503 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9504 ASSERT_EQ (0, n_edges_for_fn (fun));
9506 /* Create some edges: a simple linear chain of BBs. */
9507 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9508 make_edge (bb_a, bb_b, 0);
9509 make_edge (bb_b, bb_c, 0);
9510 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9512 /* Verify the edges. */
9513 ASSERT_EQ (4, n_edges_for_fn (fun));
9514 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9515 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9516 ASSERT_EQ (1, bb_a->preds->length ());
9517 ASSERT_EQ (1, bb_a->succs->length ());
9518 ASSERT_EQ (1, bb_b->preds->length ());
9519 ASSERT_EQ (1, bb_b->succs->length ());
9520 ASSERT_EQ (1, bb_c->preds->length ());
9521 ASSERT_EQ (1, bb_c->succs->length ());
9522 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9523 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9525 /* Verify the dominance information
9526 Each BB in our simple chain should be dominated by the one before
9527 it. */
9528 calculate_dominance_info (CDI_DOMINATORS);
9529 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9530 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9531 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9532 ASSERT_EQ (1, dom_by_b.length ());
9533 ASSERT_EQ (bb_c, dom_by_b[0]);
9534 free_dominance_info (CDI_DOMINATORS);
9535 dom_by_b.release ();
9537 /* Similarly for post-dominance: each BB in our chain is post-dominated
9538 by the one after it. */
9539 calculate_dominance_info (CDI_POST_DOMINATORS);
9540 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9541 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9542 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9543 ASSERT_EQ (1, postdom_by_b.length ());
9544 ASSERT_EQ (bb_a, postdom_by_b[0]);
9545 free_dominance_info (CDI_POST_DOMINATORS);
9546 postdom_by_b.release ();
9548 pop_cfun ();
9551 /* Verify a simple CFG of the form:
9552 ENTRY
9556 /t \f
9562 EXIT. */
9564 static void
9565 test_diamond ()
9567 gimple_register_cfg_hooks ();
9569 tree fndecl = push_fndecl ("cfg_test_diamond");
9570 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9572 /* Create some empty blocks. */
9573 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9574 basic_block bb_b = create_empty_bb (bb_a);
9575 basic_block bb_c = create_empty_bb (bb_a);
9576 basic_block bb_d = create_empty_bb (bb_b);
9578 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9579 ASSERT_EQ (0, n_edges_for_fn (fun));
9581 /* Create the edges. */
9582 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9583 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9584 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9585 make_edge (bb_b, bb_d, 0);
9586 make_edge (bb_c, bb_d, 0);
9587 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9589 /* Verify the edges. */
9590 ASSERT_EQ (6, n_edges_for_fn (fun));
9591 ASSERT_EQ (1, bb_a->preds->length ());
9592 ASSERT_EQ (2, bb_a->succs->length ());
9593 ASSERT_EQ (1, bb_b->preds->length ());
9594 ASSERT_EQ (1, bb_b->succs->length ());
9595 ASSERT_EQ (1, bb_c->preds->length ());
9596 ASSERT_EQ (1, bb_c->succs->length ());
9597 ASSERT_EQ (2, bb_d->preds->length ());
9598 ASSERT_EQ (1, bb_d->succs->length ());
9600 /* Verify the dominance information. */
9601 calculate_dominance_info (CDI_DOMINATORS);
9602 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9603 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9604 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9605 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9606 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9607 dom_by_a.release ();
9608 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9609 ASSERT_EQ (0, dom_by_b.length ());
9610 dom_by_b.release ();
9611 free_dominance_info (CDI_DOMINATORS);
9613 /* Similarly for post-dominance. */
9614 calculate_dominance_info (CDI_POST_DOMINATORS);
9615 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9616 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9617 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9618 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9619 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9620 postdom_by_d.release ();
9621 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9622 ASSERT_EQ (0, postdom_by_b.length ());
9623 postdom_by_b.release ();
9624 free_dominance_info (CDI_POST_DOMINATORS);
9626 pop_cfun ();
9629 /* Verify that we can handle a CFG containing a "complete" aka
9630 fully-connected subgraph (where A B C D below all have edges
9631 pointing to each other node, also to themselves).
9632 e.g.:
9633 ENTRY EXIT
9639 A<--->B
9640 ^^ ^^
9641 | \ / |
9642 | X |
9643 | / \ |
9644 VV VV
9645 C<--->D
9648 static void
9649 test_fully_connected ()
9651 gimple_register_cfg_hooks ();
9653 tree fndecl = push_fndecl ("cfg_fully_connected");
9654 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9656 const int n = 4;
9658 /* Create some empty blocks. */
9659 auto_vec <basic_block> subgraph_nodes;
9660 for (int i = 0; i < n; i++)
9661 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9663 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9664 ASSERT_EQ (0, n_edges_for_fn (fun));
9666 /* Create the edges. */
9667 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9668 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9669 for (int i = 0; i < n; i++)
9670 for (int j = 0; j < n; j++)
9671 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9673 /* Verify the edges. */
9674 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9675 /* The first one is linked to ENTRY/EXIT as well as itself and
9676 everything else. */
9677 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9678 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9679 /* The other ones in the subgraph are linked to everything in
9680 the subgraph (including themselves). */
9681 for (int i = 1; i < n; i++)
9683 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9684 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9687 /* Verify the dominance information. */
9688 calculate_dominance_info (CDI_DOMINATORS);
9689 /* The initial block in the subgraph should be dominated by ENTRY. */
9690 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9691 get_immediate_dominator (CDI_DOMINATORS,
9692 subgraph_nodes[0]));
9693 /* Every other block in the subgraph should be dominated by the
9694 initial block. */
9695 for (int i = 1; i < n; i++)
9696 ASSERT_EQ (subgraph_nodes[0],
9697 get_immediate_dominator (CDI_DOMINATORS,
9698 subgraph_nodes[i]));
9699 free_dominance_info (CDI_DOMINATORS);
9701 /* Similarly for post-dominance. */
9702 calculate_dominance_info (CDI_POST_DOMINATORS);
9703 /* The initial block in the subgraph should be postdominated by EXIT. */
9704 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9705 get_immediate_dominator (CDI_POST_DOMINATORS,
9706 subgraph_nodes[0]));
9707 /* Every other block in the subgraph should be postdominated by the
9708 initial block, since that leads to EXIT. */
9709 for (int i = 1; i < n; i++)
9710 ASSERT_EQ (subgraph_nodes[0],
9711 get_immediate_dominator (CDI_POST_DOMINATORS,
9712 subgraph_nodes[i]));
9713 free_dominance_info (CDI_POST_DOMINATORS);
9715 pop_cfun ();
9718 /* Run all of the selftests within this file. */
9720 void
9721 tree_cfg_c_tests ()
9723 test_linear_chain ();
9724 test_diamond ();
9725 test_fully_connected ();
9728 } // namespace selftest
9730 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9731 - loop
9732 - nested loops
9733 - switch statement (a block with many out-edges)
9734 - something that jumps to itself
9735 - etc */
9737 #endif /* CHECKING_P */