debug/dwarf: support 64-bit DWARF in byte order check
[official-gcc.git] / gcc / tree-cfg.c
blobae1cdb33f53af277b12c6c5c3becda8b10103110
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
65 /* This file contains functions for building the Control Flow Graph (CFG)
66 for a function tree. */
68 /* Local declarations. */
70 /* Initial capacity for the basic block array. */
71 static const int initial_cfg_capacity = 20;
73 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
74 which use a particular edge. The CASE_LABEL_EXPRs are chained together
75 via their CASE_CHAIN field, which we clear after we're done with the
76 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
78 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
79 update the case vector in response to edge redirections.
81 Right now this table is set up and torn down at key points in the
82 compilation process. It would be nice if we could make the table
83 more persistent. The key is getting notification of changes to
84 the CFG (particularly edge removal, creation and redirection). */
86 static hash_map<edge, tree> *edge_to_cases;
88 /* If we record edge_to_cases, this bitmap will hold indexes
89 of basic blocks that end in a GIMPLE_SWITCH which we touched
90 due to edge manipulations. */
92 static bitmap touched_switch_bbs;
94 /* CFG statistics. */
95 struct cfg_stats_d
97 long num_merged_labels;
100 static struct cfg_stats_d cfg_stats;
102 /* Data to pass to replace_block_vars_by_duplicates_1. */
103 struct replace_decls_d
105 hash_map<tree, tree> *vars_map;
106 tree to_context;
109 /* Hash table to store last discriminator assigned for each locus. */
110 struct locus_discrim_map
112 location_t locus;
113 int discriminator;
116 /* Hashtable helpers. */
118 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
120 static inline hashval_t hash (const locus_discrim_map *);
121 static inline bool equal (const locus_discrim_map *,
122 const locus_discrim_map *);
125 /* Trivial hash function for a location_t. ITEM is a pointer to
126 a hash table entry that maps a location_t to a discriminator. */
128 inline hashval_t
129 locus_discrim_hasher::hash (const locus_discrim_map *item)
131 return LOCATION_LINE (item->locus);
134 /* Equality function for the locus-to-discriminator map. A and B
135 point to the two hash table entries to compare. */
137 inline bool
138 locus_discrim_hasher::equal (const locus_discrim_map *a,
139 const locus_discrim_map *b)
141 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
144 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
146 /* Basic blocks and flowgraphs. */
147 static void make_blocks (gimple_seq);
149 /* Edges. */
150 static void make_edges (void);
151 static void assign_discriminators (void);
152 static void make_cond_expr_edges (basic_block);
153 static void make_gimple_switch_edges (gswitch *, basic_block);
154 static bool make_goto_expr_edges (basic_block);
155 static void make_gimple_asm_edges (basic_block);
156 static edge gimple_redirect_edge_and_branch (edge, basic_block);
157 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple *, gimple *);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge);
163 static gimple *first_non_label_stmt (basic_block);
164 static bool verify_gimple_transaction (gtransaction *);
165 static bool call_can_make_abnormal_goto (gimple *);
167 /* Flowgraph optimization and cleanup. */
168 static void gimple_merge_blocks (basic_block, basic_block);
169 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
170 static void remove_bb (basic_block);
171 static edge find_taken_edge_computed_goto (basic_block, tree);
172 static edge find_taken_edge_cond_expr (basic_block, tree);
173 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
174 static tree find_case_label_for_value (gswitch *, tree);
175 static void lower_phi_internal_fn ();
177 void
178 init_empty_tree_cfg_for_function (struct function *fn)
180 /* Initialize the basic block array. */
181 init_flow (fn);
182 profile_status_for_fn (fn) = PROFILE_ABSENT;
183 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
184 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
185 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
186 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
187 initial_cfg_capacity);
189 /* Build a mapping of labels to their associated blocks. */
190 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
192 initial_cfg_capacity);
194 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
195 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
197 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn);
199 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 void
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun);
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
216 static void
217 build_gimple_cfg (gimple_seq seq)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224 init_empty_tree_cfg ();
226 make_blocks (seq);
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
236 n_basic_blocks_for_fn (cfun));
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
248 make_edges ();
249 assign_discriminators ();
250 lower_phi_internal_fn ();
251 cleanup_dead_labels ();
252 delete discriminator_per_locus;
253 discriminator_per_locus = NULL;
256 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
257 them and propagate the information to LOOP. We assume that the annotations
258 come immediately before the condition in BB, if any. */
260 static void
261 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
263 gimple_stmt_iterator gsi = gsi_last_bb (bb);
264 gimple *stmt = gsi_stmt (gsi);
266 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
267 return;
269 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
271 stmt = gsi_stmt (gsi);
272 if (gimple_code (stmt) != GIMPLE_CALL)
273 break;
274 if (!gimple_call_internal_p (stmt)
275 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
276 break;
278 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 case annot_expr_ivdep_kind:
281 loop->safelen = INT_MAX;
282 break;
283 case annot_expr_no_vector_kind:
284 loop->dont_vectorize = true;
285 break;
286 case annot_expr_vector_kind:
287 loop->force_vectorize = true;
288 cfun->has_force_vectorize_loops = true;
289 break;
290 default:
291 gcc_unreachable ();
294 stmt = gimple_build_assign (gimple_call_lhs (stmt),
295 gimple_call_arg (stmt, 0));
296 gsi_replace (&gsi, stmt, true);
300 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
301 them and propagate the information to the loop. We assume that the
302 annotations come immediately before the condition of the loop. */
304 static void
305 replace_loop_annotate (void)
307 struct loop *loop;
308 basic_block bb;
309 gimple_stmt_iterator gsi;
310 gimple *stmt;
312 FOR_EACH_LOOP (loop, 0)
314 /* First look into the header. */
315 replace_loop_annotate_in_block (loop->header, loop);
317 /* Then look into the latch, if any. */
318 if (loop->latch)
319 replace_loop_annotate_in_block (loop->latch, loop);
322 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
323 FOR_EACH_BB_FN (bb, cfun)
325 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
327 stmt = gsi_stmt (gsi);
328 if (gimple_code (stmt) != GIMPLE_CALL)
329 continue;
330 if (!gimple_call_internal_p (stmt)
331 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
332 continue;
334 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
336 case annot_expr_ivdep_kind:
337 case annot_expr_no_vector_kind:
338 case annot_expr_vector_kind:
339 break;
340 default:
341 gcc_unreachable ();
344 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
345 stmt = gimple_build_assign (gimple_call_lhs (stmt),
346 gimple_call_arg (stmt, 0));
347 gsi_replace (&gsi, stmt, true);
352 /* Lower internal PHI function from GIMPLE FE. */
354 static void
355 lower_phi_internal_fn ()
357 basic_block bb, pred = NULL;
358 gimple_stmt_iterator gsi;
359 tree lhs;
360 gphi *phi_node;
361 gimple *stmt;
363 /* After edge creation, handle __PHI function from GIMPLE FE. */
364 FOR_EACH_BB_FN (bb, cfun)
366 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
368 stmt = gsi_stmt (gsi);
369 if (! gimple_call_internal_p (stmt, IFN_PHI))
370 break;
372 lhs = gimple_call_lhs (stmt);
373 phi_node = create_phi_node (lhs, bb);
375 /* Add arguments to the PHI node. */
376 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
378 tree arg = gimple_call_arg (stmt, i);
379 if (TREE_CODE (arg) == LABEL_DECL)
380 pred = label_to_block (arg);
381 else
383 edge e = find_edge (pred, bb);
384 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
388 gsi_remove (&gsi, true);
393 static unsigned int
394 execute_build_cfg (void)
396 gimple_seq body = gimple_body (current_function_decl);
398 build_gimple_cfg (body);
399 gimple_set_body (current_function_decl, NULL);
400 if (dump_file && (dump_flags & TDF_DETAILS))
402 fprintf (dump_file, "Scope blocks:\n");
403 dump_scope_blocks (dump_file, dump_flags);
405 cleanup_tree_cfg ();
406 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
407 replace_loop_annotate ();
408 return 0;
411 namespace {
413 const pass_data pass_data_build_cfg =
415 GIMPLE_PASS, /* type */
416 "cfg", /* name */
417 OPTGROUP_NONE, /* optinfo_flags */
418 TV_TREE_CFG, /* tv_id */
419 PROP_gimple_leh, /* properties_required */
420 ( PROP_cfg | PROP_loops ), /* properties_provided */
421 0, /* properties_destroyed */
422 0, /* todo_flags_start */
423 0, /* todo_flags_finish */
426 class pass_build_cfg : public gimple_opt_pass
428 public:
429 pass_build_cfg (gcc::context *ctxt)
430 : gimple_opt_pass (pass_data_build_cfg, ctxt)
433 /* opt_pass methods: */
434 virtual unsigned int execute (function *) { return execute_build_cfg (); }
436 }; // class pass_build_cfg
438 } // anon namespace
440 gimple_opt_pass *
441 make_pass_build_cfg (gcc::context *ctxt)
443 return new pass_build_cfg (ctxt);
447 /* Return true if T is a computed goto. */
449 bool
450 computed_goto_p (gimple *t)
452 return (gimple_code (t) == GIMPLE_GOTO
453 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
456 /* Returns true if the sequence of statements STMTS only contains
457 a call to __builtin_unreachable (). */
459 bool
460 gimple_seq_unreachable_p (gimple_seq stmts)
462 if (stmts == NULL)
463 return false;
465 gimple_stmt_iterator gsi = gsi_last (stmts);
467 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
468 return false;
470 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
472 gimple *stmt = gsi_stmt (gsi);
473 if (gimple_code (stmt) != GIMPLE_LABEL
474 && !is_gimple_debug (stmt)
475 && !gimple_clobber_p (stmt))
476 return false;
478 return true;
481 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
482 the other edge points to a bb with just __builtin_unreachable ().
483 I.e. return true for C->M edge in:
484 <bb C>:
486 if (something)
487 goto <bb N>;
488 else
489 goto <bb M>;
490 <bb N>:
491 __builtin_unreachable ();
492 <bb M>: */
494 bool
495 assert_unreachable_fallthru_edge_p (edge e)
497 basic_block pred_bb = e->src;
498 gimple *last = last_stmt (pred_bb);
499 if (last && gimple_code (last) == GIMPLE_COND)
501 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
502 if (other_bb == e->dest)
503 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
504 if (EDGE_COUNT (other_bb->succs) == 0)
505 return gimple_seq_unreachable_p (bb_seq (other_bb));
507 return false;
511 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
512 could alter control flow except via eh. We initialize the flag at
513 CFG build time and only ever clear it later. */
515 static void
516 gimple_call_initialize_ctrl_altering (gimple *stmt)
518 int flags = gimple_call_flags (stmt);
520 /* A call alters control flow if it can make an abnormal goto. */
521 if (call_can_make_abnormal_goto (stmt)
522 /* A call also alters control flow if it does not return. */
523 || flags & ECF_NORETURN
524 /* TM ending statements have backedges out of the transaction.
525 Return true so we split the basic block containing them.
526 Note that the TM_BUILTIN test is merely an optimization. */
527 || ((flags & ECF_TM_BUILTIN)
528 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
529 /* BUILT_IN_RETURN call is same as return statement. */
530 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
531 /* IFN_UNIQUE should be the last insn, to make checking for it
532 as cheap as possible. */
533 || (gimple_call_internal_p (stmt)
534 && gimple_call_internal_unique_p (stmt)))
535 gimple_call_set_ctrl_altering (stmt, true);
536 else
537 gimple_call_set_ctrl_altering (stmt, false);
541 /* Insert SEQ after BB and build a flowgraph. */
543 static basic_block
544 make_blocks_1 (gimple_seq seq, basic_block bb)
546 gimple_stmt_iterator i = gsi_start (seq);
547 gimple *stmt = NULL;
548 bool start_new_block = true;
549 bool first_stmt_of_seq = true;
551 while (!gsi_end_p (i))
553 gimple *prev_stmt;
555 prev_stmt = stmt;
556 stmt = gsi_stmt (i);
558 if (stmt && is_gimple_call (stmt))
559 gimple_call_initialize_ctrl_altering (stmt);
561 /* If the statement starts a new basic block or if we have determined
562 in a previous pass that we need to create a new block for STMT, do
563 so now. */
564 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
566 if (!first_stmt_of_seq)
567 gsi_split_seq_before (&i, &seq);
568 bb = create_basic_block (seq, bb);
569 start_new_block = false;
572 /* Now add STMT to BB and create the subgraphs for special statement
573 codes. */
574 gimple_set_bb (stmt, bb);
576 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
577 next iteration. */
578 if (stmt_ends_bb_p (stmt))
580 /* If the stmt can make abnormal goto use a new temporary
581 for the assignment to the LHS. This makes sure the old value
582 of the LHS is available on the abnormal edge. Otherwise
583 we will end up with overlapping life-ranges for abnormal
584 SSA names. */
585 if (gimple_has_lhs (stmt)
586 && stmt_can_make_abnormal_goto (stmt)
587 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
589 tree lhs = gimple_get_lhs (stmt);
590 tree tmp = create_tmp_var (TREE_TYPE (lhs));
591 gimple *s = gimple_build_assign (lhs, tmp);
592 gimple_set_location (s, gimple_location (stmt));
593 gimple_set_block (s, gimple_block (stmt));
594 gimple_set_lhs (stmt, tmp);
595 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
596 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
597 DECL_GIMPLE_REG_P (tmp) = 1;
598 gsi_insert_after (&i, s, GSI_SAME_STMT);
600 start_new_block = true;
603 gsi_next (&i);
604 first_stmt_of_seq = false;
606 return bb;
609 /* Build a flowgraph for the sequence of stmts SEQ. */
611 static void
612 make_blocks (gimple_seq seq)
614 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
617 /* Create and return a new empty basic block after bb AFTER. */
619 static basic_block
620 create_bb (void *h, void *e, basic_block after)
622 basic_block bb;
624 gcc_assert (!e);
626 /* Create and initialize a new basic block. Since alloc_block uses
627 GC allocation that clears memory to allocate a basic block, we do
628 not have to clear the newly allocated basic block here. */
629 bb = alloc_block ();
631 bb->index = last_basic_block_for_fn (cfun);
632 bb->flags = BB_NEW;
633 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
635 /* Add the new block to the linked list of blocks. */
636 link_block (bb, after);
638 /* Grow the basic block array if needed. */
639 if ((size_t) last_basic_block_for_fn (cfun)
640 == basic_block_info_for_fn (cfun)->length ())
642 size_t new_size =
643 (last_basic_block_for_fn (cfun)
644 + (last_basic_block_for_fn (cfun) + 3) / 4);
645 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
648 /* Add the newly created block to the array. */
649 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
651 n_basic_blocks_for_fn (cfun)++;
652 last_basic_block_for_fn (cfun)++;
654 return bb;
658 /*---------------------------------------------------------------------------
659 Edge creation
660 ---------------------------------------------------------------------------*/
662 /* If basic block BB has an abnormal edge to a basic block
663 containing IFN_ABNORMAL_DISPATCHER internal call, return
664 that the dispatcher's basic block, otherwise return NULL. */
666 basic_block
667 get_abnormal_succ_dispatcher (basic_block bb)
669 edge e;
670 edge_iterator ei;
672 FOR_EACH_EDGE (e, ei, bb->succs)
673 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
675 gimple_stmt_iterator gsi
676 = gsi_start_nondebug_after_labels_bb (e->dest);
677 gimple *g = gsi_stmt (gsi);
678 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
679 return e->dest;
681 return NULL;
684 /* Helper function for make_edges. Create a basic block with
685 with ABNORMAL_DISPATCHER internal call in it if needed, and
686 create abnormal edges from BBS to it and from it to FOR_BB
687 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
689 static void
690 handle_abnormal_edges (basic_block *dispatcher_bbs,
691 basic_block for_bb, int *bb_to_omp_idx,
692 auto_vec<basic_block> *bbs, bool computed_goto)
694 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
695 unsigned int idx = 0;
696 basic_block bb;
697 bool inner = false;
699 if (bb_to_omp_idx)
701 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
702 if (bb_to_omp_idx[for_bb->index] != 0)
703 inner = true;
706 /* If the dispatcher has been created already, then there are basic
707 blocks with abnormal edges to it, so just make a new edge to
708 for_bb. */
709 if (*dispatcher == NULL)
711 /* Check if there are any basic blocks that need to have
712 abnormal edges to this dispatcher. If there are none, return
713 early. */
714 if (bb_to_omp_idx == NULL)
716 if (bbs->is_empty ())
717 return;
719 else
721 FOR_EACH_VEC_ELT (*bbs, idx, bb)
722 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
723 break;
724 if (bb == NULL)
725 return;
728 /* Create the dispatcher bb. */
729 *dispatcher = create_basic_block (NULL, for_bb);
730 if (computed_goto)
732 /* Factor computed gotos into a common computed goto site. Also
733 record the location of that site so that we can un-factor the
734 gotos after we have converted back to normal form. */
735 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
737 /* Create the destination of the factored goto. Each original
738 computed goto will put its desired destination into this
739 variable and jump to the label we create immediately below. */
740 tree var = create_tmp_var (ptr_type_node, "gotovar");
742 /* Build a label for the new block which will contain the
743 factored computed goto. */
744 tree factored_label_decl
745 = create_artificial_label (UNKNOWN_LOCATION);
746 gimple *factored_computed_goto_label
747 = gimple_build_label (factored_label_decl);
748 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
750 /* Build our new computed goto. */
751 gimple *factored_computed_goto = gimple_build_goto (var);
752 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
754 FOR_EACH_VEC_ELT (*bbs, idx, bb)
756 if (bb_to_omp_idx
757 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
758 continue;
760 gsi = gsi_last_bb (bb);
761 gimple *last = gsi_stmt (gsi);
763 gcc_assert (computed_goto_p (last));
765 /* Copy the original computed goto's destination into VAR. */
766 gimple *assignment
767 = gimple_build_assign (var, gimple_goto_dest (last));
768 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
770 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
771 e->goto_locus = gimple_location (last);
772 gsi_remove (&gsi, true);
775 else
777 tree arg = inner ? boolean_true_node : boolean_false_node;
778 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
779 1, arg);
780 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
781 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
783 /* Create predecessor edges of the dispatcher. */
784 FOR_EACH_VEC_ELT (*bbs, idx, bb)
786 if (bb_to_omp_idx
787 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
788 continue;
789 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
794 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
797 /* Creates outgoing edges for BB. Returns 1 when it ends with an
798 computed goto, returns 2 when it ends with a statement that
799 might return to this function via an nonlocal goto, otherwise
800 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
802 static int
803 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
805 gimple *last = last_stmt (bb);
806 bool fallthru = false;
807 int ret = 0;
809 if (!last)
810 return ret;
812 switch (gimple_code (last))
814 case GIMPLE_GOTO:
815 if (make_goto_expr_edges (bb))
816 ret = 1;
817 fallthru = false;
818 break;
819 case GIMPLE_RETURN:
821 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
822 e->goto_locus = gimple_location (last);
823 fallthru = false;
825 break;
826 case GIMPLE_COND:
827 make_cond_expr_edges (bb);
828 fallthru = false;
829 break;
830 case GIMPLE_SWITCH:
831 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
832 fallthru = false;
833 break;
834 case GIMPLE_RESX:
835 make_eh_edges (last);
836 fallthru = false;
837 break;
838 case GIMPLE_EH_DISPATCH:
839 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
840 break;
842 case GIMPLE_CALL:
843 /* If this function receives a nonlocal goto, then we need to
844 make edges from this call site to all the nonlocal goto
845 handlers. */
846 if (stmt_can_make_abnormal_goto (last))
847 ret = 2;
849 /* If this statement has reachable exception handlers, then
850 create abnormal edges to them. */
851 make_eh_edges (last);
853 /* BUILTIN_RETURN is really a return statement. */
854 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
856 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
857 fallthru = false;
859 /* Some calls are known not to return. */
860 else
861 fallthru = !gimple_call_noreturn_p (last);
862 break;
864 case GIMPLE_ASSIGN:
865 /* A GIMPLE_ASSIGN may throw internally and thus be considered
866 control-altering. */
867 if (is_ctrl_altering_stmt (last))
868 make_eh_edges (last);
869 fallthru = true;
870 break;
872 case GIMPLE_ASM:
873 make_gimple_asm_edges (bb);
874 fallthru = true;
875 break;
877 CASE_GIMPLE_OMP:
878 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
879 break;
881 case GIMPLE_TRANSACTION:
883 gtransaction *txn = as_a <gtransaction *> (last);
884 tree label1 = gimple_transaction_label_norm (txn);
885 tree label2 = gimple_transaction_label_uninst (txn);
887 if (label1)
888 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
889 if (label2)
890 make_edge (bb, label_to_block (label2),
891 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
893 tree label3 = gimple_transaction_label_over (txn);
894 if (gimple_transaction_subcode (txn)
895 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
896 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
898 fallthru = false;
900 break;
902 default:
903 gcc_assert (!stmt_ends_bb_p (last));
904 fallthru = true;
905 break;
908 if (fallthru)
909 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
911 return ret;
914 /* Join all the blocks in the flowgraph. */
916 static void
917 make_edges (void)
919 basic_block bb;
920 struct omp_region *cur_region = NULL;
921 auto_vec<basic_block> ab_edge_goto;
922 auto_vec<basic_block> ab_edge_call;
923 int *bb_to_omp_idx = NULL;
924 int cur_omp_region_idx = 0;
926 /* Create an edge from entry to the first block with executable
927 statements in it. */
928 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
929 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
930 EDGE_FALLTHRU);
932 /* Traverse the basic block array placing edges. */
933 FOR_EACH_BB_FN (bb, cfun)
935 int mer;
937 if (bb_to_omp_idx)
938 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
940 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
941 if (mer == 1)
942 ab_edge_goto.safe_push (bb);
943 else if (mer == 2)
944 ab_edge_call.safe_push (bb);
946 if (cur_region && bb_to_omp_idx == NULL)
947 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
950 /* Computed gotos are hell to deal with, especially if there are
951 lots of them with a large number of destinations. So we factor
952 them to a common computed goto location before we build the
953 edge list. After we convert back to normal form, we will un-factor
954 the computed gotos since factoring introduces an unwanted jump.
955 For non-local gotos and abnormal edges from calls to calls that return
956 twice or forced labels, factor the abnormal edges too, by having all
957 abnormal edges from the calls go to a common artificial basic block
958 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
959 basic block to all forced labels and calls returning twice.
960 We do this per-OpenMP structured block, because those regions
961 are guaranteed to be single entry single exit by the standard,
962 so it is not allowed to enter or exit such regions abnormally this way,
963 thus all computed gotos, non-local gotos and setjmp/longjmp calls
964 must not transfer control across SESE region boundaries. */
965 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
967 gimple_stmt_iterator gsi;
968 basic_block dispatcher_bb_array[2] = { NULL, NULL };
969 basic_block *dispatcher_bbs = dispatcher_bb_array;
970 int count = n_basic_blocks_for_fn (cfun);
972 if (bb_to_omp_idx)
973 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
975 FOR_EACH_BB_FN (bb, cfun)
977 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
979 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
980 tree target;
982 if (!label_stmt)
983 break;
985 target = gimple_label_label (label_stmt);
987 /* Make an edge to every label block that has been marked as a
988 potential target for a computed goto or a non-local goto. */
989 if (FORCED_LABEL (target))
990 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
991 &ab_edge_goto, true);
992 if (DECL_NONLOCAL (target))
994 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
995 &ab_edge_call, false);
996 break;
1000 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1001 gsi_next_nondebug (&gsi);
1002 if (!gsi_end_p (gsi))
1004 /* Make an edge to every setjmp-like call. */
1005 gimple *call_stmt = gsi_stmt (gsi);
1006 if (is_gimple_call (call_stmt)
1007 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1008 || gimple_call_builtin_p (call_stmt,
1009 BUILT_IN_SETJMP_RECEIVER)))
1010 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1011 &ab_edge_call, false);
1015 if (bb_to_omp_idx)
1016 XDELETE (dispatcher_bbs);
1019 XDELETE (bb_to_omp_idx);
1021 omp_free_regions ();
1024 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1025 needed. Returns true if new bbs were created.
1026 Note: This is transitional code, and should not be used for new code. We
1027 should be able to get rid of this by rewriting all target va-arg
1028 gimplification hooks to use an interface gimple_build_cond_value as described
1029 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1031 bool
1032 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1034 gimple *stmt = gsi_stmt (*gsi);
1035 basic_block bb = gimple_bb (stmt);
1036 basic_block lastbb, afterbb;
1037 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1038 edge e;
1039 lastbb = make_blocks_1 (seq, bb);
1040 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1041 return false;
1042 e = split_block (bb, stmt);
1043 /* Move e->dest to come after the new basic blocks. */
1044 afterbb = e->dest;
1045 unlink_block (afterbb);
1046 link_block (afterbb, lastbb);
1047 redirect_edge_succ (e, bb->next_bb);
1048 bb = bb->next_bb;
1049 while (bb != afterbb)
1051 struct omp_region *cur_region = NULL;
1052 profile_count cnt = profile_count::zero ();
1053 int freq = 0;
1054 bool all = true;
1056 int cur_omp_region_idx = 0;
1057 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1058 gcc_assert (!mer && !cur_region);
1059 add_bb_to_loop (bb, afterbb->loop_father);
1061 edge e;
1062 edge_iterator ei;
1063 FOR_EACH_EDGE (e, ei, bb->preds)
1065 if (e->count ().initialized_p ())
1066 cnt += e->count ();
1067 else
1068 all = false;
1069 freq += EDGE_FREQUENCY (e);
1071 tree_guess_outgoing_edge_probabilities (bb);
1072 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1073 bb->count = cnt;
1074 bb->frequency = freq;
1076 bb = bb->next_bb;
1078 return true;
1081 /* Find the next available discriminator value for LOCUS. The
1082 discriminator distinguishes among several basic blocks that
1083 share a common locus, allowing for more accurate sample-based
1084 profiling. */
1086 static int
1087 next_discriminator_for_locus (location_t locus)
1089 struct locus_discrim_map item;
1090 struct locus_discrim_map **slot;
1092 item.locus = locus;
1093 item.discriminator = 0;
1094 slot = discriminator_per_locus->find_slot_with_hash (
1095 &item, LOCATION_LINE (locus), INSERT);
1096 gcc_assert (slot);
1097 if (*slot == HTAB_EMPTY_ENTRY)
1099 *slot = XNEW (struct locus_discrim_map);
1100 gcc_assert (*slot);
1101 (*slot)->locus = locus;
1102 (*slot)->discriminator = 0;
1104 (*slot)->discriminator++;
1105 return (*slot)->discriminator;
1108 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1110 static bool
1111 same_line_p (location_t locus1, location_t locus2)
1113 expanded_location from, to;
1115 if (locus1 == locus2)
1116 return true;
1118 from = expand_location (locus1);
1119 to = expand_location (locus2);
1121 if (from.line != to.line)
1122 return false;
1123 if (from.file == to.file)
1124 return true;
1125 return (from.file != NULL
1126 && to.file != NULL
1127 && filename_cmp (from.file, to.file) == 0);
1130 /* Assign discriminators to each basic block. */
1132 static void
1133 assign_discriminators (void)
1135 basic_block bb;
1137 FOR_EACH_BB_FN (bb, cfun)
1139 edge e;
1140 edge_iterator ei;
1141 gimple *last = last_stmt (bb);
1142 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1144 if (locus == UNKNOWN_LOCATION)
1145 continue;
1147 FOR_EACH_EDGE (e, ei, bb->succs)
1149 gimple *first = first_non_label_stmt (e->dest);
1150 gimple *last = last_stmt (e->dest);
1151 if ((first && same_line_p (locus, gimple_location (first)))
1152 || (last && same_line_p (locus, gimple_location (last))))
1154 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1155 bb->discriminator = next_discriminator_for_locus (locus);
1156 else
1157 e->dest->discriminator = next_discriminator_for_locus (locus);
1163 /* Create the edges for a GIMPLE_COND starting at block BB. */
1165 static void
1166 make_cond_expr_edges (basic_block bb)
1168 gcond *entry = as_a <gcond *> (last_stmt (bb));
1169 gimple *then_stmt, *else_stmt;
1170 basic_block then_bb, else_bb;
1171 tree then_label, else_label;
1172 edge e;
1174 gcc_assert (entry);
1175 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1177 /* Entry basic blocks for each component. */
1178 then_label = gimple_cond_true_label (entry);
1179 else_label = gimple_cond_false_label (entry);
1180 then_bb = label_to_block (then_label);
1181 else_bb = label_to_block (else_label);
1182 then_stmt = first_stmt (then_bb);
1183 else_stmt = first_stmt (else_bb);
1185 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1186 e->goto_locus = gimple_location (then_stmt);
1187 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1188 if (e)
1189 e->goto_locus = gimple_location (else_stmt);
1191 /* We do not need the labels anymore. */
1192 gimple_cond_set_true_label (entry, NULL_TREE);
1193 gimple_cond_set_false_label (entry, NULL_TREE);
1197 /* Called for each element in the hash table (P) as we delete the
1198 edge to cases hash table.
1200 Clear all the CASE_CHAINs to prevent problems with copying of
1201 SWITCH_EXPRs and structure sharing rules, then free the hash table
1202 element. */
1204 bool
1205 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1207 tree t, next;
1209 for (t = value; t; t = next)
1211 next = CASE_CHAIN (t);
1212 CASE_CHAIN (t) = NULL;
1215 return true;
1218 /* Start recording information mapping edges to case labels. */
1220 void
1221 start_recording_case_labels (void)
1223 gcc_assert (edge_to_cases == NULL);
1224 edge_to_cases = new hash_map<edge, tree>;
1225 touched_switch_bbs = BITMAP_ALLOC (NULL);
1228 /* Return nonzero if we are recording information for case labels. */
1230 static bool
1231 recording_case_labels_p (void)
1233 return (edge_to_cases != NULL);
1236 /* Stop recording information mapping edges to case labels and
1237 remove any information we have recorded. */
1238 void
1239 end_recording_case_labels (void)
1241 bitmap_iterator bi;
1242 unsigned i;
1243 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1244 delete edge_to_cases;
1245 edge_to_cases = NULL;
1246 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1248 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1249 if (bb)
1251 gimple *stmt = last_stmt (bb);
1252 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1253 group_case_labels_stmt (as_a <gswitch *> (stmt));
1256 BITMAP_FREE (touched_switch_bbs);
1259 /* If we are inside a {start,end}_recording_cases block, then return
1260 a chain of CASE_LABEL_EXPRs from T which reference E.
1262 Otherwise return NULL. */
1264 static tree
1265 get_cases_for_edge (edge e, gswitch *t)
1267 tree *slot;
1268 size_t i, n;
1270 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1271 chains available. Return NULL so the caller can detect this case. */
1272 if (!recording_case_labels_p ())
1273 return NULL;
1275 slot = edge_to_cases->get (e);
1276 if (slot)
1277 return *slot;
1279 /* If we did not find E in the hash table, then this must be the first
1280 time we have been queried for information about E & T. Add all the
1281 elements from T to the hash table then perform the query again. */
1283 n = gimple_switch_num_labels (t);
1284 for (i = 0; i < n; i++)
1286 tree elt = gimple_switch_label (t, i);
1287 tree lab = CASE_LABEL (elt);
1288 basic_block label_bb = label_to_block (lab);
1289 edge this_edge = find_edge (e->src, label_bb);
1291 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1292 a new chain. */
1293 tree &s = edge_to_cases->get_or_insert (this_edge);
1294 CASE_CHAIN (elt) = s;
1295 s = elt;
1298 return *edge_to_cases->get (e);
1301 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1303 static void
1304 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1306 size_t i, n;
1308 n = gimple_switch_num_labels (entry);
1310 for (i = 0; i < n; ++i)
1312 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1313 basic_block label_bb = label_to_block (lab);
1314 make_edge (bb, label_bb, 0);
1319 /* Return the basic block holding label DEST. */
1321 basic_block
1322 label_to_block_fn (struct function *ifun, tree dest)
1324 int uid = LABEL_DECL_UID (dest);
1326 /* We would die hard when faced by an undefined label. Emit a label to
1327 the very first basic block. This will hopefully make even the dataflow
1328 and undefined variable warnings quite right. */
1329 if (seen_error () && uid < 0)
1331 gimple_stmt_iterator gsi =
1332 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1333 gimple *stmt;
1335 stmt = gimple_build_label (dest);
1336 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1337 uid = LABEL_DECL_UID (dest);
1339 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1340 return NULL;
1341 return (*ifun->cfg->x_label_to_block_map)[uid];
1344 /* Create edges for a goto statement at block BB. Returns true
1345 if abnormal edges should be created. */
1347 static bool
1348 make_goto_expr_edges (basic_block bb)
1350 gimple_stmt_iterator last = gsi_last_bb (bb);
1351 gimple *goto_t = gsi_stmt (last);
1353 /* A simple GOTO creates normal edges. */
1354 if (simple_goto_p (goto_t))
1356 tree dest = gimple_goto_dest (goto_t);
1357 basic_block label_bb = label_to_block (dest);
1358 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1359 e->goto_locus = gimple_location (goto_t);
1360 gsi_remove (&last, true);
1361 return false;
1364 /* A computed GOTO creates abnormal edges. */
1365 return true;
1368 /* Create edges for an asm statement with labels at block BB. */
1370 static void
1371 make_gimple_asm_edges (basic_block bb)
1373 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1374 int i, n = gimple_asm_nlabels (stmt);
1376 for (i = 0; i < n; ++i)
1378 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1379 basic_block label_bb = label_to_block (label);
1380 make_edge (bb, label_bb, 0);
1384 /*---------------------------------------------------------------------------
1385 Flowgraph analysis
1386 ---------------------------------------------------------------------------*/
1388 /* Cleanup useless labels in basic blocks. This is something we wish
1389 to do early because it allows us to group case labels before creating
1390 the edges for the CFG, and it speeds up block statement iterators in
1391 all passes later on.
1392 We rerun this pass after CFG is created, to get rid of the labels that
1393 are no longer referenced. After then we do not run it any more, since
1394 (almost) no new labels should be created. */
1396 /* A map from basic block index to the leading label of that block. */
1397 static struct label_record
1399 /* The label. */
1400 tree label;
1402 /* True if the label is referenced from somewhere. */
1403 bool used;
1404 } *label_for_bb;
1406 /* Given LABEL return the first label in the same basic block. */
1408 static tree
1409 main_block_label (tree label)
1411 basic_block bb = label_to_block (label);
1412 tree main_label = label_for_bb[bb->index].label;
1414 /* label_to_block possibly inserted undefined label into the chain. */
1415 if (!main_label)
1417 label_for_bb[bb->index].label = label;
1418 main_label = label;
1421 label_for_bb[bb->index].used = true;
1422 return main_label;
1425 /* Clean up redundant labels within the exception tree. */
1427 static void
1428 cleanup_dead_labels_eh (void)
1430 eh_landing_pad lp;
1431 eh_region r;
1432 tree lab;
1433 int i;
1435 if (cfun->eh == NULL)
1436 return;
1438 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1439 if (lp && lp->post_landing_pad)
1441 lab = main_block_label (lp->post_landing_pad);
1442 if (lab != lp->post_landing_pad)
1444 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1445 EH_LANDING_PAD_NR (lab) = lp->index;
1449 FOR_ALL_EH_REGION (r)
1450 switch (r->type)
1452 case ERT_CLEANUP:
1453 case ERT_MUST_NOT_THROW:
1454 break;
1456 case ERT_TRY:
1458 eh_catch c;
1459 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1461 lab = c->label;
1462 if (lab)
1463 c->label = main_block_label (lab);
1466 break;
1468 case ERT_ALLOWED_EXCEPTIONS:
1469 lab = r->u.allowed.label;
1470 if (lab)
1471 r->u.allowed.label = main_block_label (lab);
1472 break;
1477 /* Cleanup redundant labels. This is a three-step process:
1478 1) Find the leading label for each block.
1479 2) Redirect all references to labels to the leading labels.
1480 3) Cleanup all useless labels. */
1482 void
1483 cleanup_dead_labels (void)
1485 basic_block bb;
1486 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1488 /* Find a suitable label for each block. We use the first user-defined
1489 label if there is one, or otherwise just the first label we see. */
1490 FOR_EACH_BB_FN (bb, cfun)
1492 gimple_stmt_iterator i;
1494 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1496 tree label;
1497 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1499 if (!label_stmt)
1500 break;
1502 label = gimple_label_label (label_stmt);
1504 /* If we have not yet seen a label for the current block,
1505 remember this one and see if there are more labels. */
1506 if (!label_for_bb[bb->index].label)
1508 label_for_bb[bb->index].label = label;
1509 continue;
1512 /* If we did see a label for the current block already, but it
1513 is an artificially created label, replace it if the current
1514 label is a user defined label. */
1515 if (!DECL_ARTIFICIAL (label)
1516 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1518 label_for_bb[bb->index].label = label;
1519 break;
1524 /* Now redirect all jumps/branches to the selected label.
1525 First do so for each block ending in a control statement. */
1526 FOR_EACH_BB_FN (bb, cfun)
1528 gimple *stmt = last_stmt (bb);
1529 tree label, new_label;
1531 if (!stmt)
1532 continue;
1534 switch (gimple_code (stmt))
1536 case GIMPLE_COND:
1538 gcond *cond_stmt = as_a <gcond *> (stmt);
1539 label = gimple_cond_true_label (cond_stmt);
1540 if (label)
1542 new_label = main_block_label (label);
1543 if (new_label != label)
1544 gimple_cond_set_true_label (cond_stmt, new_label);
1547 label = gimple_cond_false_label (cond_stmt);
1548 if (label)
1550 new_label = main_block_label (label);
1551 if (new_label != label)
1552 gimple_cond_set_false_label (cond_stmt, new_label);
1555 break;
1557 case GIMPLE_SWITCH:
1559 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1560 size_t i, n = gimple_switch_num_labels (switch_stmt);
1562 /* Replace all destination labels. */
1563 for (i = 0; i < n; ++i)
1565 tree case_label = gimple_switch_label (switch_stmt, i);
1566 label = CASE_LABEL (case_label);
1567 new_label = main_block_label (label);
1568 if (new_label != label)
1569 CASE_LABEL (case_label) = new_label;
1571 break;
1574 case GIMPLE_ASM:
1576 gasm *asm_stmt = as_a <gasm *> (stmt);
1577 int i, n = gimple_asm_nlabels (asm_stmt);
1579 for (i = 0; i < n; ++i)
1581 tree cons = gimple_asm_label_op (asm_stmt, i);
1582 tree label = main_block_label (TREE_VALUE (cons));
1583 TREE_VALUE (cons) = label;
1585 break;
1588 /* We have to handle gotos until they're removed, and we don't
1589 remove them until after we've created the CFG edges. */
1590 case GIMPLE_GOTO:
1591 if (!computed_goto_p (stmt))
1593 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1594 label = gimple_goto_dest (goto_stmt);
1595 new_label = main_block_label (label);
1596 if (new_label != label)
1597 gimple_goto_set_dest (goto_stmt, new_label);
1599 break;
1601 case GIMPLE_TRANSACTION:
1603 gtransaction *txn = as_a <gtransaction *> (stmt);
1605 label = gimple_transaction_label_norm (txn);
1606 if (label)
1608 new_label = main_block_label (label);
1609 if (new_label != label)
1610 gimple_transaction_set_label_norm (txn, new_label);
1613 label = gimple_transaction_label_uninst (txn);
1614 if (label)
1616 new_label = main_block_label (label);
1617 if (new_label != label)
1618 gimple_transaction_set_label_uninst (txn, new_label);
1621 label = gimple_transaction_label_over (txn);
1622 if (label)
1624 new_label = main_block_label (label);
1625 if (new_label != label)
1626 gimple_transaction_set_label_over (txn, new_label);
1629 break;
1631 default:
1632 break;
1636 /* Do the same for the exception region tree labels. */
1637 cleanup_dead_labels_eh ();
1639 /* Finally, purge dead labels. All user-defined labels and labels that
1640 can be the target of non-local gotos and labels which have their
1641 address taken are preserved. */
1642 FOR_EACH_BB_FN (bb, cfun)
1644 gimple_stmt_iterator i;
1645 tree label_for_this_bb = label_for_bb[bb->index].label;
1647 if (!label_for_this_bb)
1648 continue;
1650 /* If the main label of the block is unused, we may still remove it. */
1651 if (!label_for_bb[bb->index].used)
1652 label_for_this_bb = NULL;
1654 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1656 tree label;
1657 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1659 if (!label_stmt)
1660 break;
1662 label = gimple_label_label (label_stmt);
1664 if (label == label_for_this_bb
1665 || !DECL_ARTIFICIAL (label)
1666 || DECL_NONLOCAL (label)
1667 || FORCED_LABEL (label))
1668 gsi_next (&i);
1669 else
1670 gsi_remove (&i, true);
1674 free (label_for_bb);
1677 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1678 the ones jumping to the same label.
1679 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1681 bool
1682 group_case_labels_stmt (gswitch *stmt)
1684 int old_size = gimple_switch_num_labels (stmt);
1685 int i, next_index, new_size;
1686 basic_block default_bb = NULL;
1688 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1690 /* Look for possible opportunities to merge cases. */
1691 new_size = i = 1;
1692 while (i < old_size)
1694 tree base_case, base_high;
1695 basic_block base_bb;
1697 base_case = gimple_switch_label (stmt, i);
1699 gcc_assert (base_case);
1700 base_bb = label_to_block (CASE_LABEL (base_case));
1702 /* Discard cases that have the same destination as the default case or
1703 whose destiniation blocks have already been removed as unreachable. */
1704 if (base_bb == NULL || base_bb == default_bb)
1706 i++;
1707 continue;
1710 base_high = CASE_HIGH (base_case)
1711 ? CASE_HIGH (base_case)
1712 : CASE_LOW (base_case);
1713 next_index = i + 1;
1715 /* Try to merge case labels. Break out when we reach the end
1716 of the label vector or when we cannot merge the next case
1717 label with the current one. */
1718 while (next_index < old_size)
1720 tree merge_case = gimple_switch_label (stmt, next_index);
1721 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1722 wide_int bhp1 = wi::to_wide (base_high) + 1;
1724 /* Merge the cases if they jump to the same place,
1725 and their ranges are consecutive. */
1726 if (merge_bb == base_bb
1727 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1729 base_high = CASE_HIGH (merge_case) ?
1730 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1731 CASE_HIGH (base_case) = base_high;
1732 next_index++;
1734 else
1735 break;
1738 /* Discard cases that have an unreachable destination block. */
1739 if (EDGE_COUNT (base_bb->succs) == 0
1740 && gimple_seq_unreachable_p (bb_seq (base_bb)))
1742 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1743 if (base_edge != NULL)
1744 remove_edge_and_dominated_blocks (base_edge);
1745 i = next_index;
1746 continue;
1749 if (new_size < i)
1750 gimple_switch_set_label (stmt, new_size,
1751 gimple_switch_label (stmt, i));
1752 i = next_index;
1753 new_size++;
1756 gcc_assert (new_size <= old_size);
1758 if (new_size < old_size)
1759 gimple_switch_set_num_labels (stmt, new_size);
1761 return new_size < old_size;
1764 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1765 and scan the sorted vector of cases. Combine the ones jumping to the
1766 same label. */
1768 bool
1769 group_case_labels (void)
1771 basic_block bb;
1772 bool changed = false;
1774 FOR_EACH_BB_FN (bb, cfun)
1776 gimple *stmt = last_stmt (bb);
1777 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1778 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1781 return changed;
1784 /* Checks whether we can merge block B into block A. */
1786 static bool
1787 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1789 gimple *stmt;
1791 if (!single_succ_p (a))
1792 return false;
1794 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1795 return false;
1797 if (single_succ (a) != b)
1798 return false;
1800 if (!single_pred_p (b))
1801 return false;
1803 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1804 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1805 return false;
1807 /* If A ends by a statement causing exceptions or something similar, we
1808 cannot merge the blocks. */
1809 stmt = last_stmt (a);
1810 if (stmt && stmt_ends_bb_p (stmt))
1811 return false;
1813 /* Do not allow a block with only a non-local label to be merged. */
1814 if (stmt)
1815 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1816 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1817 return false;
1819 /* Examine the labels at the beginning of B. */
1820 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1821 gsi_next (&gsi))
1823 tree lab;
1824 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1825 if (!label_stmt)
1826 break;
1827 lab = gimple_label_label (label_stmt);
1829 /* Do not remove user forced labels or for -O0 any user labels. */
1830 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1831 return false;
1834 /* Protect simple loop latches. We only want to avoid merging
1835 the latch with the loop header or with a block in another
1836 loop in this case. */
1837 if (current_loops
1838 && b->loop_father->latch == b
1839 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1840 && (b->loop_father->header == a
1841 || b->loop_father != a->loop_father))
1842 return false;
1844 /* It must be possible to eliminate all phi nodes in B. If ssa form
1845 is not up-to-date and a name-mapping is registered, we cannot eliminate
1846 any phis. Symbols marked for renaming are never a problem though. */
1847 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1848 gsi_next (&gsi))
1850 gphi *phi = gsi.phi ();
1851 /* Technically only new names matter. */
1852 if (name_registered_for_update_p (PHI_RESULT (phi)))
1853 return false;
1856 /* When not optimizing, don't merge if we'd lose goto_locus. */
1857 if (!optimize
1858 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1860 location_t goto_locus = single_succ_edge (a)->goto_locus;
1861 gimple_stmt_iterator prev, next;
1862 prev = gsi_last_nondebug_bb (a);
1863 next = gsi_after_labels (b);
1864 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1865 gsi_next_nondebug (&next);
1866 if ((gsi_end_p (prev)
1867 || gimple_location (gsi_stmt (prev)) != goto_locus)
1868 && (gsi_end_p (next)
1869 || gimple_location (gsi_stmt (next)) != goto_locus))
1870 return false;
1873 return true;
1876 /* Replaces all uses of NAME by VAL. */
1878 void
1879 replace_uses_by (tree name, tree val)
1881 imm_use_iterator imm_iter;
1882 use_operand_p use;
1883 gimple *stmt;
1884 edge e;
1886 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1888 /* Mark the block if we change the last stmt in it. */
1889 if (cfgcleanup_altered_bbs
1890 && stmt_ends_bb_p (stmt))
1891 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1893 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1895 replace_exp (use, val);
1897 if (gimple_code (stmt) == GIMPLE_PHI)
1899 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1900 PHI_ARG_INDEX_FROM_USE (use));
1901 if (e->flags & EDGE_ABNORMAL
1902 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1904 /* This can only occur for virtual operands, since
1905 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1906 would prevent replacement. */
1907 gcc_checking_assert (virtual_operand_p (name));
1908 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1913 if (gimple_code (stmt) != GIMPLE_PHI)
1915 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1916 gimple *orig_stmt = stmt;
1917 size_t i;
1919 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1920 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1921 only change sth from non-invariant to invariant, and only
1922 when propagating constants. */
1923 if (is_gimple_min_invariant (val))
1924 for (i = 0; i < gimple_num_ops (stmt); i++)
1926 tree op = gimple_op (stmt, i);
1927 /* Operands may be empty here. For example, the labels
1928 of a GIMPLE_COND are nulled out following the creation
1929 of the corresponding CFG edges. */
1930 if (op && TREE_CODE (op) == ADDR_EXPR)
1931 recompute_tree_invariant_for_addr_expr (op);
1934 if (fold_stmt (&gsi))
1935 stmt = gsi_stmt (gsi);
1937 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1938 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1940 update_stmt (stmt);
1944 gcc_checking_assert (has_zero_uses (name));
1946 /* Also update the trees stored in loop structures. */
1947 if (current_loops)
1949 struct loop *loop;
1951 FOR_EACH_LOOP (loop, 0)
1953 substitute_in_loop_info (loop, name, val);
1958 /* Merge block B into block A. */
1960 static void
1961 gimple_merge_blocks (basic_block a, basic_block b)
1963 gimple_stmt_iterator last, gsi;
1964 gphi_iterator psi;
1966 if (dump_file)
1967 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1969 /* Remove all single-valued PHI nodes from block B of the form
1970 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1971 gsi = gsi_last_bb (a);
1972 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1974 gimple *phi = gsi_stmt (psi);
1975 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1976 gimple *copy;
1977 bool may_replace_uses = (virtual_operand_p (def)
1978 || may_propagate_copy (def, use));
1980 /* In case we maintain loop closed ssa form, do not propagate arguments
1981 of loop exit phi nodes. */
1982 if (current_loops
1983 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1984 && !virtual_operand_p (def)
1985 && TREE_CODE (use) == SSA_NAME
1986 && a->loop_father != b->loop_father)
1987 may_replace_uses = false;
1989 if (!may_replace_uses)
1991 gcc_assert (!virtual_operand_p (def));
1993 /* Note that just emitting the copies is fine -- there is no problem
1994 with ordering of phi nodes. This is because A is the single
1995 predecessor of B, therefore results of the phi nodes cannot
1996 appear as arguments of the phi nodes. */
1997 copy = gimple_build_assign (def, use);
1998 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1999 remove_phi_node (&psi, false);
2001 else
2003 /* If we deal with a PHI for virtual operands, we can simply
2004 propagate these without fussing with folding or updating
2005 the stmt. */
2006 if (virtual_operand_p (def))
2008 imm_use_iterator iter;
2009 use_operand_p use_p;
2010 gimple *stmt;
2012 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2013 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2014 SET_USE (use_p, use);
2016 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2017 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2019 else
2020 replace_uses_by (def, use);
2022 remove_phi_node (&psi, true);
2026 /* Ensure that B follows A. */
2027 move_block_after (b, a);
2029 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2030 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2032 /* Remove labels from B and set gimple_bb to A for other statements. */
2033 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2035 gimple *stmt = gsi_stmt (gsi);
2036 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2038 tree label = gimple_label_label (label_stmt);
2039 int lp_nr;
2041 gsi_remove (&gsi, false);
2043 /* Now that we can thread computed gotos, we might have
2044 a situation where we have a forced label in block B
2045 However, the label at the start of block B might still be
2046 used in other ways (think about the runtime checking for
2047 Fortran assigned gotos). So we can not just delete the
2048 label. Instead we move the label to the start of block A. */
2049 if (FORCED_LABEL (label))
2051 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2052 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2054 /* Other user labels keep around in a form of a debug stmt. */
2055 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2057 gimple *dbg = gimple_build_debug_bind (label,
2058 integer_zero_node,
2059 stmt);
2060 gimple_debug_bind_reset_value (dbg);
2061 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2064 lp_nr = EH_LANDING_PAD_NR (label);
2065 if (lp_nr)
2067 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2068 lp->post_landing_pad = NULL;
2071 else
2073 gimple_set_bb (stmt, a);
2074 gsi_next (&gsi);
2078 /* When merging two BBs, if their counts are different, the larger count
2079 is selected as the new bb count. This is to handle inconsistent
2080 profiles. */
2081 if (a->loop_father == b->loop_father)
2083 a->count = a->count.merge (b->count);
2084 a->frequency = MAX (a->frequency, b->frequency);
2087 /* Merge the sequences. */
2088 last = gsi_last_bb (a);
2089 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2090 set_bb_seq (b, NULL);
2092 if (cfgcleanup_altered_bbs)
2093 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2097 /* Return the one of two successors of BB that is not reachable by a
2098 complex edge, if there is one. Else, return BB. We use
2099 this in optimizations that use post-dominators for their heuristics,
2100 to catch the cases in C++ where function calls are involved. */
2102 basic_block
2103 single_noncomplex_succ (basic_block bb)
2105 edge e0, e1;
2106 if (EDGE_COUNT (bb->succs) != 2)
2107 return bb;
2109 e0 = EDGE_SUCC (bb, 0);
2110 e1 = EDGE_SUCC (bb, 1);
2111 if (e0->flags & EDGE_COMPLEX)
2112 return e1->dest;
2113 if (e1->flags & EDGE_COMPLEX)
2114 return e0->dest;
2116 return bb;
2119 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2121 void
2122 notice_special_calls (gcall *call)
2124 int flags = gimple_call_flags (call);
2126 if (flags & ECF_MAY_BE_ALLOCA)
2127 cfun->calls_alloca = true;
2128 if (flags & ECF_RETURNS_TWICE)
2129 cfun->calls_setjmp = true;
2133 /* Clear flags set by notice_special_calls. Used by dead code removal
2134 to update the flags. */
2136 void
2137 clear_special_calls (void)
2139 cfun->calls_alloca = false;
2140 cfun->calls_setjmp = false;
2143 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2145 static void
2146 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2148 /* Since this block is no longer reachable, we can just delete all
2149 of its PHI nodes. */
2150 remove_phi_nodes (bb);
2152 /* Remove edges to BB's successors. */
2153 while (EDGE_COUNT (bb->succs) > 0)
2154 remove_edge (EDGE_SUCC (bb, 0));
2158 /* Remove statements of basic block BB. */
2160 static void
2161 remove_bb (basic_block bb)
2163 gimple_stmt_iterator i;
2165 if (dump_file)
2167 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2168 if (dump_flags & TDF_DETAILS)
2170 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2171 fprintf (dump_file, "\n");
2175 if (current_loops)
2177 struct loop *loop = bb->loop_father;
2179 /* If a loop gets removed, clean up the information associated
2180 with it. */
2181 if (loop->latch == bb
2182 || loop->header == bb)
2183 free_numbers_of_iterations_estimates (loop);
2186 /* Remove all the instructions in the block. */
2187 if (bb_seq (bb) != NULL)
2189 /* Walk backwards so as to get a chance to substitute all
2190 released DEFs into debug stmts. See
2191 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2192 details. */
2193 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2195 gimple *stmt = gsi_stmt (i);
2196 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2197 if (label_stmt
2198 && (FORCED_LABEL (gimple_label_label (label_stmt))
2199 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2201 basic_block new_bb;
2202 gimple_stmt_iterator new_gsi;
2204 /* A non-reachable non-local label may still be referenced.
2205 But it no longer needs to carry the extra semantics of
2206 non-locality. */
2207 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2209 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2210 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2213 new_bb = bb->prev_bb;
2214 new_gsi = gsi_start_bb (new_bb);
2215 gsi_remove (&i, false);
2216 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2218 else
2220 /* Release SSA definitions. */
2221 release_defs (stmt);
2222 gsi_remove (&i, true);
2225 if (gsi_end_p (i))
2226 i = gsi_last_bb (bb);
2227 else
2228 gsi_prev (&i);
2232 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2233 bb->il.gimple.seq = NULL;
2234 bb->il.gimple.phi_nodes = NULL;
2238 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2239 predicate VAL, return the edge that will be taken out of the block.
2240 If VAL does not match a unique edge, NULL is returned. */
2242 edge
2243 find_taken_edge (basic_block bb, tree val)
2245 gimple *stmt;
2247 stmt = last_stmt (bb);
2249 gcc_assert (is_ctrl_stmt (stmt));
2251 if (gimple_code (stmt) == GIMPLE_COND)
2252 return find_taken_edge_cond_expr (bb, val);
2254 if (gimple_code (stmt) == GIMPLE_SWITCH)
2255 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2257 if (computed_goto_p (stmt))
2259 /* Only optimize if the argument is a label, if the argument is
2260 not a label then we can not construct a proper CFG.
2262 It may be the case that we only need to allow the LABEL_REF to
2263 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2264 appear inside a LABEL_EXPR just to be safe. */
2265 if (val
2266 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2267 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2268 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2269 return NULL;
2272 gcc_unreachable ();
2275 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2276 statement, determine which of the outgoing edges will be taken out of the
2277 block. Return NULL if either edge may be taken. */
2279 static edge
2280 find_taken_edge_computed_goto (basic_block bb, tree val)
2282 basic_block dest;
2283 edge e = NULL;
2285 dest = label_to_block (val);
2286 if (dest)
2288 e = find_edge (bb, dest);
2289 gcc_assert (e != NULL);
2292 return e;
2295 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2296 statement, determine which of the two edges will be taken out of the
2297 block. Return NULL if either edge may be taken. */
2299 static edge
2300 find_taken_edge_cond_expr (basic_block bb, tree val)
2302 edge true_edge, false_edge;
2304 if (val == NULL
2305 || TREE_CODE (val) != INTEGER_CST)
2306 return NULL;
2308 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2310 return (integer_zerop (val) ? false_edge : true_edge);
2313 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2314 statement, determine which edge will be taken out of the block. Return
2315 NULL if any edge may be taken. */
2317 static edge
2318 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2319 tree val)
2321 basic_block dest_bb;
2322 edge e;
2323 tree taken_case;
2325 if (gimple_switch_num_labels (switch_stmt) == 1)
2326 taken_case = gimple_switch_default_label (switch_stmt);
2327 else if (! val || TREE_CODE (val) != INTEGER_CST)
2328 return NULL;
2329 else
2330 taken_case = find_case_label_for_value (switch_stmt, val);
2331 dest_bb = label_to_block (CASE_LABEL (taken_case));
2333 e = find_edge (bb, dest_bb);
2334 gcc_assert (e);
2335 return e;
2339 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2340 We can make optimal use here of the fact that the case labels are
2341 sorted: We can do a binary search for a case matching VAL. */
2343 static tree
2344 find_case_label_for_value (gswitch *switch_stmt, tree val)
2346 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2347 tree default_case = gimple_switch_default_label (switch_stmt);
2349 for (low = 0, high = n; high - low > 1; )
2351 size_t i = (high + low) / 2;
2352 tree t = gimple_switch_label (switch_stmt, i);
2353 int cmp;
2355 /* Cache the result of comparing CASE_LOW and val. */
2356 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2358 if (cmp > 0)
2359 high = i;
2360 else
2361 low = i;
2363 if (CASE_HIGH (t) == NULL)
2365 /* A singe-valued case label. */
2366 if (cmp == 0)
2367 return t;
2369 else
2371 /* A case range. We can only handle integer ranges. */
2372 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2373 return t;
2377 return default_case;
2381 /* Dump a basic block on stderr. */
2383 void
2384 gimple_debug_bb (basic_block bb)
2386 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2390 /* Dump basic block with index N on stderr. */
2392 basic_block
2393 gimple_debug_bb_n (int n)
2395 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2396 return BASIC_BLOCK_FOR_FN (cfun, n);
2400 /* Dump the CFG on stderr.
2402 FLAGS are the same used by the tree dumping functions
2403 (see TDF_* in dumpfile.h). */
2405 void
2406 gimple_debug_cfg (dump_flags_t flags)
2408 gimple_dump_cfg (stderr, flags);
2412 /* Dump the program showing basic block boundaries on the given FILE.
2414 FLAGS are the same used by the tree dumping functions (see TDF_* in
2415 tree.h). */
2417 void
2418 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2420 if (flags & TDF_DETAILS)
2422 dump_function_header (file, current_function_decl, flags);
2423 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2424 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2425 last_basic_block_for_fn (cfun));
2427 brief_dump_cfg (file, flags);
2428 fprintf (file, "\n");
2431 if (flags & TDF_STATS)
2432 dump_cfg_stats (file);
2434 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2438 /* Dump CFG statistics on FILE. */
2440 void
2441 dump_cfg_stats (FILE *file)
2443 static long max_num_merged_labels = 0;
2444 unsigned long size, total = 0;
2445 long num_edges;
2446 basic_block bb;
2447 const char * const fmt_str = "%-30s%-13s%12s\n";
2448 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2449 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2450 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2451 const char *funcname = current_function_name ();
2453 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2455 fprintf (file, "---------------------------------------------------------\n");
2456 fprintf (file, fmt_str, "", " Number of ", "Memory");
2457 fprintf (file, fmt_str, "", " instances ", "used ");
2458 fprintf (file, "---------------------------------------------------------\n");
2460 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2461 total += size;
2462 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2463 SCALE (size), LABEL (size));
2465 num_edges = 0;
2466 FOR_EACH_BB_FN (bb, cfun)
2467 num_edges += EDGE_COUNT (bb->succs);
2468 size = num_edges * sizeof (struct edge_def);
2469 total += size;
2470 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2472 fprintf (file, "---------------------------------------------------------\n");
2473 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2474 LABEL (total));
2475 fprintf (file, "---------------------------------------------------------\n");
2476 fprintf (file, "\n");
2478 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2479 max_num_merged_labels = cfg_stats.num_merged_labels;
2481 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2482 cfg_stats.num_merged_labels, max_num_merged_labels);
2484 fprintf (file, "\n");
2488 /* Dump CFG statistics on stderr. Keep extern so that it's always
2489 linked in the final executable. */
2491 DEBUG_FUNCTION void
2492 debug_cfg_stats (void)
2494 dump_cfg_stats (stderr);
2497 /*---------------------------------------------------------------------------
2498 Miscellaneous helpers
2499 ---------------------------------------------------------------------------*/
2501 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2502 flow. Transfers of control flow associated with EH are excluded. */
2504 static bool
2505 call_can_make_abnormal_goto (gimple *t)
2507 /* If the function has no non-local labels, then a call cannot make an
2508 abnormal transfer of control. */
2509 if (!cfun->has_nonlocal_label
2510 && !cfun->calls_setjmp)
2511 return false;
2513 /* Likewise if the call has no side effects. */
2514 if (!gimple_has_side_effects (t))
2515 return false;
2517 /* Likewise if the called function is leaf. */
2518 if (gimple_call_flags (t) & ECF_LEAF)
2519 return false;
2521 return true;
2525 /* Return true if T can make an abnormal transfer of control flow.
2526 Transfers of control flow associated with EH are excluded. */
2528 bool
2529 stmt_can_make_abnormal_goto (gimple *t)
2531 if (computed_goto_p (t))
2532 return true;
2533 if (is_gimple_call (t))
2534 return call_can_make_abnormal_goto (t);
2535 return false;
2539 /* Return true if T represents a stmt that always transfers control. */
2541 bool
2542 is_ctrl_stmt (gimple *t)
2544 switch (gimple_code (t))
2546 case GIMPLE_COND:
2547 case GIMPLE_SWITCH:
2548 case GIMPLE_GOTO:
2549 case GIMPLE_RETURN:
2550 case GIMPLE_RESX:
2551 return true;
2552 default:
2553 return false;
2558 /* Return true if T is a statement that may alter the flow of control
2559 (e.g., a call to a non-returning function). */
2561 bool
2562 is_ctrl_altering_stmt (gimple *t)
2564 gcc_assert (t);
2566 switch (gimple_code (t))
2568 case GIMPLE_CALL:
2569 /* Per stmt call flag indicates whether the call could alter
2570 controlflow. */
2571 if (gimple_call_ctrl_altering_p (t))
2572 return true;
2573 break;
2575 case GIMPLE_EH_DISPATCH:
2576 /* EH_DISPATCH branches to the individual catch handlers at
2577 this level of a try or allowed-exceptions region. It can
2578 fallthru to the next statement as well. */
2579 return true;
2581 case GIMPLE_ASM:
2582 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2583 return true;
2584 break;
2586 CASE_GIMPLE_OMP:
2587 /* OpenMP directives alter control flow. */
2588 return true;
2590 case GIMPLE_TRANSACTION:
2591 /* A transaction start alters control flow. */
2592 return true;
2594 default:
2595 break;
2598 /* If a statement can throw, it alters control flow. */
2599 return stmt_can_throw_internal (t);
2603 /* Return true if T is a simple local goto. */
2605 bool
2606 simple_goto_p (gimple *t)
2608 return (gimple_code (t) == GIMPLE_GOTO
2609 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2613 /* Return true if STMT should start a new basic block. PREV_STMT is
2614 the statement preceding STMT. It is used when STMT is a label or a
2615 case label. Labels should only start a new basic block if their
2616 previous statement wasn't a label. Otherwise, sequence of labels
2617 would generate unnecessary basic blocks that only contain a single
2618 label. */
2620 static inline bool
2621 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2623 if (stmt == NULL)
2624 return false;
2626 /* Labels start a new basic block only if the preceding statement
2627 wasn't a label of the same type. This prevents the creation of
2628 consecutive blocks that have nothing but a single label. */
2629 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2631 /* Nonlocal and computed GOTO targets always start a new block. */
2632 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2633 || FORCED_LABEL (gimple_label_label (label_stmt)))
2634 return true;
2636 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2638 if (DECL_NONLOCAL (gimple_label_label (
2639 as_a <glabel *> (prev_stmt))))
2640 return true;
2642 cfg_stats.num_merged_labels++;
2643 return false;
2645 else
2646 return true;
2648 else if (gimple_code (stmt) == GIMPLE_CALL)
2650 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2651 /* setjmp acts similar to a nonlocal GOTO target and thus should
2652 start a new block. */
2653 return true;
2654 if (gimple_call_internal_p (stmt, IFN_PHI)
2655 && prev_stmt
2656 && gimple_code (prev_stmt) != GIMPLE_LABEL
2657 && (gimple_code (prev_stmt) != GIMPLE_CALL
2658 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2659 /* PHI nodes start a new block unless preceeded by a label
2660 or another PHI. */
2661 return true;
2664 return false;
2668 /* Return true if T should end a basic block. */
2670 bool
2671 stmt_ends_bb_p (gimple *t)
2673 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2676 /* Remove block annotations and other data structures. */
2678 void
2679 delete_tree_cfg_annotations (struct function *fn)
2681 vec_free (label_to_block_map_for_fn (fn));
2684 /* Return the virtual phi in BB. */
2686 gphi *
2687 get_virtual_phi (basic_block bb)
2689 for (gphi_iterator gsi = gsi_start_phis (bb);
2690 !gsi_end_p (gsi);
2691 gsi_next (&gsi))
2693 gphi *phi = gsi.phi ();
2695 if (virtual_operand_p (PHI_RESULT (phi)))
2696 return phi;
2699 return NULL;
2702 /* Return the first statement in basic block BB. */
2704 gimple *
2705 first_stmt (basic_block bb)
2707 gimple_stmt_iterator i = gsi_start_bb (bb);
2708 gimple *stmt = NULL;
2710 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2712 gsi_next (&i);
2713 stmt = NULL;
2715 return stmt;
2718 /* Return the first non-label statement in basic block BB. */
2720 static gimple *
2721 first_non_label_stmt (basic_block bb)
2723 gimple_stmt_iterator i = gsi_start_bb (bb);
2724 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2725 gsi_next (&i);
2726 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2729 /* Return the last statement in basic block BB. */
2731 gimple *
2732 last_stmt (basic_block bb)
2734 gimple_stmt_iterator i = gsi_last_bb (bb);
2735 gimple *stmt = NULL;
2737 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2739 gsi_prev (&i);
2740 stmt = NULL;
2742 return stmt;
2745 /* Return the last statement of an otherwise empty block. Return NULL
2746 if the block is totally empty, or if it contains more than one
2747 statement. */
2749 gimple *
2750 last_and_only_stmt (basic_block bb)
2752 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2753 gimple *last, *prev;
2755 if (gsi_end_p (i))
2756 return NULL;
2758 last = gsi_stmt (i);
2759 gsi_prev_nondebug (&i);
2760 if (gsi_end_p (i))
2761 return last;
2763 /* Empty statements should no longer appear in the instruction stream.
2764 Everything that might have appeared before should be deleted by
2765 remove_useless_stmts, and the optimizers should just gsi_remove
2766 instead of smashing with build_empty_stmt.
2768 Thus the only thing that should appear here in a block containing
2769 one executable statement is a label. */
2770 prev = gsi_stmt (i);
2771 if (gimple_code (prev) == GIMPLE_LABEL)
2772 return last;
2773 else
2774 return NULL;
2777 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2779 static void
2780 reinstall_phi_args (edge new_edge, edge old_edge)
2782 edge_var_map *vm;
2783 int i;
2784 gphi_iterator phis;
2786 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2787 if (!v)
2788 return;
2790 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2791 v->iterate (i, &vm) && !gsi_end_p (phis);
2792 i++, gsi_next (&phis))
2794 gphi *phi = phis.phi ();
2795 tree result = redirect_edge_var_map_result (vm);
2796 tree arg = redirect_edge_var_map_def (vm);
2798 gcc_assert (result == gimple_phi_result (phi));
2800 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2803 redirect_edge_var_map_clear (old_edge);
2806 /* Returns the basic block after which the new basic block created
2807 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2808 near its "logical" location. This is of most help to humans looking
2809 at debugging dumps. */
2811 basic_block
2812 split_edge_bb_loc (edge edge_in)
2814 basic_block dest = edge_in->dest;
2815 basic_block dest_prev = dest->prev_bb;
2817 if (dest_prev)
2819 edge e = find_edge (dest_prev, dest);
2820 if (e && !(e->flags & EDGE_COMPLEX))
2821 return edge_in->src;
2823 return dest_prev;
2826 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2827 Abort on abnormal edges. */
2829 static basic_block
2830 gimple_split_edge (edge edge_in)
2832 basic_block new_bb, after_bb, dest;
2833 edge new_edge, e;
2835 /* Abnormal edges cannot be split. */
2836 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2838 dest = edge_in->dest;
2840 after_bb = split_edge_bb_loc (edge_in);
2842 new_bb = create_empty_bb (after_bb);
2843 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2844 new_bb->count = edge_in->count ();
2846 e = redirect_edge_and_branch (edge_in, new_bb);
2847 gcc_assert (e == edge_in);
2849 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2850 reinstall_phi_args (new_edge, e);
2852 return new_bb;
2856 /* Verify properties of the address expression T with base object BASE. */
2858 static tree
2859 verify_address (tree t, tree base)
2861 bool old_constant;
2862 bool old_side_effects;
2863 bool new_constant;
2864 bool new_side_effects;
2866 old_constant = TREE_CONSTANT (t);
2867 old_side_effects = TREE_SIDE_EFFECTS (t);
2869 recompute_tree_invariant_for_addr_expr (t);
2870 new_side_effects = TREE_SIDE_EFFECTS (t);
2871 new_constant = TREE_CONSTANT (t);
2873 if (old_constant != new_constant)
2875 error ("constant not recomputed when ADDR_EXPR changed");
2876 return t;
2878 if (old_side_effects != new_side_effects)
2880 error ("side effects not recomputed when ADDR_EXPR changed");
2881 return t;
2884 if (!(VAR_P (base)
2885 || TREE_CODE (base) == PARM_DECL
2886 || TREE_CODE (base) == RESULT_DECL))
2887 return NULL_TREE;
2889 if (DECL_GIMPLE_REG_P (base))
2891 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2892 return base;
2895 return NULL_TREE;
2898 /* Callback for walk_tree, check that all elements with address taken are
2899 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2900 inside a PHI node. */
2902 static tree
2903 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2905 tree t = *tp, x;
2907 if (TYPE_P (t))
2908 *walk_subtrees = 0;
2910 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2911 #define CHECK_OP(N, MSG) \
2912 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2913 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2915 switch (TREE_CODE (t))
2917 case SSA_NAME:
2918 if (SSA_NAME_IN_FREE_LIST (t))
2920 error ("SSA name in freelist but still referenced");
2921 return *tp;
2923 break;
2925 case PARM_DECL:
2926 case VAR_DECL:
2927 case RESULT_DECL:
2929 tree context = decl_function_context (t);
2930 if (context != cfun->decl
2931 && !SCOPE_FILE_SCOPE_P (context)
2932 && !TREE_STATIC (t)
2933 && !DECL_EXTERNAL (t))
2935 error ("Local declaration from a different function");
2936 return t;
2939 break;
2941 case INDIRECT_REF:
2942 error ("INDIRECT_REF in gimple IL");
2943 return t;
2945 case MEM_REF:
2946 x = TREE_OPERAND (t, 0);
2947 if (!POINTER_TYPE_P (TREE_TYPE (x))
2948 || !is_gimple_mem_ref_addr (x))
2950 error ("invalid first operand of MEM_REF");
2951 return x;
2953 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2954 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2956 error ("invalid offset operand of MEM_REF");
2957 return TREE_OPERAND (t, 1);
2959 if (TREE_CODE (x) == ADDR_EXPR)
2961 tree va = verify_address (x, TREE_OPERAND (x, 0));
2962 if (va)
2963 return va;
2964 x = TREE_OPERAND (x, 0);
2966 walk_tree (&x, verify_expr, data, NULL);
2967 *walk_subtrees = 0;
2968 break;
2970 case ASSERT_EXPR:
2971 x = fold (ASSERT_EXPR_COND (t));
2972 if (x == boolean_false_node)
2974 error ("ASSERT_EXPR with an always-false condition");
2975 return *tp;
2977 break;
2979 case MODIFY_EXPR:
2980 error ("MODIFY_EXPR not expected while having tuples");
2981 return *tp;
2983 case ADDR_EXPR:
2985 tree tem;
2987 gcc_assert (is_gimple_address (t));
2989 /* Skip any references (they will be checked when we recurse down the
2990 tree) and ensure that any variable used as a prefix is marked
2991 addressable. */
2992 for (x = TREE_OPERAND (t, 0);
2993 handled_component_p (x);
2994 x = TREE_OPERAND (x, 0))
2997 if ((tem = verify_address (t, x)))
2998 return tem;
3000 if (!(VAR_P (x)
3001 || TREE_CODE (x) == PARM_DECL
3002 || TREE_CODE (x) == RESULT_DECL))
3003 return NULL;
3005 if (!TREE_ADDRESSABLE (x))
3007 error ("address taken, but ADDRESSABLE bit not set");
3008 return x;
3011 break;
3014 case COND_EXPR:
3015 x = COND_EXPR_COND (t);
3016 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3018 error ("non-integral used in condition");
3019 return x;
3021 if (!is_gimple_condexpr (x))
3023 error ("invalid conditional operand");
3024 return x;
3026 break;
3028 case NON_LVALUE_EXPR:
3029 case TRUTH_NOT_EXPR:
3030 gcc_unreachable ();
3032 CASE_CONVERT:
3033 case FIX_TRUNC_EXPR:
3034 case FLOAT_EXPR:
3035 case NEGATE_EXPR:
3036 case ABS_EXPR:
3037 case BIT_NOT_EXPR:
3038 CHECK_OP (0, "invalid operand to unary operator");
3039 break;
3041 case REALPART_EXPR:
3042 case IMAGPART_EXPR:
3043 case BIT_FIELD_REF:
3044 if (!is_gimple_reg_type (TREE_TYPE (t)))
3046 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3047 return t;
3050 if (TREE_CODE (t) == BIT_FIELD_REF)
3052 tree t0 = TREE_OPERAND (t, 0);
3053 tree t1 = TREE_OPERAND (t, 1);
3054 tree t2 = TREE_OPERAND (t, 2);
3055 if (!tree_fits_uhwi_p (t1)
3056 || !tree_fits_uhwi_p (t2)
3057 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3058 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3060 error ("invalid position or size operand to BIT_FIELD_REF");
3061 return t;
3063 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3064 && (TYPE_PRECISION (TREE_TYPE (t))
3065 != tree_to_uhwi (t1)))
3067 error ("integral result type precision does not match "
3068 "field size of BIT_FIELD_REF");
3069 return t;
3071 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3072 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3073 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3074 != tree_to_uhwi (t1)))
3076 error ("mode size of non-integral result does not "
3077 "match field size of BIT_FIELD_REF");
3078 return t;
3080 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3081 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3082 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3084 error ("position plus size exceeds size of referenced object in "
3085 "BIT_FIELD_REF");
3086 return t;
3089 t = TREE_OPERAND (t, 0);
3091 /* Fall-through. */
3092 case COMPONENT_REF:
3093 case ARRAY_REF:
3094 case ARRAY_RANGE_REF:
3095 case VIEW_CONVERT_EXPR:
3096 /* We have a nest of references. Verify that each of the operands
3097 that determine where to reference is either a constant or a variable,
3098 verify that the base is valid, and then show we've already checked
3099 the subtrees. */
3100 while (handled_component_p (t))
3102 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3103 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3104 else if (TREE_CODE (t) == ARRAY_REF
3105 || TREE_CODE (t) == ARRAY_RANGE_REF)
3107 CHECK_OP (1, "invalid array index");
3108 if (TREE_OPERAND (t, 2))
3109 CHECK_OP (2, "invalid array lower bound");
3110 if (TREE_OPERAND (t, 3))
3111 CHECK_OP (3, "invalid array stride");
3113 else if (TREE_CODE (t) == BIT_FIELD_REF
3114 || TREE_CODE (t) == REALPART_EXPR
3115 || TREE_CODE (t) == IMAGPART_EXPR)
3117 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3118 "REALPART_EXPR");
3119 return t;
3122 t = TREE_OPERAND (t, 0);
3125 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3127 error ("invalid reference prefix");
3128 return t;
3130 walk_tree (&t, verify_expr, data, NULL);
3131 *walk_subtrees = 0;
3132 break;
3133 case PLUS_EXPR:
3134 case MINUS_EXPR:
3135 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3136 POINTER_PLUS_EXPR. */
3137 if (POINTER_TYPE_P (TREE_TYPE (t)))
3139 error ("invalid operand to plus/minus, type is a pointer");
3140 return t;
3142 CHECK_OP (0, "invalid operand to binary operator");
3143 CHECK_OP (1, "invalid operand to binary operator");
3144 break;
3146 case POINTER_PLUS_EXPR:
3147 /* Check to make sure the first operand is a pointer or reference type. */
3148 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3150 error ("invalid operand to pointer plus, first operand is not a pointer");
3151 return t;
3153 /* Check to make sure the second operand is a ptrofftype. */
3154 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3156 error ("invalid operand to pointer plus, second operand is not an "
3157 "integer type of appropriate width");
3158 return t;
3160 /* FALLTHROUGH */
3161 case LT_EXPR:
3162 case LE_EXPR:
3163 case GT_EXPR:
3164 case GE_EXPR:
3165 case EQ_EXPR:
3166 case NE_EXPR:
3167 case UNORDERED_EXPR:
3168 case ORDERED_EXPR:
3169 case UNLT_EXPR:
3170 case UNLE_EXPR:
3171 case UNGT_EXPR:
3172 case UNGE_EXPR:
3173 case UNEQ_EXPR:
3174 case LTGT_EXPR:
3175 case MULT_EXPR:
3176 case TRUNC_DIV_EXPR:
3177 case CEIL_DIV_EXPR:
3178 case FLOOR_DIV_EXPR:
3179 case ROUND_DIV_EXPR:
3180 case TRUNC_MOD_EXPR:
3181 case CEIL_MOD_EXPR:
3182 case FLOOR_MOD_EXPR:
3183 case ROUND_MOD_EXPR:
3184 case RDIV_EXPR:
3185 case EXACT_DIV_EXPR:
3186 case MIN_EXPR:
3187 case MAX_EXPR:
3188 case LSHIFT_EXPR:
3189 case RSHIFT_EXPR:
3190 case LROTATE_EXPR:
3191 case RROTATE_EXPR:
3192 case BIT_IOR_EXPR:
3193 case BIT_XOR_EXPR:
3194 case BIT_AND_EXPR:
3195 CHECK_OP (0, "invalid operand to binary operator");
3196 CHECK_OP (1, "invalid operand to binary operator");
3197 break;
3199 case CONSTRUCTOR:
3200 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3201 *walk_subtrees = 0;
3202 break;
3204 case CASE_LABEL_EXPR:
3205 if (CASE_CHAIN (t))
3207 error ("invalid CASE_CHAIN");
3208 return t;
3210 break;
3212 default:
3213 break;
3215 return NULL;
3217 #undef CHECK_OP
3221 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3222 Returns true if there is an error, otherwise false. */
3224 static bool
3225 verify_types_in_gimple_min_lval (tree expr)
3227 tree op;
3229 if (is_gimple_id (expr))
3230 return false;
3232 if (TREE_CODE (expr) != TARGET_MEM_REF
3233 && TREE_CODE (expr) != MEM_REF)
3235 error ("invalid expression for min lvalue");
3236 return true;
3239 /* TARGET_MEM_REFs are strange beasts. */
3240 if (TREE_CODE (expr) == TARGET_MEM_REF)
3241 return false;
3243 op = TREE_OPERAND (expr, 0);
3244 if (!is_gimple_val (op))
3246 error ("invalid operand in indirect reference");
3247 debug_generic_stmt (op);
3248 return true;
3250 /* Memory references now generally can involve a value conversion. */
3252 return false;
3255 /* Verify if EXPR is a valid GIMPLE reference expression. If
3256 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3257 if there is an error, otherwise false. */
3259 static bool
3260 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3262 while (handled_component_p (expr))
3264 tree op = TREE_OPERAND (expr, 0);
3266 if (TREE_CODE (expr) == ARRAY_REF
3267 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3269 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3270 || (TREE_OPERAND (expr, 2)
3271 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3272 || (TREE_OPERAND (expr, 3)
3273 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3275 error ("invalid operands to array reference");
3276 debug_generic_stmt (expr);
3277 return true;
3281 /* Verify if the reference array element types are compatible. */
3282 if (TREE_CODE (expr) == ARRAY_REF
3283 && !useless_type_conversion_p (TREE_TYPE (expr),
3284 TREE_TYPE (TREE_TYPE (op))))
3286 error ("type mismatch in array reference");
3287 debug_generic_stmt (TREE_TYPE (expr));
3288 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3289 return true;
3291 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3292 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3293 TREE_TYPE (TREE_TYPE (op))))
3295 error ("type mismatch in array range reference");
3296 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3297 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3298 return true;
3301 if ((TREE_CODE (expr) == REALPART_EXPR
3302 || TREE_CODE (expr) == IMAGPART_EXPR)
3303 && !useless_type_conversion_p (TREE_TYPE (expr),
3304 TREE_TYPE (TREE_TYPE (op))))
3306 error ("type mismatch in real/imagpart reference");
3307 debug_generic_stmt (TREE_TYPE (expr));
3308 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3309 return true;
3312 if (TREE_CODE (expr) == COMPONENT_REF
3313 && !useless_type_conversion_p (TREE_TYPE (expr),
3314 TREE_TYPE (TREE_OPERAND (expr, 1))))
3316 error ("type mismatch in component reference");
3317 debug_generic_stmt (TREE_TYPE (expr));
3318 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3319 return true;
3322 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3324 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3325 that their operand is not an SSA name or an invariant when
3326 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3327 bug). Otherwise there is nothing to verify, gross mismatches at
3328 most invoke undefined behavior. */
3329 if (require_lvalue
3330 && (TREE_CODE (op) == SSA_NAME
3331 || is_gimple_min_invariant (op)))
3333 error ("conversion of an SSA_NAME on the left hand side");
3334 debug_generic_stmt (expr);
3335 return true;
3337 else if (TREE_CODE (op) == SSA_NAME
3338 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3340 error ("conversion of register to a different size");
3341 debug_generic_stmt (expr);
3342 return true;
3344 else if (!handled_component_p (op))
3345 return false;
3348 expr = op;
3351 if (TREE_CODE (expr) == MEM_REF)
3353 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3355 error ("invalid address operand in MEM_REF");
3356 debug_generic_stmt (expr);
3357 return true;
3359 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3360 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3362 error ("invalid offset operand in MEM_REF");
3363 debug_generic_stmt (expr);
3364 return true;
3367 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3369 if (!TMR_BASE (expr)
3370 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3372 error ("invalid address operand in TARGET_MEM_REF");
3373 return true;
3375 if (!TMR_OFFSET (expr)
3376 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3377 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3379 error ("invalid offset operand in TARGET_MEM_REF");
3380 debug_generic_stmt (expr);
3381 return true;
3385 return ((require_lvalue || !is_gimple_min_invariant (expr))
3386 && verify_types_in_gimple_min_lval (expr));
3389 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3390 list of pointer-to types that is trivially convertible to DEST. */
3392 static bool
3393 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3395 tree src;
3397 if (!TYPE_POINTER_TO (src_obj))
3398 return true;
3400 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3401 if (useless_type_conversion_p (dest, src))
3402 return true;
3404 return false;
3407 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3408 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3410 static bool
3411 valid_fixed_convert_types_p (tree type1, tree type2)
3413 return (FIXED_POINT_TYPE_P (type1)
3414 && (INTEGRAL_TYPE_P (type2)
3415 || SCALAR_FLOAT_TYPE_P (type2)
3416 || FIXED_POINT_TYPE_P (type2)));
3419 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3420 is a problem, otherwise false. */
3422 static bool
3423 verify_gimple_call (gcall *stmt)
3425 tree fn = gimple_call_fn (stmt);
3426 tree fntype, fndecl;
3427 unsigned i;
3429 if (gimple_call_internal_p (stmt))
3431 if (fn)
3433 error ("gimple call has two targets");
3434 debug_generic_stmt (fn);
3435 return true;
3437 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3438 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3440 return false;
3443 else
3445 if (!fn)
3447 error ("gimple call has no target");
3448 return true;
3452 if (fn && !is_gimple_call_addr (fn))
3454 error ("invalid function in gimple call");
3455 debug_generic_stmt (fn);
3456 return true;
3459 if (fn
3460 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3461 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3462 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3464 error ("non-function in gimple call");
3465 return true;
3468 fndecl = gimple_call_fndecl (stmt);
3469 if (fndecl
3470 && TREE_CODE (fndecl) == FUNCTION_DECL
3471 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3472 && !DECL_PURE_P (fndecl)
3473 && !TREE_READONLY (fndecl))
3475 error ("invalid pure const state for function");
3476 return true;
3479 tree lhs = gimple_call_lhs (stmt);
3480 if (lhs
3481 && (!is_gimple_lvalue (lhs)
3482 || verify_types_in_gimple_reference (lhs, true)))
3484 error ("invalid LHS in gimple call");
3485 return true;
3488 if (gimple_call_ctrl_altering_p (stmt)
3489 && gimple_call_noreturn_p (stmt)
3490 && should_remove_lhs_p (lhs))
3492 error ("LHS in noreturn call");
3493 return true;
3496 fntype = gimple_call_fntype (stmt);
3497 if (fntype
3498 && lhs
3499 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3500 /* ??? At least C++ misses conversions at assignments from
3501 void * call results.
3502 For now simply allow arbitrary pointer type conversions. */
3503 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3504 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3506 error ("invalid conversion in gimple call");
3507 debug_generic_stmt (TREE_TYPE (lhs));
3508 debug_generic_stmt (TREE_TYPE (fntype));
3509 return true;
3512 if (gimple_call_chain (stmt)
3513 && !is_gimple_val (gimple_call_chain (stmt)))
3515 error ("invalid static chain in gimple call");
3516 debug_generic_stmt (gimple_call_chain (stmt));
3517 return true;
3520 /* If there is a static chain argument, the call should either be
3521 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3522 if (gimple_call_chain (stmt)
3523 && fndecl
3524 && !DECL_STATIC_CHAIN (fndecl))
3526 error ("static chain with function that doesn%'t use one");
3527 return true;
3530 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3532 switch (DECL_FUNCTION_CODE (fndecl))
3534 case BUILT_IN_UNREACHABLE:
3535 case BUILT_IN_TRAP:
3536 if (gimple_call_num_args (stmt) > 0)
3538 /* Built-in unreachable with parameters might not be caught by
3539 undefined behavior sanitizer. Front-ends do check users do not
3540 call them that way but we also produce calls to
3541 __builtin_unreachable internally, for example when IPA figures
3542 out a call cannot happen in a legal program. In such cases,
3543 we must make sure arguments are stripped off. */
3544 error ("__builtin_unreachable or __builtin_trap call with "
3545 "arguments");
3546 return true;
3548 break;
3549 default:
3550 break;
3554 /* ??? The C frontend passes unpromoted arguments in case it
3555 didn't see a function declaration before the call. So for now
3556 leave the call arguments mostly unverified. Once we gimplify
3557 unit-at-a-time we have a chance to fix this. */
3559 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3561 tree arg = gimple_call_arg (stmt, i);
3562 if ((is_gimple_reg_type (TREE_TYPE (arg))
3563 && !is_gimple_val (arg))
3564 || (!is_gimple_reg_type (TREE_TYPE (arg))
3565 && !is_gimple_lvalue (arg)))
3567 error ("invalid argument to gimple call");
3568 debug_generic_expr (arg);
3569 return true;
3573 return false;
3576 /* Verifies the gimple comparison with the result type TYPE and
3577 the operands OP0 and OP1, comparison code is CODE. */
3579 static bool
3580 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3582 tree op0_type = TREE_TYPE (op0);
3583 tree op1_type = TREE_TYPE (op1);
3585 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3587 error ("invalid operands in gimple comparison");
3588 return true;
3591 /* For comparisons we do not have the operations type as the
3592 effective type the comparison is carried out in. Instead
3593 we require that either the first operand is trivially
3594 convertible into the second, or the other way around.
3595 Because we special-case pointers to void we allow
3596 comparisons of pointers with the same mode as well. */
3597 if (!useless_type_conversion_p (op0_type, op1_type)
3598 && !useless_type_conversion_p (op1_type, op0_type)
3599 && (!POINTER_TYPE_P (op0_type)
3600 || !POINTER_TYPE_P (op1_type)
3601 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3603 error ("mismatching comparison operand types");
3604 debug_generic_expr (op0_type);
3605 debug_generic_expr (op1_type);
3606 return true;
3609 /* The resulting type of a comparison may be an effective boolean type. */
3610 if (INTEGRAL_TYPE_P (type)
3611 && (TREE_CODE (type) == BOOLEAN_TYPE
3612 || TYPE_PRECISION (type) == 1))
3614 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3615 || TREE_CODE (op1_type) == VECTOR_TYPE)
3616 && code != EQ_EXPR && code != NE_EXPR
3617 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3618 && !VECTOR_INTEGER_TYPE_P (op0_type))
3620 error ("unsupported operation or type for vector comparison"
3621 " returning a boolean");
3622 debug_generic_expr (op0_type);
3623 debug_generic_expr (op1_type);
3624 return true;
3627 /* Or a boolean vector type with the same element count
3628 as the comparison operand types. */
3629 else if (TREE_CODE (type) == VECTOR_TYPE
3630 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3632 if (TREE_CODE (op0_type) != VECTOR_TYPE
3633 || TREE_CODE (op1_type) != VECTOR_TYPE)
3635 error ("non-vector operands in vector comparison");
3636 debug_generic_expr (op0_type);
3637 debug_generic_expr (op1_type);
3638 return true;
3641 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3643 error ("invalid vector comparison resulting type");
3644 debug_generic_expr (type);
3645 return true;
3648 else
3650 error ("bogus comparison result type");
3651 debug_generic_expr (type);
3652 return true;
3655 return false;
3658 /* Verify a gimple assignment statement STMT with an unary rhs.
3659 Returns true if anything is wrong. */
3661 static bool
3662 verify_gimple_assign_unary (gassign *stmt)
3664 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3665 tree lhs = gimple_assign_lhs (stmt);
3666 tree lhs_type = TREE_TYPE (lhs);
3667 tree rhs1 = gimple_assign_rhs1 (stmt);
3668 tree rhs1_type = TREE_TYPE (rhs1);
3670 if (!is_gimple_reg (lhs))
3672 error ("non-register as LHS of unary operation");
3673 return true;
3676 if (!is_gimple_val (rhs1))
3678 error ("invalid operand in unary operation");
3679 return true;
3682 /* First handle conversions. */
3683 switch (rhs_code)
3685 CASE_CONVERT:
3687 /* Allow conversions from pointer type to integral type only if
3688 there is no sign or zero extension involved.
3689 For targets were the precision of ptrofftype doesn't match that
3690 of pointers we need to allow arbitrary conversions to ptrofftype. */
3691 if ((POINTER_TYPE_P (lhs_type)
3692 && INTEGRAL_TYPE_P (rhs1_type))
3693 || (POINTER_TYPE_P (rhs1_type)
3694 && INTEGRAL_TYPE_P (lhs_type)
3695 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3696 || ptrofftype_p (sizetype))))
3697 return false;
3699 /* Allow conversion from integral to offset type and vice versa. */
3700 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3701 && INTEGRAL_TYPE_P (rhs1_type))
3702 || (INTEGRAL_TYPE_P (lhs_type)
3703 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3704 return false;
3706 /* Otherwise assert we are converting between types of the
3707 same kind. */
3708 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3710 error ("invalid types in nop conversion");
3711 debug_generic_expr (lhs_type);
3712 debug_generic_expr (rhs1_type);
3713 return true;
3716 return false;
3719 case ADDR_SPACE_CONVERT_EXPR:
3721 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3722 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3723 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3725 error ("invalid types in address space conversion");
3726 debug_generic_expr (lhs_type);
3727 debug_generic_expr (rhs1_type);
3728 return true;
3731 return false;
3734 case FIXED_CONVERT_EXPR:
3736 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3737 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3739 error ("invalid types in fixed-point conversion");
3740 debug_generic_expr (lhs_type);
3741 debug_generic_expr (rhs1_type);
3742 return true;
3745 return false;
3748 case FLOAT_EXPR:
3750 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3751 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3752 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3754 error ("invalid types in conversion to floating point");
3755 debug_generic_expr (lhs_type);
3756 debug_generic_expr (rhs1_type);
3757 return true;
3760 return false;
3763 case FIX_TRUNC_EXPR:
3765 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3766 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3767 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3769 error ("invalid types in conversion to integer");
3770 debug_generic_expr (lhs_type);
3771 debug_generic_expr (rhs1_type);
3772 return true;
3775 return false;
3777 case REDUC_MAX_EXPR:
3778 case REDUC_MIN_EXPR:
3779 case REDUC_PLUS_EXPR:
3780 if (!VECTOR_TYPE_P (rhs1_type)
3781 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3783 error ("reduction should convert from vector to element type");
3784 debug_generic_expr (lhs_type);
3785 debug_generic_expr (rhs1_type);
3786 return true;
3788 return false;
3790 case VEC_UNPACK_HI_EXPR:
3791 case VEC_UNPACK_LO_EXPR:
3792 case VEC_UNPACK_FLOAT_HI_EXPR:
3793 case VEC_UNPACK_FLOAT_LO_EXPR:
3794 /* FIXME. */
3795 return false;
3797 case NEGATE_EXPR:
3798 case ABS_EXPR:
3799 case BIT_NOT_EXPR:
3800 case PAREN_EXPR:
3801 case CONJ_EXPR:
3802 break;
3804 default:
3805 gcc_unreachable ();
3808 /* For the remaining codes assert there is no conversion involved. */
3809 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3811 error ("non-trivial conversion in unary operation");
3812 debug_generic_expr (lhs_type);
3813 debug_generic_expr (rhs1_type);
3814 return true;
3817 return false;
3820 /* Verify a gimple assignment statement STMT with a binary rhs.
3821 Returns true if anything is wrong. */
3823 static bool
3824 verify_gimple_assign_binary (gassign *stmt)
3826 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3827 tree lhs = gimple_assign_lhs (stmt);
3828 tree lhs_type = TREE_TYPE (lhs);
3829 tree rhs1 = gimple_assign_rhs1 (stmt);
3830 tree rhs1_type = TREE_TYPE (rhs1);
3831 tree rhs2 = gimple_assign_rhs2 (stmt);
3832 tree rhs2_type = TREE_TYPE (rhs2);
3834 if (!is_gimple_reg (lhs))
3836 error ("non-register as LHS of binary operation");
3837 return true;
3840 if (!is_gimple_val (rhs1)
3841 || !is_gimple_val (rhs2))
3843 error ("invalid operands in binary operation");
3844 return true;
3847 /* First handle operations that involve different types. */
3848 switch (rhs_code)
3850 case COMPLEX_EXPR:
3852 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3853 || !(INTEGRAL_TYPE_P (rhs1_type)
3854 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3855 || !(INTEGRAL_TYPE_P (rhs2_type)
3856 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3858 error ("type mismatch in complex expression");
3859 debug_generic_expr (lhs_type);
3860 debug_generic_expr (rhs1_type);
3861 debug_generic_expr (rhs2_type);
3862 return true;
3865 return false;
3868 case LSHIFT_EXPR:
3869 case RSHIFT_EXPR:
3870 case LROTATE_EXPR:
3871 case RROTATE_EXPR:
3873 /* Shifts and rotates are ok on integral types, fixed point
3874 types and integer vector types. */
3875 if ((!INTEGRAL_TYPE_P (rhs1_type)
3876 && !FIXED_POINT_TYPE_P (rhs1_type)
3877 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3878 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3879 || (!INTEGRAL_TYPE_P (rhs2_type)
3880 /* Vector shifts of vectors are also ok. */
3881 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3882 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3883 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3884 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3885 || !useless_type_conversion_p (lhs_type, rhs1_type))
3887 error ("type mismatch in shift expression");
3888 debug_generic_expr (lhs_type);
3889 debug_generic_expr (rhs1_type);
3890 debug_generic_expr (rhs2_type);
3891 return true;
3894 return false;
3897 case WIDEN_LSHIFT_EXPR:
3899 if (!INTEGRAL_TYPE_P (lhs_type)
3900 || !INTEGRAL_TYPE_P (rhs1_type)
3901 || TREE_CODE (rhs2) != INTEGER_CST
3902 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3904 error ("type mismatch in widening vector shift expression");
3905 debug_generic_expr (lhs_type);
3906 debug_generic_expr (rhs1_type);
3907 debug_generic_expr (rhs2_type);
3908 return true;
3911 return false;
3914 case VEC_WIDEN_LSHIFT_HI_EXPR:
3915 case VEC_WIDEN_LSHIFT_LO_EXPR:
3917 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3918 || TREE_CODE (lhs_type) != VECTOR_TYPE
3919 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3920 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3921 || TREE_CODE (rhs2) != INTEGER_CST
3922 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3923 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3925 error ("type mismatch in widening vector shift expression");
3926 debug_generic_expr (lhs_type);
3927 debug_generic_expr (rhs1_type);
3928 debug_generic_expr (rhs2_type);
3929 return true;
3932 return false;
3935 case PLUS_EXPR:
3936 case MINUS_EXPR:
3938 tree lhs_etype = lhs_type;
3939 tree rhs1_etype = rhs1_type;
3940 tree rhs2_etype = rhs2_type;
3941 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3943 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3944 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3946 error ("invalid non-vector operands to vector valued plus");
3947 return true;
3949 lhs_etype = TREE_TYPE (lhs_type);
3950 rhs1_etype = TREE_TYPE (rhs1_type);
3951 rhs2_etype = TREE_TYPE (rhs2_type);
3953 if (POINTER_TYPE_P (lhs_etype)
3954 || POINTER_TYPE_P (rhs1_etype)
3955 || POINTER_TYPE_P (rhs2_etype))
3957 error ("invalid (pointer) operands to plus/minus");
3958 return true;
3961 /* Continue with generic binary expression handling. */
3962 break;
3965 case POINTER_PLUS_EXPR:
3967 if (!POINTER_TYPE_P (rhs1_type)
3968 || !useless_type_conversion_p (lhs_type, rhs1_type)
3969 || !ptrofftype_p (rhs2_type))
3971 error ("type mismatch in pointer plus expression");
3972 debug_generic_stmt (lhs_type);
3973 debug_generic_stmt (rhs1_type);
3974 debug_generic_stmt (rhs2_type);
3975 return true;
3978 return false;
3981 case TRUTH_ANDIF_EXPR:
3982 case TRUTH_ORIF_EXPR:
3983 case TRUTH_AND_EXPR:
3984 case TRUTH_OR_EXPR:
3985 case TRUTH_XOR_EXPR:
3987 gcc_unreachable ();
3989 case LT_EXPR:
3990 case LE_EXPR:
3991 case GT_EXPR:
3992 case GE_EXPR:
3993 case EQ_EXPR:
3994 case NE_EXPR:
3995 case UNORDERED_EXPR:
3996 case ORDERED_EXPR:
3997 case UNLT_EXPR:
3998 case UNLE_EXPR:
3999 case UNGT_EXPR:
4000 case UNGE_EXPR:
4001 case UNEQ_EXPR:
4002 case LTGT_EXPR:
4003 /* Comparisons are also binary, but the result type is not
4004 connected to the operand types. */
4005 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4007 case WIDEN_MULT_EXPR:
4008 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4009 return true;
4010 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4011 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4013 case WIDEN_SUM_EXPR:
4015 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4016 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4017 && ((!INTEGRAL_TYPE_P (rhs1_type)
4018 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4019 || (!INTEGRAL_TYPE_P (lhs_type)
4020 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4021 || !useless_type_conversion_p (lhs_type, rhs2_type)
4022 || (GET_MODE_SIZE (element_mode (rhs2_type))
4023 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4025 error ("type mismatch in widening sum reduction");
4026 debug_generic_expr (lhs_type);
4027 debug_generic_expr (rhs1_type);
4028 debug_generic_expr (rhs2_type);
4029 return true;
4031 return false;
4034 case VEC_WIDEN_MULT_HI_EXPR:
4035 case VEC_WIDEN_MULT_LO_EXPR:
4036 case VEC_WIDEN_MULT_EVEN_EXPR:
4037 case VEC_WIDEN_MULT_ODD_EXPR:
4039 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4040 || TREE_CODE (lhs_type) != VECTOR_TYPE
4041 || !types_compatible_p (rhs1_type, rhs2_type)
4042 || (GET_MODE_SIZE (element_mode (lhs_type))
4043 != 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4045 error ("type mismatch in vector widening multiplication");
4046 debug_generic_expr (lhs_type);
4047 debug_generic_expr (rhs1_type);
4048 debug_generic_expr (rhs2_type);
4049 return true;
4051 return false;
4054 case VEC_PACK_TRUNC_EXPR:
4055 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4056 vector boolean types. */
4057 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4058 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4059 && types_compatible_p (rhs1_type, rhs2_type)
4060 && (TYPE_VECTOR_SUBPARTS (lhs_type)
4061 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4062 return false;
4064 /* Fallthru. */
4065 case VEC_PACK_SAT_EXPR:
4066 case VEC_PACK_FIX_TRUNC_EXPR:
4068 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4069 || TREE_CODE (lhs_type) != VECTOR_TYPE
4070 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4071 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4072 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4073 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4074 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4075 || !types_compatible_p (rhs1_type, rhs2_type)
4076 || (GET_MODE_SIZE (element_mode (rhs1_type))
4077 != 2 * GET_MODE_SIZE (element_mode (lhs_type))))
4079 error ("type mismatch in vector pack expression");
4080 debug_generic_expr (lhs_type);
4081 debug_generic_expr (rhs1_type);
4082 debug_generic_expr (rhs2_type);
4083 return true;
4086 return false;
4089 case MULT_EXPR:
4090 case MULT_HIGHPART_EXPR:
4091 case TRUNC_DIV_EXPR:
4092 case CEIL_DIV_EXPR:
4093 case FLOOR_DIV_EXPR:
4094 case ROUND_DIV_EXPR:
4095 case TRUNC_MOD_EXPR:
4096 case CEIL_MOD_EXPR:
4097 case FLOOR_MOD_EXPR:
4098 case ROUND_MOD_EXPR:
4099 case RDIV_EXPR:
4100 case EXACT_DIV_EXPR:
4101 case MIN_EXPR:
4102 case MAX_EXPR:
4103 case BIT_IOR_EXPR:
4104 case BIT_XOR_EXPR:
4105 case BIT_AND_EXPR:
4106 /* Continue with generic binary expression handling. */
4107 break;
4109 default:
4110 gcc_unreachable ();
4113 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4114 || !useless_type_conversion_p (lhs_type, rhs2_type))
4116 error ("type mismatch in binary expression");
4117 debug_generic_stmt (lhs_type);
4118 debug_generic_stmt (rhs1_type);
4119 debug_generic_stmt (rhs2_type);
4120 return true;
4123 return false;
4126 /* Verify a gimple assignment statement STMT with a ternary rhs.
4127 Returns true if anything is wrong. */
4129 static bool
4130 verify_gimple_assign_ternary (gassign *stmt)
4132 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4133 tree lhs = gimple_assign_lhs (stmt);
4134 tree lhs_type = TREE_TYPE (lhs);
4135 tree rhs1 = gimple_assign_rhs1 (stmt);
4136 tree rhs1_type = TREE_TYPE (rhs1);
4137 tree rhs2 = gimple_assign_rhs2 (stmt);
4138 tree rhs2_type = TREE_TYPE (rhs2);
4139 tree rhs3 = gimple_assign_rhs3 (stmt);
4140 tree rhs3_type = TREE_TYPE (rhs3);
4142 if (!is_gimple_reg (lhs))
4144 error ("non-register as LHS of ternary operation");
4145 return true;
4148 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4149 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4150 || !is_gimple_val (rhs2)
4151 || !is_gimple_val (rhs3))
4153 error ("invalid operands in ternary operation");
4154 return true;
4157 /* First handle operations that involve different types. */
4158 switch (rhs_code)
4160 case WIDEN_MULT_PLUS_EXPR:
4161 case WIDEN_MULT_MINUS_EXPR:
4162 if ((!INTEGRAL_TYPE_P (rhs1_type)
4163 && !FIXED_POINT_TYPE_P (rhs1_type))
4164 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4165 || !useless_type_conversion_p (lhs_type, rhs3_type)
4166 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4167 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4169 error ("type mismatch in widening multiply-accumulate expression");
4170 debug_generic_expr (lhs_type);
4171 debug_generic_expr (rhs1_type);
4172 debug_generic_expr (rhs2_type);
4173 debug_generic_expr (rhs3_type);
4174 return true;
4176 break;
4178 case FMA_EXPR:
4179 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4180 || !useless_type_conversion_p (lhs_type, rhs2_type)
4181 || !useless_type_conversion_p (lhs_type, rhs3_type))
4183 error ("type mismatch in fused multiply-add expression");
4184 debug_generic_expr (lhs_type);
4185 debug_generic_expr (rhs1_type);
4186 debug_generic_expr (rhs2_type);
4187 debug_generic_expr (rhs3_type);
4188 return true;
4190 break;
4192 case VEC_COND_EXPR:
4193 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4194 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4195 != TYPE_VECTOR_SUBPARTS (lhs_type))
4197 error ("the first argument of a VEC_COND_EXPR must be of a "
4198 "boolean vector type of the same number of elements "
4199 "as the result");
4200 debug_generic_expr (lhs_type);
4201 debug_generic_expr (rhs1_type);
4202 return true;
4204 /* Fallthrough. */
4205 case COND_EXPR:
4206 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4207 || !useless_type_conversion_p (lhs_type, rhs3_type))
4209 error ("type mismatch in conditional expression");
4210 debug_generic_expr (lhs_type);
4211 debug_generic_expr (rhs2_type);
4212 debug_generic_expr (rhs3_type);
4213 return true;
4215 break;
4217 case VEC_PERM_EXPR:
4218 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4219 || !useless_type_conversion_p (lhs_type, rhs2_type))
4221 error ("type mismatch in vector permute expression");
4222 debug_generic_expr (lhs_type);
4223 debug_generic_expr (rhs1_type);
4224 debug_generic_expr (rhs2_type);
4225 debug_generic_expr (rhs3_type);
4226 return true;
4229 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4230 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4231 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4233 error ("vector types expected in vector permute expression");
4234 debug_generic_expr (lhs_type);
4235 debug_generic_expr (rhs1_type);
4236 debug_generic_expr (rhs2_type);
4237 debug_generic_expr (rhs3_type);
4238 return true;
4241 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4242 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4243 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4244 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4245 != TYPE_VECTOR_SUBPARTS (lhs_type))
4247 error ("vectors with different element number found "
4248 "in vector permute expression");
4249 debug_generic_expr (lhs_type);
4250 debug_generic_expr (rhs1_type);
4251 debug_generic_expr (rhs2_type);
4252 debug_generic_expr (rhs3_type);
4253 return true;
4256 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4257 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type)))
4258 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type))))
4260 error ("invalid mask type in vector permute expression");
4261 debug_generic_expr (lhs_type);
4262 debug_generic_expr (rhs1_type);
4263 debug_generic_expr (rhs2_type);
4264 debug_generic_expr (rhs3_type);
4265 return true;
4268 return false;
4270 case SAD_EXPR:
4271 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4272 || !useless_type_conversion_p (lhs_type, rhs3_type)
4273 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4274 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4276 error ("type mismatch in sad expression");
4277 debug_generic_expr (lhs_type);
4278 debug_generic_expr (rhs1_type);
4279 debug_generic_expr (rhs2_type);
4280 debug_generic_expr (rhs3_type);
4281 return true;
4284 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4285 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4286 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4288 error ("vector types expected in sad expression");
4289 debug_generic_expr (lhs_type);
4290 debug_generic_expr (rhs1_type);
4291 debug_generic_expr (rhs2_type);
4292 debug_generic_expr (rhs3_type);
4293 return true;
4296 return false;
4298 case BIT_INSERT_EXPR:
4299 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4301 error ("type mismatch in BIT_INSERT_EXPR");
4302 debug_generic_expr (lhs_type);
4303 debug_generic_expr (rhs1_type);
4304 return true;
4306 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4307 && INTEGRAL_TYPE_P (rhs2_type))
4308 || (VECTOR_TYPE_P (rhs1_type)
4309 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4311 error ("not allowed type combination in BIT_INSERT_EXPR");
4312 debug_generic_expr (rhs1_type);
4313 debug_generic_expr (rhs2_type);
4314 return true;
4316 if (! tree_fits_uhwi_p (rhs3)
4317 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4318 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4320 error ("invalid position or size in BIT_INSERT_EXPR");
4321 return true;
4323 if (INTEGRAL_TYPE_P (rhs1_type))
4325 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4326 if (bitpos >= TYPE_PRECISION (rhs1_type)
4327 || (bitpos + TYPE_PRECISION (rhs2_type)
4328 > TYPE_PRECISION (rhs1_type)))
4330 error ("insertion out of range in BIT_INSERT_EXPR");
4331 return true;
4334 else if (VECTOR_TYPE_P (rhs1_type))
4336 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4337 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4338 if (bitpos % bitsize != 0)
4340 error ("vector insertion not at element boundary");
4341 return true;
4344 return false;
4346 case DOT_PROD_EXPR:
4348 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4349 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4350 && ((!INTEGRAL_TYPE_P (rhs1_type)
4351 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4352 || (!INTEGRAL_TYPE_P (lhs_type)
4353 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4354 || !types_compatible_p (rhs1_type, rhs2_type)
4355 || !useless_type_conversion_p (lhs_type, rhs3_type)
4356 || (GET_MODE_SIZE (element_mode (rhs3_type))
4357 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4359 error ("type mismatch in dot product reduction");
4360 debug_generic_expr (lhs_type);
4361 debug_generic_expr (rhs1_type);
4362 debug_generic_expr (rhs2_type);
4363 return true;
4365 return false;
4368 case REALIGN_LOAD_EXPR:
4369 /* FIXME. */
4370 return false;
4372 default:
4373 gcc_unreachable ();
4375 return false;
4378 /* Verify a gimple assignment statement STMT with a single rhs.
4379 Returns true if anything is wrong. */
4381 static bool
4382 verify_gimple_assign_single (gassign *stmt)
4384 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4385 tree lhs = gimple_assign_lhs (stmt);
4386 tree lhs_type = TREE_TYPE (lhs);
4387 tree rhs1 = gimple_assign_rhs1 (stmt);
4388 tree rhs1_type = TREE_TYPE (rhs1);
4389 bool res = false;
4391 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4393 error ("non-trivial conversion at assignment");
4394 debug_generic_expr (lhs_type);
4395 debug_generic_expr (rhs1_type);
4396 return true;
4399 if (gimple_clobber_p (stmt)
4400 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4402 error ("non-decl/MEM_REF LHS in clobber statement");
4403 debug_generic_expr (lhs);
4404 return true;
4407 if (handled_component_p (lhs)
4408 || TREE_CODE (lhs) == MEM_REF
4409 || TREE_CODE (lhs) == TARGET_MEM_REF)
4410 res |= verify_types_in_gimple_reference (lhs, true);
4412 /* Special codes we cannot handle via their class. */
4413 switch (rhs_code)
4415 case ADDR_EXPR:
4417 tree op = TREE_OPERAND (rhs1, 0);
4418 if (!is_gimple_addressable (op))
4420 error ("invalid operand in unary expression");
4421 return true;
4424 /* Technically there is no longer a need for matching types, but
4425 gimple hygiene asks for this check. In LTO we can end up
4426 combining incompatible units and thus end up with addresses
4427 of globals that change their type to a common one. */
4428 if (!in_lto_p
4429 && !types_compatible_p (TREE_TYPE (op),
4430 TREE_TYPE (TREE_TYPE (rhs1)))
4431 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4432 TREE_TYPE (op)))
4434 error ("type mismatch in address expression");
4435 debug_generic_stmt (TREE_TYPE (rhs1));
4436 debug_generic_stmt (TREE_TYPE (op));
4437 return true;
4440 return verify_types_in_gimple_reference (op, true);
4443 /* tcc_reference */
4444 case INDIRECT_REF:
4445 error ("INDIRECT_REF in gimple IL");
4446 return true;
4448 case COMPONENT_REF:
4449 case BIT_FIELD_REF:
4450 case ARRAY_REF:
4451 case ARRAY_RANGE_REF:
4452 case VIEW_CONVERT_EXPR:
4453 case REALPART_EXPR:
4454 case IMAGPART_EXPR:
4455 case TARGET_MEM_REF:
4456 case MEM_REF:
4457 if (!is_gimple_reg (lhs)
4458 && is_gimple_reg_type (TREE_TYPE (lhs)))
4460 error ("invalid rhs for gimple memory store");
4461 debug_generic_stmt (lhs);
4462 debug_generic_stmt (rhs1);
4463 return true;
4465 return res || verify_types_in_gimple_reference (rhs1, false);
4467 /* tcc_constant */
4468 case SSA_NAME:
4469 case INTEGER_CST:
4470 case REAL_CST:
4471 case FIXED_CST:
4472 case COMPLEX_CST:
4473 case VECTOR_CST:
4474 case STRING_CST:
4475 return res;
4477 /* tcc_declaration */
4478 case CONST_DECL:
4479 return res;
4480 case VAR_DECL:
4481 case PARM_DECL:
4482 if (!is_gimple_reg (lhs)
4483 && !is_gimple_reg (rhs1)
4484 && is_gimple_reg_type (TREE_TYPE (lhs)))
4486 error ("invalid rhs for gimple memory store");
4487 debug_generic_stmt (lhs);
4488 debug_generic_stmt (rhs1);
4489 return true;
4491 return res;
4493 case CONSTRUCTOR:
4494 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4496 unsigned int i;
4497 tree elt_i, elt_v, elt_t = NULL_TREE;
4499 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4500 return res;
4501 /* For vector CONSTRUCTORs we require that either it is empty
4502 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4503 (then the element count must be correct to cover the whole
4504 outer vector and index must be NULL on all elements, or it is
4505 a CONSTRUCTOR of scalar elements, where we as an exception allow
4506 smaller number of elements (assuming zero filling) and
4507 consecutive indexes as compared to NULL indexes (such
4508 CONSTRUCTORs can appear in the IL from FEs). */
4509 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4511 if (elt_t == NULL_TREE)
4513 elt_t = TREE_TYPE (elt_v);
4514 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4516 tree elt_t = TREE_TYPE (elt_v);
4517 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4518 TREE_TYPE (elt_t)))
4520 error ("incorrect type of vector CONSTRUCTOR"
4521 " elements");
4522 debug_generic_stmt (rhs1);
4523 return true;
4525 else if (CONSTRUCTOR_NELTS (rhs1)
4526 * TYPE_VECTOR_SUBPARTS (elt_t)
4527 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4529 error ("incorrect number of vector CONSTRUCTOR"
4530 " elements");
4531 debug_generic_stmt (rhs1);
4532 return true;
4535 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4536 elt_t))
4538 error ("incorrect type of vector CONSTRUCTOR elements");
4539 debug_generic_stmt (rhs1);
4540 return true;
4542 else if (CONSTRUCTOR_NELTS (rhs1)
4543 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4545 error ("incorrect number of vector CONSTRUCTOR elements");
4546 debug_generic_stmt (rhs1);
4547 return true;
4550 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4552 error ("incorrect type of vector CONSTRUCTOR elements");
4553 debug_generic_stmt (rhs1);
4554 return true;
4556 if (elt_i != NULL_TREE
4557 && (TREE_CODE (elt_t) == VECTOR_TYPE
4558 || TREE_CODE (elt_i) != INTEGER_CST
4559 || compare_tree_int (elt_i, i) != 0))
4561 error ("vector CONSTRUCTOR with non-NULL element index");
4562 debug_generic_stmt (rhs1);
4563 return true;
4565 if (!is_gimple_val (elt_v))
4567 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4568 debug_generic_stmt (rhs1);
4569 return true;
4573 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4575 error ("non-vector CONSTRUCTOR with elements");
4576 debug_generic_stmt (rhs1);
4577 return true;
4579 return res;
4580 case OBJ_TYPE_REF:
4581 case ASSERT_EXPR:
4582 case WITH_SIZE_EXPR:
4583 /* FIXME. */
4584 return res;
4586 default:;
4589 return res;
4592 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4593 is a problem, otherwise false. */
4595 static bool
4596 verify_gimple_assign (gassign *stmt)
4598 switch (gimple_assign_rhs_class (stmt))
4600 case GIMPLE_SINGLE_RHS:
4601 return verify_gimple_assign_single (stmt);
4603 case GIMPLE_UNARY_RHS:
4604 return verify_gimple_assign_unary (stmt);
4606 case GIMPLE_BINARY_RHS:
4607 return verify_gimple_assign_binary (stmt);
4609 case GIMPLE_TERNARY_RHS:
4610 return verify_gimple_assign_ternary (stmt);
4612 default:
4613 gcc_unreachable ();
4617 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4618 is a problem, otherwise false. */
4620 static bool
4621 verify_gimple_return (greturn *stmt)
4623 tree op = gimple_return_retval (stmt);
4624 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4626 /* We cannot test for present return values as we do not fix up missing
4627 return values from the original source. */
4628 if (op == NULL)
4629 return false;
4631 if (!is_gimple_val (op)
4632 && TREE_CODE (op) != RESULT_DECL)
4634 error ("invalid operand in return statement");
4635 debug_generic_stmt (op);
4636 return true;
4639 if ((TREE_CODE (op) == RESULT_DECL
4640 && DECL_BY_REFERENCE (op))
4641 || (TREE_CODE (op) == SSA_NAME
4642 && SSA_NAME_VAR (op)
4643 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4644 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4645 op = TREE_TYPE (op);
4647 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4649 error ("invalid conversion in return statement");
4650 debug_generic_stmt (restype);
4651 debug_generic_stmt (TREE_TYPE (op));
4652 return true;
4655 return false;
4659 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4660 is a problem, otherwise false. */
4662 static bool
4663 verify_gimple_goto (ggoto *stmt)
4665 tree dest = gimple_goto_dest (stmt);
4667 /* ??? We have two canonical forms of direct goto destinations, a
4668 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4669 if (TREE_CODE (dest) != LABEL_DECL
4670 && (!is_gimple_val (dest)
4671 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4673 error ("goto destination is neither a label nor a pointer");
4674 return true;
4677 return false;
4680 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4681 is a problem, otherwise false. */
4683 static bool
4684 verify_gimple_switch (gswitch *stmt)
4686 unsigned int i, n;
4687 tree elt, prev_upper_bound = NULL_TREE;
4688 tree index_type, elt_type = NULL_TREE;
4690 if (!is_gimple_val (gimple_switch_index (stmt)))
4692 error ("invalid operand to switch statement");
4693 debug_generic_stmt (gimple_switch_index (stmt));
4694 return true;
4697 index_type = TREE_TYPE (gimple_switch_index (stmt));
4698 if (! INTEGRAL_TYPE_P (index_type))
4700 error ("non-integral type switch statement");
4701 debug_generic_expr (index_type);
4702 return true;
4705 elt = gimple_switch_label (stmt, 0);
4706 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4708 error ("invalid default case label in switch statement");
4709 debug_generic_expr (elt);
4710 return true;
4713 n = gimple_switch_num_labels (stmt);
4714 for (i = 1; i < n; i++)
4716 elt = gimple_switch_label (stmt, i);
4718 if (! CASE_LOW (elt))
4720 error ("invalid case label in switch statement");
4721 debug_generic_expr (elt);
4722 return true;
4724 if (CASE_HIGH (elt)
4725 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4727 error ("invalid case range in switch statement");
4728 debug_generic_expr (elt);
4729 return true;
4732 if (elt_type)
4734 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4735 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4737 error ("type mismatch for case label in switch statement");
4738 debug_generic_expr (elt);
4739 return true;
4742 else
4744 elt_type = TREE_TYPE (CASE_LOW (elt));
4745 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4747 error ("type precision mismatch in switch statement");
4748 return true;
4752 if (prev_upper_bound)
4754 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4756 error ("case labels not sorted in switch statement");
4757 return true;
4761 prev_upper_bound = CASE_HIGH (elt);
4762 if (! prev_upper_bound)
4763 prev_upper_bound = CASE_LOW (elt);
4766 return false;
4769 /* Verify a gimple debug statement STMT.
4770 Returns true if anything is wrong. */
4772 static bool
4773 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4775 /* There isn't much that could be wrong in a gimple debug stmt. A
4776 gimple debug bind stmt, for example, maps a tree, that's usually
4777 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4778 component or member of an aggregate type, to another tree, that
4779 can be an arbitrary expression. These stmts expand into debug
4780 insns, and are converted to debug notes by var-tracking.c. */
4781 return false;
4784 /* Verify a gimple label statement STMT.
4785 Returns true if anything is wrong. */
4787 static bool
4788 verify_gimple_label (glabel *stmt)
4790 tree decl = gimple_label_label (stmt);
4791 int uid;
4792 bool err = false;
4794 if (TREE_CODE (decl) != LABEL_DECL)
4795 return true;
4796 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4797 && DECL_CONTEXT (decl) != current_function_decl)
4799 error ("label's context is not the current function decl");
4800 err |= true;
4803 uid = LABEL_DECL_UID (decl);
4804 if (cfun->cfg
4805 && (uid == -1
4806 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4808 error ("incorrect entry in label_to_block_map");
4809 err |= true;
4812 uid = EH_LANDING_PAD_NR (decl);
4813 if (uid)
4815 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4816 if (decl != lp->post_landing_pad)
4818 error ("incorrect setting of landing pad number");
4819 err |= true;
4823 return err;
4826 /* Verify a gimple cond statement STMT.
4827 Returns true if anything is wrong. */
4829 static bool
4830 verify_gimple_cond (gcond *stmt)
4832 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4834 error ("invalid comparison code in gimple cond");
4835 return true;
4837 if (!(!gimple_cond_true_label (stmt)
4838 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4839 || !(!gimple_cond_false_label (stmt)
4840 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4842 error ("invalid labels in gimple cond");
4843 return true;
4846 return verify_gimple_comparison (boolean_type_node,
4847 gimple_cond_lhs (stmt),
4848 gimple_cond_rhs (stmt),
4849 gimple_cond_code (stmt));
4852 /* Verify the GIMPLE statement STMT. Returns true if there is an
4853 error, otherwise false. */
4855 static bool
4856 verify_gimple_stmt (gimple *stmt)
4858 switch (gimple_code (stmt))
4860 case GIMPLE_ASSIGN:
4861 return verify_gimple_assign (as_a <gassign *> (stmt));
4863 case GIMPLE_LABEL:
4864 return verify_gimple_label (as_a <glabel *> (stmt));
4866 case GIMPLE_CALL:
4867 return verify_gimple_call (as_a <gcall *> (stmt));
4869 case GIMPLE_COND:
4870 return verify_gimple_cond (as_a <gcond *> (stmt));
4872 case GIMPLE_GOTO:
4873 return verify_gimple_goto (as_a <ggoto *> (stmt));
4875 case GIMPLE_SWITCH:
4876 return verify_gimple_switch (as_a <gswitch *> (stmt));
4878 case GIMPLE_RETURN:
4879 return verify_gimple_return (as_a <greturn *> (stmt));
4881 case GIMPLE_ASM:
4882 return false;
4884 case GIMPLE_TRANSACTION:
4885 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4887 /* Tuples that do not have tree operands. */
4888 case GIMPLE_NOP:
4889 case GIMPLE_PREDICT:
4890 case GIMPLE_RESX:
4891 case GIMPLE_EH_DISPATCH:
4892 case GIMPLE_EH_MUST_NOT_THROW:
4893 return false;
4895 CASE_GIMPLE_OMP:
4896 /* OpenMP directives are validated by the FE and never operated
4897 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4898 non-gimple expressions when the main index variable has had
4899 its address taken. This does not affect the loop itself
4900 because the header of an GIMPLE_OMP_FOR is merely used to determine
4901 how to setup the parallel iteration. */
4902 return false;
4904 case GIMPLE_DEBUG:
4905 return verify_gimple_debug (stmt);
4907 default:
4908 gcc_unreachable ();
4912 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4913 and false otherwise. */
4915 static bool
4916 verify_gimple_phi (gimple *phi)
4918 bool err = false;
4919 unsigned i;
4920 tree phi_result = gimple_phi_result (phi);
4921 bool virtual_p;
4923 if (!phi_result)
4925 error ("invalid PHI result");
4926 return true;
4929 virtual_p = virtual_operand_p (phi_result);
4930 if (TREE_CODE (phi_result) != SSA_NAME
4931 || (virtual_p
4932 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4934 error ("invalid PHI result");
4935 err = true;
4938 for (i = 0; i < gimple_phi_num_args (phi); i++)
4940 tree t = gimple_phi_arg_def (phi, i);
4942 if (!t)
4944 error ("missing PHI def");
4945 err |= true;
4946 continue;
4948 /* Addressable variables do have SSA_NAMEs but they
4949 are not considered gimple values. */
4950 else if ((TREE_CODE (t) == SSA_NAME
4951 && virtual_p != virtual_operand_p (t))
4952 || (virtual_p
4953 && (TREE_CODE (t) != SSA_NAME
4954 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4955 || (!virtual_p
4956 && !is_gimple_val (t)))
4958 error ("invalid PHI argument");
4959 debug_generic_expr (t);
4960 err |= true;
4962 #ifdef ENABLE_TYPES_CHECKING
4963 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4965 error ("incompatible types in PHI argument %u", i);
4966 debug_generic_stmt (TREE_TYPE (phi_result));
4967 debug_generic_stmt (TREE_TYPE (t));
4968 err |= true;
4970 #endif
4973 return err;
4976 /* Verify the GIMPLE statements inside the sequence STMTS. */
4978 static bool
4979 verify_gimple_in_seq_2 (gimple_seq stmts)
4981 gimple_stmt_iterator ittr;
4982 bool err = false;
4984 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4986 gimple *stmt = gsi_stmt (ittr);
4988 switch (gimple_code (stmt))
4990 case GIMPLE_BIND:
4991 err |= verify_gimple_in_seq_2 (
4992 gimple_bind_body (as_a <gbind *> (stmt)));
4993 break;
4995 case GIMPLE_TRY:
4996 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4997 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4998 break;
5000 case GIMPLE_EH_FILTER:
5001 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5002 break;
5004 case GIMPLE_EH_ELSE:
5006 geh_else *eh_else = as_a <geh_else *> (stmt);
5007 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5008 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5010 break;
5012 case GIMPLE_CATCH:
5013 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5014 as_a <gcatch *> (stmt)));
5015 break;
5017 case GIMPLE_TRANSACTION:
5018 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5019 break;
5021 default:
5023 bool err2 = verify_gimple_stmt (stmt);
5024 if (err2)
5025 debug_gimple_stmt (stmt);
5026 err |= err2;
5031 return err;
5034 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5035 is a problem, otherwise false. */
5037 static bool
5038 verify_gimple_transaction (gtransaction *stmt)
5040 tree lab;
5042 lab = gimple_transaction_label_norm (stmt);
5043 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5044 return true;
5045 lab = gimple_transaction_label_uninst (stmt);
5046 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5047 return true;
5048 lab = gimple_transaction_label_over (stmt);
5049 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5050 return true;
5052 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5056 /* Verify the GIMPLE statements inside the statement list STMTS. */
5058 DEBUG_FUNCTION void
5059 verify_gimple_in_seq (gimple_seq stmts)
5061 timevar_push (TV_TREE_STMT_VERIFY);
5062 if (verify_gimple_in_seq_2 (stmts))
5063 internal_error ("verify_gimple failed");
5064 timevar_pop (TV_TREE_STMT_VERIFY);
5067 /* Return true when the T can be shared. */
5069 static bool
5070 tree_node_can_be_shared (tree t)
5072 if (IS_TYPE_OR_DECL_P (t)
5073 || is_gimple_min_invariant (t)
5074 || TREE_CODE (t) == SSA_NAME
5075 || t == error_mark_node
5076 || TREE_CODE (t) == IDENTIFIER_NODE)
5077 return true;
5079 if (TREE_CODE (t) == CASE_LABEL_EXPR)
5080 return true;
5082 if (DECL_P (t))
5083 return true;
5085 return false;
5088 /* Called via walk_tree. Verify tree sharing. */
5090 static tree
5091 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5093 hash_set<void *> *visited = (hash_set<void *> *) data;
5095 if (tree_node_can_be_shared (*tp))
5097 *walk_subtrees = false;
5098 return NULL;
5101 if (visited->add (*tp))
5102 return *tp;
5104 return NULL;
5107 /* Called via walk_gimple_stmt. Verify tree sharing. */
5109 static tree
5110 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5112 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5113 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5116 static bool eh_error_found;
5117 bool
5118 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5119 hash_set<gimple *> *visited)
5121 if (!visited->contains (stmt))
5123 error ("dead STMT in EH table");
5124 debug_gimple_stmt (stmt);
5125 eh_error_found = true;
5127 return true;
5130 /* Verify if the location LOCs block is in BLOCKS. */
5132 static bool
5133 verify_location (hash_set<tree> *blocks, location_t loc)
5135 tree block = LOCATION_BLOCK (loc);
5136 if (block != NULL_TREE
5137 && !blocks->contains (block))
5139 error ("location references block not in block tree");
5140 return true;
5142 if (block != NULL_TREE)
5143 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5144 return false;
5147 /* Called via walk_tree. Verify that expressions have no blocks. */
5149 static tree
5150 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5152 if (!EXPR_P (*tp))
5154 *walk_subtrees = false;
5155 return NULL;
5158 location_t loc = EXPR_LOCATION (*tp);
5159 if (LOCATION_BLOCK (loc) != NULL)
5160 return *tp;
5162 return NULL;
5165 /* Called via walk_tree. Verify locations of expressions. */
5167 static tree
5168 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5170 hash_set<tree> *blocks = (hash_set<tree> *) data;
5172 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5174 tree t = DECL_DEBUG_EXPR (*tp);
5175 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5176 if (addr)
5177 return addr;
5179 if ((VAR_P (*tp)
5180 || TREE_CODE (*tp) == PARM_DECL
5181 || TREE_CODE (*tp) == RESULT_DECL)
5182 && DECL_HAS_VALUE_EXPR_P (*tp))
5184 tree t = DECL_VALUE_EXPR (*tp);
5185 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5186 if (addr)
5187 return addr;
5190 if (!EXPR_P (*tp))
5192 *walk_subtrees = false;
5193 return NULL;
5196 location_t loc = EXPR_LOCATION (*tp);
5197 if (verify_location (blocks, loc))
5198 return *tp;
5200 return NULL;
5203 /* Called via walk_gimple_op. Verify locations of expressions. */
5205 static tree
5206 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5208 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5209 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5212 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5214 static void
5215 collect_subblocks (hash_set<tree> *blocks, tree block)
5217 tree t;
5218 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5220 blocks->add (t);
5221 collect_subblocks (blocks, t);
5225 /* Verify the GIMPLE statements in the CFG of FN. */
5227 DEBUG_FUNCTION void
5228 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5230 basic_block bb;
5231 bool err = false;
5233 timevar_push (TV_TREE_STMT_VERIFY);
5234 hash_set<void *> visited;
5235 hash_set<gimple *> visited_stmts;
5237 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5238 hash_set<tree> blocks;
5239 if (DECL_INITIAL (fn->decl))
5241 blocks.add (DECL_INITIAL (fn->decl));
5242 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5245 FOR_EACH_BB_FN (bb, fn)
5247 gimple_stmt_iterator gsi;
5249 for (gphi_iterator gpi = gsi_start_phis (bb);
5250 !gsi_end_p (gpi);
5251 gsi_next (&gpi))
5253 gphi *phi = gpi.phi ();
5254 bool err2 = false;
5255 unsigned i;
5257 visited_stmts.add (phi);
5259 if (gimple_bb (phi) != bb)
5261 error ("gimple_bb (phi) is set to a wrong basic block");
5262 err2 = true;
5265 err2 |= verify_gimple_phi (phi);
5267 /* Only PHI arguments have locations. */
5268 if (gimple_location (phi) != UNKNOWN_LOCATION)
5270 error ("PHI node with location");
5271 err2 = true;
5274 for (i = 0; i < gimple_phi_num_args (phi); i++)
5276 tree arg = gimple_phi_arg_def (phi, i);
5277 tree addr = walk_tree (&arg, verify_node_sharing_1,
5278 &visited, NULL);
5279 if (addr)
5281 error ("incorrect sharing of tree nodes");
5282 debug_generic_expr (addr);
5283 err2 |= true;
5285 location_t loc = gimple_phi_arg_location (phi, i);
5286 if (virtual_operand_p (gimple_phi_result (phi))
5287 && loc != UNKNOWN_LOCATION)
5289 error ("virtual PHI with argument locations");
5290 err2 = true;
5292 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5293 if (addr)
5295 debug_generic_expr (addr);
5296 err2 = true;
5298 err2 |= verify_location (&blocks, loc);
5301 if (err2)
5302 debug_gimple_stmt (phi);
5303 err |= err2;
5306 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5308 gimple *stmt = gsi_stmt (gsi);
5309 bool err2 = false;
5310 struct walk_stmt_info wi;
5311 tree addr;
5312 int lp_nr;
5314 visited_stmts.add (stmt);
5316 if (gimple_bb (stmt) != bb)
5318 error ("gimple_bb (stmt) is set to a wrong basic block");
5319 err2 = true;
5322 err2 |= verify_gimple_stmt (stmt);
5323 err2 |= verify_location (&blocks, gimple_location (stmt));
5325 memset (&wi, 0, sizeof (wi));
5326 wi.info = (void *) &visited;
5327 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5328 if (addr)
5330 error ("incorrect sharing of tree nodes");
5331 debug_generic_expr (addr);
5332 err2 |= true;
5335 memset (&wi, 0, sizeof (wi));
5336 wi.info = (void *) &blocks;
5337 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5338 if (addr)
5340 debug_generic_expr (addr);
5341 err2 |= true;
5344 /* ??? Instead of not checking these stmts at all the walker
5345 should know its context via wi. */
5346 if (!is_gimple_debug (stmt)
5347 && !is_gimple_omp (stmt))
5349 memset (&wi, 0, sizeof (wi));
5350 addr = walk_gimple_op (stmt, verify_expr, &wi);
5351 if (addr)
5353 debug_generic_expr (addr);
5354 inform (gimple_location (stmt), "in statement");
5355 err2 |= true;
5359 /* If the statement is marked as part of an EH region, then it is
5360 expected that the statement could throw. Verify that when we
5361 have optimizations that simplify statements such that we prove
5362 that they cannot throw, that we update other data structures
5363 to match. */
5364 lp_nr = lookup_stmt_eh_lp (stmt);
5365 if (lp_nr > 0)
5367 if (!stmt_could_throw_p (stmt))
5369 if (verify_nothrow)
5371 error ("statement marked for throw, but doesn%'t");
5372 err2 |= true;
5375 else if (!gsi_one_before_end_p (gsi))
5377 error ("statement marked for throw in middle of block");
5378 err2 |= true;
5382 if (err2)
5383 debug_gimple_stmt (stmt);
5384 err |= err2;
5388 eh_error_found = false;
5389 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5390 if (eh_table)
5391 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5392 (&visited_stmts);
5394 if (err || eh_error_found)
5395 internal_error ("verify_gimple failed");
5397 verify_histograms ();
5398 timevar_pop (TV_TREE_STMT_VERIFY);
5402 /* Verifies that the flow information is OK. */
5404 static int
5405 gimple_verify_flow_info (void)
5407 int err = 0;
5408 basic_block bb;
5409 gimple_stmt_iterator gsi;
5410 gimple *stmt;
5411 edge e;
5412 edge_iterator ei;
5414 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5415 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5417 error ("ENTRY_BLOCK has IL associated with it");
5418 err = 1;
5421 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5422 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5424 error ("EXIT_BLOCK has IL associated with it");
5425 err = 1;
5428 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5429 if (e->flags & EDGE_FALLTHRU)
5431 error ("fallthru to exit from bb %d", e->src->index);
5432 err = 1;
5435 FOR_EACH_BB_FN (bb, cfun)
5437 bool found_ctrl_stmt = false;
5439 stmt = NULL;
5441 /* Skip labels on the start of basic block. */
5442 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5444 tree label;
5445 gimple *prev_stmt = stmt;
5447 stmt = gsi_stmt (gsi);
5449 if (gimple_code (stmt) != GIMPLE_LABEL)
5450 break;
5452 label = gimple_label_label (as_a <glabel *> (stmt));
5453 if (prev_stmt && DECL_NONLOCAL (label))
5455 error ("nonlocal label ");
5456 print_generic_expr (stderr, label);
5457 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5458 bb->index);
5459 err = 1;
5462 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5464 error ("EH landing pad label ");
5465 print_generic_expr (stderr, label);
5466 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5467 bb->index);
5468 err = 1;
5471 if (label_to_block (label) != bb)
5473 error ("label ");
5474 print_generic_expr (stderr, label);
5475 fprintf (stderr, " to block does not match in bb %d",
5476 bb->index);
5477 err = 1;
5480 if (decl_function_context (label) != current_function_decl)
5482 error ("label ");
5483 print_generic_expr (stderr, label);
5484 fprintf (stderr, " has incorrect context in bb %d",
5485 bb->index);
5486 err = 1;
5490 /* Verify that body of basic block BB is free of control flow. */
5491 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5493 gimple *stmt = gsi_stmt (gsi);
5495 if (found_ctrl_stmt)
5497 error ("control flow in the middle of basic block %d",
5498 bb->index);
5499 err = 1;
5502 if (stmt_ends_bb_p (stmt))
5503 found_ctrl_stmt = true;
5505 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5507 error ("label ");
5508 print_generic_expr (stderr, gimple_label_label (label_stmt));
5509 fprintf (stderr, " in the middle of basic block %d", bb->index);
5510 err = 1;
5514 gsi = gsi_last_bb (bb);
5515 if (gsi_end_p (gsi))
5516 continue;
5518 stmt = gsi_stmt (gsi);
5520 if (gimple_code (stmt) == GIMPLE_LABEL)
5521 continue;
5523 err |= verify_eh_edges (stmt);
5525 if (is_ctrl_stmt (stmt))
5527 FOR_EACH_EDGE (e, ei, bb->succs)
5528 if (e->flags & EDGE_FALLTHRU)
5530 error ("fallthru edge after a control statement in bb %d",
5531 bb->index);
5532 err = 1;
5536 if (gimple_code (stmt) != GIMPLE_COND)
5538 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5539 after anything else but if statement. */
5540 FOR_EACH_EDGE (e, ei, bb->succs)
5541 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5543 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5544 bb->index);
5545 err = 1;
5549 switch (gimple_code (stmt))
5551 case GIMPLE_COND:
5553 edge true_edge;
5554 edge false_edge;
5556 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5558 if (!true_edge
5559 || !false_edge
5560 || !(true_edge->flags & EDGE_TRUE_VALUE)
5561 || !(false_edge->flags & EDGE_FALSE_VALUE)
5562 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5563 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5564 || EDGE_COUNT (bb->succs) >= 3)
5566 error ("wrong outgoing edge flags at end of bb %d",
5567 bb->index);
5568 err = 1;
5571 break;
5573 case GIMPLE_GOTO:
5574 if (simple_goto_p (stmt))
5576 error ("explicit goto at end of bb %d", bb->index);
5577 err = 1;
5579 else
5581 /* FIXME. We should double check that the labels in the
5582 destination blocks have their address taken. */
5583 FOR_EACH_EDGE (e, ei, bb->succs)
5584 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5585 | EDGE_FALSE_VALUE))
5586 || !(e->flags & EDGE_ABNORMAL))
5588 error ("wrong outgoing edge flags at end of bb %d",
5589 bb->index);
5590 err = 1;
5593 break;
5595 case GIMPLE_CALL:
5596 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5597 break;
5598 /* fallthru */
5599 case GIMPLE_RETURN:
5600 if (!single_succ_p (bb)
5601 || (single_succ_edge (bb)->flags
5602 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5603 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5605 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5606 err = 1;
5608 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5610 error ("return edge does not point to exit in bb %d",
5611 bb->index);
5612 err = 1;
5614 break;
5616 case GIMPLE_SWITCH:
5618 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5619 tree prev;
5620 edge e;
5621 size_t i, n;
5623 n = gimple_switch_num_labels (switch_stmt);
5625 /* Mark all the destination basic blocks. */
5626 for (i = 0; i < n; ++i)
5628 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5629 basic_block label_bb = label_to_block (lab);
5630 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5631 label_bb->aux = (void *)1;
5634 /* Verify that the case labels are sorted. */
5635 prev = gimple_switch_label (switch_stmt, 0);
5636 for (i = 1; i < n; ++i)
5638 tree c = gimple_switch_label (switch_stmt, i);
5639 if (!CASE_LOW (c))
5641 error ("found default case not at the start of "
5642 "case vector");
5643 err = 1;
5644 continue;
5646 if (CASE_LOW (prev)
5647 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5649 error ("case labels not sorted: ");
5650 print_generic_expr (stderr, prev);
5651 fprintf (stderr," is greater than ");
5652 print_generic_expr (stderr, c);
5653 fprintf (stderr," but comes before it.\n");
5654 err = 1;
5656 prev = c;
5658 /* VRP will remove the default case if it can prove it will
5659 never be executed. So do not verify there always exists
5660 a default case here. */
5662 FOR_EACH_EDGE (e, ei, bb->succs)
5664 if (!e->dest->aux)
5666 error ("extra outgoing edge %d->%d",
5667 bb->index, e->dest->index);
5668 err = 1;
5671 e->dest->aux = (void *)2;
5672 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5673 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5675 error ("wrong outgoing edge flags at end of bb %d",
5676 bb->index);
5677 err = 1;
5681 /* Check that we have all of them. */
5682 for (i = 0; i < n; ++i)
5684 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5685 basic_block label_bb = label_to_block (lab);
5687 if (label_bb->aux != (void *)2)
5689 error ("missing edge %i->%i", bb->index, label_bb->index);
5690 err = 1;
5694 FOR_EACH_EDGE (e, ei, bb->succs)
5695 e->dest->aux = (void *)0;
5697 break;
5699 case GIMPLE_EH_DISPATCH:
5700 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5701 break;
5703 default:
5704 break;
5708 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5709 verify_dominators (CDI_DOMINATORS);
5711 return err;
5715 /* Updates phi nodes after creating a forwarder block joined
5716 by edge FALLTHRU. */
5718 static void
5719 gimple_make_forwarder_block (edge fallthru)
5721 edge e;
5722 edge_iterator ei;
5723 basic_block dummy, bb;
5724 tree var;
5725 gphi_iterator gsi;
5727 dummy = fallthru->src;
5728 bb = fallthru->dest;
5730 if (single_pred_p (bb))
5731 return;
5733 /* If we redirected a branch we must create new PHI nodes at the
5734 start of BB. */
5735 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5737 gphi *phi, *new_phi;
5739 phi = gsi.phi ();
5740 var = gimple_phi_result (phi);
5741 new_phi = create_phi_node (var, bb);
5742 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5743 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5744 UNKNOWN_LOCATION);
5747 /* Add the arguments we have stored on edges. */
5748 FOR_EACH_EDGE (e, ei, bb->preds)
5750 if (e == fallthru)
5751 continue;
5753 flush_pending_stmts (e);
5758 /* Return a non-special label in the head of basic block BLOCK.
5759 Create one if it doesn't exist. */
5761 tree
5762 gimple_block_label (basic_block bb)
5764 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5765 bool first = true;
5766 tree label;
5767 glabel *stmt;
5769 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5771 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5772 if (!stmt)
5773 break;
5774 label = gimple_label_label (stmt);
5775 if (!DECL_NONLOCAL (label))
5777 if (!first)
5778 gsi_move_before (&i, &s);
5779 return label;
5783 label = create_artificial_label (UNKNOWN_LOCATION);
5784 stmt = gimple_build_label (label);
5785 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5786 return label;
5790 /* Attempt to perform edge redirection by replacing a possibly complex
5791 jump instruction by a goto or by removing the jump completely.
5792 This can apply only if all edges now point to the same block. The
5793 parameters and return values are equivalent to
5794 redirect_edge_and_branch. */
5796 static edge
5797 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5799 basic_block src = e->src;
5800 gimple_stmt_iterator i;
5801 gimple *stmt;
5803 /* We can replace or remove a complex jump only when we have exactly
5804 two edges. */
5805 if (EDGE_COUNT (src->succs) != 2
5806 /* Verify that all targets will be TARGET. Specifically, the
5807 edge that is not E must also go to TARGET. */
5808 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5809 return NULL;
5811 i = gsi_last_bb (src);
5812 if (gsi_end_p (i))
5813 return NULL;
5815 stmt = gsi_stmt (i);
5817 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5819 gsi_remove (&i, true);
5820 e = ssa_redirect_edge (e, target);
5821 e->flags = EDGE_FALLTHRU;
5822 return e;
5825 return NULL;
5829 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5830 edge representing the redirected branch. */
5832 static edge
5833 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5835 basic_block bb = e->src;
5836 gimple_stmt_iterator gsi;
5837 edge ret;
5838 gimple *stmt;
5840 if (e->flags & EDGE_ABNORMAL)
5841 return NULL;
5843 if (e->dest == dest)
5844 return NULL;
5846 if (e->flags & EDGE_EH)
5847 return redirect_eh_edge (e, dest);
5849 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5851 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5852 if (ret)
5853 return ret;
5856 gsi = gsi_last_bb (bb);
5857 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5859 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5861 case GIMPLE_COND:
5862 /* For COND_EXPR, we only need to redirect the edge. */
5863 break;
5865 case GIMPLE_GOTO:
5866 /* No non-abnormal edges should lead from a non-simple goto, and
5867 simple ones should be represented implicitly. */
5868 gcc_unreachable ();
5870 case GIMPLE_SWITCH:
5872 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5873 tree label = gimple_block_label (dest);
5874 tree cases = get_cases_for_edge (e, switch_stmt);
5876 /* If we have a list of cases associated with E, then use it
5877 as it's a lot faster than walking the entire case vector. */
5878 if (cases)
5880 edge e2 = find_edge (e->src, dest);
5881 tree last, first;
5883 first = cases;
5884 while (cases)
5886 last = cases;
5887 CASE_LABEL (cases) = label;
5888 cases = CASE_CHAIN (cases);
5891 /* If there was already an edge in the CFG, then we need
5892 to move all the cases associated with E to E2. */
5893 if (e2)
5895 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5897 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5898 CASE_CHAIN (cases2) = first;
5900 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5902 else
5904 size_t i, n = gimple_switch_num_labels (switch_stmt);
5906 for (i = 0; i < n; i++)
5908 tree elt = gimple_switch_label (switch_stmt, i);
5909 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5910 CASE_LABEL (elt) = label;
5914 break;
5916 case GIMPLE_ASM:
5918 gasm *asm_stmt = as_a <gasm *> (stmt);
5919 int i, n = gimple_asm_nlabels (asm_stmt);
5920 tree label = NULL;
5922 for (i = 0; i < n; ++i)
5924 tree cons = gimple_asm_label_op (asm_stmt, i);
5925 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5927 if (!label)
5928 label = gimple_block_label (dest);
5929 TREE_VALUE (cons) = label;
5933 /* If we didn't find any label matching the former edge in the
5934 asm labels, we must be redirecting the fallthrough
5935 edge. */
5936 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5938 break;
5940 case GIMPLE_RETURN:
5941 gsi_remove (&gsi, true);
5942 e->flags |= EDGE_FALLTHRU;
5943 break;
5945 case GIMPLE_OMP_RETURN:
5946 case GIMPLE_OMP_CONTINUE:
5947 case GIMPLE_OMP_SECTIONS_SWITCH:
5948 case GIMPLE_OMP_FOR:
5949 /* The edges from OMP constructs can be simply redirected. */
5950 break;
5952 case GIMPLE_EH_DISPATCH:
5953 if (!(e->flags & EDGE_FALLTHRU))
5954 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5955 break;
5957 case GIMPLE_TRANSACTION:
5958 if (e->flags & EDGE_TM_ABORT)
5959 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5960 gimple_block_label (dest));
5961 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5962 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5963 gimple_block_label (dest));
5964 else
5965 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5966 gimple_block_label (dest));
5967 break;
5969 default:
5970 /* Otherwise it must be a fallthru edge, and we don't need to
5971 do anything besides redirecting it. */
5972 gcc_assert (e->flags & EDGE_FALLTHRU);
5973 break;
5976 /* Update/insert PHI nodes as necessary. */
5978 /* Now update the edges in the CFG. */
5979 e = ssa_redirect_edge (e, dest);
5981 return e;
5984 /* Returns true if it is possible to remove edge E by redirecting
5985 it to the destination of the other edge from E->src. */
5987 static bool
5988 gimple_can_remove_branch_p (const_edge e)
5990 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5991 return false;
5993 return true;
5996 /* Simple wrapper, as we can always redirect fallthru edges. */
5998 static basic_block
5999 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6001 e = gimple_redirect_edge_and_branch (e, dest);
6002 gcc_assert (e);
6004 return NULL;
6008 /* Splits basic block BB after statement STMT (but at least after the
6009 labels). If STMT is NULL, BB is split just after the labels. */
6011 static basic_block
6012 gimple_split_block (basic_block bb, void *stmt)
6014 gimple_stmt_iterator gsi;
6015 gimple_stmt_iterator gsi_tgt;
6016 gimple_seq list;
6017 basic_block new_bb;
6018 edge e;
6019 edge_iterator ei;
6021 new_bb = create_empty_bb (bb);
6023 /* Redirect the outgoing edges. */
6024 new_bb->succs = bb->succs;
6025 bb->succs = NULL;
6026 FOR_EACH_EDGE (e, ei, new_bb->succs)
6027 e->src = new_bb;
6029 /* Get a stmt iterator pointing to the first stmt to move. */
6030 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6031 gsi = gsi_after_labels (bb);
6032 else
6034 gsi = gsi_for_stmt ((gimple *) stmt);
6035 gsi_next (&gsi);
6038 /* Move everything from GSI to the new basic block. */
6039 if (gsi_end_p (gsi))
6040 return new_bb;
6042 /* Split the statement list - avoid re-creating new containers as this
6043 brings ugly quadratic memory consumption in the inliner.
6044 (We are still quadratic since we need to update stmt BB pointers,
6045 sadly.) */
6046 gsi_split_seq_before (&gsi, &list);
6047 set_bb_seq (new_bb, list);
6048 for (gsi_tgt = gsi_start (list);
6049 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6050 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6052 return new_bb;
6056 /* Moves basic block BB after block AFTER. */
6058 static bool
6059 gimple_move_block_after (basic_block bb, basic_block after)
6061 if (bb->prev_bb == after)
6062 return true;
6064 unlink_block (bb);
6065 link_block (bb, after);
6067 return true;
6071 /* Return TRUE if block BB has no executable statements, otherwise return
6072 FALSE. */
6074 static bool
6075 gimple_empty_block_p (basic_block bb)
6077 /* BB must have no executable statements. */
6078 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6079 if (phi_nodes (bb))
6080 return false;
6081 if (gsi_end_p (gsi))
6082 return true;
6083 if (is_gimple_debug (gsi_stmt (gsi)))
6084 gsi_next_nondebug (&gsi);
6085 return gsi_end_p (gsi);
6089 /* Split a basic block if it ends with a conditional branch and if the
6090 other part of the block is not empty. */
6092 static basic_block
6093 gimple_split_block_before_cond_jump (basic_block bb)
6095 gimple *last, *split_point;
6096 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6097 if (gsi_end_p (gsi))
6098 return NULL;
6099 last = gsi_stmt (gsi);
6100 if (gimple_code (last) != GIMPLE_COND
6101 && gimple_code (last) != GIMPLE_SWITCH)
6102 return NULL;
6103 gsi_prev (&gsi);
6104 split_point = gsi_stmt (gsi);
6105 return split_block (bb, split_point)->dest;
6109 /* Return true if basic_block can be duplicated. */
6111 static bool
6112 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6114 return true;
6117 /* Create a duplicate of the basic block BB. NOTE: This does not
6118 preserve SSA form. */
6120 static basic_block
6121 gimple_duplicate_bb (basic_block bb)
6123 basic_block new_bb;
6124 gimple_stmt_iterator gsi_tgt;
6126 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6128 /* Copy the PHI nodes. We ignore PHI node arguments here because
6129 the incoming edges have not been setup yet. */
6130 for (gphi_iterator gpi = gsi_start_phis (bb);
6131 !gsi_end_p (gpi);
6132 gsi_next (&gpi))
6134 gphi *phi, *copy;
6135 phi = gpi.phi ();
6136 copy = create_phi_node (NULL_TREE, new_bb);
6137 create_new_def_for (gimple_phi_result (phi), copy,
6138 gimple_phi_result_ptr (copy));
6139 gimple_set_uid (copy, gimple_uid (phi));
6142 gsi_tgt = gsi_start_bb (new_bb);
6143 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6144 !gsi_end_p (gsi);
6145 gsi_next (&gsi))
6147 def_operand_p def_p;
6148 ssa_op_iter op_iter;
6149 tree lhs;
6150 gimple *stmt, *copy;
6152 stmt = gsi_stmt (gsi);
6153 if (gimple_code (stmt) == GIMPLE_LABEL)
6154 continue;
6156 /* Don't duplicate label debug stmts. */
6157 if (gimple_debug_bind_p (stmt)
6158 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6159 == LABEL_DECL)
6160 continue;
6162 /* Create a new copy of STMT and duplicate STMT's virtual
6163 operands. */
6164 copy = gimple_copy (stmt);
6165 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6167 maybe_duplicate_eh_stmt (copy, stmt);
6168 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6170 /* When copying around a stmt writing into a local non-user
6171 aggregate, make sure it won't share stack slot with other
6172 vars. */
6173 lhs = gimple_get_lhs (stmt);
6174 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6176 tree base = get_base_address (lhs);
6177 if (base
6178 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6179 && DECL_IGNORED_P (base)
6180 && !TREE_STATIC (base)
6181 && !DECL_EXTERNAL (base)
6182 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6183 DECL_NONSHAREABLE (base) = 1;
6186 /* Create new names for all the definitions created by COPY and
6187 add replacement mappings for each new name. */
6188 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6189 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6192 return new_bb;
6195 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6197 static void
6198 add_phi_args_after_copy_edge (edge e_copy)
6200 basic_block bb, bb_copy = e_copy->src, dest;
6201 edge e;
6202 edge_iterator ei;
6203 gphi *phi, *phi_copy;
6204 tree def;
6205 gphi_iterator psi, psi_copy;
6207 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6208 return;
6210 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6212 if (e_copy->dest->flags & BB_DUPLICATED)
6213 dest = get_bb_original (e_copy->dest);
6214 else
6215 dest = e_copy->dest;
6217 e = find_edge (bb, dest);
6218 if (!e)
6220 /* During loop unrolling the target of the latch edge is copied.
6221 In this case we are not looking for edge to dest, but to
6222 duplicated block whose original was dest. */
6223 FOR_EACH_EDGE (e, ei, bb->succs)
6225 if ((e->dest->flags & BB_DUPLICATED)
6226 && get_bb_original (e->dest) == dest)
6227 break;
6230 gcc_assert (e != NULL);
6233 for (psi = gsi_start_phis (e->dest),
6234 psi_copy = gsi_start_phis (e_copy->dest);
6235 !gsi_end_p (psi);
6236 gsi_next (&psi), gsi_next (&psi_copy))
6238 phi = psi.phi ();
6239 phi_copy = psi_copy.phi ();
6240 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6241 add_phi_arg (phi_copy, def, e_copy,
6242 gimple_phi_arg_location_from_edge (phi, e));
6247 /* Basic block BB_COPY was created by code duplication. Add phi node
6248 arguments for edges going out of BB_COPY. The blocks that were
6249 duplicated have BB_DUPLICATED set. */
6251 void
6252 add_phi_args_after_copy_bb (basic_block bb_copy)
6254 edge e_copy;
6255 edge_iterator ei;
6257 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6259 add_phi_args_after_copy_edge (e_copy);
6263 /* Blocks in REGION_COPY array of length N_REGION were created by
6264 duplication of basic blocks. Add phi node arguments for edges
6265 going from these blocks. If E_COPY is not NULL, also add
6266 phi node arguments for its destination.*/
6268 void
6269 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6270 edge e_copy)
6272 unsigned i;
6274 for (i = 0; i < n_region; i++)
6275 region_copy[i]->flags |= BB_DUPLICATED;
6277 for (i = 0; i < n_region; i++)
6278 add_phi_args_after_copy_bb (region_copy[i]);
6279 if (e_copy)
6280 add_phi_args_after_copy_edge (e_copy);
6282 for (i = 0; i < n_region; i++)
6283 region_copy[i]->flags &= ~BB_DUPLICATED;
6286 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6287 important exit edge EXIT. By important we mean that no SSA name defined
6288 inside region is live over the other exit edges of the region. All entry
6289 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6290 to the duplicate of the region. Dominance and loop information is
6291 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6292 UPDATE_DOMINANCE is false then we assume that the caller will update the
6293 dominance information after calling this function. The new basic
6294 blocks are stored to REGION_COPY in the same order as they had in REGION,
6295 provided that REGION_COPY is not NULL.
6296 The function returns false if it is unable to copy the region,
6297 true otherwise. */
6299 bool
6300 gimple_duplicate_sese_region (edge entry, edge exit,
6301 basic_block *region, unsigned n_region,
6302 basic_block *region_copy,
6303 bool update_dominance)
6305 unsigned i;
6306 bool free_region_copy = false, copying_header = false;
6307 struct loop *loop = entry->dest->loop_father;
6308 edge exit_copy;
6309 vec<basic_block> doms;
6310 edge redirected;
6311 int total_freq = 0, entry_freq = 0;
6312 profile_count total_count = profile_count::uninitialized ();
6313 profile_count entry_count = profile_count::uninitialized ();
6315 if (!can_copy_bbs_p (region, n_region))
6316 return false;
6318 /* Some sanity checking. Note that we do not check for all possible
6319 missuses of the functions. I.e. if you ask to copy something weird,
6320 it will work, but the state of structures probably will not be
6321 correct. */
6322 for (i = 0; i < n_region; i++)
6324 /* We do not handle subloops, i.e. all the blocks must belong to the
6325 same loop. */
6326 if (region[i]->loop_father != loop)
6327 return false;
6329 if (region[i] != entry->dest
6330 && region[i] == loop->header)
6331 return false;
6334 /* In case the function is used for loop header copying (which is the primary
6335 use), ensure that EXIT and its copy will be new latch and entry edges. */
6336 if (loop->header == entry->dest)
6338 copying_header = true;
6340 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6341 return false;
6343 for (i = 0; i < n_region; i++)
6344 if (region[i] != exit->src
6345 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6346 return false;
6349 initialize_original_copy_tables ();
6351 if (copying_header)
6352 set_loop_copy (loop, loop_outer (loop));
6353 else
6354 set_loop_copy (loop, loop);
6356 if (!region_copy)
6358 region_copy = XNEWVEC (basic_block, n_region);
6359 free_region_copy = true;
6362 /* Record blocks outside the region that are dominated by something
6363 inside. */
6364 if (update_dominance)
6366 doms.create (0);
6367 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6370 if (entry->dest->count.initialized_p ())
6372 total_count = entry->dest->count;
6373 entry_count = entry->count ();
6374 /* Fix up corner cases, to avoid division by zero or creation of negative
6375 frequencies. */
6376 if (entry_count > total_count)
6377 entry_count = total_count;
6379 if (!(total_count > 0) || !(entry_count > 0))
6381 total_freq = entry->dest->frequency;
6382 entry_freq = EDGE_FREQUENCY (entry);
6383 /* Fix up corner cases, to avoid division by zero or creation of negative
6384 frequencies. */
6385 if (total_freq == 0)
6386 total_freq = 1;
6387 else if (entry_freq > total_freq)
6388 entry_freq = total_freq;
6391 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6392 split_edge_bb_loc (entry), update_dominance);
6393 if (total_count > 0 && entry_count > 0)
6395 scale_bbs_frequencies_profile_count (region, n_region,
6396 total_count - entry_count,
6397 total_count);
6398 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6399 total_count);
6401 else
6403 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6404 total_freq);
6405 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6408 if (copying_header)
6410 loop->header = exit->dest;
6411 loop->latch = exit->src;
6414 /* Redirect the entry and add the phi node arguments. */
6415 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6416 gcc_assert (redirected != NULL);
6417 flush_pending_stmts (entry);
6419 /* Concerning updating of dominators: We must recount dominators
6420 for entry block and its copy. Anything that is outside of the
6421 region, but was dominated by something inside needs recounting as
6422 well. */
6423 if (update_dominance)
6425 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6426 doms.safe_push (get_bb_original (entry->dest));
6427 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6428 doms.release ();
6431 /* Add the other PHI node arguments. */
6432 add_phi_args_after_copy (region_copy, n_region, NULL);
6434 if (free_region_copy)
6435 free (region_copy);
6437 free_original_copy_tables ();
6438 return true;
6441 /* Checks if BB is part of the region defined by N_REGION BBS. */
6442 static bool
6443 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6445 unsigned int n;
6447 for (n = 0; n < n_region; n++)
6449 if (bb == bbs[n])
6450 return true;
6452 return false;
6455 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6456 are stored to REGION_COPY in the same order in that they appear
6457 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6458 the region, EXIT an exit from it. The condition guarding EXIT
6459 is moved to ENTRY. Returns true if duplication succeeds, false
6460 otherwise.
6462 For example,
6464 some_code;
6465 if (cond)
6467 else
6470 is transformed to
6472 if (cond)
6474 some_code;
6477 else
6479 some_code;
6484 bool
6485 gimple_duplicate_sese_tail (edge entry, edge exit,
6486 basic_block *region, unsigned n_region,
6487 basic_block *region_copy)
6489 unsigned i;
6490 bool free_region_copy = false;
6491 struct loop *loop = exit->dest->loop_father;
6492 struct loop *orig_loop = entry->dest->loop_father;
6493 basic_block switch_bb, entry_bb, nentry_bb;
6494 vec<basic_block> doms;
6495 int total_freq = 0, exit_freq = 0;
6496 profile_count total_count = profile_count::uninitialized (),
6497 exit_count = profile_count::uninitialized ();
6498 edge exits[2], nexits[2], e;
6499 gimple_stmt_iterator gsi;
6500 gimple *cond_stmt;
6501 edge sorig, snew;
6502 basic_block exit_bb;
6503 gphi_iterator psi;
6504 gphi *phi;
6505 tree def;
6506 struct loop *target, *aloop, *cloop;
6508 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6509 exits[0] = exit;
6510 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6512 if (!can_copy_bbs_p (region, n_region))
6513 return false;
6515 initialize_original_copy_tables ();
6516 set_loop_copy (orig_loop, loop);
6518 target= loop;
6519 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6521 if (bb_part_of_region_p (aloop->header, region, n_region))
6523 cloop = duplicate_loop (aloop, target);
6524 duplicate_subloops (aloop, cloop);
6528 if (!region_copy)
6530 region_copy = XNEWVEC (basic_block, n_region);
6531 free_region_copy = true;
6534 gcc_assert (!need_ssa_update_p (cfun));
6536 /* Record blocks outside the region that are dominated by something
6537 inside. */
6538 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6540 if (exit->src->count > 0)
6542 total_count = exit->src->count;
6543 exit_count = exit->count ();
6544 /* Fix up corner cases, to avoid division by zero or creation of negative
6545 frequencies. */
6546 if (exit_count > total_count)
6547 exit_count = total_count;
6549 else
6551 total_freq = exit->src->frequency;
6552 exit_freq = EDGE_FREQUENCY (exit);
6553 /* Fix up corner cases, to avoid division by zero or creation of negative
6554 frequencies. */
6555 if (total_freq == 0)
6556 total_freq = 1;
6557 if (exit_freq > total_freq)
6558 exit_freq = total_freq;
6561 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6562 split_edge_bb_loc (exit), true);
6563 if (total_count.initialized_p ())
6565 scale_bbs_frequencies_profile_count (region, n_region,
6566 total_count - exit_count,
6567 total_count);
6568 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6569 total_count);
6571 else
6573 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6574 total_freq);
6575 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6578 /* Create the switch block, and put the exit condition to it. */
6579 entry_bb = entry->dest;
6580 nentry_bb = get_bb_copy (entry_bb);
6581 if (!last_stmt (entry->src)
6582 || !stmt_ends_bb_p (last_stmt (entry->src)))
6583 switch_bb = entry->src;
6584 else
6585 switch_bb = split_edge (entry);
6586 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6588 gsi = gsi_last_bb (switch_bb);
6589 cond_stmt = last_stmt (exit->src);
6590 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6591 cond_stmt = gimple_copy (cond_stmt);
6593 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6595 sorig = single_succ_edge (switch_bb);
6596 sorig->flags = exits[1]->flags;
6597 sorig->probability = exits[1]->probability;
6598 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6599 snew->probability = exits[0]->probability;
6602 /* Register the new edge from SWITCH_BB in loop exit lists. */
6603 rescan_loop_exit (snew, true, false);
6605 /* Add the PHI node arguments. */
6606 add_phi_args_after_copy (region_copy, n_region, snew);
6608 /* Get rid of now superfluous conditions and associated edges (and phi node
6609 arguments). */
6610 exit_bb = exit->dest;
6612 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6613 PENDING_STMT (e) = NULL;
6615 /* The latch of ORIG_LOOP was copied, and so was the backedge
6616 to the original header. We redirect this backedge to EXIT_BB. */
6617 for (i = 0; i < n_region; i++)
6618 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6620 gcc_assert (single_succ_edge (region_copy[i]));
6621 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6622 PENDING_STMT (e) = NULL;
6623 for (psi = gsi_start_phis (exit_bb);
6624 !gsi_end_p (psi);
6625 gsi_next (&psi))
6627 phi = psi.phi ();
6628 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6629 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6632 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6633 PENDING_STMT (e) = NULL;
6635 /* Anything that is outside of the region, but was dominated by something
6636 inside needs to update dominance info. */
6637 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6638 doms.release ();
6639 /* Update the SSA web. */
6640 update_ssa (TODO_update_ssa);
6642 if (free_region_copy)
6643 free (region_copy);
6645 free_original_copy_tables ();
6646 return true;
6649 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6650 adding blocks when the dominator traversal reaches EXIT. This
6651 function silently assumes that ENTRY strictly dominates EXIT. */
6653 void
6654 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6655 vec<basic_block> *bbs_p)
6657 basic_block son;
6659 for (son = first_dom_son (CDI_DOMINATORS, entry);
6660 son;
6661 son = next_dom_son (CDI_DOMINATORS, son))
6663 bbs_p->safe_push (son);
6664 if (son != exit)
6665 gather_blocks_in_sese_region (son, exit, bbs_p);
6669 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6670 The duplicates are recorded in VARS_MAP. */
6672 static void
6673 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6674 tree to_context)
6676 tree t = *tp, new_t;
6677 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6679 if (DECL_CONTEXT (t) == to_context)
6680 return;
6682 bool existed;
6683 tree &loc = vars_map->get_or_insert (t, &existed);
6685 if (!existed)
6687 if (SSA_VAR_P (t))
6689 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6690 add_local_decl (f, new_t);
6692 else
6694 gcc_assert (TREE_CODE (t) == CONST_DECL);
6695 new_t = copy_node (t);
6697 DECL_CONTEXT (new_t) = to_context;
6699 loc = new_t;
6701 else
6702 new_t = loc;
6704 *tp = new_t;
6708 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6709 VARS_MAP maps old ssa names and var_decls to the new ones. */
6711 static tree
6712 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6713 tree to_context)
6715 tree new_name;
6717 gcc_assert (!virtual_operand_p (name));
6719 tree *loc = vars_map->get (name);
6721 if (!loc)
6723 tree decl = SSA_NAME_VAR (name);
6724 if (decl)
6726 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6727 replace_by_duplicate_decl (&decl, vars_map, to_context);
6728 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6729 decl, SSA_NAME_DEF_STMT (name));
6731 else
6732 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6733 name, SSA_NAME_DEF_STMT (name));
6735 /* Now that we've used the def stmt to define new_name, make sure it
6736 doesn't define name anymore. */
6737 SSA_NAME_DEF_STMT (name) = NULL;
6739 vars_map->put (name, new_name);
6741 else
6742 new_name = *loc;
6744 return new_name;
6747 struct move_stmt_d
6749 tree orig_block;
6750 tree new_block;
6751 tree from_context;
6752 tree to_context;
6753 hash_map<tree, tree> *vars_map;
6754 htab_t new_label_map;
6755 hash_map<void *, void *> *eh_map;
6756 bool remap_decls_p;
6759 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6760 contained in *TP if it has been ORIG_BLOCK previously and change the
6761 DECL_CONTEXT of every local variable referenced in *TP. */
6763 static tree
6764 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6766 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6767 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6768 tree t = *tp;
6770 if (EXPR_P (t))
6772 tree block = TREE_BLOCK (t);
6773 if (block == NULL_TREE)
6775 else if (block == p->orig_block
6776 || p->orig_block == NULL_TREE)
6777 TREE_SET_BLOCK (t, p->new_block);
6778 else if (flag_checking)
6780 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6781 block = BLOCK_SUPERCONTEXT (block);
6782 gcc_assert (block == p->orig_block);
6785 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6787 if (TREE_CODE (t) == SSA_NAME)
6788 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6789 else if (TREE_CODE (t) == PARM_DECL
6790 && gimple_in_ssa_p (cfun))
6791 *tp = *(p->vars_map->get (t));
6792 else if (TREE_CODE (t) == LABEL_DECL)
6794 if (p->new_label_map)
6796 struct tree_map in, *out;
6797 in.base.from = t;
6798 out = (struct tree_map *)
6799 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6800 if (out)
6801 *tp = t = out->to;
6804 /* For FORCED_LABELs we can end up with references from other
6805 functions if some SESE regions are outlined. It is UB to
6806 jump in between them, but they could be used just for printing
6807 addresses etc. In that case, DECL_CONTEXT on the label should
6808 be the function containing the glabel stmt with that LABEL_DECL,
6809 rather than whatever function a reference to the label was seen
6810 last time. */
6811 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6812 DECL_CONTEXT (t) = p->to_context;
6814 else if (p->remap_decls_p)
6816 /* Replace T with its duplicate. T should no longer appear in the
6817 parent function, so this looks wasteful; however, it may appear
6818 in referenced_vars, and more importantly, as virtual operands of
6819 statements, and in alias lists of other variables. It would be
6820 quite difficult to expunge it from all those places. ??? It might
6821 suffice to do this for addressable variables. */
6822 if ((VAR_P (t) && !is_global_var (t))
6823 || TREE_CODE (t) == CONST_DECL)
6824 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6826 *walk_subtrees = 0;
6828 else if (TYPE_P (t))
6829 *walk_subtrees = 0;
6831 return NULL_TREE;
6834 /* Helper for move_stmt_r. Given an EH region number for the source
6835 function, map that to the duplicate EH regio number in the dest. */
6837 static int
6838 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6840 eh_region old_r, new_r;
6842 old_r = get_eh_region_from_number (old_nr);
6843 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6845 return new_r->index;
6848 /* Similar, but operate on INTEGER_CSTs. */
6850 static tree
6851 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6853 int old_nr, new_nr;
6855 old_nr = tree_to_shwi (old_t_nr);
6856 new_nr = move_stmt_eh_region_nr (old_nr, p);
6858 return build_int_cst (integer_type_node, new_nr);
6861 /* Like move_stmt_op, but for gimple statements.
6863 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6864 contained in the current statement in *GSI_P and change the
6865 DECL_CONTEXT of every local variable referenced in the current
6866 statement. */
6868 static tree
6869 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6870 struct walk_stmt_info *wi)
6872 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6873 gimple *stmt = gsi_stmt (*gsi_p);
6874 tree block = gimple_block (stmt);
6876 if (block == p->orig_block
6877 || (p->orig_block == NULL_TREE
6878 && block != NULL_TREE))
6879 gimple_set_block (stmt, p->new_block);
6881 switch (gimple_code (stmt))
6883 case GIMPLE_CALL:
6884 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6886 tree r, fndecl = gimple_call_fndecl (stmt);
6887 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6888 switch (DECL_FUNCTION_CODE (fndecl))
6890 case BUILT_IN_EH_COPY_VALUES:
6891 r = gimple_call_arg (stmt, 1);
6892 r = move_stmt_eh_region_tree_nr (r, p);
6893 gimple_call_set_arg (stmt, 1, r);
6894 /* FALLTHRU */
6896 case BUILT_IN_EH_POINTER:
6897 case BUILT_IN_EH_FILTER:
6898 r = gimple_call_arg (stmt, 0);
6899 r = move_stmt_eh_region_tree_nr (r, p);
6900 gimple_call_set_arg (stmt, 0, r);
6901 break;
6903 default:
6904 break;
6907 break;
6909 case GIMPLE_RESX:
6911 gresx *resx_stmt = as_a <gresx *> (stmt);
6912 int r = gimple_resx_region (resx_stmt);
6913 r = move_stmt_eh_region_nr (r, p);
6914 gimple_resx_set_region (resx_stmt, r);
6916 break;
6918 case GIMPLE_EH_DISPATCH:
6920 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6921 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6922 r = move_stmt_eh_region_nr (r, p);
6923 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6925 break;
6927 case GIMPLE_OMP_RETURN:
6928 case GIMPLE_OMP_CONTINUE:
6929 break;
6931 case GIMPLE_LABEL:
6933 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6934 so that such labels can be referenced from other regions.
6935 Make sure to update it when seeing a GIMPLE_LABEL though,
6936 that is the owner of the label. */
6937 walk_gimple_op (stmt, move_stmt_op, wi);
6938 *handled_ops_p = true;
6939 tree label = gimple_label_label (as_a <glabel *> (stmt));
6940 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6941 DECL_CONTEXT (label) = p->to_context;
6943 break;
6945 default:
6946 if (is_gimple_omp (stmt))
6948 /* Do not remap variables inside OMP directives. Variables
6949 referenced in clauses and directive header belong to the
6950 parent function and should not be moved into the child
6951 function. */
6952 bool save_remap_decls_p = p->remap_decls_p;
6953 p->remap_decls_p = false;
6954 *handled_ops_p = true;
6956 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6957 move_stmt_op, wi);
6959 p->remap_decls_p = save_remap_decls_p;
6961 break;
6964 return NULL_TREE;
6967 /* Move basic block BB from function CFUN to function DEST_FN. The
6968 block is moved out of the original linked list and placed after
6969 block AFTER in the new list. Also, the block is removed from the
6970 original array of blocks and placed in DEST_FN's array of blocks.
6971 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6972 updated to reflect the moved edges.
6974 The local variables are remapped to new instances, VARS_MAP is used
6975 to record the mapping. */
6977 static void
6978 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6979 basic_block after, bool update_edge_count_p,
6980 struct move_stmt_d *d)
6982 struct control_flow_graph *cfg;
6983 edge_iterator ei;
6984 edge e;
6985 gimple_stmt_iterator si;
6986 unsigned old_len, new_len;
6988 /* Remove BB from dominance structures. */
6989 delete_from_dominance_info (CDI_DOMINATORS, bb);
6991 /* Move BB from its current loop to the copy in the new function. */
6992 if (current_loops)
6994 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6995 if (new_loop)
6996 bb->loop_father = new_loop;
6999 /* Link BB to the new linked list. */
7000 move_block_after (bb, after);
7002 /* Update the edge count in the corresponding flowgraphs. */
7003 if (update_edge_count_p)
7004 FOR_EACH_EDGE (e, ei, bb->succs)
7006 cfun->cfg->x_n_edges--;
7007 dest_cfun->cfg->x_n_edges++;
7010 /* Remove BB from the original basic block array. */
7011 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7012 cfun->cfg->x_n_basic_blocks--;
7014 /* Grow DEST_CFUN's basic block array if needed. */
7015 cfg = dest_cfun->cfg;
7016 cfg->x_n_basic_blocks++;
7017 if (bb->index >= cfg->x_last_basic_block)
7018 cfg->x_last_basic_block = bb->index + 1;
7020 old_len = vec_safe_length (cfg->x_basic_block_info);
7021 if ((unsigned) cfg->x_last_basic_block >= old_len)
7023 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7024 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7027 (*cfg->x_basic_block_info)[bb->index] = bb;
7029 /* Remap the variables in phi nodes. */
7030 for (gphi_iterator psi = gsi_start_phis (bb);
7031 !gsi_end_p (psi); )
7033 gphi *phi = psi.phi ();
7034 use_operand_p use;
7035 tree op = PHI_RESULT (phi);
7036 ssa_op_iter oi;
7037 unsigned i;
7039 if (virtual_operand_p (op))
7041 /* Remove the phi nodes for virtual operands (alias analysis will be
7042 run for the new function, anyway). */
7043 remove_phi_node (&psi, true);
7044 continue;
7047 SET_PHI_RESULT (phi,
7048 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7049 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7051 op = USE_FROM_PTR (use);
7052 if (TREE_CODE (op) == SSA_NAME)
7053 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7056 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7058 location_t locus = gimple_phi_arg_location (phi, i);
7059 tree block = LOCATION_BLOCK (locus);
7061 if (locus == UNKNOWN_LOCATION)
7062 continue;
7063 if (d->orig_block == NULL_TREE || block == d->orig_block)
7065 locus = set_block (locus, d->new_block);
7066 gimple_phi_arg_set_location (phi, i, locus);
7070 gsi_next (&psi);
7073 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7075 gimple *stmt = gsi_stmt (si);
7076 struct walk_stmt_info wi;
7078 memset (&wi, 0, sizeof (wi));
7079 wi.info = d;
7080 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7082 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7084 tree label = gimple_label_label (label_stmt);
7085 int uid = LABEL_DECL_UID (label);
7087 gcc_assert (uid > -1);
7089 old_len = vec_safe_length (cfg->x_label_to_block_map);
7090 if (old_len <= (unsigned) uid)
7092 new_len = 3 * uid / 2 + 1;
7093 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7096 (*cfg->x_label_to_block_map)[uid] = bb;
7097 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7099 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7101 if (uid >= dest_cfun->cfg->last_label_uid)
7102 dest_cfun->cfg->last_label_uid = uid + 1;
7105 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7106 remove_stmt_from_eh_lp_fn (cfun, stmt);
7108 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7109 gimple_remove_stmt_histograms (cfun, stmt);
7111 /* We cannot leave any operands allocated from the operand caches of
7112 the current function. */
7113 free_stmt_operands (cfun, stmt);
7114 push_cfun (dest_cfun);
7115 update_stmt (stmt);
7116 pop_cfun ();
7119 FOR_EACH_EDGE (e, ei, bb->succs)
7120 if (e->goto_locus != UNKNOWN_LOCATION)
7122 tree block = LOCATION_BLOCK (e->goto_locus);
7123 if (d->orig_block == NULL_TREE
7124 || block == d->orig_block)
7125 e->goto_locus = set_block (e->goto_locus, d->new_block);
7129 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7130 the outermost EH region. Use REGION as the incoming base EH region. */
7132 static eh_region
7133 find_outermost_region_in_block (struct function *src_cfun,
7134 basic_block bb, eh_region region)
7136 gimple_stmt_iterator si;
7138 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7140 gimple *stmt = gsi_stmt (si);
7141 eh_region stmt_region;
7142 int lp_nr;
7144 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7145 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7146 if (stmt_region)
7148 if (region == NULL)
7149 region = stmt_region;
7150 else if (stmt_region != region)
7152 region = eh_region_outermost (src_cfun, stmt_region, region);
7153 gcc_assert (region != NULL);
7158 return region;
7161 static tree
7162 new_label_mapper (tree decl, void *data)
7164 htab_t hash = (htab_t) data;
7165 struct tree_map *m;
7166 void **slot;
7168 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7170 m = XNEW (struct tree_map);
7171 m->hash = DECL_UID (decl);
7172 m->base.from = decl;
7173 m->to = create_artificial_label (UNKNOWN_LOCATION);
7174 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7175 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7176 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7178 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7179 gcc_assert (*slot == NULL);
7181 *slot = m;
7183 return m->to;
7186 /* Tree walker to replace the decls used inside value expressions by
7187 duplicates. */
7189 static tree
7190 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7192 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7194 switch (TREE_CODE (*tp))
7196 case VAR_DECL:
7197 case PARM_DECL:
7198 case RESULT_DECL:
7199 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7200 break;
7201 default:
7202 break;
7205 if (IS_TYPE_OR_DECL_P (*tp))
7206 *walk_subtrees = false;
7208 return NULL;
7211 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7212 subblocks. */
7214 static void
7215 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7216 tree to_context)
7218 tree *tp, t;
7220 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7222 t = *tp;
7223 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7224 continue;
7225 replace_by_duplicate_decl (&t, vars_map, to_context);
7226 if (t != *tp)
7228 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7230 tree x = DECL_VALUE_EXPR (*tp);
7231 struct replace_decls_d rd = { vars_map, to_context };
7232 unshare_expr (x);
7233 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7234 SET_DECL_VALUE_EXPR (t, x);
7235 DECL_HAS_VALUE_EXPR_P (t) = 1;
7237 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7238 *tp = t;
7242 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7243 replace_block_vars_by_duplicates (block, vars_map, to_context);
7246 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7247 from FN1 to FN2. */
7249 static void
7250 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7251 struct loop *loop)
7253 /* Discard it from the old loop array. */
7254 (*get_loops (fn1))[loop->num] = NULL;
7256 /* Place it in the new loop array, assigning it a new number. */
7257 loop->num = number_of_loops (fn2);
7258 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7260 /* Recurse to children. */
7261 for (loop = loop->inner; loop; loop = loop->next)
7262 fixup_loop_arrays_after_move (fn1, fn2, loop);
7265 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7266 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7268 DEBUG_FUNCTION void
7269 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7271 basic_block bb;
7272 edge_iterator ei;
7273 edge e;
7274 bitmap bbs = BITMAP_ALLOC (NULL);
7275 int i;
7277 gcc_assert (entry != NULL);
7278 gcc_assert (entry != exit);
7279 gcc_assert (bbs_p != NULL);
7281 gcc_assert (bbs_p->length () > 0);
7283 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7284 bitmap_set_bit (bbs, bb->index);
7286 gcc_assert (bitmap_bit_p (bbs, entry->index));
7287 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7289 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7291 if (bb == entry)
7293 gcc_assert (single_pred_p (entry));
7294 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7296 else
7297 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7299 e = ei_edge (ei);
7300 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7303 if (bb == exit)
7305 gcc_assert (single_succ_p (exit));
7306 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7308 else
7309 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7311 e = ei_edge (ei);
7312 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7316 BITMAP_FREE (bbs);
7319 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7321 bool
7322 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7324 bitmap release_names = (bitmap)data;
7326 if (TREE_CODE (from) != SSA_NAME)
7327 return true;
7329 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7330 return true;
7333 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7334 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7335 single basic block in the original CFG and the new basic block is
7336 returned. DEST_CFUN must not have a CFG yet.
7338 Note that the region need not be a pure SESE region. Blocks inside
7339 the region may contain calls to abort/exit. The only restriction
7340 is that ENTRY_BB should be the only entry point and it must
7341 dominate EXIT_BB.
7343 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7344 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7345 to the new function.
7347 All local variables referenced in the region are assumed to be in
7348 the corresponding BLOCK_VARS and unexpanded variable lists
7349 associated with DEST_CFUN.
7351 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7352 reimplement move_sese_region_to_fn by duplicating the region rather than
7353 moving it. */
7355 basic_block
7356 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7357 basic_block exit_bb, tree orig_block)
7359 vec<basic_block> bbs, dom_bbs;
7360 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7361 basic_block after, bb, *entry_pred, *exit_succ, abb;
7362 struct function *saved_cfun = cfun;
7363 int *entry_flag, *exit_flag;
7364 profile_probability *entry_prob, *exit_prob;
7365 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7366 edge e;
7367 edge_iterator ei;
7368 htab_t new_label_map;
7369 hash_map<void *, void *> *eh_map;
7370 struct loop *loop = entry_bb->loop_father;
7371 struct loop *loop0 = get_loop (saved_cfun, 0);
7372 struct move_stmt_d d;
7374 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7375 region. */
7376 gcc_assert (entry_bb != exit_bb
7377 && (!exit_bb
7378 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7380 /* Collect all the blocks in the region. Manually add ENTRY_BB
7381 because it won't be added by dfs_enumerate_from. */
7382 bbs.create (0);
7383 bbs.safe_push (entry_bb);
7384 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7386 if (flag_checking)
7387 verify_sese (entry_bb, exit_bb, &bbs);
7389 /* The blocks that used to be dominated by something in BBS will now be
7390 dominated by the new block. */
7391 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7392 bbs.address (),
7393 bbs.length ());
7395 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7396 the predecessor edges to ENTRY_BB and the successor edges to
7397 EXIT_BB so that we can re-attach them to the new basic block that
7398 will replace the region. */
7399 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7400 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7401 entry_flag = XNEWVEC (int, num_entry_edges);
7402 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7403 i = 0;
7404 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7406 entry_prob[i] = e->probability;
7407 entry_flag[i] = e->flags;
7408 entry_pred[i++] = e->src;
7409 remove_edge (e);
7412 if (exit_bb)
7414 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7415 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7416 exit_flag = XNEWVEC (int, num_exit_edges);
7417 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7418 i = 0;
7419 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7421 exit_prob[i] = e->probability;
7422 exit_flag[i] = e->flags;
7423 exit_succ[i++] = e->dest;
7424 remove_edge (e);
7427 else
7429 num_exit_edges = 0;
7430 exit_succ = NULL;
7431 exit_flag = NULL;
7432 exit_prob = NULL;
7435 /* Switch context to the child function to initialize DEST_FN's CFG. */
7436 gcc_assert (dest_cfun->cfg == NULL);
7437 push_cfun (dest_cfun);
7439 init_empty_tree_cfg ();
7441 /* Initialize EH information for the new function. */
7442 eh_map = NULL;
7443 new_label_map = NULL;
7444 if (saved_cfun->eh)
7446 eh_region region = NULL;
7448 FOR_EACH_VEC_ELT (bbs, i, bb)
7449 region = find_outermost_region_in_block (saved_cfun, bb, region);
7451 init_eh_for_function ();
7452 if (region != NULL)
7454 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7455 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7456 new_label_mapper, new_label_map);
7460 /* Initialize an empty loop tree. */
7461 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7462 init_loops_structure (dest_cfun, loops, 1);
7463 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7464 set_loops_for_fn (dest_cfun, loops);
7466 /* Move the outlined loop tree part. */
7467 num_nodes = bbs.length ();
7468 FOR_EACH_VEC_ELT (bbs, i, bb)
7470 if (bb->loop_father->header == bb)
7472 struct loop *this_loop = bb->loop_father;
7473 struct loop *outer = loop_outer (this_loop);
7474 if (outer == loop
7475 /* If the SESE region contains some bbs ending with
7476 a noreturn call, those are considered to belong
7477 to the outermost loop in saved_cfun, rather than
7478 the entry_bb's loop_father. */
7479 || outer == loop0)
7481 if (outer != loop)
7482 num_nodes -= this_loop->num_nodes;
7483 flow_loop_tree_node_remove (bb->loop_father);
7484 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7485 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7488 else if (bb->loop_father == loop0 && loop0 != loop)
7489 num_nodes--;
7491 /* Remove loop exits from the outlined region. */
7492 if (loops_for_fn (saved_cfun)->exits)
7493 FOR_EACH_EDGE (e, ei, bb->succs)
7495 struct loops *l = loops_for_fn (saved_cfun);
7496 loop_exit **slot
7497 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7498 NO_INSERT);
7499 if (slot)
7500 l->exits->clear_slot (slot);
7505 /* Adjust the number of blocks in the tree root of the outlined part. */
7506 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7508 /* Setup a mapping to be used by move_block_to_fn. */
7509 loop->aux = current_loops->tree_root;
7510 loop0->aux = current_loops->tree_root;
7512 pop_cfun ();
7514 /* Move blocks from BBS into DEST_CFUN. */
7515 gcc_assert (bbs.length () >= 2);
7516 after = dest_cfun->cfg->x_entry_block_ptr;
7517 hash_map<tree, tree> vars_map;
7519 memset (&d, 0, sizeof (d));
7520 d.orig_block = orig_block;
7521 d.new_block = DECL_INITIAL (dest_cfun->decl);
7522 d.from_context = cfun->decl;
7523 d.to_context = dest_cfun->decl;
7524 d.vars_map = &vars_map;
7525 d.new_label_map = new_label_map;
7526 d.eh_map = eh_map;
7527 d.remap_decls_p = true;
7529 if (gimple_in_ssa_p (cfun))
7530 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7532 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7533 set_ssa_default_def (dest_cfun, arg, narg);
7534 vars_map.put (arg, narg);
7537 FOR_EACH_VEC_ELT (bbs, i, bb)
7539 /* No need to update edge counts on the last block. It has
7540 already been updated earlier when we detached the region from
7541 the original CFG. */
7542 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7543 after = bb;
7546 loop->aux = NULL;
7547 loop0->aux = NULL;
7548 /* Loop sizes are no longer correct, fix them up. */
7549 loop->num_nodes -= num_nodes;
7550 for (struct loop *outer = loop_outer (loop);
7551 outer; outer = loop_outer (outer))
7552 outer->num_nodes -= num_nodes;
7553 loop0->num_nodes -= bbs.length () - num_nodes;
7555 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7557 struct loop *aloop;
7558 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7559 if (aloop != NULL)
7561 if (aloop->simduid)
7563 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7564 d.to_context);
7565 dest_cfun->has_simduid_loops = true;
7567 if (aloop->force_vectorize)
7568 dest_cfun->has_force_vectorize_loops = true;
7572 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7573 if (orig_block)
7575 tree block;
7576 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7577 == NULL_TREE);
7578 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7579 = BLOCK_SUBBLOCKS (orig_block);
7580 for (block = BLOCK_SUBBLOCKS (orig_block);
7581 block; block = BLOCK_CHAIN (block))
7582 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7583 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7586 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7587 &vars_map, dest_cfun->decl);
7589 if (new_label_map)
7590 htab_delete (new_label_map);
7591 if (eh_map)
7592 delete eh_map;
7594 if (gimple_in_ssa_p (cfun))
7596 /* We need to release ssa-names in a defined order, so first find them,
7597 and then iterate in ascending version order. */
7598 bitmap release_names = BITMAP_ALLOC (NULL);
7599 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7600 bitmap_iterator bi;
7601 unsigned i;
7602 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7603 release_ssa_name (ssa_name (i));
7604 BITMAP_FREE (release_names);
7607 /* Rewire the entry and exit blocks. The successor to the entry
7608 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7609 the child function. Similarly, the predecessor of DEST_FN's
7610 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7611 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7612 various CFG manipulation function get to the right CFG.
7614 FIXME, this is silly. The CFG ought to become a parameter to
7615 these helpers. */
7616 push_cfun (dest_cfun);
7617 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7618 if (exit_bb)
7619 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7620 pop_cfun ();
7622 /* Back in the original function, the SESE region has disappeared,
7623 create a new basic block in its place. */
7624 bb = create_empty_bb (entry_pred[0]);
7625 if (current_loops)
7626 add_bb_to_loop (bb, loop);
7627 for (i = 0; i < num_entry_edges; i++)
7629 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7630 e->probability = entry_prob[i];
7633 for (i = 0; i < num_exit_edges; i++)
7635 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7636 e->probability = exit_prob[i];
7639 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7640 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7641 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7642 dom_bbs.release ();
7644 if (exit_bb)
7646 free (exit_prob);
7647 free (exit_flag);
7648 free (exit_succ);
7650 free (entry_prob);
7651 free (entry_flag);
7652 free (entry_pred);
7653 bbs.release ();
7655 return bb;
7658 /* Dump default def DEF to file FILE using FLAGS and indentation
7659 SPC. */
7661 static void
7662 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7664 for (int i = 0; i < spc; ++i)
7665 fprintf (file, " ");
7666 dump_ssaname_info_to_file (file, def, spc);
7668 print_generic_expr (file, TREE_TYPE (def), flags);
7669 fprintf (file, " ");
7670 print_generic_expr (file, def, flags);
7671 fprintf (file, " = ");
7672 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7673 fprintf (file, ";\n");
7676 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7678 static void
7679 print_no_sanitize_attr_value (FILE *file, tree value)
7681 unsigned int flags = tree_to_uhwi (value);
7682 bool first = true;
7683 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7685 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7687 if (!first)
7688 fprintf (file, " | ");
7689 fprintf (file, "%s", sanitizer_opts[i].name);
7690 first = false;
7695 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7698 void
7699 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7701 tree arg, var, old_current_fndecl = current_function_decl;
7702 struct function *dsf;
7703 bool ignore_topmost_bind = false, any_var = false;
7704 basic_block bb;
7705 tree chain;
7706 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7707 && decl_is_tm_clone (fndecl));
7708 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7710 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7712 fprintf (file, "__attribute__((");
7714 bool first = true;
7715 tree chain;
7716 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7717 first = false, chain = TREE_CHAIN (chain))
7719 if (!first)
7720 fprintf (file, ", ");
7722 tree name = get_attribute_name (chain);
7723 print_generic_expr (file, name, dump_flags);
7724 if (TREE_VALUE (chain) != NULL_TREE)
7726 fprintf (file, " (");
7728 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7729 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7730 else
7731 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7732 fprintf (file, ")");
7736 fprintf (file, "))\n");
7739 current_function_decl = fndecl;
7740 if (flags & TDF_GIMPLE)
7742 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7743 dump_flags | TDF_SLIM);
7744 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7746 else
7747 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7749 arg = DECL_ARGUMENTS (fndecl);
7750 while (arg)
7752 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7753 fprintf (file, " ");
7754 print_generic_expr (file, arg, dump_flags);
7755 if (DECL_CHAIN (arg))
7756 fprintf (file, ", ");
7757 arg = DECL_CHAIN (arg);
7759 fprintf (file, ")\n");
7761 dsf = DECL_STRUCT_FUNCTION (fndecl);
7762 if (dsf && (flags & TDF_EH))
7763 dump_eh_tree (file, dsf);
7765 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7767 dump_node (fndecl, TDF_SLIM | flags, file);
7768 current_function_decl = old_current_fndecl;
7769 return;
7772 /* When GIMPLE is lowered, the variables are no longer available in
7773 BIND_EXPRs, so display them separately. */
7774 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7776 unsigned ix;
7777 ignore_topmost_bind = true;
7779 fprintf (file, "{\n");
7780 if (gimple_in_ssa_p (fun)
7781 && (flags & TDF_ALIAS))
7783 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7784 arg = DECL_CHAIN (arg))
7786 tree def = ssa_default_def (fun, arg);
7787 if (def)
7788 dump_default_def (file, def, 2, flags);
7791 tree res = DECL_RESULT (fun->decl);
7792 if (res != NULL_TREE
7793 && DECL_BY_REFERENCE (res))
7795 tree def = ssa_default_def (fun, res);
7796 if (def)
7797 dump_default_def (file, def, 2, flags);
7800 tree static_chain = fun->static_chain_decl;
7801 if (static_chain != NULL_TREE)
7803 tree def = ssa_default_def (fun, static_chain);
7804 if (def)
7805 dump_default_def (file, def, 2, flags);
7809 if (!vec_safe_is_empty (fun->local_decls))
7810 FOR_EACH_LOCAL_DECL (fun, ix, var)
7812 print_generic_decl (file, var, flags);
7813 fprintf (file, "\n");
7815 any_var = true;
7818 tree name;
7820 if (gimple_in_ssa_p (cfun))
7821 FOR_EACH_SSA_NAME (ix, name, cfun)
7823 if (!SSA_NAME_VAR (name))
7825 fprintf (file, " ");
7826 print_generic_expr (file, TREE_TYPE (name), flags);
7827 fprintf (file, " ");
7828 print_generic_expr (file, name, flags);
7829 fprintf (file, ";\n");
7831 any_var = true;
7836 if (fun && fun->decl == fndecl
7837 && fun->cfg
7838 && basic_block_info_for_fn (fun))
7840 /* If the CFG has been built, emit a CFG-based dump. */
7841 if (!ignore_topmost_bind)
7842 fprintf (file, "{\n");
7844 if (any_var && n_basic_blocks_for_fn (fun))
7845 fprintf (file, "\n");
7847 FOR_EACH_BB_FN (bb, fun)
7848 dump_bb (file, bb, 2, flags);
7850 fprintf (file, "}\n");
7852 else if (fun->curr_properties & PROP_gimple_any)
7854 /* The function is now in GIMPLE form but the CFG has not been
7855 built yet. Emit the single sequence of GIMPLE statements
7856 that make up its body. */
7857 gimple_seq body = gimple_body (fndecl);
7859 if (gimple_seq_first_stmt (body)
7860 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7861 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7862 print_gimple_seq (file, body, 0, flags);
7863 else
7865 if (!ignore_topmost_bind)
7866 fprintf (file, "{\n");
7868 if (any_var)
7869 fprintf (file, "\n");
7871 print_gimple_seq (file, body, 2, flags);
7872 fprintf (file, "}\n");
7875 else
7877 int indent;
7879 /* Make a tree based dump. */
7880 chain = DECL_SAVED_TREE (fndecl);
7881 if (chain && TREE_CODE (chain) == BIND_EXPR)
7883 if (ignore_topmost_bind)
7885 chain = BIND_EXPR_BODY (chain);
7886 indent = 2;
7888 else
7889 indent = 0;
7891 else
7893 if (!ignore_topmost_bind)
7895 fprintf (file, "{\n");
7896 /* No topmost bind, pretend it's ignored for later. */
7897 ignore_topmost_bind = true;
7899 indent = 2;
7902 if (any_var)
7903 fprintf (file, "\n");
7905 print_generic_stmt_indented (file, chain, flags, indent);
7906 if (ignore_topmost_bind)
7907 fprintf (file, "}\n");
7910 if (flags & TDF_ENUMERATE_LOCALS)
7911 dump_enumerated_decls (file, flags);
7912 fprintf (file, "\n\n");
7914 current_function_decl = old_current_fndecl;
7917 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7919 DEBUG_FUNCTION void
7920 debug_function (tree fn, dump_flags_t flags)
7922 dump_function_to_file (fn, stderr, flags);
7926 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7928 static void
7929 print_pred_bbs (FILE *file, basic_block bb)
7931 edge e;
7932 edge_iterator ei;
7934 FOR_EACH_EDGE (e, ei, bb->preds)
7935 fprintf (file, "bb_%d ", e->src->index);
7939 /* Print on FILE the indexes for the successors of basic_block BB. */
7941 static void
7942 print_succ_bbs (FILE *file, basic_block bb)
7944 edge e;
7945 edge_iterator ei;
7947 FOR_EACH_EDGE (e, ei, bb->succs)
7948 fprintf (file, "bb_%d ", e->dest->index);
7951 /* Print to FILE the basic block BB following the VERBOSITY level. */
7953 void
7954 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7956 char *s_indent = (char *) alloca ((size_t) indent + 1);
7957 memset ((void *) s_indent, ' ', (size_t) indent);
7958 s_indent[indent] = '\0';
7960 /* Print basic_block's header. */
7961 if (verbosity >= 2)
7963 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7964 print_pred_bbs (file, bb);
7965 fprintf (file, "}, succs = {");
7966 print_succ_bbs (file, bb);
7967 fprintf (file, "})\n");
7970 /* Print basic_block's body. */
7971 if (verbosity >= 3)
7973 fprintf (file, "%s {\n", s_indent);
7974 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7975 fprintf (file, "%s }\n", s_indent);
7979 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7981 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7982 VERBOSITY level this outputs the contents of the loop, or just its
7983 structure. */
7985 static void
7986 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7988 char *s_indent;
7989 basic_block bb;
7991 if (loop == NULL)
7992 return;
7994 s_indent = (char *) alloca ((size_t) indent + 1);
7995 memset ((void *) s_indent, ' ', (size_t) indent);
7996 s_indent[indent] = '\0';
7998 /* Print loop's header. */
7999 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8000 if (loop->header)
8001 fprintf (file, "header = %d", loop->header->index);
8002 else
8004 fprintf (file, "deleted)\n");
8005 return;
8007 if (loop->latch)
8008 fprintf (file, ", latch = %d", loop->latch->index);
8009 else
8010 fprintf (file, ", multiple latches");
8011 fprintf (file, ", niter = ");
8012 print_generic_expr (file, loop->nb_iterations);
8014 if (loop->any_upper_bound)
8016 fprintf (file, ", upper_bound = ");
8017 print_decu (loop->nb_iterations_upper_bound, file);
8019 if (loop->any_likely_upper_bound)
8021 fprintf (file, ", likely_upper_bound = ");
8022 print_decu (loop->nb_iterations_likely_upper_bound, file);
8025 if (loop->any_estimate)
8027 fprintf (file, ", estimate = ");
8028 print_decu (loop->nb_iterations_estimate, file);
8030 fprintf (file, ")\n");
8032 /* Print loop's body. */
8033 if (verbosity >= 1)
8035 fprintf (file, "%s{\n", s_indent);
8036 FOR_EACH_BB_FN (bb, cfun)
8037 if (bb->loop_father == loop)
8038 print_loops_bb (file, bb, indent, verbosity);
8040 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8041 fprintf (file, "%s}\n", s_indent);
8045 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8046 spaces. Following VERBOSITY level this outputs the contents of the
8047 loop, or just its structure. */
8049 static void
8050 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8051 int verbosity)
8053 if (loop == NULL)
8054 return;
8056 print_loop (file, loop, indent, verbosity);
8057 print_loop_and_siblings (file, loop->next, indent, verbosity);
8060 /* Follow a CFG edge from the entry point of the program, and on entry
8061 of a loop, pretty print the loop structure on FILE. */
8063 void
8064 print_loops (FILE *file, int verbosity)
8066 basic_block bb;
8068 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8069 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8070 if (bb && bb->loop_father)
8071 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8074 /* Dump a loop. */
8076 DEBUG_FUNCTION void
8077 debug (struct loop &ref)
8079 print_loop (stderr, &ref, 0, /*verbosity*/0);
8082 DEBUG_FUNCTION void
8083 debug (struct loop *ptr)
8085 if (ptr)
8086 debug (*ptr);
8087 else
8088 fprintf (stderr, "<nil>\n");
8091 /* Dump a loop verbosely. */
8093 DEBUG_FUNCTION void
8094 debug_verbose (struct loop &ref)
8096 print_loop (stderr, &ref, 0, /*verbosity*/3);
8099 DEBUG_FUNCTION void
8100 debug_verbose (struct loop *ptr)
8102 if (ptr)
8103 debug (*ptr);
8104 else
8105 fprintf (stderr, "<nil>\n");
8109 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8111 DEBUG_FUNCTION void
8112 debug_loops (int verbosity)
8114 print_loops (stderr, verbosity);
8117 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8119 DEBUG_FUNCTION void
8120 debug_loop (struct loop *loop, int verbosity)
8122 print_loop (stderr, loop, 0, verbosity);
8125 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8126 level. */
8128 DEBUG_FUNCTION void
8129 debug_loop_num (unsigned num, int verbosity)
8131 debug_loop (get_loop (cfun, num), verbosity);
8134 /* Return true if BB ends with a call, possibly followed by some
8135 instructions that must stay with the call. Return false,
8136 otherwise. */
8138 static bool
8139 gimple_block_ends_with_call_p (basic_block bb)
8141 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8142 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8146 /* Return true if BB ends with a conditional branch. Return false,
8147 otherwise. */
8149 static bool
8150 gimple_block_ends_with_condjump_p (const_basic_block bb)
8152 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8153 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8157 /* Return true if statement T may terminate execution of BB in ways not
8158 explicitly represtented in the CFG. */
8160 bool
8161 stmt_can_terminate_bb_p (gimple *t)
8163 tree fndecl = NULL_TREE;
8164 int call_flags = 0;
8166 /* Eh exception not handled internally terminates execution of the whole
8167 function. */
8168 if (stmt_can_throw_external (t))
8169 return true;
8171 /* NORETURN and LONGJMP calls already have an edge to exit.
8172 CONST and PURE calls do not need one.
8173 We don't currently check for CONST and PURE here, although
8174 it would be a good idea, because those attributes are
8175 figured out from the RTL in mark_constant_function, and
8176 the counter incrementation code from -fprofile-arcs
8177 leads to different results from -fbranch-probabilities. */
8178 if (is_gimple_call (t))
8180 fndecl = gimple_call_fndecl (t);
8181 call_flags = gimple_call_flags (t);
8184 if (is_gimple_call (t)
8185 && fndecl
8186 && DECL_BUILT_IN (fndecl)
8187 && (call_flags & ECF_NOTHROW)
8188 && !(call_flags & ECF_RETURNS_TWICE)
8189 /* fork() doesn't really return twice, but the effect of
8190 wrapping it in __gcov_fork() which calls __gcov_flush()
8191 and clears the counters before forking has the same
8192 effect as returning twice. Force a fake edge. */
8193 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8194 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8195 return false;
8197 if (is_gimple_call (t))
8199 edge_iterator ei;
8200 edge e;
8201 basic_block bb;
8203 if (call_flags & (ECF_PURE | ECF_CONST)
8204 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8205 return false;
8207 /* Function call may do longjmp, terminate program or do other things.
8208 Special case noreturn that have non-abnormal edges out as in this case
8209 the fact is sufficiently represented by lack of edges out of T. */
8210 if (!(call_flags & ECF_NORETURN))
8211 return true;
8213 bb = gimple_bb (t);
8214 FOR_EACH_EDGE (e, ei, bb->succs)
8215 if ((e->flags & EDGE_FAKE) == 0)
8216 return true;
8219 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8220 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8221 return true;
8223 return false;
8227 /* Add fake edges to the function exit for any non constant and non
8228 noreturn calls (or noreturn calls with EH/abnormal edges),
8229 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8230 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8231 that were split.
8233 The goal is to expose cases in which entering a basic block does
8234 not imply that all subsequent instructions must be executed. */
8236 static int
8237 gimple_flow_call_edges_add (sbitmap blocks)
8239 int i;
8240 int blocks_split = 0;
8241 int last_bb = last_basic_block_for_fn (cfun);
8242 bool check_last_block = false;
8244 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8245 return 0;
8247 if (! blocks)
8248 check_last_block = true;
8249 else
8250 check_last_block = bitmap_bit_p (blocks,
8251 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8253 /* In the last basic block, before epilogue generation, there will be
8254 a fallthru edge to EXIT. Special care is required if the last insn
8255 of the last basic block is a call because make_edge folds duplicate
8256 edges, which would result in the fallthru edge also being marked
8257 fake, which would result in the fallthru edge being removed by
8258 remove_fake_edges, which would result in an invalid CFG.
8260 Moreover, we can't elide the outgoing fake edge, since the block
8261 profiler needs to take this into account in order to solve the minimal
8262 spanning tree in the case that the call doesn't return.
8264 Handle this by adding a dummy instruction in a new last basic block. */
8265 if (check_last_block)
8267 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8268 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8269 gimple *t = NULL;
8271 if (!gsi_end_p (gsi))
8272 t = gsi_stmt (gsi);
8274 if (t && stmt_can_terminate_bb_p (t))
8276 edge e;
8278 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8279 if (e)
8281 gsi_insert_on_edge (e, gimple_build_nop ());
8282 gsi_commit_edge_inserts ();
8287 /* Now add fake edges to the function exit for any non constant
8288 calls since there is no way that we can determine if they will
8289 return or not... */
8290 for (i = 0; i < last_bb; i++)
8292 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8293 gimple_stmt_iterator gsi;
8294 gimple *stmt, *last_stmt;
8296 if (!bb)
8297 continue;
8299 if (blocks && !bitmap_bit_p (blocks, i))
8300 continue;
8302 gsi = gsi_last_nondebug_bb (bb);
8303 if (!gsi_end_p (gsi))
8305 last_stmt = gsi_stmt (gsi);
8308 stmt = gsi_stmt (gsi);
8309 if (stmt_can_terminate_bb_p (stmt))
8311 edge e;
8313 /* The handling above of the final block before the
8314 epilogue should be enough to verify that there is
8315 no edge to the exit block in CFG already.
8316 Calling make_edge in such case would cause us to
8317 mark that edge as fake and remove it later. */
8318 if (flag_checking && stmt == last_stmt)
8320 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8321 gcc_assert (e == NULL);
8324 /* Note that the following may create a new basic block
8325 and renumber the existing basic blocks. */
8326 if (stmt != last_stmt)
8328 e = split_block (bb, stmt);
8329 if (e)
8330 blocks_split++;
8332 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8333 e->probability = profile_probability::guessed_never ();
8335 gsi_prev (&gsi);
8337 while (!gsi_end_p (gsi));
8341 if (blocks_split)
8342 checking_verify_flow_info ();
8344 return blocks_split;
8347 /* Removes edge E and all the blocks dominated by it, and updates dominance
8348 information. The IL in E->src needs to be updated separately.
8349 If dominance info is not available, only the edge E is removed.*/
8351 void
8352 remove_edge_and_dominated_blocks (edge e)
8354 vec<basic_block> bbs_to_remove = vNULL;
8355 vec<basic_block> bbs_to_fix_dom = vNULL;
8356 edge f;
8357 edge_iterator ei;
8358 bool none_removed = false;
8359 unsigned i;
8360 basic_block bb, dbb;
8361 bitmap_iterator bi;
8363 /* If we are removing a path inside a non-root loop that may change
8364 loop ownership of blocks or remove loops. Mark loops for fixup. */
8365 if (current_loops
8366 && loop_outer (e->src->loop_father) != NULL
8367 && e->src->loop_father == e->dest->loop_father)
8368 loops_state_set (LOOPS_NEED_FIXUP);
8370 if (!dom_info_available_p (CDI_DOMINATORS))
8372 remove_edge (e);
8373 return;
8376 /* No updating is needed for edges to exit. */
8377 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8379 if (cfgcleanup_altered_bbs)
8380 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8381 remove_edge (e);
8382 return;
8385 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8386 that is not dominated by E->dest, then this set is empty. Otherwise,
8387 all the basic blocks dominated by E->dest are removed.
8389 Also, to DF_IDOM we store the immediate dominators of the blocks in
8390 the dominance frontier of E (i.e., of the successors of the
8391 removed blocks, if there are any, and of E->dest otherwise). */
8392 FOR_EACH_EDGE (f, ei, e->dest->preds)
8394 if (f == e)
8395 continue;
8397 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8399 none_removed = true;
8400 break;
8404 auto_bitmap df, df_idom;
8405 if (none_removed)
8406 bitmap_set_bit (df_idom,
8407 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8408 else
8410 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8411 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8413 FOR_EACH_EDGE (f, ei, bb->succs)
8415 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8416 bitmap_set_bit (df, f->dest->index);
8419 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8420 bitmap_clear_bit (df, bb->index);
8422 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8424 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8425 bitmap_set_bit (df_idom,
8426 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8430 if (cfgcleanup_altered_bbs)
8432 /* Record the set of the altered basic blocks. */
8433 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8434 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8437 /* Remove E and the cancelled blocks. */
8438 if (none_removed)
8439 remove_edge (e);
8440 else
8442 /* Walk backwards so as to get a chance to substitute all
8443 released DEFs into debug stmts. See
8444 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8445 details. */
8446 for (i = bbs_to_remove.length (); i-- > 0; )
8447 delete_basic_block (bbs_to_remove[i]);
8450 /* Update the dominance information. The immediate dominator may change only
8451 for blocks whose immediate dominator belongs to DF_IDOM:
8453 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8454 removal. Let Z the arbitrary block such that idom(Z) = Y and
8455 Z dominates X after the removal. Before removal, there exists a path P
8456 from Y to X that avoids Z. Let F be the last edge on P that is
8457 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8458 dominates W, and because of P, Z does not dominate W), and W belongs to
8459 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8460 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8462 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8463 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8464 dbb;
8465 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8466 bbs_to_fix_dom.safe_push (dbb);
8469 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8471 bbs_to_remove.release ();
8472 bbs_to_fix_dom.release ();
8475 /* Purge dead EH edges from basic block BB. */
8477 bool
8478 gimple_purge_dead_eh_edges (basic_block bb)
8480 bool changed = false;
8481 edge e;
8482 edge_iterator ei;
8483 gimple *stmt = last_stmt (bb);
8485 if (stmt && stmt_can_throw_internal (stmt))
8486 return false;
8488 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8490 if (e->flags & EDGE_EH)
8492 remove_edge_and_dominated_blocks (e);
8493 changed = true;
8495 else
8496 ei_next (&ei);
8499 return changed;
8502 /* Purge dead EH edges from basic block listed in BLOCKS. */
8504 bool
8505 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8507 bool changed = false;
8508 unsigned i;
8509 bitmap_iterator bi;
8511 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8513 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8515 /* Earlier gimple_purge_dead_eh_edges could have removed
8516 this basic block already. */
8517 gcc_assert (bb || changed);
8518 if (bb != NULL)
8519 changed |= gimple_purge_dead_eh_edges (bb);
8522 return changed;
8525 /* Purge dead abnormal call edges from basic block BB. */
8527 bool
8528 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8530 bool changed = false;
8531 edge e;
8532 edge_iterator ei;
8533 gimple *stmt = last_stmt (bb);
8535 if (!cfun->has_nonlocal_label
8536 && !cfun->calls_setjmp)
8537 return false;
8539 if (stmt && stmt_can_make_abnormal_goto (stmt))
8540 return false;
8542 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8544 if (e->flags & EDGE_ABNORMAL)
8546 if (e->flags & EDGE_FALLTHRU)
8547 e->flags &= ~EDGE_ABNORMAL;
8548 else
8549 remove_edge_and_dominated_blocks (e);
8550 changed = true;
8552 else
8553 ei_next (&ei);
8556 return changed;
8559 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8561 bool
8562 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8564 bool changed = false;
8565 unsigned i;
8566 bitmap_iterator bi;
8568 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8570 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8572 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8573 this basic block already. */
8574 gcc_assert (bb || changed);
8575 if (bb != NULL)
8576 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8579 return changed;
8582 /* This function is called whenever a new edge is created or
8583 redirected. */
8585 static void
8586 gimple_execute_on_growing_pred (edge e)
8588 basic_block bb = e->dest;
8590 if (!gimple_seq_empty_p (phi_nodes (bb)))
8591 reserve_phi_args_for_new_edge (bb);
8594 /* This function is called immediately before edge E is removed from
8595 the edge vector E->dest->preds. */
8597 static void
8598 gimple_execute_on_shrinking_pred (edge e)
8600 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8601 remove_phi_args (e);
8604 /*---------------------------------------------------------------------------
8605 Helper functions for Loop versioning
8606 ---------------------------------------------------------------------------*/
8608 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8609 of 'first'. Both of them are dominated by 'new_head' basic block. When
8610 'new_head' was created by 'second's incoming edge it received phi arguments
8611 on the edge by split_edge(). Later, additional edge 'e' was created to
8612 connect 'new_head' and 'first'. Now this routine adds phi args on this
8613 additional edge 'e' that new_head to second edge received as part of edge
8614 splitting. */
8616 static void
8617 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8618 basic_block new_head, edge e)
8620 gphi *phi1, *phi2;
8621 gphi_iterator psi1, psi2;
8622 tree def;
8623 edge e2 = find_edge (new_head, second);
8625 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8626 edge, we should always have an edge from NEW_HEAD to SECOND. */
8627 gcc_assert (e2 != NULL);
8629 /* Browse all 'second' basic block phi nodes and add phi args to
8630 edge 'e' for 'first' head. PHI args are always in correct order. */
8632 for (psi2 = gsi_start_phis (second),
8633 psi1 = gsi_start_phis (first);
8634 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8635 gsi_next (&psi2), gsi_next (&psi1))
8637 phi1 = psi1.phi ();
8638 phi2 = psi2.phi ();
8639 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8640 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8645 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8646 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8647 the destination of the ELSE part. */
8649 static void
8650 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8651 basic_block second_head ATTRIBUTE_UNUSED,
8652 basic_block cond_bb, void *cond_e)
8654 gimple_stmt_iterator gsi;
8655 gimple *new_cond_expr;
8656 tree cond_expr = (tree) cond_e;
8657 edge e0;
8659 /* Build new conditional expr */
8660 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8661 NULL_TREE, NULL_TREE);
8663 /* Add new cond in cond_bb. */
8664 gsi = gsi_last_bb (cond_bb);
8665 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8667 /* Adjust edges appropriately to connect new head with first head
8668 as well as second head. */
8669 e0 = single_succ_edge (cond_bb);
8670 e0->flags &= ~EDGE_FALLTHRU;
8671 e0->flags |= EDGE_FALSE_VALUE;
8675 /* Do book-keeping of basic block BB for the profile consistency checker.
8676 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8677 then do post-pass accounting. Store the counting in RECORD. */
8678 static void
8679 gimple_account_profile_record (basic_block bb, int after_pass,
8680 struct profile_record *record)
8682 gimple_stmt_iterator i;
8683 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8685 record->size[after_pass]
8686 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8687 if (bb->count.initialized_p ())
8688 record->time[after_pass]
8689 += estimate_num_insns (gsi_stmt (i),
8690 &eni_time_weights) * bb->count.to_gcov_type ();
8691 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8692 record->time[after_pass]
8693 += estimate_num_insns (gsi_stmt (i),
8694 &eni_time_weights) * bb->frequency;
8698 struct cfg_hooks gimple_cfg_hooks = {
8699 "gimple",
8700 gimple_verify_flow_info,
8701 gimple_dump_bb, /* dump_bb */
8702 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8703 create_bb, /* create_basic_block */
8704 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8705 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8706 gimple_can_remove_branch_p, /* can_remove_branch_p */
8707 remove_bb, /* delete_basic_block */
8708 gimple_split_block, /* split_block */
8709 gimple_move_block_after, /* move_block_after */
8710 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8711 gimple_merge_blocks, /* merge_blocks */
8712 gimple_predict_edge, /* predict_edge */
8713 gimple_predicted_by_p, /* predicted_by_p */
8714 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8715 gimple_duplicate_bb, /* duplicate_block */
8716 gimple_split_edge, /* split_edge */
8717 gimple_make_forwarder_block, /* make_forward_block */
8718 NULL, /* tidy_fallthru_edge */
8719 NULL, /* force_nonfallthru */
8720 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8721 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8722 gimple_flow_call_edges_add, /* flow_call_edges_add */
8723 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8724 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8725 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8726 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8727 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8728 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8729 flush_pending_stmts, /* flush_pending_stmts */
8730 gimple_empty_block_p, /* block_empty_p */
8731 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8732 gimple_account_profile_record,
8736 /* Split all critical edges. */
8738 unsigned int
8739 split_critical_edges (void)
8741 basic_block bb;
8742 edge e;
8743 edge_iterator ei;
8745 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8746 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8747 mappings around the calls to split_edge. */
8748 start_recording_case_labels ();
8749 FOR_ALL_BB_FN (bb, cfun)
8751 FOR_EACH_EDGE (e, ei, bb->succs)
8753 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8754 split_edge (e);
8755 /* PRE inserts statements to edges and expects that
8756 since split_critical_edges was done beforehand, committing edge
8757 insertions will not split more edges. In addition to critical
8758 edges we must split edges that have multiple successors and
8759 end by control flow statements, such as RESX.
8760 Go ahead and split them too. This matches the logic in
8761 gimple_find_edge_insert_loc. */
8762 else if ((!single_pred_p (e->dest)
8763 || !gimple_seq_empty_p (phi_nodes (e->dest))
8764 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8765 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8766 && !(e->flags & EDGE_ABNORMAL))
8768 gimple_stmt_iterator gsi;
8770 gsi = gsi_last_bb (e->src);
8771 if (!gsi_end_p (gsi)
8772 && stmt_ends_bb_p (gsi_stmt (gsi))
8773 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8774 && !gimple_call_builtin_p (gsi_stmt (gsi),
8775 BUILT_IN_RETURN)))
8776 split_edge (e);
8780 end_recording_case_labels ();
8781 return 0;
8784 namespace {
8786 const pass_data pass_data_split_crit_edges =
8788 GIMPLE_PASS, /* type */
8789 "crited", /* name */
8790 OPTGROUP_NONE, /* optinfo_flags */
8791 TV_TREE_SPLIT_EDGES, /* tv_id */
8792 PROP_cfg, /* properties_required */
8793 PROP_no_crit_edges, /* properties_provided */
8794 0, /* properties_destroyed */
8795 0, /* todo_flags_start */
8796 0, /* todo_flags_finish */
8799 class pass_split_crit_edges : public gimple_opt_pass
8801 public:
8802 pass_split_crit_edges (gcc::context *ctxt)
8803 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8806 /* opt_pass methods: */
8807 virtual unsigned int execute (function *) { return split_critical_edges (); }
8809 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8810 }; // class pass_split_crit_edges
8812 } // anon namespace
8814 gimple_opt_pass *
8815 make_pass_split_crit_edges (gcc::context *ctxt)
8817 return new pass_split_crit_edges (ctxt);
8821 /* Insert COND expression which is GIMPLE_COND after STMT
8822 in basic block BB with appropriate basic block split
8823 and creation of a new conditionally executed basic block.
8824 Update profile so the new bb is visited with probability PROB.
8825 Return created basic block. */
8826 basic_block
8827 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
8828 profile_probability prob)
8830 edge fall = split_block (bb, stmt);
8831 gimple_stmt_iterator iter = gsi_last_bb (bb);
8832 basic_block new_bb;
8834 /* Insert cond statement. */
8835 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8836 if (gsi_end_p (iter))
8837 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8838 else
8839 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8841 /* Create conditionally executed block. */
8842 new_bb = create_empty_bb (bb);
8843 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8844 e->probability = prob;
8845 new_bb->count = e->count ();
8846 new_bb->frequency = prob.apply (bb->frequency);
8847 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8849 /* Fix edge for split bb. */
8850 fall->flags = EDGE_FALSE_VALUE;
8851 fall->probability -= e->probability;
8853 /* Update dominance info. */
8854 if (dom_info_available_p (CDI_DOMINATORS))
8856 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8857 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8860 /* Update loop info. */
8861 if (current_loops)
8862 add_bb_to_loop (new_bb, bb->loop_father);
8864 return new_bb;
8867 /* Build a ternary operation and gimplify it. Emit code before GSI.
8868 Return the gimple_val holding the result. */
8870 tree
8871 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8872 tree type, tree a, tree b, tree c)
8874 tree ret;
8875 location_t loc = gimple_location (gsi_stmt (*gsi));
8877 ret = fold_build3_loc (loc, code, type, a, b, c);
8878 STRIP_NOPS (ret);
8880 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8881 GSI_SAME_STMT);
8884 /* Build a binary operation and gimplify it. Emit code before GSI.
8885 Return the gimple_val holding the result. */
8887 tree
8888 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8889 tree type, tree a, tree b)
8891 tree ret;
8893 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8894 STRIP_NOPS (ret);
8896 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8897 GSI_SAME_STMT);
8900 /* Build a unary operation and gimplify it. Emit code before GSI.
8901 Return the gimple_val holding the result. */
8903 tree
8904 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8905 tree a)
8907 tree ret;
8909 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8910 STRIP_NOPS (ret);
8912 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8913 GSI_SAME_STMT);
8918 /* Given a basic block B which ends with a conditional and has
8919 precisely two successors, determine which of the edges is taken if
8920 the conditional is true and which is taken if the conditional is
8921 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8923 void
8924 extract_true_false_edges_from_block (basic_block b,
8925 edge *true_edge,
8926 edge *false_edge)
8928 edge e = EDGE_SUCC (b, 0);
8930 if (e->flags & EDGE_TRUE_VALUE)
8932 *true_edge = e;
8933 *false_edge = EDGE_SUCC (b, 1);
8935 else
8937 *false_edge = e;
8938 *true_edge = EDGE_SUCC (b, 1);
8943 /* From a controlling predicate in the immediate dominator DOM of
8944 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8945 predicate evaluates to true and false and store them to
8946 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8947 they are non-NULL. Returns true if the edges can be determined,
8948 else return false. */
8950 bool
8951 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8952 edge *true_controlled_edge,
8953 edge *false_controlled_edge)
8955 basic_block bb = phiblock;
8956 edge true_edge, false_edge, tem;
8957 edge e0 = NULL, e1 = NULL;
8959 /* We have to verify that one edge into the PHI node is dominated
8960 by the true edge of the predicate block and the other edge
8961 dominated by the false edge. This ensures that the PHI argument
8962 we are going to take is completely determined by the path we
8963 take from the predicate block.
8964 We can only use BB dominance checks below if the destination of
8965 the true/false edges are dominated by their edge, thus only
8966 have a single predecessor. */
8967 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8968 tem = EDGE_PRED (bb, 0);
8969 if (tem == true_edge
8970 || (single_pred_p (true_edge->dest)
8971 && (tem->src == true_edge->dest
8972 || dominated_by_p (CDI_DOMINATORS,
8973 tem->src, true_edge->dest))))
8974 e0 = tem;
8975 else if (tem == false_edge
8976 || (single_pred_p (false_edge->dest)
8977 && (tem->src == false_edge->dest
8978 || dominated_by_p (CDI_DOMINATORS,
8979 tem->src, false_edge->dest))))
8980 e1 = tem;
8981 else
8982 return false;
8983 tem = EDGE_PRED (bb, 1);
8984 if (tem == true_edge
8985 || (single_pred_p (true_edge->dest)
8986 && (tem->src == true_edge->dest
8987 || dominated_by_p (CDI_DOMINATORS,
8988 tem->src, true_edge->dest))))
8989 e0 = tem;
8990 else if (tem == false_edge
8991 || (single_pred_p (false_edge->dest)
8992 && (tem->src == false_edge->dest
8993 || dominated_by_p (CDI_DOMINATORS,
8994 tem->src, false_edge->dest))))
8995 e1 = tem;
8996 else
8997 return false;
8998 if (!e0 || !e1)
8999 return false;
9001 if (true_controlled_edge)
9002 *true_controlled_edge = e0;
9003 if (false_controlled_edge)
9004 *false_controlled_edge = e1;
9006 return true;
9009 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9010 range [low, high]. Place associated stmts before *GSI. */
9012 void
9013 generate_range_test (basic_block bb, tree index, tree low, tree high,
9014 tree *lhs, tree *rhs)
9016 tree type = TREE_TYPE (index);
9017 tree utype = unsigned_type_for (type);
9019 low = fold_convert (type, low);
9020 high = fold_convert (type, high);
9022 tree tmp = make_ssa_name (type);
9023 gassign *sub1
9024 = gimple_build_assign (tmp, MINUS_EXPR, index, low);
9026 *lhs = make_ssa_name (utype);
9027 gassign *a = gimple_build_assign (*lhs, NOP_EXPR, tmp);
9029 *rhs = fold_build2 (MINUS_EXPR, utype, high, low);
9030 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9031 gsi_insert_before (&gsi, sub1, GSI_SAME_STMT);
9032 gsi_insert_before (&gsi, a, GSI_SAME_STMT);
9035 /* Emit return warnings. */
9037 namespace {
9039 const pass_data pass_data_warn_function_return =
9041 GIMPLE_PASS, /* type */
9042 "*warn_function_return", /* name */
9043 OPTGROUP_NONE, /* optinfo_flags */
9044 TV_NONE, /* tv_id */
9045 PROP_cfg, /* properties_required */
9046 0, /* properties_provided */
9047 0, /* properties_destroyed */
9048 0, /* todo_flags_start */
9049 0, /* todo_flags_finish */
9052 class pass_warn_function_return : public gimple_opt_pass
9054 public:
9055 pass_warn_function_return (gcc::context *ctxt)
9056 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9059 /* opt_pass methods: */
9060 virtual unsigned int execute (function *);
9062 }; // class pass_warn_function_return
9064 unsigned int
9065 pass_warn_function_return::execute (function *fun)
9067 source_location location;
9068 gimple *last;
9069 edge e;
9070 edge_iterator ei;
9072 if (!targetm.warn_func_return (fun->decl))
9073 return 0;
9075 /* If we have a path to EXIT, then we do return. */
9076 if (TREE_THIS_VOLATILE (fun->decl)
9077 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9079 location = UNKNOWN_LOCATION;
9080 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9081 (e = ei_safe_edge (ei)); )
9083 last = last_stmt (e->src);
9084 if ((gimple_code (last) == GIMPLE_RETURN
9085 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9086 && location == UNKNOWN_LOCATION
9087 && (location = gimple_location (last)) != UNKNOWN_LOCATION
9088 && !optimize)
9089 break;
9090 /* When optimizing, replace return stmts in noreturn functions
9091 with __builtin_unreachable () call. */
9092 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9094 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9095 gimple *new_stmt = gimple_build_call (fndecl, 0);
9096 gimple_set_location (new_stmt, gimple_location (last));
9097 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9098 gsi_replace (&gsi, new_stmt, true);
9099 remove_edge (e);
9101 else
9102 ei_next (&ei);
9104 if (location == UNKNOWN_LOCATION)
9105 location = cfun->function_end_locus;
9106 warning_at (location, 0, "%<noreturn%> function does return");
9109 /* If we see "return;" in some basic block, then we do reach the end
9110 without returning a value. */
9111 else if (warn_return_type
9112 && !TREE_NO_WARNING (fun->decl)
9113 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
9114 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9116 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9118 gimple *last = last_stmt (e->src);
9119 greturn *return_stmt = dyn_cast <greturn *> (last);
9120 if (return_stmt
9121 && gimple_return_retval (return_stmt) == NULL
9122 && !gimple_no_warning_p (last))
9124 location = gimple_location (last);
9125 if (location == UNKNOWN_LOCATION)
9126 location = fun->function_end_locus;
9127 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
9128 TREE_NO_WARNING (fun->decl) = 1;
9129 break;
9133 return 0;
9136 } // anon namespace
9138 gimple_opt_pass *
9139 make_pass_warn_function_return (gcc::context *ctxt)
9141 return new pass_warn_function_return (ctxt);
9144 /* Walk a gimplified function and warn for functions whose return value is
9145 ignored and attribute((warn_unused_result)) is set. This is done before
9146 inlining, so we don't have to worry about that. */
9148 static void
9149 do_warn_unused_result (gimple_seq seq)
9151 tree fdecl, ftype;
9152 gimple_stmt_iterator i;
9154 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9156 gimple *g = gsi_stmt (i);
9158 switch (gimple_code (g))
9160 case GIMPLE_BIND:
9161 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9162 break;
9163 case GIMPLE_TRY:
9164 do_warn_unused_result (gimple_try_eval (g));
9165 do_warn_unused_result (gimple_try_cleanup (g));
9166 break;
9167 case GIMPLE_CATCH:
9168 do_warn_unused_result (gimple_catch_handler (
9169 as_a <gcatch *> (g)));
9170 break;
9171 case GIMPLE_EH_FILTER:
9172 do_warn_unused_result (gimple_eh_filter_failure (g));
9173 break;
9175 case GIMPLE_CALL:
9176 if (gimple_call_lhs (g))
9177 break;
9178 if (gimple_call_internal_p (g))
9179 break;
9181 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9182 LHS. All calls whose value is ignored should be
9183 represented like this. Look for the attribute. */
9184 fdecl = gimple_call_fndecl (g);
9185 ftype = gimple_call_fntype (g);
9187 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9189 location_t loc = gimple_location (g);
9191 if (fdecl)
9192 warning_at (loc, OPT_Wunused_result,
9193 "ignoring return value of %qD, "
9194 "declared with attribute warn_unused_result",
9195 fdecl);
9196 else
9197 warning_at (loc, OPT_Wunused_result,
9198 "ignoring return value of function "
9199 "declared with attribute warn_unused_result");
9201 break;
9203 default:
9204 /* Not a container, not a call, or a call whose value is used. */
9205 break;
9210 namespace {
9212 const pass_data pass_data_warn_unused_result =
9214 GIMPLE_PASS, /* type */
9215 "*warn_unused_result", /* name */
9216 OPTGROUP_NONE, /* optinfo_flags */
9217 TV_NONE, /* tv_id */
9218 PROP_gimple_any, /* properties_required */
9219 0, /* properties_provided */
9220 0, /* properties_destroyed */
9221 0, /* todo_flags_start */
9222 0, /* todo_flags_finish */
9225 class pass_warn_unused_result : public gimple_opt_pass
9227 public:
9228 pass_warn_unused_result (gcc::context *ctxt)
9229 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9232 /* opt_pass methods: */
9233 virtual bool gate (function *) { return flag_warn_unused_result; }
9234 virtual unsigned int execute (function *)
9236 do_warn_unused_result (gimple_body (current_function_decl));
9237 return 0;
9240 }; // class pass_warn_unused_result
9242 } // anon namespace
9244 gimple_opt_pass *
9245 make_pass_warn_unused_result (gcc::context *ctxt)
9247 return new pass_warn_unused_result (ctxt);
9250 /* IPA passes, compilation of earlier functions or inlining
9251 might have changed some properties, such as marked functions nothrow,
9252 pure, const or noreturn.
9253 Remove redundant edges and basic blocks, and create new ones if necessary.
9255 This pass can't be executed as stand alone pass from pass manager, because
9256 in between inlining and this fixup the verify_flow_info would fail. */
9258 unsigned int
9259 execute_fixup_cfg (void)
9261 basic_block bb;
9262 gimple_stmt_iterator gsi;
9263 int todo = 0;
9264 cgraph_node *node = cgraph_node::get (current_function_decl);
9265 profile_count num = node->count;
9266 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9267 bool scale = num.initialized_p ()
9268 && (den > 0 || num == profile_count::zero ())
9269 && !(num == den);
9271 if (scale)
9273 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9274 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9275 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9278 FOR_EACH_BB_FN (bb, cfun)
9280 if (scale)
9281 bb->count = bb->count.apply_scale (num, den);
9282 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9284 gimple *stmt = gsi_stmt (gsi);
9285 tree decl = is_gimple_call (stmt)
9286 ? gimple_call_fndecl (stmt)
9287 : NULL;
9288 if (decl)
9290 int flags = gimple_call_flags (stmt);
9291 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9293 if (gimple_purge_dead_abnormal_call_edges (bb))
9294 todo |= TODO_cleanup_cfg;
9296 if (gimple_in_ssa_p (cfun))
9298 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9299 update_stmt (stmt);
9303 if (flags & ECF_NORETURN
9304 && fixup_noreturn_call (stmt))
9305 todo |= TODO_cleanup_cfg;
9308 /* Remove stores to variables we marked write-only.
9309 Keep access when store has side effect, i.e. in case when source
9310 is volatile. */
9311 if (gimple_store_p (stmt)
9312 && !gimple_has_side_effects (stmt))
9314 tree lhs = get_base_address (gimple_get_lhs (stmt));
9316 if (VAR_P (lhs)
9317 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9318 && varpool_node::get (lhs)->writeonly)
9320 unlink_stmt_vdef (stmt);
9321 gsi_remove (&gsi, true);
9322 release_defs (stmt);
9323 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9324 continue;
9327 /* For calls we can simply remove LHS when it is known
9328 to be write-only. */
9329 if (is_gimple_call (stmt)
9330 && gimple_get_lhs (stmt))
9332 tree lhs = get_base_address (gimple_get_lhs (stmt));
9334 if (VAR_P (lhs)
9335 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9336 && varpool_node::get (lhs)->writeonly)
9338 gimple_call_set_lhs (stmt, NULL);
9339 update_stmt (stmt);
9340 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9344 if (maybe_clean_eh_stmt (stmt)
9345 && gimple_purge_dead_eh_edges (bb))
9346 todo |= TODO_cleanup_cfg;
9347 gsi_next (&gsi);
9350 /* If we have a basic block with no successors that does not
9351 end with a control statement or a noreturn call end it with
9352 a call to __builtin_unreachable. This situation can occur
9353 when inlining a noreturn call that does in fact return. */
9354 if (EDGE_COUNT (bb->succs) == 0)
9356 gimple *stmt = last_stmt (bb);
9357 if (!stmt
9358 || (!is_ctrl_stmt (stmt)
9359 && (!is_gimple_call (stmt)
9360 || !gimple_call_noreturn_p (stmt))))
9362 if (stmt && is_gimple_call (stmt))
9363 gimple_call_set_ctrl_altering (stmt, false);
9364 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9365 stmt = gimple_build_call (fndecl, 0);
9366 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9367 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9368 if (!cfun->after_inlining)
9370 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9371 int freq
9372 = compute_call_stmt_bb_frequency (current_function_decl,
9373 bb);
9374 node->create_edge (cgraph_node::get_create (fndecl),
9375 call_stmt, bb->count, freq);
9380 if (scale)
9381 compute_function_frequency ();
9383 if (current_loops
9384 && (todo & TODO_cleanup_cfg))
9385 loops_state_set (LOOPS_NEED_FIXUP);
9387 return todo;
9390 namespace {
9392 const pass_data pass_data_fixup_cfg =
9394 GIMPLE_PASS, /* type */
9395 "fixup_cfg", /* name */
9396 OPTGROUP_NONE, /* optinfo_flags */
9397 TV_NONE, /* tv_id */
9398 PROP_cfg, /* properties_required */
9399 0, /* properties_provided */
9400 0, /* properties_destroyed */
9401 0, /* todo_flags_start */
9402 0, /* todo_flags_finish */
9405 class pass_fixup_cfg : public gimple_opt_pass
9407 public:
9408 pass_fixup_cfg (gcc::context *ctxt)
9409 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9412 /* opt_pass methods: */
9413 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9414 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9416 }; // class pass_fixup_cfg
9418 } // anon namespace
9420 gimple_opt_pass *
9421 make_pass_fixup_cfg (gcc::context *ctxt)
9423 return new pass_fixup_cfg (ctxt);
9426 /* Garbage collection support for edge_def. */
9428 extern void gt_ggc_mx (tree&);
9429 extern void gt_ggc_mx (gimple *&);
9430 extern void gt_ggc_mx (rtx&);
9431 extern void gt_ggc_mx (basic_block&);
9433 static void
9434 gt_ggc_mx (rtx_insn *& x)
9436 if (x)
9437 gt_ggc_mx_rtx_def ((void *) x);
9440 void
9441 gt_ggc_mx (edge_def *e)
9443 tree block = LOCATION_BLOCK (e->goto_locus);
9444 gt_ggc_mx (e->src);
9445 gt_ggc_mx (e->dest);
9446 if (current_ir_type () == IR_GIMPLE)
9447 gt_ggc_mx (e->insns.g);
9448 else
9449 gt_ggc_mx (e->insns.r);
9450 gt_ggc_mx (block);
9453 /* PCH support for edge_def. */
9455 extern void gt_pch_nx (tree&);
9456 extern void gt_pch_nx (gimple *&);
9457 extern void gt_pch_nx (rtx&);
9458 extern void gt_pch_nx (basic_block&);
9460 static void
9461 gt_pch_nx (rtx_insn *& x)
9463 if (x)
9464 gt_pch_nx_rtx_def ((void *) x);
9467 void
9468 gt_pch_nx (edge_def *e)
9470 tree block = LOCATION_BLOCK (e->goto_locus);
9471 gt_pch_nx (e->src);
9472 gt_pch_nx (e->dest);
9473 if (current_ir_type () == IR_GIMPLE)
9474 gt_pch_nx (e->insns.g);
9475 else
9476 gt_pch_nx (e->insns.r);
9477 gt_pch_nx (block);
9480 void
9481 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9483 tree block = LOCATION_BLOCK (e->goto_locus);
9484 op (&(e->src), cookie);
9485 op (&(e->dest), cookie);
9486 if (current_ir_type () == IR_GIMPLE)
9487 op (&(e->insns.g), cookie);
9488 else
9489 op (&(e->insns.r), cookie);
9490 op (&(block), cookie);
9493 #if CHECKING_P
9495 namespace selftest {
9497 /* Helper function for CFG selftests: create a dummy function decl
9498 and push it as cfun. */
9500 static tree
9501 push_fndecl (const char *name)
9503 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9504 /* FIXME: this uses input_location: */
9505 tree fndecl = build_fn_decl (name, fn_type);
9506 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9507 NULL_TREE, integer_type_node);
9508 DECL_RESULT (fndecl) = retval;
9509 push_struct_function (fndecl);
9510 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9511 ASSERT_TRUE (fun != NULL);
9512 init_empty_tree_cfg_for_function (fun);
9513 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9514 ASSERT_EQ (0, n_edges_for_fn (fun));
9515 return fndecl;
9518 /* These tests directly create CFGs.
9519 Compare with the static fns within tree-cfg.c:
9520 - build_gimple_cfg
9521 - make_blocks: calls create_basic_block (seq, bb);
9522 - make_edges. */
9524 /* Verify a simple cfg of the form:
9525 ENTRY -> A -> B -> C -> EXIT. */
9527 static void
9528 test_linear_chain ()
9530 gimple_register_cfg_hooks ();
9532 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9533 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9535 /* Create some empty blocks. */
9536 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9537 basic_block bb_b = create_empty_bb (bb_a);
9538 basic_block bb_c = create_empty_bb (bb_b);
9540 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9541 ASSERT_EQ (0, n_edges_for_fn (fun));
9543 /* Create some edges: a simple linear chain of BBs. */
9544 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9545 make_edge (bb_a, bb_b, 0);
9546 make_edge (bb_b, bb_c, 0);
9547 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9549 /* Verify the edges. */
9550 ASSERT_EQ (4, n_edges_for_fn (fun));
9551 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9552 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9553 ASSERT_EQ (1, bb_a->preds->length ());
9554 ASSERT_EQ (1, bb_a->succs->length ());
9555 ASSERT_EQ (1, bb_b->preds->length ());
9556 ASSERT_EQ (1, bb_b->succs->length ());
9557 ASSERT_EQ (1, bb_c->preds->length ());
9558 ASSERT_EQ (1, bb_c->succs->length ());
9559 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9560 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9562 /* Verify the dominance information
9563 Each BB in our simple chain should be dominated by the one before
9564 it. */
9565 calculate_dominance_info (CDI_DOMINATORS);
9566 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9567 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9568 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9569 ASSERT_EQ (1, dom_by_b.length ());
9570 ASSERT_EQ (bb_c, dom_by_b[0]);
9571 free_dominance_info (CDI_DOMINATORS);
9572 dom_by_b.release ();
9574 /* Similarly for post-dominance: each BB in our chain is post-dominated
9575 by the one after it. */
9576 calculate_dominance_info (CDI_POST_DOMINATORS);
9577 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9578 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9579 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9580 ASSERT_EQ (1, postdom_by_b.length ());
9581 ASSERT_EQ (bb_a, postdom_by_b[0]);
9582 free_dominance_info (CDI_POST_DOMINATORS);
9583 postdom_by_b.release ();
9585 pop_cfun ();
9588 /* Verify a simple CFG of the form:
9589 ENTRY
9593 /t \f
9599 EXIT. */
9601 static void
9602 test_diamond ()
9604 gimple_register_cfg_hooks ();
9606 tree fndecl = push_fndecl ("cfg_test_diamond");
9607 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9609 /* Create some empty blocks. */
9610 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9611 basic_block bb_b = create_empty_bb (bb_a);
9612 basic_block bb_c = create_empty_bb (bb_a);
9613 basic_block bb_d = create_empty_bb (bb_b);
9615 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9616 ASSERT_EQ (0, n_edges_for_fn (fun));
9618 /* Create the edges. */
9619 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9620 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9621 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9622 make_edge (bb_b, bb_d, 0);
9623 make_edge (bb_c, bb_d, 0);
9624 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9626 /* Verify the edges. */
9627 ASSERT_EQ (6, n_edges_for_fn (fun));
9628 ASSERT_EQ (1, bb_a->preds->length ());
9629 ASSERT_EQ (2, bb_a->succs->length ());
9630 ASSERT_EQ (1, bb_b->preds->length ());
9631 ASSERT_EQ (1, bb_b->succs->length ());
9632 ASSERT_EQ (1, bb_c->preds->length ());
9633 ASSERT_EQ (1, bb_c->succs->length ());
9634 ASSERT_EQ (2, bb_d->preds->length ());
9635 ASSERT_EQ (1, bb_d->succs->length ());
9637 /* Verify the dominance information. */
9638 calculate_dominance_info (CDI_DOMINATORS);
9639 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9640 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9641 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9642 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9643 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9644 dom_by_a.release ();
9645 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9646 ASSERT_EQ (0, dom_by_b.length ());
9647 dom_by_b.release ();
9648 free_dominance_info (CDI_DOMINATORS);
9650 /* Similarly for post-dominance. */
9651 calculate_dominance_info (CDI_POST_DOMINATORS);
9652 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9653 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9654 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9655 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9656 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9657 postdom_by_d.release ();
9658 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9659 ASSERT_EQ (0, postdom_by_b.length ());
9660 postdom_by_b.release ();
9661 free_dominance_info (CDI_POST_DOMINATORS);
9663 pop_cfun ();
9666 /* Verify that we can handle a CFG containing a "complete" aka
9667 fully-connected subgraph (where A B C D below all have edges
9668 pointing to each other node, also to themselves).
9669 e.g.:
9670 ENTRY EXIT
9676 A<--->B
9677 ^^ ^^
9678 | \ / |
9679 | X |
9680 | / \ |
9681 VV VV
9682 C<--->D
9685 static void
9686 test_fully_connected ()
9688 gimple_register_cfg_hooks ();
9690 tree fndecl = push_fndecl ("cfg_fully_connected");
9691 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9693 const int n = 4;
9695 /* Create some empty blocks. */
9696 auto_vec <basic_block> subgraph_nodes;
9697 for (int i = 0; i < n; i++)
9698 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9700 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9701 ASSERT_EQ (0, n_edges_for_fn (fun));
9703 /* Create the edges. */
9704 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9705 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9706 for (int i = 0; i < n; i++)
9707 for (int j = 0; j < n; j++)
9708 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9710 /* Verify the edges. */
9711 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9712 /* The first one is linked to ENTRY/EXIT as well as itself and
9713 everything else. */
9714 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9715 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9716 /* The other ones in the subgraph are linked to everything in
9717 the subgraph (including themselves). */
9718 for (int i = 1; i < n; i++)
9720 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9721 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9724 /* Verify the dominance information. */
9725 calculate_dominance_info (CDI_DOMINATORS);
9726 /* The initial block in the subgraph should be dominated by ENTRY. */
9727 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9728 get_immediate_dominator (CDI_DOMINATORS,
9729 subgraph_nodes[0]));
9730 /* Every other block in the subgraph should be dominated by the
9731 initial block. */
9732 for (int i = 1; i < n; i++)
9733 ASSERT_EQ (subgraph_nodes[0],
9734 get_immediate_dominator (CDI_DOMINATORS,
9735 subgraph_nodes[i]));
9736 free_dominance_info (CDI_DOMINATORS);
9738 /* Similarly for post-dominance. */
9739 calculate_dominance_info (CDI_POST_DOMINATORS);
9740 /* The initial block in the subgraph should be postdominated by EXIT. */
9741 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9742 get_immediate_dominator (CDI_POST_DOMINATORS,
9743 subgraph_nodes[0]));
9744 /* Every other block in the subgraph should be postdominated by the
9745 initial block, since that leads to EXIT. */
9746 for (int i = 1; i < n; i++)
9747 ASSERT_EQ (subgraph_nodes[0],
9748 get_immediate_dominator (CDI_POST_DOMINATORS,
9749 subgraph_nodes[i]));
9750 free_dominance_info (CDI_POST_DOMINATORS);
9752 pop_cfun ();
9755 /* Run all of the selftests within this file. */
9757 void
9758 tree_cfg_c_tests ()
9760 test_linear_chain ();
9761 test_diamond ();
9762 test_fully_connected ();
9765 } // namespace selftest
9767 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9768 - loop
9769 - nested loops
9770 - switch statement (a block with many out-edges)
9771 - something that jumps to itself
9772 - etc */
9774 #endif /* CHECKING_P */