2017-11-29 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / tree-cfg.c
blob2835a4b722c21f0ced3542a505cdd56fae0bb37d
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
65 /* This file contains functions for building the Control Flow Graph (CFG)
66 for a function tree. */
68 /* Local declarations. */
70 /* Initial capacity for the basic block array. */
71 static const int initial_cfg_capacity = 20;
73 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
74 which use a particular edge. The CASE_LABEL_EXPRs are chained together
75 via their CASE_CHAIN field, which we clear after we're done with the
76 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
78 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
79 update the case vector in response to edge redirections.
81 Right now this table is set up and torn down at key points in the
82 compilation process. It would be nice if we could make the table
83 more persistent. The key is getting notification of changes to
84 the CFG (particularly edge removal, creation and redirection). */
86 static hash_map<edge, tree> *edge_to_cases;
88 /* If we record edge_to_cases, this bitmap will hold indexes
89 of basic blocks that end in a GIMPLE_SWITCH which we touched
90 due to edge manipulations. */
92 static bitmap touched_switch_bbs;
94 /* CFG statistics. */
95 struct cfg_stats_d
97 long num_merged_labels;
100 static struct cfg_stats_d cfg_stats;
102 /* Data to pass to replace_block_vars_by_duplicates_1. */
103 struct replace_decls_d
105 hash_map<tree, tree> *vars_map;
106 tree to_context;
109 /* Hash table to store last discriminator assigned for each locus. */
110 struct locus_discrim_map
112 location_t locus;
113 int discriminator;
116 /* Hashtable helpers. */
118 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
120 static inline hashval_t hash (const locus_discrim_map *);
121 static inline bool equal (const locus_discrim_map *,
122 const locus_discrim_map *);
125 /* Trivial hash function for a location_t. ITEM is a pointer to
126 a hash table entry that maps a location_t to a discriminator. */
128 inline hashval_t
129 locus_discrim_hasher::hash (const locus_discrim_map *item)
131 return LOCATION_LINE (item->locus);
134 /* Equality function for the locus-to-discriminator map. A and B
135 point to the two hash table entries to compare. */
137 inline bool
138 locus_discrim_hasher::equal (const locus_discrim_map *a,
139 const locus_discrim_map *b)
141 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
144 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
146 /* Basic blocks and flowgraphs. */
147 static void make_blocks (gimple_seq);
149 /* Edges. */
150 static void make_edges (void);
151 static void assign_discriminators (void);
152 static void make_cond_expr_edges (basic_block);
153 static void make_gimple_switch_edges (gswitch *, basic_block);
154 static bool make_goto_expr_edges (basic_block);
155 static void make_gimple_asm_edges (basic_block);
156 static edge gimple_redirect_edge_and_branch (edge, basic_block);
157 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple *, gimple *);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge);
163 static gimple *first_non_label_stmt (basic_block);
164 static bool verify_gimple_transaction (gtransaction *);
165 static bool call_can_make_abnormal_goto (gimple *);
167 /* Flowgraph optimization and cleanup. */
168 static void gimple_merge_blocks (basic_block, basic_block);
169 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
170 static void remove_bb (basic_block);
171 static edge find_taken_edge_computed_goto (basic_block, tree);
172 static edge find_taken_edge_cond_expr (basic_block, tree);
173 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
174 static tree find_case_label_for_value (gswitch *, tree);
175 static void lower_phi_internal_fn ();
177 void
178 init_empty_tree_cfg_for_function (struct function *fn)
180 /* Initialize the basic block array. */
181 init_flow (fn);
182 profile_status_for_fn (fn) = PROFILE_ABSENT;
183 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
184 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
185 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
186 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
187 initial_cfg_capacity);
189 /* Build a mapping of labels to their associated blocks. */
190 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
192 initial_cfg_capacity);
194 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
195 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
197 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn);
199 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 void
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun);
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
216 static void
217 build_gimple_cfg (gimple_seq seq)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224 init_empty_tree_cfg ();
226 make_blocks (seq);
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
236 n_basic_blocks_for_fn (cfun));
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
248 make_edges ();
249 assign_discriminators ();
250 lower_phi_internal_fn ();
251 cleanup_dead_labels ();
252 delete discriminator_per_locus;
253 discriminator_per_locus = NULL;
256 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
257 them and propagate the information to LOOP. We assume that the annotations
258 come immediately before the condition in BB, if any. */
260 static void
261 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
263 gimple_stmt_iterator gsi = gsi_last_bb (bb);
264 gimple *stmt = gsi_stmt (gsi);
266 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
267 return;
269 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
271 stmt = gsi_stmt (gsi);
272 if (gimple_code (stmt) != GIMPLE_CALL)
273 break;
274 if (!gimple_call_internal_p (stmt)
275 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
276 break;
278 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 case annot_expr_ivdep_kind:
281 loop->safelen = INT_MAX;
282 break;
283 case annot_expr_unroll_kind:
284 loop->unroll
285 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
286 cfun->has_unroll = true;
287 break;
288 case annot_expr_no_vector_kind:
289 loop->dont_vectorize = true;
290 break;
291 case annot_expr_vector_kind:
292 loop->force_vectorize = true;
293 cfun->has_force_vectorize_loops = true;
294 break;
295 case annot_expr_parallel_kind:
296 loop->can_be_parallel = true;
297 loop->safelen = INT_MAX;
298 break;
299 default:
300 gcc_unreachable ();
303 stmt = gimple_build_assign (gimple_call_lhs (stmt),
304 gimple_call_arg (stmt, 0));
305 gsi_replace (&gsi, stmt, true);
309 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
310 them and propagate the information to the loop. We assume that the
311 annotations come immediately before the condition of the loop. */
313 static void
314 replace_loop_annotate (void)
316 struct loop *loop;
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple *stmt;
321 FOR_EACH_LOOP (loop, 0)
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
326 /* Then look into the latch, if any. */
327 if (loop->latch)
328 replace_loop_annotate_in_block (loop->latch, loop);
331 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
332 FOR_EACH_BB_FN (bb, cfun)
334 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
336 stmt = gsi_stmt (gsi);
337 if (gimple_code (stmt) != GIMPLE_CALL)
338 continue;
339 if (!gimple_call_internal_p (stmt)
340 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
341 continue;
343 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
345 case annot_expr_ivdep_kind:
346 case annot_expr_unroll_kind:
347 case annot_expr_no_vector_kind:
348 case annot_expr_vector_kind:
349 break;
350 default:
351 gcc_unreachable ();
354 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
355 stmt = gimple_build_assign (gimple_call_lhs (stmt),
356 gimple_call_arg (stmt, 0));
357 gsi_replace (&gsi, stmt, true);
362 /* Lower internal PHI function from GIMPLE FE. */
364 static void
365 lower_phi_internal_fn ()
367 basic_block bb, pred = NULL;
368 gimple_stmt_iterator gsi;
369 tree lhs;
370 gphi *phi_node;
371 gimple *stmt;
373 /* After edge creation, handle __PHI function from GIMPLE FE. */
374 FOR_EACH_BB_FN (bb, cfun)
376 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
378 stmt = gsi_stmt (gsi);
379 if (! gimple_call_internal_p (stmt, IFN_PHI))
380 break;
382 lhs = gimple_call_lhs (stmt);
383 phi_node = create_phi_node (lhs, bb);
385 /* Add arguments to the PHI node. */
386 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
388 tree arg = gimple_call_arg (stmt, i);
389 if (TREE_CODE (arg) == LABEL_DECL)
390 pred = label_to_block (arg);
391 else
393 edge e = find_edge (pred, bb);
394 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
398 gsi_remove (&gsi, true);
403 static unsigned int
404 execute_build_cfg (void)
406 gimple_seq body = gimple_body (current_function_decl);
408 build_gimple_cfg (body);
409 gimple_set_body (current_function_decl, NULL);
410 if (dump_file && (dump_flags & TDF_DETAILS))
412 fprintf (dump_file, "Scope blocks:\n");
413 dump_scope_blocks (dump_file, dump_flags);
415 cleanup_tree_cfg ();
416 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
417 replace_loop_annotate ();
418 return 0;
421 namespace {
423 const pass_data pass_data_build_cfg =
425 GIMPLE_PASS, /* type */
426 "cfg", /* name */
427 OPTGROUP_NONE, /* optinfo_flags */
428 TV_TREE_CFG, /* tv_id */
429 PROP_gimple_leh, /* properties_required */
430 ( PROP_cfg | PROP_loops ), /* properties_provided */
431 0, /* properties_destroyed */
432 0, /* todo_flags_start */
433 0, /* todo_flags_finish */
436 class pass_build_cfg : public gimple_opt_pass
438 public:
439 pass_build_cfg (gcc::context *ctxt)
440 : gimple_opt_pass (pass_data_build_cfg, ctxt)
443 /* opt_pass methods: */
444 virtual unsigned int execute (function *) { return execute_build_cfg (); }
446 }; // class pass_build_cfg
448 } // anon namespace
450 gimple_opt_pass *
451 make_pass_build_cfg (gcc::context *ctxt)
453 return new pass_build_cfg (ctxt);
457 /* Return true if T is a computed goto. */
459 bool
460 computed_goto_p (gimple *t)
462 return (gimple_code (t) == GIMPLE_GOTO
463 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
466 /* Returns true if the sequence of statements STMTS only contains
467 a call to __builtin_unreachable (). */
469 bool
470 gimple_seq_unreachable_p (gimple_seq stmts)
472 if (stmts == NULL)
473 return false;
475 gimple_stmt_iterator gsi = gsi_last (stmts);
477 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
478 return false;
480 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
482 gimple *stmt = gsi_stmt (gsi);
483 if (gimple_code (stmt) != GIMPLE_LABEL
484 && !is_gimple_debug (stmt)
485 && !gimple_clobber_p (stmt))
486 return false;
488 return true;
491 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
492 the other edge points to a bb with just __builtin_unreachable ().
493 I.e. return true for C->M edge in:
494 <bb C>:
496 if (something)
497 goto <bb N>;
498 else
499 goto <bb M>;
500 <bb N>:
501 __builtin_unreachable ();
502 <bb M>: */
504 bool
505 assert_unreachable_fallthru_edge_p (edge e)
507 basic_block pred_bb = e->src;
508 gimple *last = last_stmt (pred_bb);
509 if (last && gimple_code (last) == GIMPLE_COND)
511 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
512 if (other_bb == e->dest)
513 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
514 if (EDGE_COUNT (other_bb->succs) == 0)
515 return gimple_seq_unreachable_p (bb_seq (other_bb));
517 return false;
521 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
522 could alter control flow except via eh. We initialize the flag at
523 CFG build time and only ever clear it later. */
525 static void
526 gimple_call_initialize_ctrl_altering (gimple *stmt)
528 int flags = gimple_call_flags (stmt);
530 /* A call alters control flow if it can make an abnormal goto. */
531 if (call_can_make_abnormal_goto (stmt)
532 /* A call also alters control flow if it does not return. */
533 || flags & ECF_NORETURN
534 /* TM ending statements have backedges out of the transaction.
535 Return true so we split the basic block containing them.
536 Note that the TM_BUILTIN test is merely an optimization. */
537 || ((flags & ECF_TM_BUILTIN)
538 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
539 /* BUILT_IN_RETURN call is same as return statement. */
540 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
541 /* IFN_UNIQUE should be the last insn, to make checking for it
542 as cheap as possible. */
543 || (gimple_call_internal_p (stmt)
544 && gimple_call_internal_unique_p (stmt)))
545 gimple_call_set_ctrl_altering (stmt, true);
546 else
547 gimple_call_set_ctrl_altering (stmt, false);
551 /* Insert SEQ after BB and build a flowgraph. */
553 static basic_block
554 make_blocks_1 (gimple_seq seq, basic_block bb)
556 gimple_stmt_iterator i = gsi_start (seq);
557 gimple *stmt = NULL;
558 bool start_new_block = true;
559 bool first_stmt_of_seq = true;
561 while (!gsi_end_p (i))
563 gimple *prev_stmt;
565 prev_stmt = stmt;
566 stmt = gsi_stmt (i);
568 if (stmt && is_gimple_call (stmt))
569 gimple_call_initialize_ctrl_altering (stmt);
571 /* If the statement starts a new basic block or if we have determined
572 in a previous pass that we need to create a new block for STMT, do
573 so now. */
574 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
576 if (!first_stmt_of_seq)
577 gsi_split_seq_before (&i, &seq);
578 bb = create_basic_block (seq, bb);
579 start_new_block = false;
582 /* Now add STMT to BB and create the subgraphs for special statement
583 codes. */
584 gimple_set_bb (stmt, bb);
586 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
587 next iteration. */
588 if (stmt_ends_bb_p (stmt))
590 /* If the stmt can make abnormal goto use a new temporary
591 for the assignment to the LHS. This makes sure the old value
592 of the LHS is available on the abnormal edge. Otherwise
593 we will end up with overlapping life-ranges for abnormal
594 SSA names. */
595 if (gimple_has_lhs (stmt)
596 && stmt_can_make_abnormal_goto (stmt)
597 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
599 tree lhs = gimple_get_lhs (stmt);
600 tree tmp = create_tmp_var (TREE_TYPE (lhs));
601 gimple *s = gimple_build_assign (lhs, tmp);
602 gimple_set_location (s, gimple_location (stmt));
603 gimple_set_block (s, gimple_block (stmt));
604 gimple_set_lhs (stmt, tmp);
605 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
606 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
607 DECL_GIMPLE_REG_P (tmp) = 1;
608 gsi_insert_after (&i, s, GSI_SAME_STMT);
610 start_new_block = true;
613 gsi_next (&i);
614 first_stmt_of_seq = false;
616 return bb;
619 /* Build a flowgraph for the sequence of stmts SEQ. */
621 static void
622 make_blocks (gimple_seq seq)
624 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
627 /* Create and return a new empty basic block after bb AFTER. */
629 static basic_block
630 create_bb (void *h, void *e, basic_block after)
632 basic_block bb;
634 gcc_assert (!e);
636 /* Create and initialize a new basic block. Since alloc_block uses
637 GC allocation that clears memory to allocate a basic block, we do
638 not have to clear the newly allocated basic block here. */
639 bb = alloc_block ();
641 bb->index = last_basic_block_for_fn (cfun);
642 bb->flags = BB_NEW;
643 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
645 /* Add the new block to the linked list of blocks. */
646 link_block (bb, after);
648 /* Grow the basic block array if needed. */
649 if ((size_t) last_basic_block_for_fn (cfun)
650 == basic_block_info_for_fn (cfun)->length ())
652 size_t new_size =
653 (last_basic_block_for_fn (cfun)
654 + (last_basic_block_for_fn (cfun) + 3) / 4);
655 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
658 /* Add the newly created block to the array. */
659 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
661 n_basic_blocks_for_fn (cfun)++;
662 last_basic_block_for_fn (cfun)++;
664 return bb;
668 /*---------------------------------------------------------------------------
669 Edge creation
670 ---------------------------------------------------------------------------*/
672 /* If basic block BB has an abnormal edge to a basic block
673 containing IFN_ABNORMAL_DISPATCHER internal call, return
674 that the dispatcher's basic block, otherwise return NULL. */
676 basic_block
677 get_abnormal_succ_dispatcher (basic_block bb)
679 edge e;
680 edge_iterator ei;
682 FOR_EACH_EDGE (e, ei, bb->succs)
683 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
685 gimple_stmt_iterator gsi
686 = gsi_start_nondebug_after_labels_bb (e->dest);
687 gimple *g = gsi_stmt (gsi);
688 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
689 return e->dest;
691 return NULL;
694 /* Helper function for make_edges. Create a basic block with
695 with ABNORMAL_DISPATCHER internal call in it if needed, and
696 create abnormal edges from BBS to it and from it to FOR_BB
697 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
699 static void
700 handle_abnormal_edges (basic_block *dispatcher_bbs,
701 basic_block for_bb, int *bb_to_omp_idx,
702 auto_vec<basic_block> *bbs, bool computed_goto)
704 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
705 unsigned int idx = 0;
706 basic_block bb;
707 bool inner = false;
709 if (bb_to_omp_idx)
711 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
712 if (bb_to_omp_idx[for_bb->index] != 0)
713 inner = true;
716 /* If the dispatcher has been created already, then there are basic
717 blocks with abnormal edges to it, so just make a new edge to
718 for_bb. */
719 if (*dispatcher == NULL)
721 /* Check if there are any basic blocks that need to have
722 abnormal edges to this dispatcher. If there are none, return
723 early. */
724 if (bb_to_omp_idx == NULL)
726 if (bbs->is_empty ())
727 return;
729 else
731 FOR_EACH_VEC_ELT (*bbs, idx, bb)
732 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
733 break;
734 if (bb == NULL)
735 return;
738 /* Create the dispatcher bb. */
739 *dispatcher = create_basic_block (NULL, for_bb);
740 if (computed_goto)
742 /* Factor computed gotos into a common computed goto site. Also
743 record the location of that site so that we can un-factor the
744 gotos after we have converted back to normal form. */
745 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
747 /* Create the destination of the factored goto. Each original
748 computed goto will put its desired destination into this
749 variable and jump to the label we create immediately below. */
750 tree var = create_tmp_var (ptr_type_node, "gotovar");
752 /* Build a label for the new block which will contain the
753 factored computed goto. */
754 tree factored_label_decl
755 = create_artificial_label (UNKNOWN_LOCATION);
756 gimple *factored_computed_goto_label
757 = gimple_build_label (factored_label_decl);
758 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
760 /* Build our new computed goto. */
761 gimple *factored_computed_goto = gimple_build_goto (var);
762 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
764 FOR_EACH_VEC_ELT (*bbs, idx, bb)
766 if (bb_to_omp_idx
767 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
768 continue;
770 gsi = gsi_last_bb (bb);
771 gimple *last = gsi_stmt (gsi);
773 gcc_assert (computed_goto_p (last));
775 /* Copy the original computed goto's destination into VAR. */
776 gimple *assignment
777 = gimple_build_assign (var, gimple_goto_dest (last));
778 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
780 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
781 e->goto_locus = gimple_location (last);
782 gsi_remove (&gsi, true);
785 else
787 tree arg = inner ? boolean_true_node : boolean_false_node;
788 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
789 1, arg);
790 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
791 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
793 /* Create predecessor edges of the dispatcher. */
794 FOR_EACH_VEC_ELT (*bbs, idx, bb)
796 if (bb_to_omp_idx
797 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
798 continue;
799 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
804 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
807 /* Creates outgoing edges for BB. Returns 1 when it ends with an
808 computed goto, returns 2 when it ends with a statement that
809 might return to this function via an nonlocal goto, otherwise
810 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
812 static int
813 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
815 gimple *last = last_stmt (bb);
816 bool fallthru = false;
817 int ret = 0;
819 if (!last)
820 return ret;
822 switch (gimple_code (last))
824 case GIMPLE_GOTO:
825 if (make_goto_expr_edges (bb))
826 ret = 1;
827 fallthru = false;
828 break;
829 case GIMPLE_RETURN:
831 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
832 e->goto_locus = gimple_location (last);
833 fallthru = false;
835 break;
836 case GIMPLE_COND:
837 make_cond_expr_edges (bb);
838 fallthru = false;
839 break;
840 case GIMPLE_SWITCH:
841 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
842 fallthru = false;
843 break;
844 case GIMPLE_RESX:
845 make_eh_edges (last);
846 fallthru = false;
847 break;
848 case GIMPLE_EH_DISPATCH:
849 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
850 break;
852 case GIMPLE_CALL:
853 /* If this function receives a nonlocal goto, then we need to
854 make edges from this call site to all the nonlocal goto
855 handlers. */
856 if (stmt_can_make_abnormal_goto (last))
857 ret = 2;
859 /* If this statement has reachable exception handlers, then
860 create abnormal edges to them. */
861 make_eh_edges (last);
863 /* BUILTIN_RETURN is really a return statement. */
864 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
866 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
867 fallthru = false;
869 /* Some calls are known not to return. */
870 else
871 fallthru = !gimple_call_noreturn_p (last);
872 break;
874 case GIMPLE_ASSIGN:
875 /* A GIMPLE_ASSIGN may throw internally and thus be considered
876 control-altering. */
877 if (is_ctrl_altering_stmt (last))
878 make_eh_edges (last);
879 fallthru = true;
880 break;
882 case GIMPLE_ASM:
883 make_gimple_asm_edges (bb);
884 fallthru = true;
885 break;
887 CASE_GIMPLE_OMP:
888 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
889 break;
891 case GIMPLE_TRANSACTION:
893 gtransaction *txn = as_a <gtransaction *> (last);
894 tree label1 = gimple_transaction_label_norm (txn);
895 tree label2 = gimple_transaction_label_uninst (txn);
897 if (label1)
898 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
899 if (label2)
900 make_edge (bb, label_to_block (label2),
901 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
903 tree label3 = gimple_transaction_label_over (txn);
904 if (gimple_transaction_subcode (txn)
905 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
906 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
908 fallthru = false;
910 break;
912 default:
913 gcc_assert (!stmt_ends_bb_p (last));
914 fallthru = true;
915 break;
918 if (fallthru)
919 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
921 return ret;
924 /* Join all the blocks in the flowgraph. */
926 static void
927 make_edges (void)
929 basic_block bb;
930 struct omp_region *cur_region = NULL;
931 auto_vec<basic_block> ab_edge_goto;
932 auto_vec<basic_block> ab_edge_call;
933 int *bb_to_omp_idx = NULL;
934 int cur_omp_region_idx = 0;
936 /* Create an edge from entry to the first block with executable
937 statements in it. */
938 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
939 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
940 EDGE_FALLTHRU);
942 /* Traverse the basic block array placing edges. */
943 FOR_EACH_BB_FN (bb, cfun)
945 int mer;
947 if (bb_to_omp_idx)
948 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
950 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
951 if (mer == 1)
952 ab_edge_goto.safe_push (bb);
953 else if (mer == 2)
954 ab_edge_call.safe_push (bb);
956 if (cur_region && bb_to_omp_idx == NULL)
957 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
960 /* Computed gotos are hell to deal with, especially if there are
961 lots of them with a large number of destinations. So we factor
962 them to a common computed goto location before we build the
963 edge list. After we convert back to normal form, we will un-factor
964 the computed gotos since factoring introduces an unwanted jump.
965 For non-local gotos and abnormal edges from calls to calls that return
966 twice or forced labels, factor the abnormal edges too, by having all
967 abnormal edges from the calls go to a common artificial basic block
968 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
969 basic block to all forced labels and calls returning twice.
970 We do this per-OpenMP structured block, because those regions
971 are guaranteed to be single entry single exit by the standard,
972 so it is not allowed to enter or exit such regions abnormally this way,
973 thus all computed gotos, non-local gotos and setjmp/longjmp calls
974 must not transfer control across SESE region boundaries. */
975 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
977 gimple_stmt_iterator gsi;
978 basic_block dispatcher_bb_array[2] = { NULL, NULL };
979 basic_block *dispatcher_bbs = dispatcher_bb_array;
980 int count = n_basic_blocks_for_fn (cfun);
982 if (bb_to_omp_idx)
983 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
985 FOR_EACH_BB_FN (bb, cfun)
987 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
989 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
990 tree target;
992 if (!label_stmt)
993 break;
995 target = gimple_label_label (label_stmt);
997 /* Make an edge to every label block that has been marked as a
998 potential target for a computed goto or a non-local goto. */
999 if (FORCED_LABEL (target))
1000 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1001 &ab_edge_goto, true);
1002 if (DECL_NONLOCAL (target))
1004 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1005 &ab_edge_call, false);
1006 break;
1010 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1011 gsi_next_nondebug (&gsi);
1012 if (!gsi_end_p (gsi))
1014 /* Make an edge to every setjmp-like call. */
1015 gimple *call_stmt = gsi_stmt (gsi);
1016 if (is_gimple_call (call_stmt)
1017 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1018 || gimple_call_builtin_p (call_stmt,
1019 BUILT_IN_SETJMP_RECEIVER)))
1020 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1021 &ab_edge_call, false);
1025 if (bb_to_omp_idx)
1026 XDELETE (dispatcher_bbs);
1029 XDELETE (bb_to_omp_idx);
1031 omp_free_regions ();
1034 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1035 needed. Returns true if new bbs were created.
1036 Note: This is transitional code, and should not be used for new code. We
1037 should be able to get rid of this by rewriting all target va-arg
1038 gimplification hooks to use an interface gimple_build_cond_value as described
1039 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1041 bool
1042 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1044 gimple *stmt = gsi_stmt (*gsi);
1045 basic_block bb = gimple_bb (stmt);
1046 basic_block lastbb, afterbb;
1047 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1048 edge e;
1049 lastbb = make_blocks_1 (seq, bb);
1050 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1051 return false;
1052 e = split_block (bb, stmt);
1053 /* Move e->dest to come after the new basic blocks. */
1054 afterbb = e->dest;
1055 unlink_block (afterbb);
1056 link_block (afterbb, lastbb);
1057 redirect_edge_succ (e, bb->next_bb);
1058 bb = bb->next_bb;
1059 while (bb != afterbb)
1061 struct omp_region *cur_region = NULL;
1062 profile_count cnt = profile_count::zero ();
1063 bool all = true;
1065 int cur_omp_region_idx = 0;
1066 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1067 gcc_assert (!mer && !cur_region);
1068 add_bb_to_loop (bb, afterbb->loop_father);
1070 edge e;
1071 edge_iterator ei;
1072 FOR_EACH_EDGE (e, ei, bb->preds)
1074 if (e->count ().initialized_p ())
1075 cnt += e->count ();
1076 else
1077 all = false;
1079 tree_guess_outgoing_edge_probabilities (bb);
1080 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1081 bb->count = cnt;
1083 bb = bb->next_bb;
1085 return true;
1088 /* Find the next available discriminator value for LOCUS. The
1089 discriminator distinguishes among several basic blocks that
1090 share a common locus, allowing for more accurate sample-based
1091 profiling. */
1093 static int
1094 next_discriminator_for_locus (location_t locus)
1096 struct locus_discrim_map item;
1097 struct locus_discrim_map **slot;
1099 item.locus = locus;
1100 item.discriminator = 0;
1101 slot = discriminator_per_locus->find_slot_with_hash (
1102 &item, LOCATION_LINE (locus), INSERT);
1103 gcc_assert (slot);
1104 if (*slot == HTAB_EMPTY_ENTRY)
1106 *slot = XNEW (struct locus_discrim_map);
1107 gcc_assert (*slot);
1108 (*slot)->locus = locus;
1109 (*slot)->discriminator = 0;
1111 (*slot)->discriminator++;
1112 return (*slot)->discriminator;
1115 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1117 static bool
1118 same_line_p (location_t locus1, location_t locus2)
1120 expanded_location from, to;
1122 if (locus1 == locus2)
1123 return true;
1125 from = expand_location (locus1);
1126 to = expand_location (locus2);
1128 if (from.line != to.line)
1129 return false;
1130 if (from.file == to.file)
1131 return true;
1132 return (from.file != NULL
1133 && to.file != NULL
1134 && filename_cmp (from.file, to.file) == 0);
1137 /* Assign discriminators to each basic block. */
1139 static void
1140 assign_discriminators (void)
1142 basic_block bb;
1144 FOR_EACH_BB_FN (bb, cfun)
1146 edge e;
1147 edge_iterator ei;
1148 gimple *last = last_stmt (bb);
1149 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1151 if (locus == UNKNOWN_LOCATION)
1152 continue;
1154 FOR_EACH_EDGE (e, ei, bb->succs)
1156 gimple *first = first_non_label_stmt (e->dest);
1157 gimple *last = last_stmt (e->dest);
1158 if ((first && same_line_p (locus, gimple_location (first)))
1159 || (last && same_line_p (locus, gimple_location (last))))
1161 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1162 bb->discriminator = next_discriminator_for_locus (locus);
1163 else
1164 e->dest->discriminator = next_discriminator_for_locus (locus);
1170 /* Create the edges for a GIMPLE_COND starting at block BB. */
1172 static void
1173 make_cond_expr_edges (basic_block bb)
1175 gcond *entry = as_a <gcond *> (last_stmt (bb));
1176 gimple *then_stmt, *else_stmt;
1177 basic_block then_bb, else_bb;
1178 tree then_label, else_label;
1179 edge e;
1181 gcc_assert (entry);
1182 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1184 /* Entry basic blocks for each component. */
1185 then_label = gimple_cond_true_label (entry);
1186 else_label = gimple_cond_false_label (entry);
1187 then_bb = label_to_block (then_label);
1188 else_bb = label_to_block (else_label);
1189 then_stmt = first_stmt (then_bb);
1190 else_stmt = first_stmt (else_bb);
1192 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1193 e->goto_locus = gimple_location (then_stmt);
1194 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1195 if (e)
1196 e->goto_locus = gimple_location (else_stmt);
1198 /* We do not need the labels anymore. */
1199 gimple_cond_set_true_label (entry, NULL_TREE);
1200 gimple_cond_set_false_label (entry, NULL_TREE);
1204 /* Called for each element in the hash table (P) as we delete the
1205 edge to cases hash table.
1207 Clear all the CASE_CHAINs to prevent problems with copying of
1208 SWITCH_EXPRs and structure sharing rules, then free the hash table
1209 element. */
1211 bool
1212 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1214 tree t, next;
1216 for (t = value; t; t = next)
1218 next = CASE_CHAIN (t);
1219 CASE_CHAIN (t) = NULL;
1222 return true;
1225 /* Start recording information mapping edges to case labels. */
1227 void
1228 start_recording_case_labels (void)
1230 gcc_assert (edge_to_cases == NULL);
1231 edge_to_cases = new hash_map<edge, tree>;
1232 touched_switch_bbs = BITMAP_ALLOC (NULL);
1235 /* Return nonzero if we are recording information for case labels. */
1237 static bool
1238 recording_case_labels_p (void)
1240 return (edge_to_cases != NULL);
1243 /* Stop recording information mapping edges to case labels and
1244 remove any information we have recorded. */
1245 void
1246 end_recording_case_labels (void)
1248 bitmap_iterator bi;
1249 unsigned i;
1250 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1251 delete edge_to_cases;
1252 edge_to_cases = NULL;
1253 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1255 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1256 if (bb)
1258 gimple *stmt = last_stmt (bb);
1259 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1260 group_case_labels_stmt (as_a <gswitch *> (stmt));
1263 BITMAP_FREE (touched_switch_bbs);
1266 /* If we are inside a {start,end}_recording_cases block, then return
1267 a chain of CASE_LABEL_EXPRs from T which reference E.
1269 Otherwise return NULL. */
1271 static tree
1272 get_cases_for_edge (edge e, gswitch *t)
1274 tree *slot;
1275 size_t i, n;
1277 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1278 chains available. Return NULL so the caller can detect this case. */
1279 if (!recording_case_labels_p ())
1280 return NULL;
1282 slot = edge_to_cases->get (e);
1283 if (slot)
1284 return *slot;
1286 /* If we did not find E in the hash table, then this must be the first
1287 time we have been queried for information about E & T. Add all the
1288 elements from T to the hash table then perform the query again. */
1290 n = gimple_switch_num_labels (t);
1291 for (i = 0; i < n; i++)
1293 tree elt = gimple_switch_label (t, i);
1294 tree lab = CASE_LABEL (elt);
1295 basic_block label_bb = label_to_block (lab);
1296 edge this_edge = find_edge (e->src, label_bb);
1298 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1299 a new chain. */
1300 tree &s = edge_to_cases->get_or_insert (this_edge);
1301 CASE_CHAIN (elt) = s;
1302 s = elt;
1305 return *edge_to_cases->get (e);
1308 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1310 static void
1311 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1313 size_t i, n;
1315 n = gimple_switch_num_labels (entry);
1317 for (i = 0; i < n; ++i)
1319 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1320 basic_block label_bb = label_to_block (lab);
1321 make_edge (bb, label_bb, 0);
1326 /* Return the basic block holding label DEST. */
1328 basic_block
1329 label_to_block_fn (struct function *ifun, tree dest)
1331 int uid = LABEL_DECL_UID (dest);
1333 /* We would die hard when faced by an undefined label. Emit a label to
1334 the very first basic block. This will hopefully make even the dataflow
1335 and undefined variable warnings quite right. */
1336 if (seen_error () && uid < 0)
1338 gimple_stmt_iterator gsi =
1339 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1340 gimple *stmt;
1342 stmt = gimple_build_label (dest);
1343 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1344 uid = LABEL_DECL_UID (dest);
1346 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1347 return NULL;
1348 return (*ifun->cfg->x_label_to_block_map)[uid];
1351 /* Create edges for a goto statement at block BB. Returns true
1352 if abnormal edges should be created. */
1354 static bool
1355 make_goto_expr_edges (basic_block bb)
1357 gimple_stmt_iterator last = gsi_last_bb (bb);
1358 gimple *goto_t = gsi_stmt (last);
1360 /* A simple GOTO creates normal edges. */
1361 if (simple_goto_p (goto_t))
1363 tree dest = gimple_goto_dest (goto_t);
1364 basic_block label_bb = label_to_block (dest);
1365 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1366 e->goto_locus = gimple_location (goto_t);
1367 gsi_remove (&last, true);
1368 return false;
1371 /* A computed GOTO creates abnormal edges. */
1372 return true;
1375 /* Create edges for an asm statement with labels at block BB. */
1377 static void
1378 make_gimple_asm_edges (basic_block bb)
1380 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1381 int i, n = gimple_asm_nlabels (stmt);
1383 for (i = 0; i < n; ++i)
1385 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1386 basic_block label_bb = label_to_block (label);
1387 make_edge (bb, label_bb, 0);
1391 /*---------------------------------------------------------------------------
1392 Flowgraph analysis
1393 ---------------------------------------------------------------------------*/
1395 /* Cleanup useless labels in basic blocks. This is something we wish
1396 to do early because it allows us to group case labels before creating
1397 the edges for the CFG, and it speeds up block statement iterators in
1398 all passes later on.
1399 We rerun this pass after CFG is created, to get rid of the labels that
1400 are no longer referenced. After then we do not run it any more, since
1401 (almost) no new labels should be created. */
1403 /* A map from basic block index to the leading label of that block. */
1404 static struct label_record
1406 /* The label. */
1407 tree label;
1409 /* True if the label is referenced from somewhere. */
1410 bool used;
1411 } *label_for_bb;
1413 /* Given LABEL return the first label in the same basic block. */
1415 static tree
1416 main_block_label (tree label)
1418 basic_block bb = label_to_block (label);
1419 tree main_label = label_for_bb[bb->index].label;
1421 /* label_to_block possibly inserted undefined label into the chain. */
1422 if (!main_label)
1424 label_for_bb[bb->index].label = label;
1425 main_label = label;
1428 label_for_bb[bb->index].used = true;
1429 return main_label;
1432 /* Clean up redundant labels within the exception tree. */
1434 static void
1435 cleanup_dead_labels_eh (void)
1437 eh_landing_pad lp;
1438 eh_region r;
1439 tree lab;
1440 int i;
1442 if (cfun->eh == NULL)
1443 return;
1445 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1446 if (lp && lp->post_landing_pad)
1448 lab = main_block_label (lp->post_landing_pad);
1449 if (lab != lp->post_landing_pad)
1451 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1452 EH_LANDING_PAD_NR (lab) = lp->index;
1456 FOR_ALL_EH_REGION (r)
1457 switch (r->type)
1459 case ERT_CLEANUP:
1460 case ERT_MUST_NOT_THROW:
1461 break;
1463 case ERT_TRY:
1465 eh_catch c;
1466 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1468 lab = c->label;
1469 if (lab)
1470 c->label = main_block_label (lab);
1473 break;
1475 case ERT_ALLOWED_EXCEPTIONS:
1476 lab = r->u.allowed.label;
1477 if (lab)
1478 r->u.allowed.label = main_block_label (lab);
1479 break;
1484 /* Cleanup redundant labels. This is a three-step process:
1485 1) Find the leading label for each block.
1486 2) Redirect all references to labels to the leading labels.
1487 3) Cleanup all useless labels. */
1489 void
1490 cleanup_dead_labels (void)
1492 basic_block bb;
1493 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1495 /* Find a suitable label for each block. We use the first user-defined
1496 label if there is one, or otherwise just the first label we see. */
1497 FOR_EACH_BB_FN (bb, cfun)
1499 gimple_stmt_iterator i;
1501 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1503 tree label;
1504 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1506 if (!label_stmt)
1507 break;
1509 label = gimple_label_label (label_stmt);
1511 /* If we have not yet seen a label for the current block,
1512 remember this one and see if there are more labels. */
1513 if (!label_for_bb[bb->index].label)
1515 label_for_bb[bb->index].label = label;
1516 continue;
1519 /* If we did see a label for the current block already, but it
1520 is an artificially created label, replace it if the current
1521 label is a user defined label. */
1522 if (!DECL_ARTIFICIAL (label)
1523 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1525 label_for_bb[bb->index].label = label;
1526 break;
1531 /* Now redirect all jumps/branches to the selected label.
1532 First do so for each block ending in a control statement. */
1533 FOR_EACH_BB_FN (bb, cfun)
1535 gimple *stmt = last_stmt (bb);
1536 tree label, new_label;
1538 if (!stmt)
1539 continue;
1541 switch (gimple_code (stmt))
1543 case GIMPLE_COND:
1545 gcond *cond_stmt = as_a <gcond *> (stmt);
1546 label = gimple_cond_true_label (cond_stmt);
1547 if (label)
1549 new_label = main_block_label (label);
1550 if (new_label != label)
1551 gimple_cond_set_true_label (cond_stmt, new_label);
1554 label = gimple_cond_false_label (cond_stmt);
1555 if (label)
1557 new_label = main_block_label (label);
1558 if (new_label != label)
1559 gimple_cond_set_false_label (cond_stmt, new_label);
1562 break;
1564 case GIMPLE_SWITCH:
1566 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1567 size_t i, n = gimple_switch_num_labels (switch_stmt);
1569 /* Replace all destination labels. */
1570 for (i = 0; i < n; ++i)
1572 tree case_label = gimple_switch_label (switch_stmt, i);
1573 label = CASE_LABEL (case_label);
1574 new_label = main_block_label (label);
1575 if (new_label != label)
1576 CASE_LABEL (case_label) = new_label;
1578 break;
1581 case GIMPLE_ASM:
1583 gasm *asm_stmt = as_a <gasm *> (stmt);
1584 int i, n = gimple_asm_nlabels (asm_stmt);
1586 for (i = 0; i < n; ++i)
1588 tree cons = gimple_asm_label_op (asm_stmt, i);
1589 tree label = main_block_label (TREE_VALUE (cons));
1590 TREE_VALUE (cons) = label;
1592 break;
1595 /* We have to handle gotos until they're removed, and we don't
1596 remove them until after we've created the CFG edges. */
1597 case GIMPLE_GOTO:
1598 if (!computed_goto_p (stmt))
1600 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1601 label = gimple_goto_dest (goto_stmt);
1602 new_label = main_block_label (label);
1603 if (new_label != label)
1604 gimple_goto_set_dest (goto_stmt, new_label);
1606 break;
1608 case GIMPLE_TRANSACTION:
1610 gtransaction *txn = as_a <gtransaction *> (stmt);
1612 label = gimple_transaction_label_norm (txn);
1613 if (label)
1615 new_label = main_block_label (label);
1616 if (new_label != label)
1617 gimple_transaction_set_label_norm (txn, new_label);
1620 label = gimple_transaction_label_uninst (txn);
1621 if (label)
1623 new_label = main_block_label (label);
1624 if (new_label != label)
1625 gimple_transaction_set_label_uninst (txn, new_label);
1628 label = gimple_transaction_label_over (txn);
1629 if (label)
1631 new_label = main_block_label (label);
1632 if (new_label != label)
1633 gimple_transaction_set_label_over (txn, new_label);
1636 break;
1638 default:
1639 break;
1643 /* Do the same for the exception region tree labels. */
1644 cleanup_dead_labels_eh ();
1646 /* Finally, purge dead labels. All user-defined labels and labels that
1647 can be the target of non-local gotos and labels which have their
1648 address taken are preserved. */
1649 FOR_EACH_BB_FN (bb, cfun)
1651 gimple_stmt_iterator i;
1652 tree label_for_this_bb = label_for_bb[bb->index].label;
1654 if (!label_for_this_bb)
1655 continue;
1657 /* If the main label of the block is unused, we may still remove it. */
1658 if (!label_for_bb[bb->index].used)
1659 label_for_this_bb = NULL;
1661 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1663 tree label;
1664 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1666 if (!label_stmt)
1667 break;
1669 label = gimple_label_label (label_stmt);
1671 if (label == label_for_this_bb
1672 || !DECL_ARTIFICIAL (label)
1673 || DECL_NONLOCAL (label)
1674 || FORCED_LABEL (label))
1675 gsi_next (&i);
1676 else
1677 gsi_remove (&i, true);
1681 free (label_for_bb);
1684 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1685 the ones jumping to the same label.
1686 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1688 bool
1689 group_case_labels_stmt (gswitch *stmt)
1691 int old_size = gimple_switch_num_labels (stmt);
1692 int i, next_index, new_size;
1693 basic_block default_bb = NULL;
1695 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1697 /* Look for possible opportunities to merge cases. */
1698 new_size = i = 1;
1699 while (i < old_size)
1701 tree base_case, base_high;
1702 basic_block base_bb;
1704 base_case = gimple_switch_label (stmt, i);
1706 gcc_assert (base_case);
1707 base_bb = label_to_block (CASE_LABEL (base_case));
1709 /* Discard cases that have the same destination as the default case or
1710 whose destiniation blocks have already been removed as unreachable. */
1711 if (base_bb == NULL || base_bb == default_bb)
1713 i++;
1714 continue;
1717 base_high = CASE_HIGH (base_case)
1718 ? CASE_HIGH (base_case)
1719 : CASE_LOW (base_case);
1720 next_index = i + 1;
1722 /* Try to merge case labels. Break out when we reach the end
1723 of the label vector or when we cannot merge the next case
1724 label with the current one. */
1725 while (next_index < old_size)
1727 tree merge_case = gimple_switch_label (stmt, next_index);
1728 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1729 wide_int bhp1 = wi::to_wide (base_high) + 1;
1731 /* Merge the cases if they jump to the same place,
1732 and their ranges are consecutive. */
1733 if (merge_bb == base_bb
1734 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1736 base_high = CASE_HIGH (merge_case) ?
1737 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1738 CASE_HIGH (base_case) = base_high;
1739 next_index++;
1741 else
1742 break;
1745 /* Discard cases that have an unreachable destination block. */
1746 if (EDGE_COUNT (base_bb->succs) == 0
1747 && gimple_seq_unreachable_p (bb_seq (base_bb)))
1749 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1750 if (base_edge != NULL)
1751 remove_edge_and_dominated_blocks (base_edge);
1752 i = next_index;
1753 continue;
1756 if (new_size < i)
1757 gimple_switch_set_label (stmt, new_size,
1758 gimple_switch_label (stmt, i));
1759 i = next_index;
1760 new_size++;
1763 gcc_assert (new_size <= old_size);
1765 if (new_size < old_size)
1766 gimple_switch_set_num_labels (stmt, new_size);
1768 return new_size < old_size;
1771 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1772 and scan the sorted vector of cases. Combine the ones jumping to the
1773 same label. */
1775 bool
1776 group_case_labels (void)
1778 basic_block bb;
1779 bool changed = false;
1781 FOR_EACH_BB_FN (bb, cfun)
1783 gimple *stmt = last_stmt (bb);
1784 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1785 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1788 return changed;
1791 /* Checks whether we can merge block B into block A. */
1793 static bool
1794 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1796 gimple *stmt;
1798 if (!single_succ_p (a))
1799 return false;
1801 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1802 return false;
1804 if (single_succ (a) != b)
1805 return false;
1807 if (!single_pred_p (b))
1808 return false;
1810 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1811 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1812 return false;
1814 /* If A ends by a statement causing exceptions or something similar, we
1815 cannot merge the blocks. */
1816 stmt = last_stmt (a);
1817 if (stmt && stmt_ends_bb_p (stmt))
1818 return false;
1820 /* Do not allow a block with only a non-local label to be merged. */
1821 if (stmt)
1822 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1823 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1824 return false;
1826 /* Examine the labels at the beginning of B. */
1827 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1828 gsi_next (&gsi))
1830 tree lab;
1831 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1832 if (!label_stmt)
1833 break;
1834 lab = gimple_label_label (label_stmt);
1836 /* Do not remove user forced labels or for -O0 any user labels. */
1837 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1838 return false;
1841 /* Protect simple loop latches. We only want to avoid merging
1842 the latch with the loop header or with a block in another
1843 loop in this case. */
1844 if (current_loops
1845 && b->loop_father->latch == b
1846 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1847 && (b->loop_father->header == a
1848 || b->loop_father != a->loop_father))
1849 return false;
1851 /* It must be possible to eliminate all phi nodes in B. If ssa form
1852 is not up-to-date and a name-mapping is registered, we cannot eliminate
1853 any phis. Symbols marked for renaming are never a problem though. */
1854 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1855 gsi_next (&gsi))
1857 gphi *phi = gsi.phi ();
1858 /* Technically only new names matter. */
1859 if (name_registered_for_update_p (PHI_RESULT (phi)))
1860 return false;
1863 /* When not optimizing, don't merge if we'd lose goto_locus. */
1864 if (!optimize
1865 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1867 location_t goto_locus = single_succ_edge (a)->goto_locus;
1868 gimple_stmt_iterator prev, next;
1869 prev = gsi_last_nondebug_bb (a);
1870 next = gsi_after_labels (b);
1871 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1872 gsi_next_nondebug (&next);
1873 if ((gsi_end_p (prev)
1874 || gimple_location (gsi_stmt (prev)) != goto_locus)
1875 && (gsi_end_p (next)
1876 || gimple_location (gsi_stmt (next)) != goto_locus))
1877 return false;
1880 return true;
1883 /* Replaces all uses of NAME by VAL. */
1885 void
1886 replace_uses_by (tree name, tree val)
1888 imm_use_iterator imm_iter;
1889 use_operand_p use;
1890 gimple *stmt;
1891 edge e;
1893 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1895 /* Mark the block if we change the last stmt in it. */
1896 if (cfgcleanup_altered_bbs
1897 && stmt_ends_bb_p (stmt))
1898 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1900 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1902 replace_exp (use, val);
1904 if (gimple_code (stmt) == GIMPLE_PHI)
1906 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1907 PHI_ARG_INDEX_FROM_USE (use));
1908 if (e->flags & EDGE_ABNORMAL
1909 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1911 /* This can only occur for virtual operands, since
1912 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1913 would prevent replacement. */
1914 gcc_checking_assert (virtual_operand_p (name));
1915 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1920 if (gimple_code (stmt) != GIMPLE_PHI)
1922 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1923 gimple *orig_stmt = stmt;
1924 size_t i;
1926 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1927 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1928 only change sth from non-invariant to invariant, and only
1929 when propagating constants. */
1930 if (is_gimple_min_invariant (val))
1931 for (i = 0; i < gimple_num_ops (stmt); i++)
1933 tree op = gimple_op (stmt, i);
1934 /* Operands may be empty here. For example, the labels
1935 of a GIMPLE_COND are nulled out following the creation
1936 of the corresponding CFG edges. */
1937 if (op && TREE_CODE (op) == ADDR_EXPR)
1938 recompute_tree_invariant_for_addr_expr (op);
1941 if (fold_stmt (&gsi))
1942 stmt = gsi_stmt (gsi);
1944 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1945 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1947 update_stmt (stmt);
1951 gcc_checking_assert (has_zero_uses (name));
1953 /* Also update the trees stored in loop structures. */
1954 if (current_loops)
1956 struct loop *loop;
1958 FOR_EACH_LOOP (loop, 0)
1960 substitute_in_loop_info (loop, name, val);
1965 /* Merge block B into block A. */
1967 static void
1968 gimple_merge_blocks (basic_block a, basic_block b)
1970 gimple_stmt_iterator last, gsi;
1971 gphi_iterator psi;
1973 if (dump_file)
1974 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1976 /* Remove all single-valued PHI nodes from block B of the form
1977 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1978 gsi = gsi_last_bb (a);
1979 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1981 gimple *phi = gsi_stmt (psi);
1982 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1983 gimple *copy;
1984 bool may_replace_uses = (virtual_operand_p (def)
1985 || may_propagate_copy (def, use));
1987 /* In case we maintain loop closed ssa form, do not propagate arguments
1988 of loop exit phi nodes. */
1989 if (current_loops
1990 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1991 && !virtual_operand_p (def)
1992 && TREE_CODE (use) == SSA_NAME
1993 && a->loop_father != b->loop_father)
1994 may_replace_uses = false;
1996 if (!may_replace_uses)
1998 gcc_assert (!virtual_operand_p (def));
2000 /* Note that just emitting the copies is fine -- there is no problem
2001 with ordering of phi nodes. This is because A is the single
2002 predecessor of B, therefore results of the phi nodes cannot
2003 appear as arguments of the phi nodes. */
2004 copy = gimple_build_assign (def, use);
2005 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2006 remove_phi_node (&psi, false);
2008 else
2010 /* If we deal with a PHI for virtual operands, we can simply
2011 propagate these without fussing with folding or updating
2012 the stmt. */
2013 if (virtual_operand_p (def))
2015 imm_use_iterator iter;
2016 use_operand_p use_p;
2017 gimple *stmt;
2019 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2020 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2021 SET_USE (use_p, use);
2023 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2024 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2026 else
2027 replace_uses_by (def, use);
2029 remove_phi_node (&psi, true);
2033 /* Ensure that B follows A. */
2034 move_block_after (b, a);
2036 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2037 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2039 /* Remove labels from B and set gimple_bb to A for other statements. */
2040 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2042 gimple *stmt = gsi_stmt (gsi);
2043 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2045 tree label = gimple_label_label (label_stmt);
2046 int lp_nr;
2048 gsi_remove (&gsi, false);
2050 /* Now that we can thread computed gotos, we might have
2051 a situation where we have a forced label in block B
2052 However, the label at the start of block B might still be
2053 used in other ways (think about the runtime checking for
2054 Fortran assigned gotos). So we can not just delete the
2055 label. Instead we move the label to the start of block A. */
2056 if (FORCED_LABEL (label))
2058 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2059 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2061 /* Other user labels keep around in a form of a debug stmt. */
2062 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2064 gimple *dbg = gimple_build_debug_bind (label,
2065 integer_zero_node,
2066 stmt);
2067 gimple_debug_bind_reset_value (dbg);
2068 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2071 lp_nr = EH_LANDING_PAD_NR (label);
2072 if (lp_nr)
2074 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2075 lp->post_landing_pad = NULL;
2078 else
2080 gimple_set_bb (stmt, a);
2081 gsi_next (&gsi);
2085 /* When merging two BBs, if their counts are different, the larger count
2086 is selected as the new bb count. This is to handle inconsistent
2087 profiles. */
2088 if (a->loop_father == b->loop_father)
2090 a->count = a->count.merge (b->count);
2093 /* Merge the sequences. */
2094 last = gsi_last_bb (a);
2095 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2096 set_bb_seq (b, NULL);
2098 if (cfgcleanup_altered_bbs)
2099 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2103 /* Return the one of two successors of BB that is not reachable by a
2104 complex edge, if there is one. Else, return BB. We use
2105 this in optimizations that use post-dominators for their heuristics,
2106 to catch the cases in C++ where function calls are involved. */
2108 basic_block
2109 single_noncomplex_succ (basic_block bb)
2111 edge e0, e1;
2112 if (EDGE_COUNT (bb->succs) != 2)
2113 return bb;
2115 e0 = EDGE_SUCC (bb, 0);
2116 e1 = EDGE_SUCC (bb, 1);
2117 if (e0->flags & EDGE_COMPLEX)
2118 return e1->dest;
2119 if (e1->flags & EDGE_COMPLEX)
2120 return e0->dest;
2122 return bb;
2125 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2127 void
2128 notice_special_calls (gcall *call)
2130 int flags = gimple_call_flags (call);
2132 if (flags & ECF_MAY_BE_ALLOCA)
2133 cfun->calls_alloca = true;
2134 if (flags & ECF_RETURNS_TWICE)
2135 cfun->calls_setjmp = true;
2139 /* Clear flags set by notice_special_calls. Used by dead code removal
2140 to update the flags. */
2142 void
2143 clear_special_calls (void)
2145 cfun->calls_alloca = false;
2146 cfun->calls_setjmp = false;
2149 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2151 static void
2152 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2154 /* Since this block is no longer reachable, we can just delete all
2155 of its PHI nodes. */
2156 remove_phi_nodes (bb);
2158 /* Remove edges to BB's successors. */
2159 while (EDGE_COUNT (bb->succs) > 0)
2160 remove_edge (EDGE_SUCC (bb, 0));
2164 /* Remove statements of basic block BB. */
2166 static void
2167 remove_bb (basic_block bb)
2169 gimple_stmt_iterator i;
2171 if (dump_file)
2173 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2174 if (dump_flags & TDF_DETAILS)
2176 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2177 fprintf (dump_file, "\n");
2181 if (current_loops)
2183 struct loop *loop = bb->loop_father;
2185 /* If a loop gets removed, clean up the information associated
2186 with it. */
2187 if (loop->latch == bb
2188 || loop->header == bb)
2189 free_numbers_of_iterations_estimates (loop);
2192 /* Remove all the instructions in the block. */
2193 if (bb_seq (bb) != NULL)
2195 /* Walk backwards so as to get a chance to substitute all
2196 released DEFs into debug stmts. See
2197 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2198 details. */
2199 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2201 gimple *stmt = gsi_stmt (i);
2202 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2203 if (label_stmt
2204 && (FORCED_LABEL (gimple_label_label (label_stmt))
2205 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2207 basic_block new_bb;
2208 gimple_stmt_iterator new_gsi;
2210 /* A non-reachable non-local label may still be referenced.
2211 But it no longer needs to carry the extra semantics of
2212 non-locality. */
2213 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2215 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2216 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2219 new_bb = bb->prev_bb;
2220 new_gsi = gsi_start_bb (new_bb);
2221 gsi_remove (&i, false);
2222 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2224 else
2226 /* Release SSA definitions. */
2227 release_defs (stmt);
2228 gsi_remove (&i, true);
2231 if (gsi_end_p (i))
2232 i = gsi_last_bb (bb);
2233 else
2234 gsi_prev (&i);
2238 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2239 bb->il.gimple.seq = NULL;
2240 bb->il.gimple.phi_nodes = NULL;
2244 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2245 predicate VAL, return the edge that will be taken out of the block.
2246 If VAL does not match a unique edge, NULL is returned. */
2248 edge
2249 find_taken_edge (basic_block bb, tree val)
2251 gimple *stmt;
2253 stmt = last_stmt (bb);
2255 gcc_assert (is_ctrl_stmt (stmt));
2257 if (gimple_code (stmt) == GIMPLE_COND)
2258 return find_taken_edge_cond_expr (bb, val);
2260 if (gimple_code (stmt) == GIMPLE_SWITCH)
2261 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2263 if (computed_goto_p (stmt))
2265 /* Only optimize if the argument is a label, if the argument is
2266 not a label then we can not construct a proper CFG.
2268 It may be the case that we only need to allow the LABEL_REF to
2269 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2270 appear inside a LABEL_EXPR just to be safe. */
2271 if (val
2272 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2273 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2274 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2275 return NULL;
2278 gcc_unreachable ();
2281 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2282 statement, determine which of the outgoing edges will be taken out of the
2283 block. Return NULL if either edge may be taken. */
2285 static edge
2286 find_taken_edge_computed_goto (basic_block bb, tree val)
2288 basic_block dest;
2289 edge e = NULL;
2291 dest = label_to_block (val);
2292 if (dest)
2294 e = find_edge (bb, dest);
2295 gcc_assert (e != NULL);
2298 return e;
2301 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2302 statement, determine which of the two edges will be taken out of the
2303 block. Return NULL if either edge may be taken. */
2305 static edge
2306 find_taken_edge_cond_expr (basic_block bb, tree val)
2308 edge true_edge, false_edge;
2310 if (val == NULL
2311 || TREE_CODE (val) != INTEGER_CST)
2312 return NULL;
2314 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2316 return (integer_zerop (val) ? false_edge : true_edge);
2319 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2320 statement, determine which edge will be taken out of the block. Return
2321 NULL if any edge may be taken. */
2323 static edge
2324 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2325 tree val)
2327 basic_block dest_bb;
2328 edge e;
2329 tree taken_case;
2331 if (gimple_switch_num_labels (switch_stmt) == 1)
2332 taken_case = gimple_switch_default_label (switch_stmt);
2333 else if (! val || TREE_CODE (val) != INTEGER_CST)
2334 return NULL;
2335 else
2336 taken_case = find_case_label_for_value (switch_stmt, val);
2337 dest_bb = label_to_block (CASE_LABEL (taken_case));
2339 e = find_edge (bb, dest_bb);
2340 gcc_assert (e);
2341 return e;
2345 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2346 We can make optimal use here of the fact that the case labels are
2347 sorted: We can do a binary search for a case matching VAL. */
2349 static tree
2350 find_case_label_for_value (gswitch *switch_stmt, tree val)
2352 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2353 tree default_case = gimple_switch_default_label (switch_stmt);
2355 for (low = 0, high = n; high - low > 1; )
2357 size_t i = (high + low) / 2;
2358 tree t = gimple_switch_label (switch_stmt, i);
2359 int cmp;
2361 /* Cache the result of comparing CASE_LOW and val. */
2362 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2364 if (cmp > 0)
2365 high = i;
2366 else
2367 low = i;
2369 if (CASE_HIGH (t) == NULL)
2371 /* A singe-valued case label. */
2372 if (cmp == 0)
2373 return t;
2375 else
2377 /* A case range. We can only handle integer ranges. */
2378 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2379 return t;
2383 return default_case;
2387 /* Dump a basic block on stderr. */
2389 void
2390 gimple_debug_bb (basic_block bb)
2392 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2396 /* Dump basic block with index N on stderr. */
2398 basic_block
2399 gimple_debug_bb_n (int n)
2401 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2402 return BASIC_BLOCK_FOR_FN (cfun, n);
2406 /* Dump the CFG on stderr.
2408 FLAGS are the same used by the tree dumping functions
2409 (see TDF_* in dumpfile.h). */
2411 void
2412 gimple_debug_cfg (dump_flags_t flags)
2414 gimple_dump_cfg (stderr, flags);
2418 /* Dump the program showing basic block boundaries on the given FILE.
2420 FLAGS are the same used by the tree dumping functions (see TDF_* in
2421 tree.h). */
2423 void
2424 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2426 if (flags & TDF_DETAILS)
2428 dump_function_header (file, current_function_decl, flags);
2429 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2430 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2431 last_basic_block_for_fn (cfun));
2433 brief_dump_cfg (file, flags);
2434 fprintf (file, "\n");
2437 if (flags & TDF_STATS)
2438 dump_cfg_stats (file);
2440 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2444 /* Dump CFG statistics on FILE. */
2446 void
2447 dump_cfg_stats (FILE *file)
2449 static long max_num_merged_labels = 0;
2450 unsigned long size, total = 0;
2451 long num_edges;
2452 basic_block bb;
2453 const char * const fmt_str = "%-30s%-13s%12s\n";
2454 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2455 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2456 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2457 const char *funcname = current_function_name ();
2459 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2461 fprintf (file, "---------------------------------------------------------\n");
2462 fprintf (file, fmt_str, "", " Number of ", "Memory");
2463 fprintf (file, fmt_str, "", " instances ", "used ");
2464 fprintf (file, "---------------------------------------------------------\n");
2466 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2467 total += size;
2468 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2469 SCALE (size), LABEL (size));
2471 num_edges = 0;
2472 FOR_EACH_BB_FN (bb, cfun)
2473 num_edges += EDGE_COUNT (bb->succs);
2474 size = num_edges * sizeof (struct edge_def);
2475 total += size;
2476 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2478 fprintf (file, "---------------------------------------------------------\n");
2479 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2480 LABEL (total));
2481 fprintf (file, "---------------------------------------------------------\n");
2482 fprintf (file, "\n");
2484 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2485 max_num_merged_labels = cfg_stats.num_merged_labels;
2487 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2488 cfg_stats.num_merged_labels, max_num_merged_labels);
2490 fprintf (file, "\n");
2494 /* Dump CFG statistics on stderr. Keep extern so that it's always
2495 linked in the final executable. */
2497 DEBUG_FUNCTION void
2498 debug_cfg_stats (void)
2500 dump_cfg_stats (stderr);
2503 /*---------------------------------------------------------------------------
2504 Miscellaneous helpers
2505 ---------------------------------------------------------------------------*/
2507 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2508 flow. Transfers of control flow associated with EH are excluded. */
2510 static bool
2511 call_can_make_abnormal_goto (gimple *t)
2513 /* If the function has no non-local labels, then a call cannot make an
2514 abnormal transfer of control. */
2515 if (!cfun->has_nonlocal_label
2516 && !cfun->calls_setjmp)
2517 return false;
2519 /* Likewise if the call has no side effects. */
2520 if (!gimple_has_side_effects (t))
2521 return false;
2523 /* Likewise if the called function is leaf. */
2524 if (gimple_call_flags (t) & ECF_LEAF)
2525 return false;
2527 return true;
2531 /* Return true if T can make an abnormal transfer of control flow.
2532 Transfers of control flow associated with EH are excluded. */
2534 bool
2535 stmt_can_make_abnormal_goto (gimple *t)
2537 if (computed_goto_p (t))
2538 return true;
2539 if (is_gimple_call (t))
2540 return call_can_make_abnormal_goto (t);
2541 return false;
2545 /* Return true if T represents a stmt that always transfers control. */
2547 bool
2548 is_ctrl_stmt (gimple *t)
2550 switch (gimple_code (t))
2552 case GIMPLE_COND:
2553 case GIMPLE_SWITCH:
2554 case GIMPLE_GOTO:
2555 case GIMPLE_RETURN:
2556 case GIMPLE_RESX:
2557 return true;
2558 default:
2559 return false;
2564 /* Return true if T is a statement that may alter the flow of control
2565 (e.g., a call to a non-returning function). */
2567 bool
2568 is_ctrl_altering_stmt (gimple *t)
2570 gcc_assert (t);
2572 switch (gimple_code (t))
2574 case GIMPLE_CALL:
2575 /* Per stmt call flag indicates whether the call could alter
2576 controlflow. */
2577 if (gimple_call_ctrl_altering_p (t))
2578 return true;
2579 break;
2581 case GIMPLE_EH_DISPATCH:
2582 /* EH_DISPATCH branches to the individual catch handlers at
2583 this level of a try or allowed-exceptions region. It can
2584 fallthru to the next statement as well. */
2585 return true;
2587 case GIMPLE_ASM:
2588 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2589 return true;
2590 break;
2592 CASE_GIMPLE_OMP:
2593 /* OpenMP directives alter control flow. */
2594 return true;
2596 case GIMPLE_TRANSACTION:
2597 /* A transaction start alters control flow. */
2598 return true;
2600 default:
2601 break;
2604 /* If a statement can throw, it alters control flow. */
2605 return stmt_can_throw_internal (t);
2609 /* Return true if T is a simple local goto. */
2611 bool
2612 simple_goto_p (gimple *t)
2614 return (gimple_code (t) == GIMPLE_GOTO
2615 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2619 /* Return true if STMT should start a new basic block. PREV_STMT is
2620 the statement preceding STMT. It is used when STMT is a label or a
2621 case label. Labels should only start a new basic block if their
2622 previous statement wasn't a label. Otherwise, sequence of labels
2623 would generate unnecessary basic blocks that only contain a single
2624 label. */
2626 static inline bool
2627 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2629 if (stmt == NULL)
2630 return false;
2632 /* Labels start a new basic block only if the preceding statement
2633 wasn't a label of the same type. This prevents the creation of
2634 consecutive blocks that have nothing but a single label. */
2635 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2637 /* Nonlocal and computed GOTO targets always start a new block. */
2638 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2639 || FORCED_LABEL (gimple_label_label (label_stmt)))
2640 return true;
2642 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2644 if (DECL_NONLOCAL (gimple_label_label (
2645 as_a <glabel *> (prev_stmt))))
2646 return true;
2648 cfg_stats.num_merged_labels++;
2649 return false;
2651 else
2652 return true;
2654 else if (gimple_code (stmt) == GIMPLE_CALL)
2656 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2657 /* setjmp acts similar to a nonlocal GOTO target and thus should
2658 start a new block. */
2659 return true;
2660 if (gimple_call_internal_p (stmt, IFN_PHI)
2661 && prev_stmt
2662 && gimple_code (prev_stmt) != GIMPLE_LABEL
2663 && (gimple_code (prev_stmt) != GIMPLE_CALL
2664 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2665 /* PHI nodes start a new block unless preceeded by a label
2666 or another PHI. */
2667 return true;
2670 return false;
2674 /* Return true if T should end a basic block. */
2676 bool
2677 stmt_ends_bb_p (gimple *t)
2679 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2682 /* Remove block annotations and other data structures. */
2684 void
2685 delete_tree_cfg_annotations (struct function *fn)
2687 vec_free (label_to_block_map_for_fn (fn));
2690 /* Return the virtual phi in BB. */
2692 gphi *
2693 get_virtual_phi (basic_block bb)
2695 for (gphi_iterator gsi = gsi_start_phis (bb);
2696 !gsi_end_p (gsi);
2697 gsi_next (&gsi))
2699 gphi *phi = gsi.phi ();
2701 if (virtual_operand_p (PHI_RESULT (phi)))
2702 return phi;
2705 return NULL;
2708 /* Return the first statement in basic block BB. */
2710 gimple *
2711 first_stmt (basic_block bb)
2713 gimple_stmt_iterator i = gsi_start_bb (bb);
2714 gimple *stmt = NULL;
2716 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2718 gsi_next (&i);
2719 stmt = NULL;
2721 return stmt;
2724 /* Return the first non-label statement in basic block BB. */
2726 static gimple *
2727 first_non_label_stmt (basic_block bb)
2729 gimple_stmt_iterator i = gsi_start_bb (bb);
2730 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2731 gsi_next (&i);
2732 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2735 /* Return the last statement in basic block BB. */
2737 gimple *
2738 last_stmt (basic_block bb)
2740 gimple_stmt_iterator i = gsi_last_bb (bb);
2741 gimple *stmt = NULL;
2743 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2745 gsi_prev (&i);
2746 stmt = NULL;
2748 return stmt;
2751 /* Return the last statement of an otherwise empty block. Return NULL
2752 if the block is totally empty, or if it contains more than one
2753 statement. */
2755 gimple *
2756 last_and_only_stmt (basic_block bb)
2758 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2759 gimple *last, *prev;
2761 if (gsi_end_p (i))
2762 return NULL;
2764 last = gsi_stmt (i);
2765 gsi_prev_nondebug (&i);
2766 if (gsi_end_p (i))
2767 return last;
2769 /* Empty statements should no longer appear in the instruction stream.
2770 Everything that might have appeared before should be deleted by
2771 remove_useless_stmts, and the optimizers should just gsi_remove
2772 instead of smashing with build_empty_stmt.
2774 Thus the only thing that should appear here in a block containing
2775 one executable statement is a label. */
2776 prev = gsi_stmt (i);
2777 if (gimple_code (prev) == GIMPLE_LABEL)
2778 return last;
2779 else
2780 return NULL;
2783 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2785 static void
2786 reinstall_phi_args (edge new_edge, edge old_edge)
2788 edge_var_map *vm;
2789 int i;
2790 gphi_iterator phis;
2792 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2793 if (!v)
2794 return;
2796 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2797 v->iterate (i, &vm) && !gsi_end_p (phis);
2798 i++, gsi_next (&phis))
2800 gphi *phi = phis.phi ();
2801 tree result = redirect_edge_var_map_result (vm);
2802 tree arg = redirect_edge_var_map_def (vm);
2804 gcc_assert (result == gimple_phi_result (phi));
2806 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2809 redirect_edge_var_map_clear (old_edge);
2812 /* Returns the basic block after which the new basic block created
2813 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2814 near its "logical" location. This is of most help to humans looking
2815 at debugging dumps. */
2817 basic_block
2818 split_edge_bb_loc (edge edge_in)
2820 basic_block dest = edge_in->dest;
2821 basic_block dest_prev = dest->prev_bb;
2823 if (dest_prev)
2825 edge e = find_edge (dest_prev, dest);
2826 if (e && !(e->flags & EDGE_COMPLEX))
2827 return edge_in->src;
2829 return dest_prev;
2832 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2833 Abort on abnormal edges. */
2835 static basic_block
2836 gimple_split_edge (edge edge_in)
2838 basic_block new_bb, after_bb, dest;
2839 edge new_edge, e;
2841 /* Abnormal edges cannot be split. */
2842 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2844 dest = edge_in->dest;
2846 after_bb = split_edge_bb_loc (edge_in);
2848 new_bb = create_empty_bb (after_bb);
2849 new_bb->count = edge_in->count ();
2851 e = redirect_edge_and_branch (edge_in, new_bb);
2852 gcc_assert (e == edge_in);
2854 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2855 reinstall_phi_args (new_edge, e);
2857 return new_bb;
2861 /* Verify properties of the address expression T with base object BASE. */
2863 static tree
2864 verify_address (tree t, tree base)
2866 bool old_constant;
2867 bool old_side_effects;
2868 bool new_constant;
2869 bool new_side_effects;
2871 old_constant = TREE_CONSTANT (t);
2872 old_side_effects = TREE_SIDE_EFFECTS (t);
2874 recompute_tree_invariant_for_addr_expr (t);
2875 new_side_effects = TREE_SIDE_EFFECTS (t);
2876 new_constant = TREE_CONSTANT (t);
2878 if (old_constant != new_constant)
2880 error ("constant not recomputed when ADDR_EXPR changed");
2881 return t;
2883 if (old_side_effects != new_side_effects)
2885 error ("side effects not recomputed when ADDR_EXPR changed");
2886 return t;
2889 if (!(VAR_P (base)
2890 || TREE_CODE (base) == PARM_DECL
2891 || TREE_CODE (base) == RESULT_DECL))
2892 return NULL_TREE;
2894 if (DECL_GIMPLE_REG_P (base))
2896 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2897 return base;
2900 return NULL_TREE;
2903 /* Callback for walk_tree, check that all elements with address taken are
2904 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2905 inside a PHI node. */
2907 static tree
2908 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2910 tree t = *tp, x;
2912 if (TYPE_P (t))
2913 *walk_subtrees = 0;
2915 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2916 #define CHECK_OP(N, MSG) \
2917 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2918 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2920 switch (TREE_CODE (t))
2922 case SSA_NAME:
2923 if (SSA_NAME_IN_FREE_LIST (t))
2925 error ("SSA name in freelist but still referenced");
2926 return *tp;
2928 break;
2930 case PARM_DECL:
2931 case VAR_DECL:
2932 case RESULT_DECL:
2934 tree context = decl_function_context (t);
2935 if (context != cfun->decl
2936 && !SCOPE_FILE_SCOPE_P (context)
2937 && !TREE_STATIC (t)
2938 && !DECL_EXTERNAL (t))
2940 error ("Local declaration from a different function");
2941 return t;
2944 break;
2946 case INDIRECT_REF:
2947 error ("INDIRECT_REF in gimple IL");
2948 return t;
2950 case MEM_REF:
2951 x = TREE_OPERAND (t, 0);
2952 if (!POINTER_TYPE_P (TREE_TYPE (x))
2953 || !is_gimple_mem_ref_addr (x))
2955 error ("invalid first operand of MEM_REF");
2956 return x;
2958 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2959 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2961 error ("invalid offset operand of MEM_REF");
2962 return TREE_OPERAND (t, 1);
2964 if (TREE_CODE (x) == ADDR_EXPR)
2966 tree va = verify_address (x, TREE_OPERAND (x, 0));
2967 if (va)
2968 return va;
2969 x = TREE_OPERAND (x, 0);
2971 walk_tree (&x, verify_expr, data, NULL);
2972 *walk_subtrees = 0;
2973 break;
2975 case ASSERT_EXPR:
2976 x = fold (ASSERT_EXPR_COND (t));
2977 if (x == boolean_false_node)
2979 error ("ASSERT_EXPR with an always-false condition");
2980 return *tp;
2982 break;
2984 case MODIFY_EXPR:
2985 error ("MODIFY_EXPR not expected while having tuples");
2986 return *tp;
2988 case ADDR_EXPR:
2990 tree tem;
2992 gcc_assert (is_gimple_address (t));
2994 /* Skip any references (they will be checked when we recurse down the
2995 tree) and ensure that any variable used as a prefix is marked
2996 addressable. */
2997 for (x = TREE_OPERAND (t, 0);
2998 handled_component_p (x);
2999 x = TREE_OPERAND (x, 0))
3002 if ((tem = verify_address (t, x)))
3003 return tem;
3005 if (!(VAR_P (x)
3006 || TREE_CODE (x) == PARM_DECL
3007 || TREE_CODE (x) == RESULT_DECL))
3008 return NULL;
3010 if (!TREE_ADDRESSABLE (x))
3012 error ("address taken, but ADDRESSABLE bit not set");
3013 return x;
3016 break;
3019 case COND_EXPR:
3020 x = COND_EXPR_COND (t);
3021 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3023 error ("non-integral used in condition");
3024 return x;
3026 if (!is_gimple_condexpr (x))
3028 error ("invalid conditional operand");
3029 return x;
3031 break;
3033 case NON_LVALUE_EXPR:
3034 case TRUTH_NOT_EXPR:
3035 gcc_unreachable ();
3037 CASE_CONVERT:
3038 case FIX_TRUNC_EXPR:
3039 case FLOAT_EXPR:
3040 case NEGATE_EXPR:
3041 case ABS_EXPR:
3042 case BIT_NOT_EXPR:
3043 CHECK_OP (0, "invalid operand to unary operator");
3044 break;
3046 case REALPART_EXPR:
3047 case IMAGPART_EXPR:
3048 case BIT_FIELD_REF:
3049 if (!is_gimple_reg_type (TREE_TYPE (t)))
3051 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3052 return t;
3055 if (TREE_CODE (t) == BIT_FIELD_REF)
3057 tree t0 = TREE_OPERAND (t, 0);
3058 tree t1 = TREE_OPERAND (t, 1);
3059 tree t2 = TREE_OPERAND (t, 2);
3060 if (!tree_fits_uhwi_p (t1)
3061 || !tree_fits_uhwi_p (t2)
3062 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3063 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3065 error ("invalid position or size operand to BIT_FIELD_REF");
3066 return t;
3068 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3069 && (TYPE_PRECISION (TREE_TYPE (t))
3070 != tree_to_uhwi (t1)))
3072 error ("integral result type precision does not match "
3073 "field size of BIT_FIELD_REF");
3074 return t;
3076 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3077 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3078 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3079 != tree_to_uhwi (t1)))
3081 error ("mode size of non-integral result does not "
3082 "match field size of BIT_FIELD_REF");
3083 return t;
3085 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3086 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3087 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3089 error ("position plus size exceeds size of referenced object in "
3090 "BIT_FIELD_REF");
3091 return t;
3094 t = TREE_OPERAND (t, 0);
3096 /* Fall-through. */
3097 case COMPONENT_REF:
3098 case ARRAY_REF:
3099 case ARRAY_RANGE_REF:
3100 case VIEW_CONVERT_EXPR:
3101 /* We have a nest of references. Verify that each of the operands
3102 that determine where to reference is either a constant or a variable,
3103 verify that the base is valid, and then show we've already checked
3104 the subtrees. */
3105 while (handled_component_p (t))
3107 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3108 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3109 else if (TREE_CODE (t) == ARRAY_REF
3110 || TREE_CODE (t) == ARRAY_RANGE_REF)
3112 CHECK_OP (1, "invalid array index");
3113 if (TREE_OPERAND (t, 2))
3114 CHECK_OP (2, "invalid array lower bound");
3115 if (TREE_OPERAND (t, 3))
3116 CHECK_OP (3, "invalid array stride");
3118 else if (TREE_CODE (t) == BIT_FIELD_REF
3119 || TREE_CODE (t) == REALPART_EXPR
3120 || TREE_CODE (t) == IMAGPART_EXPR)
3122 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3123 "REALPART_EXPR");
3124 return t;
3127 t = TREE_OPERAND (t, 0);
3130 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3132 error ("invalid reference prefix");
3133 return t;
3135 walk_tree (&t, verify_expr, data, NULL);
3136 *walk_subtrees = 0;
3137 break;
3138 case PLUS_EXPR:
3139 case MINUS_EXPR:
3140 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3141 POINTER_PLUS_EXPR. */
3142 if (POINTER_TYPE_P (TREE_TYPE (t)))
3144 error ("invalid operand to plus/minus, type is a pointer");
3145 return t;
3147 CHECK_OP (0, "invalid operand to binary operator");
3148 CHECK_OP (1, "invalid operand to binary operator");
3149 break;
3151 case POINTER_DIFF_EXPR:
3152 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3153 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
3155 error ("invalid operand to pointer diff, operand is not a pointer");
3156 return t;
3158 if (TREE_CODE (TREE_TYPE (t)) != INTEGER_TYPE
3159 || TYPE_UNSIGNED (TREE_TYPE (t))
3160 || (TYPE_PRECISION (TREE_TYPE (t))
3161 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))))
3163 error ("invalid type for pointer diff");
3164 return t;
3166 CHECK_OP (0, "invalid operand to pointer diff");
3167 CHECK_OP (1, "invalid operand to pointer diff");
3168 break;
3170 case POINTER_PLUS_EXPR:
3171 /* Check to make sure the first operand is a pointer or reference type. */
3172 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3174 error ("invalid operand to pointer plus, first operand is not a pointer");
3175 return t;
3177 /* Check to make sure the second operand is a ptrofftype. */
3178 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3180 error ("invalid operand to pointer plus, second operand is not an "
3181 "integer type of appropriate width");
3182 return t;
3184 /* FALLTHROUGH */
3185 case LT_EXPR:
3186 case LE_EXPR:
3187 case GT_EXPR:
3188 case GE_EXPR:
3189 case EQ_EXPR:
3190 case NE_EXPR:
3191 case UNORDERED_EXPR:
3192 case ORDERED_EXPR:
3193 case UNLT_EXPR:
3194 case UNLE_EXPR:
3195 case UNGT_EXPR:
3196 case UNGE_EXPR:
3197 case UNEQ_EXPR:
3198 case LTGT_EXPR:
3199 case MULT_EXPR:
3200 case TRUNC_DIV_EXPR:
3201 case CEIL_DIV_EXPR:
3202 case FLOOR_DIV_EXPR:
3203 case ROUND_DIV_EXPR:
3204 case TRUNC_MOD_EXPR:
3205 case CEIL_MOD_EXPR:
3206 case FLOOR_MOD_EXPR:
3207 case ROUND_MOD_EXPR:
3208 case RDIV_EXPR:
3209 case EXACT_DIV_EXPR:
3210 case MIN_EXPR:
3211 case MAX_EXPR:
3212 case LSHIFT_EXPR:
3213 case RSHIFT_EXPR:
3214 case LROTATE_EXPR:
3215 case RROTATE_EXPR:
3216 case BIT_IOR_EXPR:
3217 case BIT_XOR_EXPR:
3218 case BIT_AND_EXPR:
3219 CHECK_OP (0, "invalid operand to binary operator");
3220 CHECK_OP (1, "invalid operand to binary operator");
3221 break;
3223 case CONSTRUCTOR:
3224 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3225 *walk_subtrees = 0;
3226 break;
3228 case CASE_LABEL_EXPR:
3229 if (CASE_CHAIN (t))
3231 error ("invalid CASE_CHAIN");
3232 return t;
3234 break;
3236 default:
3237 break;
3239 return NULL;
3241 #undef CHECK_OP
3245 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3246 Returns true if there is an error, otherwise false. */
3248 static bool
3249 verify_types_in_gimple_min_lval (tree expr)
3251 tree op;
3253 if (is_gimple_id (expr))
3254 return false;
3256 if (TREE_CODE (expr) != TARGET_MEM_REF
3257 && TREE_CODE (expr) != MEM_REF)
3259 error ("invalid expression for min lvalue");
3260 return true;
3263 /* TARGET_MEM_REFs are strange beasts. */
3264 if (TREE_CODE (expr) == TARGET_MEM_REF)
3265 return false;
3267 op = TREE_OPERAND (expr, 0);
3268 if (!is_gimple_val (op))
3270 error ("invalid operand in indirect reference");
3271 debug_generic_stmt (op);
3272 return true;
3274 /* Memory references now generally can involve a value conversion. */
3276 return false;
3279 /* Verify if EXPR is a valid GIMPLE reference expression. If
3280 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3281 if there is an error, otherwise false. */
3283 static bool
3284 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3286 while (handled_component_p (expr))
3288 tree op = TREE_OPERAND (expr, 0);
3290 if (TREE_CODE (expr) == ARRAY_REF
3291 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3293 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3294 || (TREE_OPERAND (expr, 2)
3295 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3296 || (TREE_OPERAND (expr, 3)
3297 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3299 error ("invalid operands to array reference");
3300 debug_generic_stmt (expr);
3301 return true;
3305 /* Verify if the reference array element types are compatible. */
3306 if (TREE_CODE (expr) == ARRAY_REF
3307 && !useless_type_conversion_p (TREE_TYPE (expr),
3308 TREE_TYPE (TREE_TYPE (op))))
3310 error ("type mismatch in array reference");
3311 debug_generic_stmt (TREE_TYPE (expr));
3312 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3313 return true;
3315 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3316 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3317 TREE_TYPE (TREE_TYPE (op))))
3319 error ("type mismatch in array range reference");
3320 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3321 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3322 return true;
3325 if ((TREE_CODE (expr) == REALPART_EXPR
3326 || TREE_CODE (expr) == IMAGPART_EXPR)
3327 && !useless_type_conversion_p (TREE_TYPE (expr),
3328 TREE_TYPE (TREE_TYPE (op))))
3330 error ("type mismatch in real/imagpart reference");
3331 debug_generic_stmt (TREE_TYPE (expr));
3332 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3333 return true;
3336 if (TREE_CODE (expr) == COMPONENT_REF
3337 && !useless_type_conversion_p (TREE_TYPE (expr),
3338 TREE_TYPE (TREE_OPERAND (expr, 1))))
3340 error ("type mismatch in component reference");
3341 debug_generic_stmt (TREE_TYPE (expr));
3342 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3343 return true;
3346 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3348 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3349 that their operand is not an SSA name or an invariant when
3350 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3351 bug). Otherwise there is nothing to verify, gross mismatches at
3352 most invoke undefined behavior. */
3353 if (require_lvalue
3354 && (TREE_CODE (op) == SSA_NAME
3355 || is_gimple_min_invariant (op)))
3357 error ("conversion of an SSA_NAME on the left hand side");
3358 debug_generic_stmt (expr);
3359 return true;
3361 else if (TREE_CODE (op) == SSA_NAME
3362 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3364 error ("conversion of register to a different size");
3365 debug_generic_stmt (expr);
3366 return true;
3368 else if (!handled_component_p (op))
3369 return false;
3372 expr = op;
3375 if (TREE_CODE (expr) == MEM_REF)
3377 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3379 error ("invalid address operand in MEM_REF");
3380 debug_generic_stmt (expr);
3381 return true;
3383 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3384 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3386 error ("invalid offset operand in MEM_REF");
3387 debug_generic_stmt (expr);
3388 return true;
3391 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3393 if (!TMR_BASE (expr)
3394 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3396 error ("invalid address operand in TARGET_MEM_REF");
3397 return true;
3399 if (!TMR_OFFSET (expr)
3400 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3401 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3403 error ("invalid offset operand in TARGET_MEM_REF");
3404 debug_generic_stmt (expr);
3405 return true;
3409 return ((require_lvalue || !is_gimple_min_invariant (expr))
3410 && verify_types_in_gimple_min_lval (expr));
3413 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3414 list of pointer-to types that is trivially convertible to DEST. */
3416 static bool
3417 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3419 tree src;
3421 if (!TYPE_POINTER_TO (src_obj))
3422 return true;
3424 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3425 if (useless_type_conversion_p (dest, src))
3426 return true;
3428 return false;
3431 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3432 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3434 static bool
3435 valid_fixed_convert_types_p (tree type1, tree type2)
3437 return (FIXED_POINT_TYPE_P (type1)
3438 && (INTEGRAL_TYPE_P (type2)
3439 || SCALAR_FLOAT_TYPE_P (type2)
3440 || FIXED_POINT_TYPE_P (type2)));
3443 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3444 is a problem, otherwise false. */
3446 static bool
3447 verify_gimple_call (gcall *stmt)
3449 tree fn = gimple_call_fn (stmt);
3450 tree fntype, fndecl;
3451 unsigned i;
3453 if (gimple_call_internal_p (stmt))
3455 if (fn)
3457 error ("gimple call has two targets");
3458 debug_generic_stmt (fn);
3459 return true;
3461 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3462 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3464 return false;
3467 else
3469 if (!fn)
3471 error ("gimple call has no target");
3472 return true;
3476 if (fn && !is_gimple_call_addr (fn))
3478 error ("invalid function in gimple call");
3479 debug_generic_stmt (fn);
3480 return true;
3483 if (fn
3484 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3485 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3486 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3488 error ("non-function in gimple call");
3489 return true;
3492 fndecl = gimple_call_fndecl (stmt);
3493 if (fndecl
3494 && TREE_CODE (fndecl) == FUNCTION_DECL
3495 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3496 && !DECL_PURE_P (fndecl)
3497 && !TREE_READONLY (fndecl))
3499 error ("invalid pure const state for function");
3500 return true;
3503 tree lhs = gimple_call_lhs (stmt);
3504 if (lhs
3505 && (!is_gimple_lvalue (lhs)
3506 || verify_types_in_gimple_reference (lhs, true)))
3508 error ("invalid LHS in gimple call");
3509 return true;
3512 if (gimple_call_ctrl_altering_p (stmt)
3513 && gimple_call_noreturn_p (stmt)
3514 && should_remove_lhs_p (lhs))
3516 error ("LHS in noreturn call");
3517 return true;
3520 fntype = gimple_call_fntype (stmt);
3521 if (fntype
3522 && lhs
3523 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3524 /* ??? At least C++ misses conversions at assignments from
3525 void * call results.
3526 For now simply allow arbitrary pointer type conversions. */
3527 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3528 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3530 error ("invalid conversion in gimple call");
3531 debug_generic_stmt (TREE_TYPE (lhs));
3532 debug_generic_stmt (TREE_TYPE (fntype));
3533 return true;
3536 if (gimple_call_chain (stmt)
3537 && !is_gimple_val (gimple_call_chain (stmt)))
3539 error ("invalid static chain in gimple call");
3540 debug_generic_stmt (gimple_call_chain (stmt));
3541 return true;
3544 /* If there is a static chain argument, the call should either be
3545 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3546 if (gimple_call_chain (stmt)
3547 && fndecl
3548 && !DECL_STATIC_CHAIN (fndecl))
3550 error ("static chain with function that doesn%'t use one");
3551 return true;
3554 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3556 switch (DECL_FUNCTION_CODE (fndecl))
3558 case BUILT_IN_UNREACHABLE:
3559 case BUILT_IN_TRAP:
3560 if (gimple_call_num_args (stmt) > 0)
3562 /* Built-in unreachable with parameters might not be caught by
3563 undefined behavior sanitizer. Front-ends do check users do not
3564 call them that way but we also produce calls to
3565 __builtin_unreachable internally, for example when IPA figures
3566 out a call cannot happen in a legal program. In such cases,
3567 we must make sure arguments are stripped off. */
3568 error ("__builtin_unreachable or __builtin_trap call with "
3569 "arguments");
3570 return true;
3572 break;
3573 default:
3574 break;
3578 /* ??? The C frontend passes unpromoted arguments in case it
3579 didn't see a function declaration before the call. So for now
3580 leave the call arguments mostly unverified. Once we gimplify
3581 unit-at-a-time we have a chance to fix this. */
3583 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3585 tree arg = gimple_call_arg (stmt, i);
3586 if ((is_gimple_reg_type (TREE_TYPE (arg))
3587 && !is_gimple_val (arg))
3588 || (!is_gimple_reg_type (TREE_TYPE (arg))
3589 && !is_gimple_lvalue (arg)))
3591 error ("invalid argument to gimple call");
3592 debug_generic_expr (arg);
3593 return true;
3597 return false;
3600 /* Verifies the gimple comparison with the result type TYPE and
3601 the operands OP0 and OP1, comparison code is CODE. */
3603 static bool
3604 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3606 tree op0_type = TREE_TYPE (op0);
3607 tree op1_type = TREE_TYPE (op1);
3609 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3611 error ("invalid operands in gimple comparison");
3612 return true;
3615 /* For comparisons we do not have the operations type as the
3616 effective type the comparison is carried out in. Instead
3617 we require that either the first operand is trivially
3618 convertible into the second, or the other way around.
3619 Because we special-case pointers to void we allow
3620 comparisons of pointers with the same mode as well. */
3621 if (!useless_type_conversion_p (op0_type, op1_type)
3622 && !useless_type_conversion_p (op1_type, op0_type)
3623 && (!POINTER_TYPE_P (op0_type)
3624 || !POINTER_TYPE_P (op1_type)
3625 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3627 error ("mismatching comparison operand types");
3628 debug_generic_expr (op0_type);
3629 debug_generic_expr (op1_type);
3630 return true;
3633 /* The resulting type of a comparison may be an effective boolean type. */
3634 if (INTEGRAL_TYPE_P (type)
3635 && (TREE_CODE (type) == BOOLEAN_TYPE
3636 || TYPE_PRECISION (type) == 1))
3638 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3639 || TREE_CODE (op1_type) == VECTOR_TYPE)
3640 && code != EQ_EXPR && code != NE_EXPR
3641 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3642 && !VECTOR_INTEGER_TYPE_P (op0_type))
3644 error ("unsupported operation or type for vector comparison"
3645 " returning a boolean");
3646 debug_generic_expr (op0_type);
3647 debug_generic_expr (op1_type);
3648 return true;
3651 /* Or a boolean vector type with the same element count
3652 as the comparison operand types. */
3653 else if (TREE_CODE (type) == VECTOR_TYPE
3654 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3656 if (TREE_CODE (op0_type) != VECTOR_TYPE
3657 || TREE_CODE (op1_type) != VECTOR_TYPE)
3659 error ("non-vector operands in vector comparison");
3660 debug_generic_expr (op0_type);
3661 debug_generic_expr (op1_type);
3662 return true;
3665 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3667 error ("invalid vector comparison resulting type");
3668 debug_generic_expr (type);
3669 return true;
3672 else
3674 error ("bogus comparison result type");
3675 debug_generic_expr (type);
3676 return true;
3679 return false;
3682 /* Verify a gimple assignment statement STMT with an unary rhs.
3683 Returns true if anything is wrong. */
3685 static bool
3686 verify_gimple_assign_unary (gassign *stmt)
3688 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3689 tree lhs = gimple_assign_lhs (stmt);
3690 tree lhs_type = TREE_TYPE (lhs);
3691 tree rhs1 = gimple_assign_rhs1 (stmt);
3692 tree rhs1_type = TREE_TYPE (rhs1);
3694 if (!is_gimple_reg (lhs))
3696 error ("non-register as LHS of unary operation");
3697 return true;
3700 if (!is_gimple_val (rhs1))
3702 error ("invalid operand in unary operation");
3703 return true;
3706 /* First handle conversions. */
3707 switch (rhs_code)
3709 CASE_CONVERT:
3711 /* Allow conversions from pointer type to integral type only if
3712 there is no sign or zero extension involved.
3713 For targets were the precision of ptrofftype doesn't match that
3714 of pointers we need to allow arbitrary conversions to ptrofftype. */
3715 if ((POINTER_TYPE_P (lhs_type)
3716 && INTEGRAL_TYPE_P (rhs1_type))
3717 || (POINTER_TYPE_P (rhs1_type)
3718 && INTEGRAL_TYPE_P (lhs_type)
3719 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3720 || ptrofftype_p (sizetype))))
3721 return false;
3723 /* Allow conversion from integral to offset type and vice versa. */
3724 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3725 && INTEGRAL_TYPE_P (rhs1_type))
3726 || (INTEGRAL_TYPE_P (lhs_type)
3727 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3728 return false;
3730 /* Otherwise assert we are converting between types of the
3731 same kind. */
3732 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3734 error ("invalid types in nop conversion");
3735 debug_generic_expr (lhs_type);
3736 debug_generic_expr (rhs1_type);
3737 return true;
3740 return false;
3743 case ADDR_SPACE_CONVERT_EXPR:
3745 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3746 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3747 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3749 error ("invalid types in address space conversion");
3750 debug_generic_expr (lhs_type);
3751 debug_generic_expr (rhs1_type);
3752 return true;
3755 return false;
3758 case FIXED_CONVERT_EXPR:
3760 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3761 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3763 error ("invalid types in fixed-point conversion");
3764 debug_generic_expr (lhs_type);
3765 debug_generic_expr (rhs1_type);
3766 return true;
3769 return false;
3772 case FLOAT_EXPR:
3774 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3775 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3776 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3778 error ("invalid types in conversion to floating point");
3779 debug_generic_expr (lhs_type);
3780 debug_generic_expr (rhs1_type);
3781 return true;
3784 return false;
3787 case FIX_TRUNC_EXPR:
3789 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3790 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3791 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3793 error ("invalid types in conversion to integer");
3794 debug_generic_expr (lhs_type);
3795 debug_generic_expr (rhs1_type);
3796 return true;
3799 return false;
3802 case VEC_UNPACK_HI_EXPR:
3803 case VEC_UNPACK_LO_EXPR:
3804 case VEC_UNPACK_FLOAT_HI_EXPR:
3805 case VEC_UNPACK_FLOAT_LO_EXPR:
3806 /* FIXME. */
3807 return false;
3809 case NEGATE_EXPR:
3810 case ABS_EXPR:
3811 case BIT_NOT_EXPR:
3812 case PAREN_EXPR:
3813 case CONJ_EXPR:
3814 break;
3816 default:
3817 gcc_unreachable ();
3820 /* For the remaining codes assert there is no conversion involved. */
3821 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3823 error ("non-trivial conversion in unary operation");
3824 debug_generic_expr (lhs_type);
3825 debug_generic_expr (rhs1_type);
3826 return true;
3829 return false;
3832 /* Verify a gimple assignment statement STMT with a binary rhs.
3833 Returns true if anything is wrong. */
3835 static bool
3836 verify_gimple_assign_binary (gassign *stmt)
3838 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3839 tree lhs = gimple_assign_lhs (stmt);
3840 tree lhs_type = TREE_TYPE (lhs);
3841 tree rhs1 = gimple_assign_rhs1 (stmt);
3842 tree rhs1_type = TREE_TYPE (rhs1);
3843 tree rhs2 = gimple_assign_rhs2 (stmt);
3844 tree rhs2_type = TREE_TYPE (rhs2);
3846 if (!is_gimple_reg (lhs))
3848 error ("non-register as LHS of binary operation");
3849 return true;
3852 if (!is_gimple_val (rhs1)
3853 || !is_gimple_val (rhs2))
3855 error ("invalid operands in binary operation");
3856 return true;
3859 /* First handle operations that involve different types. */
3860 switch (rhs_code)
3862 case COMPLEX_EXPR:
3864 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3865 || !(INTEGRAL_TYPE_P (rhs1_type)
3866 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3867 || !(INTEGRAL_TYPE_P (rhs2_type)
3868 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3870 error ("type mismatch in complex expression");
3871 debug_generic_expr (lhs_type);
3872 debug_generic_expr (rhs1_type);
3873 debug_generic_expr (rhs2_type);
3874 return true;
3877 return false;
3880 case LSHIFT_EXPR:
3881 case RSHIFT_EXPR:
3882 case LROTATE_EXPR:
3883 case RROTATE_EXPR:
3885 /* Shifts and rotates are ok on integral types, fixed point
3886 types and integer vector types. */
3887 if ((!INTEGRAL_TYPE_P (rhs1_type)
3888 && !FIXED_POINT_TYPE_P (rhs1_type)
3889 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3890 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3891 || (!INTEGRAL_TYPE_P (rhs2_type)
3892 /* Vector shifts of vectors are also ok. */
3893 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3894 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3895 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3896 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3897 || !useless_type_conversion_p (lhs_type, rhs1_type))
3899 error ("type mismatch in shift expression");
3900 debug_generic_expr (lhs_type);
3901 debug_generic_expr (rhs1_type);
3902 debug_generic_expr (rhs2_type);
3903 return true;
3906 return false;
3909 case WIDEN_LSHIFT_EXPR:
3911 if (!INTEGRAL_TYPE_P (lhs_type)
3912 || !INTEGRAL_TYPE_P (rhs1_type)
3913 || TREE_CODE (rhs2) != INTEGER_CST
3914 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3916 error ("type mismatch in widening vector shift expression");
3917 debug_generic_expr (lhs_type);
3918 debug_generic_expr (rhs1_type);
3919 debug_generic_expr (rhs2_type);
3920 return true;
3923 return false;
3926 case VEC_WIDEN_LSHIFT_HI_EXPR:
3927 case VEC_WIDEN_LSHIFT_LO_EXPR:
3929 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3930 || TREE_CODE (lhs_type) != VECTOR_TYPE
3931 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3932 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3933 || TREE_CODE (rhs2) != INTEGER_CST
3934 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3935 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3937 error ("type mismatch in widening vector shift expression");
3938 debug_generic_expr (lhs_type);
3939 debug_generic_expr (rhs1_type);
3940 debug_generic_expr (rhs2_type);
3941 return true;
3944 return false;
3947 case PLUS_EXPR:
3948 case MINUS_EXPR:
3950 tree lhs_etype = lhs_type;
3951 tree rhs1_etype = rhs1_type;
3952 tree rhs2_etype = rhs2_type;
3953 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3955 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3956 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3958 error ("invalid non-vector operands to vector valued plus");
3959 return true;
3961 lhs_etype = TREE_TYPE (lhs_type);
3962 rhs1_etype = TREE_TYPE (rhs1_type);
3963 rhs2_etype = TREE_TYPE (rhs2_type);
3965 if (POINTER_TYPE_P (lhs_etype)
3966 || POINTER_TYPE_P (rhs1_etype)
3967 || POINTER_TYPE_P (rhs2_etype))
3969 error ("invalid (pointer) operands to plus/minus");
3970 return true;
3973 /* Continue with generic binary expression handling. */
3974 break;
3977 case POINTER_PLUS_EXPR:
3979 if (!POINTER_TYPE_P (rhs1_type)
3980 || !useless_type_conversion_p (lhs_type, rhs1_type)
3981 || !ptrofftype_p (rhs2_type))
3983 error ("type mismatch in pointer plus expression");
3984 debug_generic_stmt (lhs_type);
3985 debug_generic_stmt (rhs1_type);
3986 debug_generic_stmt (rhs2_type);
3987 return true;
3990 return false;
3993 case POINTER_DIFF_EXPR:
3995 if (!POINTER_TYPE_P (rhs1_type)
3996 || !POINTER_TYPE_P (rhs2_type)
3997 || !types_compatible_p (rhs1_type, rhs2_type)
3998 || TREE_CODE (lhs_type) != INTEGER_TYPE
3999 || TYPE_UNSIGNED (lhs_type)
4000 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4002 error ("type mismatch in pointer diff expression");
4003 debug_generic_stmt (lhs_type);
4004 debug_generic_stmt (rhs1_type);
4005 debug_generic_stmt (rhs2_type);
4006 return true;
4009 return false;
4012 case TRUTH_ANDIF_EXPR:
4013 case TRUTH_ORIF_EXPR:
4014 case TRUTH_AND_EXPR:
4015 case TRUTH_OR_EXPR:
4016 case TRUTH_XOR_EXPR:
4018 gcc_unreachable ();
4020 case LT_EXPR:
4021 case LE_EXPR:
4022 case GT_EXPR:
4023 case GE_EXPR:
4024 case EQ_EXPR:
4025 case NE_EXPR:
4026 case UNORDERED_EXPR:
4027 case ORDERED_EXPR:
4028 case UNLT_EXPR:
4029 case UNLE_EXPR:
4030 case UNGT_EXPR:
4031 case UNGE_EXPR:
4032 case UNEQ_EXPR:
4033 case LTGT_EXPR:
4034 /* Comparisons are also binary, but the result type is not
4035 connected to the operand types. */
4036 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4038 case WIDEN_MULT_EXPR:
4039 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4040 return true;
4041 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4042 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4044 case WIDEN_SUM_EXPR:
4046 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4047 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4048 && ((!INTEGRAL_TYPE_P (rhs1_type)
4049 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4050 || (!INTEGRAL_TYPE_P (lhs_type)
4051 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4052 || !useless_type_conversion_p (lhs_type, rhs2_type)
4053 || (GET_MODE_SIZE (element_mode (rhs2_type))
4054 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4056 error ("type mismatch in widening sum reduction");
4057 debug_generic_expr (lhs_type);
4058 debug_generic_expr (rhs1_type);
4059 debug_generic_expr (rhs2_type);
4060 return true;
4062 return false;
4065 case VEC_WIDEN_MULT_HI_EXPR:
4066 case VEC_WIDEN_MULT_LO_EXPR:
4067 case VEC_WIDEN_MULT_EVEN_EXPR:
4068 case VEC_WIDEN_MULT_ODD_EXPR:
4070 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4071 || TREE_CODE (lhs_type) != VECTOR_TYPE
4072 || !types_compatible_p (rhs1_type, rhs2_type)
4073 || (GET_MODE_SIZE (element_mode (lhs_type))
4074 != 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4076 error ("type mismatch in vector widening multiplication");
4077 debug_generic_expr (lhs_type);
4078 debug_generic_expr (rhs1_type);
4079 debug_generic_expr (rhs2_type);
4080 return true;
4082 return false;
4085 case VEC_PACK_TRUNC_EXPR:
4086 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4087 vector boolean types. */
4088 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4089 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4090 && types_compatible_p (rhs1_type, rhs2_type)
4091 && (TYPE_VECTOR_SUBPARTS (lhs_type)
4092 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4093 return false;
4095 /* Fallthru. */
4096 case VEC_PACK_SAT_EXPR:
4097 case VEC_PACK_FIX_TRUNC_EXPR:
4099 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4100 || TREE_CODE (lhs_type) != VECTOR_TYPE
4101 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4102 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4103 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4104 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4105 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4106 || !types_compatible_p (rhs1_type, rhs2_type)
4107 || (GET_MODE_SIZE (element_mode (rhs1_type))
4108 != 2 * GET_MODE_SIZE (element_mode (lhs_type))))
4110 error ("type mismatch in vector pack expression");
4111 debug_generic_expr (lhs_type);
4112 debug_generic_expr (rhs1_type);
4113 debug_generic_expr (rhs2_type);
4114 return true;
4117 return false;
4120 case MULT_EXPR:
4121 case MULT_HIGHPART_EXPR:
4122 case TRUNC_DIV_EXPR:
4123 case CEIL_DIV_EXPR:
4124 case FLOOR_DIV_EXPR:
4125 case ROUND_DIV_EXPR:
4126 case TRUNC_MOD_EXPR:
4127 case CEIL_MOD_EXPR:
4128 case FLOOR_MOD_EXPR:
4129 case ROUND_MOD_EXPR:
4130 case RDIV_EXPR:
4131 case EXACT_DIV_EXPR:
4132 case MIN_EXPR:
4133 case MAX_EXPR:
4134 case BIT_IOR_EXPR:
4135 case BIT_XOR_EXPR:
4136 case BIT_AND_EXPR:
4137 /* Continue with generic binary expression handling. */
4138 break;
4140 default:
4141 gcc_unreachable ();
4144 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4145 || !useless_type_conversion_p (lhs_type, rhs2_type))
4147 error ("type mismatch in binary expression");
4148 debug_generic_stmt (lhs_type);
4149 debug_generic_stmt (rhs1_type);
4150 debug_generic_stmt (rhs2_type);
4151 return true;
4154 return false;
4157 /* Verify a gimple assignment statement STMT with a ternary rhs.
4158 Returns true if anything is wrong. */
4160 static bool
4161 verify_gimple_assign_ternary (gassign *stmt)
4163 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4164 tree lhs = gimple_assign_lhs (stmt);
4165 tree lhs_type = TREE_TYPE (lhs);
4166 tree rhs1 = gimple_assign_rhs1 (stmt);
4167 tree rhs1_type = TREE_TYPE (rhs1);
4168 tree rhs2 = gimple_assign_rhs2 (stmt);
4169 tree rhs2_type = TREE_TYPE (rhs2);
4170 tree rhs3 = gimple_assign_rhs3 (stmt);
4171 tree rhs3_type = TREE_TYPE (rhs3);
4173 if (!is_gimple_reg (lhs))
4175 error ("non-register as LHS of ternary operation");
4176 return true;
4179 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4180 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4181 || !is_gimple_val (rhs2)
4182 || !is_gimple_val (rhs3))
4184 error ("invalid operands in ternary operation");
4185 return true;
4188 /* First handle operations that involve different types. */
4189 switch (rhs_code)
4191 case WIDEN_MULT_PLUS_EXPR:
4192 case WIDEN_MULT_MINUS_EXPR:
4193 if ((!INTEGRAL_TYPE_P (rhs1_type)
4194 && !FIXED_POINT_TYPE_P (rhs1_type))
4195 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4196 || !useless_type_conversion_p (lhs_type, rhs3_type)
4197 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4198 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4200 error ("type mismatch in widening multiply-accumulate expression");
4201 debug_generic_expr (lhs_type);
4202 debug_generic_expr (rhs1_type);
4203 debug_generic_expr (rhs2_type);
4204 debug_generic_expr (rhs3_type);
4205 return true;
4207 break;
4209 case FMA_EXPR:
4210 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4211 || !useless_type_conversion_p (lhs_type, rhs2_type)
4212 || !useless_type_conversion_p (lhs_type, rhs3_type))
4214 error ("type mismatch in fused multiply-add expression");
4215 debug_generic_expr (lhs_type);
4216 debug_generic_expr (rhs1_type);
4217 debug_generic_expr (rhs2_type);
4218 debug_generic_expr (rhs3_type);
4219 return true;
4221 break;
4223 case VEC_COND_EXPR:
4224 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4225 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4226 != TYPE_VECTOR_SUBPARTS (lhs_type))
4228 error ("the first argument of a VEC_COND_EXPR must be of a "
4229 "boolean vector type of the same number of elements "
4230 "as the result");
4231 debug_generic_expr (lhs_type);
4232 debug_generic_expr (rhs1_type);
4233 return true;
4235 /* Fallthrough. */
4236 case COND_EXPR:
4237 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4238 || !useless_type_conversion_p (lhs_type, rhs3_type))
4240 error ("type mismatch in conditional expression");
4241 debug_generic_expr (lhs_type);
4242 debug_generic_expr (rhs2_type);
4243 debug_generic_expr (rhs3_type);
4244 return true;
4246 break;
4248 case VEC_PERM_EXPR:
4249 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4250 || !useless_type_conversion_p (lhs_type, rhs2_type))
4252 error ("type mismatch in vector permute expression");
4253 debug_generic_expr (lhs_type);
4254 debug_generic_expr (rhs1_type);
4255 debug_generic_expr (rhs2_type);
4256 debug_generic_expr (rhs3_type);
4257 return true;
4260 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4261 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4262 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4264 error ("vector types expected in vector permute expression");
4265 debug_generic_expr (lhs_type);
4266 debug_generic_expr (rhs1_type);
4267 debug_generic_expr (rhs2_type);
4268 debug_generic_expr (rhs3_type);
4269 return true;
4272 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4273 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4274 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4275 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4276 != TYPE_VECTOR_SUBPARTS (lhs_type))
4278 error ("vectors with different element number found "
4279 "in vector permute expression");
4280 debug_generic_expr (lhs_type);
4281 debug_generic_expr (rhs1_type);
4282 debug_generic_expr (rhs2_type);
4283 debug_generic_expr (rhs3_type);
4284 return true;
4287 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4288 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type)))
4289 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type))))
4291 error ("invalid mask type in vector permute expression");
4292 debug_generic_expr (lhs_type);
4293 debug_generic_expr (rhs1_type);
4294 debug_generic_expr (rhs2_type);
4295 debug_generic_expr (rhs3_type);
4296 return true;
4299 return false;
4301 case SAD_EXPR:
4302 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4303 || !useless_type_conversion_p (lhs_type, rhs3_type)
4304 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4305 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4307 error ("type mismatch in sad expression");
4308 debug_generic_expr (lhs_type);
4309 debug_generic_expr (rhs1_type);
4310 debug_generic_expr (rhs2_type);
4311 debug_generic_expr (rhs3_type);
4312 return true;
4315 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4316 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4317 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4319 error ("vector types expected in sad expression");
4320 debug_generic_expr (lhs_type);
4321 debug_generic_expr (rhs1_type);
4322 debug_generic_expr (rhs2_type);
4323 debug_generic_expr (rhs3_type);
4324 return true;
4327 return false;
4329 case BIT_INSERT_EXPR:
4330 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4332 error ("type mismatch in BIT_INSERT_EXPR");
4333 debug_generic_expr (lhs_type);
4334 debug_generic_expr (rhs1_type);
4335 return true;
4337 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4338 && INTEGRAL_TYPE_P (rhs2_type))
4339 || (VECTOR_TYPE_P (rhs1_type)
4340 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4342 error ("not allowed type combination in BIT_INSERT_EXPR");
4343 debug_generic_expr (rhs1_type);
4344 debug_generic_expr (rhs2_type);
4345 return true;
4347 if (! tree_fits_uhwi_p (rhs3)
4348 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4349 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4351 error ("invalid position or size in BIT_INSERT_EXPR");
4352 return true;
4354 if (INTEGRAL_TYPE_P (rhs1_type))
4356 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4357 if (bitpos >= TYPE_PRECISION (rhs1_type)
4358 || (bitpos + TYPE_PRECISION (rhs2_type)
4359 > TYPE_PRECISION (rhs1_type)))
4361 error ("insertion out of range in BIT_INSERT_EXPR");
4362 return true;
4365 else if (VECTOR_TYPE_P (rhs1_type))
4367 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4368 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4369 if (bitpos % bitsize != 0)
4371 error ("vector insertion not at element boundary");
4372 return true;
4375 return false;
4377 case DOT_PROD_EXPR:
4379 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4380 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4381 && ((!INTEGRAL_TYPE_P (rhs1_type)
4382 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4383 || (!INTEGRAL_TYPE_P (lhs_type)
4384 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4385 || !types_compatible_p (rhs1_type, rhs2_type)
4386 || !useless_type_conversion_p (lhs_type, rhs3_type)
4387 || (GET_MODE_SIZE (element_mode (rhs3_type))
4388 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4390 error ("type mismatch in dot product reduction");
4391 debug_generic_expr (lhs_type);
4392 debug_generic_expr (rhs1_type);
4393 debug_generic_expr (rhs2_type);
4394 return true;
4396 return false;
4399 case REALIGN_LOAD_EXPR:
4400 /* FIXME. */
4401 return false;
4403 default:
4404 gcc_unreachable ();
4406 return false;
4409 /* Verify a gimple assignment statement STMT with a single rhs.
4410 Returns true if anything is wrong. */
4412 static bool
4413 verify_gimple_assign_single (gassign *stmt)
4415 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4416 tree lhs = gimple_assign_lhs (stmt);
4417 tree lhs_type = TREE_TYPE (lhs);
4418 tree rhs1 = gimple_assign_rhs1 (stmt);
4419 tree rhs1_type = TREE_TYPE (rhs1);
4420 bool res = false;
4422 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4424 error ("non-trivial conversion at assignment");
4425 debug_generic_expr (lhs_type);
4426 debug_generic_expr (rhs1_type);
4427 return true;
4430 if (gimple_clobber_p (stmt)
4431 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4433 error ("non-decl/MEM_REF LHS in clobber statement");
4434 debug_generic_expr (lhs);
4435 return true;
4438 if (handled_component_p (lhs)
4439 || TREE_CODE (lhs) == MEM_REF
4440 || TREE_CODE (lhs) == TARGET_MEM_REF)
4441 res |= verify_types_in_gimple_reference (lhs, true);
4443 /* Special codes we cannot handle via their class. */
4444 switch (rhs_code)
4446 case ADDR_EXPR:
4448 tree op = TREE_OPERAND (rhs1, 0);
4449 if (!is_gimple_addressable (op))
4451 error ("invalid operand in unary expression");
4452 return true;
4455 /* Technically there is no longer a need for matching types, but
4456 gimple hygiene asks for this check. In LTO we can end up
4457 combining incompatible units and thus end up with addresses
4458 of globals that change their type to a common one. */
4459 if (!in_lto_p
4460 && !types_compatible_p (TREE_TYPE (op),
4461 TREE_TYPE (TREE_TYPE (rhs1)))
4462 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4463 TREE_TYPE (op)))
4465 error ("type mismatch in address expression");
4466 debug_generic_stmt (TREE_TYPE (rhs1));
4467 debug_generic_stmt (TREE_TYPE (op));
4468 return true;
4471 return verify_types_in_gimple_reference (op, true);
4474 /* tcc_reference */
4475 case INDIRECT_REF:
4476 error ("INDIRECT_REF in gimple IL");
4477 return true;
4479 case COMPONENT_REF:
4480 case BIT_FIELD_REF:
4481 case ARRAY_REF:
4482 case ARRAY_RANGE_REF:
4483 case VIEW_CONVERT_EXPR:
4484 case REALPART_EXPR:
4485 case IMAGPART_EXPR:
4486 case TARGET_MEM_REF:
4487 case MEM_REF:
4488 if (!is_gimple_reg (lhs)
4489 && is_gimple_reg_type (TREE_TYPE (lhs)))
4491 error ("invalid rhs for gimple memory store");
4492 debug_generic_stmt (lhs);
4493 debug_generic_stmt (rhs1);
4494 return true;
4496 return res || verify_types_in_gimple_reference (rhs1, false);
4498 /* tcc_constant */
4499 case SSA_NAME:
4500 case INTEGER_CST:
4501 case REAL_CST:
4502 case FIXED_CST:
4503 case COMPLEX_CST:
4504 case VECTOR_CST:
4505 case STRING_CST:
4506 return res;
4508 /* tcc_declaration */
4509 case CONST_DECL:
4510 return res;
4511 case VAR_DECL:
4512 case PARM_DECL:
4513 if (!is_gimple_reg (lhs)
4514 && !is_gimple_reg (rhs1)
4515 && is_gimple_reg_type (TREE_TYPE (lhs)))
4517 error ("invalid rhs for gimple memory store");
4518 debug_generic_stmt (lhs);
4519 debug_generic_stmt (rhs1);
4520 return true;
4522 return res;
4524 case CONSTRUCTOR:
4525 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4527 unsigned int i;
4528 tree elt_i, elt_v, elt_t = NULL_TREE;
4530 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4531 return res;
4532 /* For vector CONSTRUCTORs we require that either it is empty
4533 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4534 (then the element count must be correct to cover the whole
4535 outer vector and index must be NULL on all elements, or it is
4536 a CONSTRUCTOR of scalar elements, where we as an exception allow
4537 smaller number of elements (assuming zero filling) and
4538 consecutive indexes as compared to NULL indexes (such
4539 CONSTRUCTORs can appear in the IL from FEs). */
4540 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4542 if (elt_t == NULL_TREE)
4544 elt_t = TREE_TYPE (elt_v);
4545 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4547 tree elt_t = TREE_TYPE (elt_v);
4548 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4549 TREE_TYPE (elt_t)))
4551 error ("incorrect type of vector CONSTRUCTOR"
4552 " elements");
4553 debug_generic_stmt (rhs1);
4554 return true;
4556 else if (CONSTRUCTOR_NELTS (rhs1)
4557 * TYPE_VECTOR_SUBPARTS (elt_t)
4558 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4560 error ("incorrect number of vector CONSTRUCTOR"
4561 " elements");
4562 debug_generic_stmt (rhs1);
4563 return true;
4566 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4567 elt_t))
4569 error ("incorrect type of vector CONSTRUCTOR elements");
4570 debug_generic_stmt (rhs1);
4571 return true;
4573 else if (CONSTRUCTOR_NELTS (rhs1)
4574 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4576 error ("incorrect number of vector CONSTRUCTOR elements");
4577 debug_generic_stmt (rhs1);
4578 return true;
4581 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4583 error ("incorrect type of vector CONSTRUCTOR elements");
4584 debug_generic_stmt (rhs1);
4585 return true;
4587 if (elt_i != NULL_TREE
4588 && (TREE_CODE (elt_t) == VECTOR_TYPE
4589 || TREE_CODE (elt_i) != INTEGER_CST
4590 || compare_tree_int (elt_i, i) != 0))
4592 error ("vector CONSTRUCTOR with non-NULL element index");
4593 debug_generic_stmt (rhs1);
4594 return true;
4596 if (!is_gimple_val (elt_v))
4598 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4599 debug_generic_stmt (rhs1);
4600 return true;
4604 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4606 error ("non-vector CONSTRUCTOR with elements");
4607 debug_generic_stmt (rhs1);
4608 return true;
4610 return res;
4611 case OBJ_TYPE_REF:
4612 case ASSERT_EXPR:
4613 case WITH_SIZE_EXPR:
4614 /* FIXME. */
4615 return res;
4617 default:;
4620 return res;
4623 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4624 is a problem, otherwise false. */
4626 static bool
4627 verify_gimple_assign (gassign *stmt)
4629 switch (gimple_assign_rhs_class (stmt))
4631 case GIMPLE_SINGLE_RHS:
4632 return verify_gimple_assign_single (stmt);
4634 case GIMPLE_UNARY_RHS:
4635 return verify_gimple_assign_unary (stmt);
4637 case GIMPLE_BINARY_RHS:
4638 return verify_gimple_assign_binary (stmt);
4640 case GIMPLE_TERNARY_RHS:
4641 return verify_gimple_assign_ternary (stmt);
4643 default:
4644 gcc_unreachable ();
4648 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4649 is a problem, otherwise false. */
4651 static bool
4652 verify_gimple_return (greturn *stmt)
4654 tree op = gimple_return_retval (stmt);
4655 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4657 /* We cannot test for present return values as we do not fix up missing
4658 return values from the original source. */
4659 if (op == NULL)
4660 return false;
4662 if (!is_gimple_val (op)
4663 && TREE_CODE (op) != RESULT_DECL)
4665 error ("invalid operand in return statement");
4666 debug_generic_stmt (op);
4667 return true;
4670 if ((TREE_CODE (op) == RESULT_DECL
4671 && DECL_BY_REFERENCE (op))
4672 || (TREE_CODE (op) == SSA_NAME
4673 && SSA_NAME_VAR (op)
4674 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4675 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4676 op = TREE_TYPE (op);
4678 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4680 error ("invalid conversion in return statement");
4681 debug_generic_stmt (restype);
4682 debug_generic_stmt (TREE_TYPE (op));
4683 return true;
4686 return false;
4690 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4691 is a problem, otherwise false. */
4693 static bool
4694 verify_gimple_goto (ggoto *stmt)
4696 tree dest = gimple_goto_dest (stmt);
4698 /* ??? We have two canonical forms of direct goto destinations, a
4699 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4700 if (TREE_CODE (dest) != LABEL_DECL
4701 && (!is_gimple_val (dest)
4702 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4704 error ("goto destination is neither a label nor a pointer");
4705 return true;
4708 return false;
4711 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4712 is a problem, otherwise false. */
4714 static bool
4715 verify_gimple_switch (gswitch *stmt)
4717 unsigned int i, n;
4718 tree elt, prev_upper_bound = NULL_TREE;
4719 tree index_type, elt_type = NULL_TREE;
4721 if (!is_gimple_val (gimple_switch_index (stmt)))
4723 error ("invalid operand to switch statement");
4724 debug_generic_stmt (gimple_switch_index (stmt));
4725 return true;
4728 index_type = TREE_TYPE (gimple_switch_index (stmt));
4729 if (! INTEGRAL_TYPE_P (index_type))
4731 error ("non-integral type switch statement");
4732 debug_generic_expr (index_type);
4733 return true;
4736 elt = gimple_switch_label (stmt, 0);
4737 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4739 error ("invalid default case label in switch statement");
4740 debug_generic_expr (elt);
4741 return true;
4744 n = gimple_switch_num_labels (stmt);
4745 for (i = 1; i < n; i++)
4747 elt = gimple_switch_label (stmt, i);
4749 if (! CASE_LOW (elt))
4751 error ("invalid case label in switch statement");
4752 debug_generic_expr (elt);
4753 return true;
4755 if (CASE_HIGH (elt)
4756 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4758 error ("invalid case range in switch statement");
4759 debug_generic_expr (elt);
4760 return true;
4763 if (elt_type)
4765 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4766 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4768 error ("type mismatch for case label in switch statement");
4769 debug_generic_expr (elt);
4770 return true;
4773 else
4775 elt_type = TREE_TYPE (CASE_LOW (elt));
4776 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4778 error ("type precision mismatch in switch statement");
4779 return true;
4783 if (prev_upper_bound)
4785 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4787 error ("case labels not sorted in switch statement");
4788 return true;
4792 prev_upper_bound = CASE_HIGH (elt);
4793 if (! prev_upper_bound)
4794 prev_upper_bound = CASE_LOW (elt);
4797 return false;
4800 /* Verify a gimple debug statement STMT.
4801 Returns true if anything is wrong. */
4803 static bool
4804 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4806 /* There isn't much that could be wrong in a gimple debug stmt. A
4807 gimple debug bind stmt, for example, maps a tree, that's usually
4808 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4809 component or member of an aggregate type, to another tree, that
4810 can be an arbitrary expression. These stmts expand into debug
4811 insns, and are converted to debug notes by var-tracking.c. */
4812 return false;
4815 /* Verify a gimple label statement STMT.
4816 Returns true if anything is wrong. */
4818 static bool
4819 verify_gimple_label (glabel *stmt)
4821 tree decl = gimple_label_label (stmt);
4822 int uid;
4823 bool err = false;
4825 if (TREE_CODE (decl) != LABEL_DECL)
4826 return true;
4827 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4828 && DECL_CONTEXT (decl) != current_function_decl)
4830 error ("label's context is not the current function decl");
4831 err |= true;
4834 uid = LABEL_DECL_UID (decl);
4835 if (cfun->cfg
4836 && (uid == -1
4837 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4839 error ("incorrect entry in label_to_block_map");
4840 err |= true;
4843 uid = EH_LANDING_PAD_NR (decl);
4844 if (uid)
4846 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4847 if (decl != lp->post_landing_pad)
4849 error ("incorrect setting of landing pad number");
4850 err |= true;
4854 return err;
4857 /* Verify a gimple cond statement STMT.
4858 Returns true if anything is wrong. */
4860 static bool
4861 verify_gimple_cond (gcond *stmt)
4863 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4865 error ("invalid comparison code in gimple cond");
4866 return true;
4868 if (!(!gimple_cond_true_label (stmt)
4869 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4870 || !(!gimple_cond_false_label (stmt)
4871 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4873 error ("invalid labels in gimple cond");
4874 return true;
4877 return verify_gimple_comparison (boolean_type_node,
4878 gimple_cond_lhs (stmt),
4879 gimple_cond_rhs (stmt),
4880 gimple_cond_code (stmt));
4883 /* Verify the GIMPLE statement STMT. Returns true if there is an
4884 error, otherwise false. */
4886 static bool
4887 verify_gimple_stmt (gimple *stmt)
4889 switch (gimple_code (stmt))
4891 case GIMPLE_ASSIGN:
4892 return verify_gimple_assign (as_a <gassign *> (stmt));
4894 case GIMPLE_LABEL:
4895 return verify_gimple_label (as_a <glabel *> (stmt));
4897 case GIMPLE_CALL:
4898 return verify_gimple_call (as_a <gcall *> (stmt));
4900 case GIMPLE_COND:
4901 return verify_gimple_cond (as_a <gcond *> (stmt));
4903 case GIMPLE_GOTO:
4904 return verify_gimple_goto (as_a <ggoto *> (stmt));
4906 case GIMPLE_SWITCH:
4907 return verify_gimple_switch (as_a <gswitch *> (stmt));
4909 case GIMPLE_RETURN:
4910 return verify_gimple_return (as_a <greturn *> (stmt));
4912 case GIMPLE_ASM:
4913 return false;
4915 case GIMPLE_TRANSACTION:
4916 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4918 /* Tuples that do not have tree operands. */
4919 case GIMPLE_NOP:
4920 case GIMPLE_PREDICT:
4921 case GIMPLE_RESX:
4922 case GIMPLE_EH_DISPATCH:
4923 case GIMPLE_EH_MUST_NOT_THROW:
4924 return false;
4926 CASE_GIMPLE_OMP:
4927 /* OpenMP directives are validated by the FE and never operated
4928 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4929 non-gimple expressions when the main index variable has had
4930 its address taken. This does not affect the loop itself
4931 because the header of an GIMPLE_OMP_FOR is merely used to determine
4932 how to setup the parallel iteration. */
4933 return false;
4935 case GIMPLE_DEBUG:
4936 return verify_gimple_debug (stmt);
4938 default:
4939 gcc_unreachable ();
4943 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4944 and false otherwise. */
4946 static bool
4947 verify_gimple_phi (gimple *phi)
4949 bool err = false;
4950 unsigned i;
4951 tree phi_result = gimple_phi_result (phi);
4952 bool virtual_p;
4954 if (!phi_result)
4956 error ("invalid PHI result");
4957 return true;
4960 virtual_p = virtual_operand_p (phi_result);
4961 if (TREE_CODE (phi_result) != SSA_NAME
4962 || (virtual_p
4963 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4965 error ("invalid PHI result");
4966 err = true;
4969 for (i = 0; i < gimple_phi_num_args (phi); i++)
4971 tree t = gimple_phi_arg_def (phi, i);
4973 if (!t)
4975 error ("missing PHI def");
4976 err |= true;
4977 continue;
4979 /* Addressable variables do have SSA_NAMEs but they
4980 are not considered gimple values. */
4981 else if ((TREE_CODE (t) == SSA_NAME
4982 && virtual_p != virtual_operand_p (t))
4983 || (virtual_p
4984 && (TREE_CODE (t) != SSA_NAME
4985 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4986 || (!virtual_p
4987 && !is_gimple_val (t)))
4989 error ("invalid PHI argument");
4990 debug_generic_expr (t);
4991 err |= true;
4993 #ifdef ENABLE_TYPES_CHECKING
4994 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4996 error ("incompatible types in PHI argument %u", i);
4997 debug_generic_stmt (TREE_TYPE (phi_result));
4998 debug_generic_stmt (TREE_TYPE (t));
4999 err |= true;
5001 #endif
5004 return err;
5007 /* Verify the GIMPLE statements inside the sequence STMTS. */
5009 static bool
5010 verify_gimple_in_seq_2 (gimple_seq stmts)
5012 gimple_stmt_iterator ittr;
5013 bool err = false;
5015 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5017 gimple *stmt = gsi_stmt (ittr);
5019 switch (gimple_code (stmt))
5021 case GIMPLE_BIND:
5022 err |= verify_gimple_in_seq_2 (
5023 gimple_bind_body (as_a <gbind *> (stmt)));
5024 break;
5026 case GIMPLE_TRY:
5027 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5028 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5029 break;
5031 case GIMPLE_EH_FILTER:
5032 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5033 break;
5035 case GIMPLE_EH_ELSE:
5037 geh_else *eh_else = as_a <geh_else *> (stmt);
5038 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5039 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5041 break;
5043 case GIMPLE_CATCH:
5044 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5045 as_a <gcatch *> (stmt)));
5046 break;
5048 case GIMPLE_TRANSACTION:
5049 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5050 break;
5052 default:
5054 bool err2 = verify_gimple_stmt (stmt);
5055 if (err2)
5056 debug_gimple_stmt (stmt);
5057 err |= err2;
5062 return err;
5065 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5066 is a problem, otherwise false. */
5068 static bool
5069 verify_gimple_transaction (gtransaction *stmt)
5071 tree lab;
5073 lab = gimple_transaction_label_norm (stmt);
5074 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5075 return true;
5076 lab = gimple_transaction_label_uninst (stmt);
5077 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5078 return true;
5079 lab = gimple_transaction_label_over (stmt);
5080 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5081 return true;
5083 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5087 /* Verify the GIMPLE statements inside the statement list STMTS. */
5089 DEBUG_FUNCTION void
5090 verify_gimple_in_seq (gimple_seq stmts)
5092 timevar_push (TV_TREE_STMT_VERIFY);
5093 if (verify_gimple_in_seq_2 (stmts))
5094 internal_error ("verify_gimple failed");
5095 timevar_pop (TV_TREE_STMT_VERIFY);
5098 /* Return true when the T can be shared. */
5100 static bool
5101 tree_node_can_be_shared (tree t)
5103 if (IS_TYPE_OR_DECL_P (t)
5104 || is_gimple_min_invariant (t)
5105 || TREE_CODE (t) == SSA_NAME
5106 || t == error_mark_node
5107 || TREE_CODE (t) == IDENTIFIER_NODE)
5108 return true;
5110 if (TREE_CODE (t) == CASE_LABEL_EXPR)
5111 return true;
5113 if (DECL_P (t))
5114 return true;
5116 return false;
5119 /* Called via walk_tree. Verify tree sharing. */
5121 static tree
5122 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5124 hash_set<void *> *visited = (hash_set<void *> *) data;
5126 if (tree_node_can_be_shared (*tp))
5128 *walk_subtrees = false;
5129 return NULL;
5132 if (visited->add (*tp))
5133 return *tp;
5135 return NULL;
5138 /* Called via walk_gimple_stmt. Verify tree sharing. */
5140 static tree
5141 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5143 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5144 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5147 static bool eh_error_found;
5148 bool
5149 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5150 hash_set<gimple *> *visited)
5152 if (!visited->contains (stmt))
5154 error ("dead STMT in EH table");
5155 debug_gimple_stmt (stmt);
5156 eh_error_found = true;
5158 return true;
5161 /* Verify if the location LOCs block is in BLOCKS. */
5163 static bool
5164 verify_location (hash_set<tree> *blocks, location_t loc)
5166 tree block = LOCATION_BLOCK (loc);
5167 if (block != NULL_TREE
5168 && !blocks->contains (block))
5170 error ("location references block not in block tree");
5171 return true;
5173 if (block != NULL_TREE)
5174 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5175 return false;
5178 /* Called via walk_tree. Verify that expressions have no blocks. */
5180 static tree
5181 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5183 if (!EXPR_P (*tp))
5185 *walk_subtrees = false;
5186 return NULL;
5189 location_t loc = EXPR_LOCATION (*tp);
5190 if (LOCATION_BLOCK (loc) != NULL)
5191 return *tp;
5193 return NULL;
5196 /* Called via walk_tree. Verify locations of expressions. */
5198 static tree
5199 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5201 hash_set<tree> *blocks = (hash_set<tree> *) data;
5203 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5205 tree t = DECL_DEBUG_EXPR (*tp);
5206 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5207 if (addr)
5208 return addr;
5210 if ((VAR_P (*tp)
5211 || TREE_CODE (*tp) == PARM_DECL
5212 || TREE_CODE (*tp) == RESULT_DECL)
5213 && DECL_HAS_VALUE_EXPR_P (*tp))
5215 tree t = DECL_VALUE_EXPR (*tp);
5216 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5217 if (addr)
5218 return addr;
5221 if (!EXPR_P (*tp))
5223 *walk_subtrees = false;
5224 return NULL;
5227 location_t loc = EXPR_LOCATION (*tp);
5228 if (verify_location (blocks, loc))
5229 return *tp;
5231 return NULL;
5234 /* Called via walk_gimple_op. Verify locations of expressions. */
5236 static tree
5237 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5239 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5240 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5243 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5245 static void
5246 collect_subblocks (hash_set<tree> *blocks, tree block)
5248 tree t;
5249 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5251 blocks->add (t);
5252 collect_subblocks (blocks, t);
5256 /* Verify the GIMPLE statements in the CFG of FN. */
5258 DEBUG_FUNCTION void
5259 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5261 basic_block bb;
5262 bool err = false;
5264 timevar_push (TV_TREE_STMT_VERIFY);
5265 hash_set<void *> visited;
5266 hash_set<gimple *> visited_stmts;
5268 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5269 hash_set<tree> blocks;
5270 if (DECL_INITIAL (fn->decl))
5272 blocks.add (DECL_INITIAL (fn->decl));
5273 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5276 FOR_EACH_BB_FN (bb, fn)
5278 gimple_stmt_iterator gsi;
5280 for (gphi_iterator gpi = gsi_start_phis (bb);
5281 !gsi_end_p (gpi);
5282 gsi_next (&gpi))
5284 gphi *phi = gpi.phi ();
5285 bool err2 = false;
5286 unsigned i;
5288 visited_stmts.add (phi);
5290 if (gimple_bb (phi) != bb)
5292 error ("gimple_bb (phi) is set to a wrong basic block");
5293 err2 = true;
5296 err2 |= verify_gimple_phi (phi);
5298 /* Only PHI arguments have locations. */
5299 if (gimple_location (phi) != UNKNOWN_LOCATION)
5301 error ("PHI node with location");
5302 err2 = true;
5305 for (i = 0; i < gimple_phi_num_args (phi); i++)
5307 tree arg = gimple_phi_arg_def (phi, i);
5308 tree addr = walk_tree (&arg, verify_node_sharing_1,
5309 &visited, NULL);
5310 if (addr)
5312 error ("incorrect sharing of tree nodes");
5313 debug_generic_expr (addr);
5314 err2 |= true;
5316 location_t loc = gimple_phi_arg_location (phi, i);
5317 if (virtual_operand_p (gimple_phi_result (phi))
5318 && loc != UNKNOWN_LOCATION)
5320 error ("virtual PHI with argument locations");
5321 err2 = true;
5323 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5324 if (addr)
5326 debug_generic_expr (addr);
5327 err2 = true;
5329 err2 |= verify_location (&blocks, loc);
5332 if (err2)
5333 debug_gimple_stmt (phi);
5334 err |= err2;
5337 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5339 gimple *stmt = gsi_stmt (gsi);
5340 bool err2 = false;
5341 struct walk_stmt_info wi;
5342 tree addr;
5343 int lp_nr;
5345 visited_stmts.add (stmt);
5347 if (gimple_bb (stmt) != bb)
5349 error ("gimple_bb (stmt) is set to a wrong basic block");
5350 err2 = true;
5353 err2 |= verify_gimple_stmt (stmt);
5354 err2 |= verify_location (&blocks, gimple_location (stmt));
5356 memset (&wi, 0, sizeof (wi));
5357 wi.info = (void *) &visited;
5358 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5359 if (addr)
5361 error ("incorrect sharing of tree nodes");
5362 debug_generic_expr (addr);
5363 err2 |= true;
5366 memset (&wi, 0, sizeof (wi));
5367 wi.info = (void *) &blocks;
5368 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5369 if (addr)
5371 debug_generic_expr (addr);
5372 err2 |= true;
5375 /* ??? Instead of not checking these stmts at all the walker
5376 should know its context via wi. */
5377 if (!is_gimple_debug (stmt)
5378 && !is_gimple_omp (stmt))
5380 memset (&wi, 0, sizeof (wi));
5381 addr = walk_gimple_op (stmt, verify_expr, &wi);
5382 if (addr)
5384 debug_generic_expr (addr);
5385 inform (gimple_location (stmt), "in statement");
5386 err2 |= true;
5390 /* If the statement is marked as part of an EH region, then it is
5391 expected that the statement could throw. Verify that when we
5392 have optimizations that simplify statements such that we prove
5393 that they cannot throw, that we update other data structures
5394 to match. */
5395 lp_nr = lookup_stmt_eh_lp (stmt);
5396 if (lp_nr > 0)
5398 if (!stmt_could_throw_p (stmt))
5400 if (verify_nothrow)
5402 error ("statement marked for throw, but doesn%'t");
5403 err2 |= true;
5406 else if (!gsi_one_before_end_p (gsi))
5408 error ("statement marked for throw in middle of block");
5409 err2 |= true;
5413 if (err2)
5414 debug_gimple_stmt (stmt);
5415 err |= err2;
5419 eh_error_found = false;
5420 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5421 if (eh_table)
5422 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5423 (&visited_stmts);
5425 if (err || eh_error_found)
5426 internal_error ("verify_gimple failed");
5428 verify_histograms ();
5429 timevar_pop (TV_TREE_STMT_VERIFY);
5433 /* Verifies that the flow information is OK. */
5435 static int
5436 gimple_verify_flow_info (void)
5438 int err = 0;
5439 basic_block bb;
5440 gimple_stmt_iterator gsi;
5441 gimple *stmt;
5442 edge e;
5443 edge_iterator ei;
5445 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5446 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5448 error ("ENTRY_BLOCK has IL associated with it");
5449 err = 1;
5452 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5453 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5455 error ("EXIT_BLOCK has IL associated with it");
5456 err = 1;
5459 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5460 if (e->flags & EDGE_FALLTHRU)
5462 error ("fallthru to exit from bb %d", e->src->index);
5463 err = 1;
5466 FOR_EACH_BB_FN (bb, cfun)
5468 bool found_ctrl_stmt = false;
5470 stmt = NULL;
5472 /* Skip labels on the start of basic block. */
5473 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5475 tree label;
5476 gimple *prev_stmt = stmt;
5478 stmt = gsi_stmt (gsi);
5480 if (gimple_code (stmt) != GIMPLE_LABEL)
5481 break;
5483 label = gimple_label_label (as_a <glabel *> (stmt));
5484 if (prev_stmt && DECL_NONLOCAL (label))
5486 error ("nonlocal label ");
5487 print_generic_expr (stderr, label);
5488 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5489 bb->index);
5490 err = 1;
5493 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5495 error ("EH landing pad label ");
5496 print_generic_expr (stderr, label);
5497 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5498 bb->index);
5499 err = 1;
5502 if (label_to_block (label) != bb)
5504 error ("label ");
5505 print_generic_expr (stderr, label);
5506 fprintf (stderr, " to block does not match in bb %d",
5507 bb->index);
5508 err = 1;
5511 if (decl_function_context (label) != current_function_decl)
5513 error ("label ");
5514 print_generic_expr (stderr, label);
5515 fprintf (stderr, " has incorrect context in bb %d",
5516 bb->index);
5517 err = 1;
5521 /* Verify that body of basic block BB is free of control flow. */
5522 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5524 gimple *stmt = gsi_stmt (gsi);
5526 if (found_ctrl_stmt)
5528 error ("control flow in the middle of basic block %d",
5529 bb->index);
5530 err = 1;
5533 if (stmt_ends_bb_p (stmt))
5534 found_ctrl_stmt = true;
5536 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5538 error ("label ");
5539 print_generic_expr (stderr, gimple_label_label (label_stmt));
5540 fprintf (stderr, " in the middle of basic block %d", bb->index);
5541 err = 1;
5545 gsi = gsi_last_bb (bb);
5546 if (gsi_end_p (gsi))
5547 continue;
5549 stmt = gsi_stmt (gsi);
5551 if (gimple_code (stmt) == GIMPLE_LABEL)
5552 continue;
5554 err |= verify_eh_edges (stmt);
5556 if (is_ctrl_stmt (stmt))
5558 FOR_EACH_EDGE (e, ei, bb->succs)
5559 if (e->flags & EDGE_FALLTHRU)
5561 error ("fallthru edge after a control statement in bb %d",
5562 bb->index);
5563 err = 1;
5567 if (gimple_code (stmt) != GIMPLE_COND)
5569 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5570 after anything else but if statement. */
5571 FOR_EACH_EDGE (e, ei, bb->succs)
5572 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5574 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5575 bb->index);
5576 err = 1;
5580 switch (gimple_code (stmt))
5582 case GIMPLE_COND:
5584 edge true_edge;
5585 edge false_edge;
5587 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5589 if (!true_edge
5590 || !false_edge
5591 || !(true_edge->flags & EDGE_TRUE_VALUE)
5592 || !(false_edge->flags & EDGE_FALSE_VALUE)
5593 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5594 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5595 || EDGE_COUNT (bb->succs) >= 3)
5597 error ("wrong outgoing edge flags at end of bb %d",
5598 bb->index);
5599 err = 1;
5602 break;
5604 case GIMPLE_GOTO:
5605 if (simple_goto_p (stmt))
5607 error ("explicit goto at end of bb %d", bb->index);
5608 err = 1;
5610 else
5612 /* FIXME. We should double check that the labels in the
5613 destination blocks have their address taken. */
5614 FOR_EACH_EDGE (e, ei, bb->succs)
5615 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5616 | EDGE_FALSE_VALUE))
5617 || !(e->flags & EDGE_ABNORMAL))
5619 error ("wrong outgoing edge flags at end of bb %d",
5620 bb->index);
5621 err = 1;
5624 break;
5626 case GIMPLE_CALL:
5627 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5628 break;
5629 /* fallthru */
5630 case GIMPLE_RETURN:
5631 if (!single_succ_p (bb)
5632 || (single_succ_edge (bb)->flags
5633 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5634 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5636 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5637 err = 1;
5639 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5641 error ("return edge does not point to exit in bb %d",
5642 bb->index);
5643 err = 1;
5645 break;
5647 case GIMPLE_SWITCH:
5649 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5650 tree prev;
5651 edge e;
5652 size_t i, n;
5654 n = gimple_switch_num_labels (switch_stmt);
5656 /* Mark all the destination basic blocks. */
5657 for (i = 0; i < n; ++i)
5659 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5660 basic_block label_bb = label_to_block (lab);
5661 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5662 label_bb->aux = (void *)1;
5665 /* Verify that the case labels are sorted. */
5666 prev = gimple_switch_label (switch_stmt, 0);
5667 for (i = 1; i < n; ++i)
5669 tree c = gimple_switch_label (switch_stmt, i);
5670 if (!CASE_LOW (c))
5672 error ("found default case not at the start of "
5673 "case vector");
5674 err = 1;
5675 continue;
5677 if (CASE_LOW (prev)
5678 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5680 error ("case labels not sorted: ");
5681 print_generic_expr (stderr, prev);
5682 fprintf (stderr," is greater than ");
5683 print_generic_expr (stderr, c);
5684 fprintf (stderr," but comes before it.\n");
5685 err = 1;
5687 prev = c;
5689 /* VRP will remove the default case if it can prove it will
5690 never be executed. So do not verify there always exists
5691 a default case here. */
5693 FOR_EACH_EDGE (e, ei, bb->succs)
5695 if (!e->dest->aux)
5697 error ("extra outgoing edge %d->%d",
5698 bb->index, e->dest->index);
5699 err = 1;
5702 e->dest->aux = (void *)2;
5703 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5704 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5706 error ("wrong outgoing edge flags at end of bb %d",
5707 bb->index);
5708 err = 1;
5712 /* Check that we have all of them. */
5713 for (i = 0; i < n; ++i)
5715 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5716 basic_block label_bb = label_to_block (lab);
5718 if (label_bb->aux != (void *)2)
5720 error ("missing edge %i->%i", bb->index, label_bb->index);
5721 err = 1;
5725 FOR_EACH_EDGE (e, ei, bb->succs)
5726 e->dest->aux = (void *)0;
5728 break;
5730 case GIMPLE_EH_DISPATCH:
5731 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5732 break;
5734 default:
5735 break;
5739 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5740 verify_dominators (CDI_DOMINATORS);
5742 return err;
5746 /* Updates phi nodes after creating a forwarder block joined
5747 by edge FALLTHRU. */
5749 static void
5750 gimple_make_forwarder_block (edge fallthru)
5752 edge e;
5753 edge_iterator ei;
5754 basic_block dummy, bb;
5755 tree var;
5756 gphi_iterator gsi;
5758 dummy = fallthru->src;
5759 bb = fallthru->dest;
5761 if (single_pred_p (bb))
5762 return;
5764 /* If we redirected a branch we must create new PHI nodes at the
5765 start of BB. */
5766 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5768 gphi *phi, *new_phi;
5770 phi = gsi.phi ();
5771 var = gimple_phi_result (phi);
5772 new_phi = create_phi_node (var, bb);
5773 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5774 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5775 UNKNOWN_LOCATION);
5778 /* Add the arguments we have stored on edges. */
5779 FOR_EACH_EDGE (e, ei, bb->preds)
5781 if (e == fallthru)
5782 continue;
5784 flush_pending_stmts (e);
5789 /* Return a non-special label in the head of basic block BLOCK.
5790 Create one if it doesn't exist. */
5792 tree
5793 gimple_block_label (basic_block bb)
5795 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5796 bool first = true;
5797 tree label;
5798 glabel *stmt;
5800 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5802 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5803 if (!stmt)
5804 break;
5805 label = gimple_label_label (stmt);
5806 if (!DECL_NONLOCAL (label))
5808 if (!first)
5809 gsi_move_before (&i, &s);
5810 return label;
5814 label = create_artificial_label (UNKNOWN_LOCATION);
5815 stmt = gimple_build_label (label);
5816 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5817 return label;
5821 /* Attempt to perform edge redirection by replacing a possibly complex
5822 jump instruction by a goto or by removing the jump completely.
5823 This can apply only if all edges now point to the same block. The
5824 parameters and return values are equivalent to
5825 redirect_edge_and_branch. */
5827 static edge
5828 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5830 basic_block src = e->src;
5831 gimple_stmt_iterator i;
5832 gimple *stmt;
5834 /* We can replace or remove a complex jump only when we have exactly
5835 two edges. */
5836 if (EDGE_COUNT (src->succs) != 2
5837 /* Verify that all targets will be TARGET. Specifically, the
5838 edge that is not E must also go to TARGET. */
5839 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5840 return NULL;
5842 i = gsi_last_bb (src);
5843 if (gsi_end_p (i))
5844 return NULL;
5846 stmt = gsi_stmt (i);
5848 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5850 gsi_remove (&i, true);
5851 e = ssa_redirect_edge (e, target);
5852 e->flags = EDGE_FALLTHRU;
5853 return e;
5856 return NULL;
5860 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5861 edge representing the redirected branch. */
5863 static edge
5864 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5866 basic_block bb = e->src;
5867 gimple_stmt_iterator gsi;
5868 edge ret;
5869 gimple *stmt;
5871 if (e->flags & EDGE_ABNORMAL)
5872 return NULL;
5874 if (e->dest == dest)
5875 return NULL;
5877 if (e->flags & EDGE_EH)
5878 return redirect_eh_edge (e, dest);
5880 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5882 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5883 if (ret)
5884 return ret;
5887 gsi = gsi_last_bb (bb);
5888 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5890 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5892 case GIMPLE_COND:
5893 /* For COND_EXPR, we only need to redirect the edge. */
5894 break;
5896 case GIMPLE_GOTO:
5897 /* No non-abnormal edges should lead from a non-simple goto, and
5898 simple ones should be represented implicitly. */
5899 gcc_unreachable ();
5901 case GIMPLE_SWITCH:
5903 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5904 tree label = gimple_block_label (dest);
5905 tree cases = get_cases_for_edge (e, switch_stmt);
5907 /* If we have a list of cases associated with E, then use it
5908 as it's a lot faster than walking the entire case vector. */
5909 if (cases)
5911 edge e2 = find_edge (e->src, dest);
5912 tree last, first;
5914 first = cases;
5915 while (cases)
5917 last = cases;
5918 CASE_LABEL (cases) = label;
5919 cases = CASE_CHAIN (cases);
5922 /* If there was already an edge in the CFG, then we need
5923 to move all the cases associated with E to E2. */
5924 if (e2)
5926 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5928 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5929 CASE_CHAIN (cases2) = first;
5931 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5933 else
5935 size_t i, n = gimple_switch_num_labels (switch_stmt);
5937 for (i = 0; i < n; i++)
5939 tree elt = gimple_switch_label (switch_stmt, i);
5940 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5941 CASE_LABEL (elt) = label;
5945 break;
5947 case GIMPLE_ASM:
5949 gasm *asm_stmt = as_a <gasm *> (stmt);
5950 int i, n = gimple_asm_nlabels (asm_stmt);
5951 tree label = NULL;
5953 for (i = 0; i < n; ++i)
5955 tree cons = gimple_asm_label_op (asm_stmt, i);
5956 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5958 if (!label)
5959 label = gimple_block_label (dest);
5960 TREE_VALUE (cons) = label;
5964 /* If we didn't find any label matching the former edge in the
5965 asm labels, we must be redirecting the fallthrough
5966 edge. */
5967 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5969 break;
5971 case GIMPLE_RETURN:
5972 gsi_remove (&gsi, true);
5973 e->flags |= EDGE_FALLTHRU;
5974 break;
5976 case GIMPLE_OMP_RETURN:
5977 case GIMPLE_OMP_CONTINUE:
5978 case GIMPLE_OMP_SECTIONS_SWITCH:
5979 case GIMPLE_OMP_FOR:
5980 /* The edges from OMP constructs can be simply redirected. */
5981 break;
5983 case GIMPLE_EH_DISPATCH:
5984 if (!(e->flags & EDGE_FALLTHRU))
5985 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5986 break;
5988 case GIMPLE_TRANSACTION:
5989 if (e->flags & EDGE_TM_ABORT)
5990 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5991 gimple_block_label (dest));
5992 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5993 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5994 gimple_block_label (dest));
5995 else
5996 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5997 gimple_block_label (dest));
5998 break;
6000 default:
6001 /* Otherwise it must be a fallthru edge, and we don't need to
6002 do anything besides redirecting it. */
6003 gcc_assert (e->flags & EDGE_FALLTHRU);
6004 break;
6007 /* Update/insert PHI nodes as necessary. */
6009 /* Now update the edges in the CFG. */
6010 e = ssa_redirect_edge (e, dest);
6012 return e;
6015 /* Returns true if it is possible to remove edge E by redirecting
6016 it to the destination of the other edge from E->src. */
6018 static bool
6019 gimple_can_remove_branch_p (const_edge e)
6021 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6022 return false;
6024 return true;
6027 /* Simple wrapper, as we can always redirect fallthru edges. */
6029 static basic_block
6030 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6032 e = gimple_redirect_edge_and_branch (e, dest);
6033 gcc_assert (e);
6035 return NULL;
6039 /* Splits basic block BB after statement STMT (but at least after the
6040 labels). If STMT is NULL, BB is split just after the labels. */
6042 static basic_block
6043 gimple_split_block (basic_block bb, void *stmt)
6045 gimple_stmt_iterator gsi;
6046 gimple_stmt_iterator gsi_tgt;
6047 gimple_seq list;
6048 basic_block new_bb;
6049 edge e;
6050 edge_iterator ei;
6052 new_bb = create_empty_bb (bb);
6054 /* Redirect the outgoing edges. */
6055 new_bb->succs = bb->succs;
6056 bb->succs = NULL;
6057 FOR_EACH_EDGE (e, ei, new_bb->succs)
6058 e->src = new_bb;
6060 /* Get a stmt iterator pointing to the first stmt to move. */
6061 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6062 gsi = gsi_after_labels (bb);
6063 else
6065 gsi = gsi_for_stmt ((gimple *) stmt);
6066 gsi_next (&gsi);
6069 /* Move everything from GSI to the new basic block. */
6070 if (gsi_end_p (gsi))
6071 return new_bb;
6073 /* Split the statement list - avoid re-creating new containers as this
6074 brings ugly quadratic memory consumption in the inliner.
6075 (We are still quadratic since we need to update stmt BB pointers,
6076 sadly.) */
6077 gsi_split_seq_before (&gsi, &list);
6078 set_bb_seq (new_bb, list);
6079 for (gsi_tgt = gsi_start (list);
6080 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6081 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6083 return new_bb;
6087 /* Moves basic block BB after block AFTER. */
6089 static bool
6090 gimple_move_block_after (basic_block bb, basic_block after)
6092 if (bb->prev_bb == after)
6093 return true;
6095 unlink_block (bb);
6096 link_block (bb, after);
6098 return true;
6102 /* Return TRUE if block BB has no executable statements, otherwise return
6103 FALSE. */
6105 static bool
6106 gimple_empty_block_p (basic_block bb)
6108 /* BB must have no executable statements. */
6109 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6110 if (phi_nodes (bb))
6111 return false;
6112 if (gsi_end_p (gsi))
6113 return true;
6114 if (is_gimple_debug (gsi_stmt (gsi)))
6115 gsi_next_nondebug (&gsi);
6116 return gsi_end_p (gsi);
6120 /* Split a basic block if it ends with a conditional branch and if the
6121 other part of the block is not empty. */
6123 static basic_block
6124 gimple_split_block_before_cond_jump (basic_block bb)
6126 gimple *last, *split_point;
6127 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6128 if (gsi_end_p (gsi))
6129 return NULL;
6130 last = gsi_stmt (gsi);
6131 if (gimple_code (last) != GIMPLE_COND
6132 && gimple_code (last) != GIMPLE_SWITCH)
6133 return NULL;
6134 gsi_prev (&gsi);
6135 split_point = gsi_stmt (gsi);
6136 return split_block (bb, split_point)->dest;
6140 /* Return true if basic_block can be duplicated. */
6142 static bool
6143 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6145 return true;
6148 /* Create a duplicate of the basic block BB. NOTE: This does not
6149 preserve SSA form. */
6151 static basic_block
6152 gimple_duplicate_bb (basic_block bb)
6154 basic_block new_bb;
6155 gimple_stmt_iterator gsi_tgt;
6157 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6159 /* Copy the PHI nodes. We ignore PHI node arguments here because
6160 the incoming edges have not been setup yet. */
6161 for (gphi_iterator gpi = gsi_start_phis (bb);
6162 !gsi_end_p (gpi);
6163 gsi_next (&gpi))
6165 gphi *phi, *copy;
6166 phi = gpi.phi ();
6167 copy = create_phi_node (NULL_TREE, new_bb);
6168 create_new_def_for (gimple_phi_result (phi), copy,
6169 gimple_phi_result_ptr (copy));
6170 gimple_set_uid (copy, gimple_uid (phi));
6173 gsi_tgt = gsi_start_bb (new_bb);
6174 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6175 !gsi_end_p (gsi);
6176 gsi_next (&gsi))
6178 def_operand_p def_p;
6179 ssa_op_iter op_iter;
6180 tree lhs;
6181 gimple *stmt, *copy;
6183 stmt = gsi_stmt (gsi);
6184 if (gimple_code (stmt) == GIMPLE_LABEL)
6185 continue;
6187 /* Don't duplicate label debug stmts. */
6188 if (gimple_debug_bind_p (stmt)
6189 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6190 == LABEL_DECL)
6191 continue;
6193 /* Create a new copy of STMT and duplicate STMT's virtual
6194 operands. */
6195 copy = gimple_copy (stmt);
6196 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6198 maybe_duplicate_eh_stmt (copy, stmt);
6199 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6201 /* When copying around a stmt writing into a local non-user
6202 aggregate, make sure it won't share stack slot with other
6203 vars. */
6204 lhs = gimple_get_lhs (stmt);
6205 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6207 tree base = get_base_address (lhs);
6208 if (base
6209 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6210 && DECL_IGNORED_P (base)
6211 && !TREE_STATIC (base)
6212 && !DECL_EXTERNAL (base)
6213 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6214 DECL_NONSHAREABLE (base) = 1;
6217 /* Create new names for all the definitions created by COPY and
6218 add replacement mappings for each new name. */
6219 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6220 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6223 return new_bb;
6226 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6228 static void
6229 add_phi_args_after_copy_edge (edge e_copy)
6231 basic_block bb, bb_copy = e_copy->src, dest;
6232 edge e;
6233 edge_iterator ei;
6234 gphi *phi, *phi_copy;
6235 tree def;
6236 gphi_iterator psi, psi_copy;
6238 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6239 return;
6241 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6243 if (e_copy->dest->flags & BB_DUPLICATED)
6244 dest = get_bb_original (e_copy->dest);
6245 else
6246 dest = e_copy->dest;
6248 e = find_edge (bb, dest);
6249 if (!e)
6251 /* During loop unrolling the target of the latch edge is copied.
6252 In this case we are not looking for edge to dest, but to
6253 duplicated block whose original was dest. */
6254 FOR_EACH_EDGE (e, ei, bb->succs)
6256 if ((e->dest->flags & BB_DUPLICATED)
6257 && get_bb_original (e->dest) == dest)
6258 break;
6261 gcc_assert (e != NULL);
6264 for (psi = gsi_start_phis (e->dest),
6265 psi_copy = gsi_start_phis (e_copy->dest);
6266 !gsi_end_p (psi);
6267 gsi_next (&psi), gsi_next (&psi_copy))
6269 phi = psi.phi ();
6270 phi_copy = psi_copy.phi ();
6271 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6272 add_phi_arg (phi_copy, def, e_copy,
6273 gimple_phi_arg_location_from_edge (phi, e));
6278 /* Basic block BB_COPY was created by code duplication. Add phi node
6279 arguments for edges going out of BB_COPY. The blocks that were
6280 duplicated have BB_DUPLICATED set. */
6282 void
6283 add_phi_args_after_copy_bb (basic_block bb_copy)
6285 edge e_copy;
6286 edge_iterator ei;
6288 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6290 add_phi_args_after_copy_edge (e_copy);
6294 /* Blocks in REGION_COPY array of length N_REGION were created by
6295 duplication of basic blocks. Add phi node arguments for edges
6296 going from these blocks. If E_COPY is not NULL, also add
6297 phi node arguments for its destination.*/
6299 void
6300 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6301 edge e_copy)
6303 unsigned i;
6305 for (i = 0; i < n_region; i++)
6306 region_copy[i]->flags |= BB_DUPLICATED;
6308 for (i = 0; i < n_region; i++)
6309 add_phi_args_after_copy_bb (region_copy[i]);
6310 if (e_copy)
6311 add_phi_args_after_copy_edge (e_copy);
6313 for (i = 0; i < n_region; i++)
6314 region_copy[i]->flags &= ~BB_DUPLICATED;
6317 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6318 important exit edge EXIT. By important we mean that no SSA name defined
6319 inside region is live over the other exit edges of the region. All entry
6320 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6321 to the duplicate of the region. Dominance and loop information is
6322 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6323 UPDATE_DOMINANCE is false then we assume that the caller will update the
6324 dominance information after calling this function. The new basic
6325 blocks are stored to REGION_COPY in the same order as they had in REGION,
6326 provided that REGION_COPY is not NULL.
6327 The function returns false if it is unable to copy the region,
6328 true otherwise. */
6330 bool
6331 gimple_duplicate_sese_region (edge entry, edge exit,
6332 basic_block *region, unsigned n_region,
6333 basic_block *region_copy,
6334 bool update_dominance)
6336 unsigned i;
6337 bool free_region_copy = false, copying_header = false;
6338 struct loop *loop = entry->dest->loop_father;
6339 edge exit_copy;
6340 vec<basic_block> doms = vNULL;
6341 edge redirected;
6342 profile_count total_count = profile_count::uninitialized ();
6343 profile_count entry_count = profile_count::uninitialized ();
6345 if (!can_copy_bbs_p (region, n_region))
6346 return false;
6348 /* Some sanity checking. Note that we do not check for all possible
6349 missuses of the functions. I.e. if you ask to copy something weird,
6350 it will work, but the state of structures probably will not be
6351 correct. */
6352 for (i = 0; i < n_region; i++)
6354 /* We do not handle subloops, i.e. all the blocks must belong to the
6355 same loop. */
6356 if (region[i]->loop_father != loop)
6357 return false;
6359 if (region[i] != entry->dest
6360 && region[i] == loop->header)
6361 return false;
6364 /* In case the function is used for loop header copying (which is the primary
6365 use), ensure that EXIT and its copy will be new latch and entry edges. */
6366 if (loop->header == entry->dest)
6368 copying_header = true;
6370 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6371 return false;
6373 for (i = 0; i < n_region; i++)
6374 if (region[i] != exit->src
6375 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6376 return false;
6379 initialize_original_copy_tables ();
6381 if (copying_header)
6382 set_loop_copy (loop, loop_outer (loop));
6383 else
6384 set_loop_copy (loop, loop);
6386 if (!region_copy)
6388 region_copy = XNEWVEC (basic_block, n_region);
6389 free_region_copy = true;
6392 /* Record blocks outside the region that are dominated by something
6393 inside. */
6394 if (update_dominance)
6396 doms.create (0);
6397 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6400 if (entry->dest->count.initialized_p ())
6402 total_count = entry->dest->count;
6403 entry_count = entry->count ();
6404 /* Fix up corner cases, to avoid division by zero or creation of negative
6405 frequencies. */
6406 if (entry_count > total_count)
6407 entry_count = total_count;
6410 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6411 split_edge_bb_loc (entry), update_dominance);
6412 if (total_count.initialized_p () && entry_count.initialized_p ())
6414 scale_bbs_frequencies_profile_count (region, n_region,
6415 total_count - entry_count,
6416 total_count);
6417 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6418 total_count);
6421 if (copying_header)
6423 loop->header = exit->dest;
6424 loop->latch = exit->src;
6427 /* Redirect the entry and add the phi node arguments. */
6428 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6429 gcc_assert (redirected != NULL);
6430 flush_pending_stmts (entry);
6432 /* Concerning updating of dominators: We must recount dominators
6433 for entry block and its copy. Anything that is outside of the
6434 region, but was dominated by something inside needs recounting as
6435 well. */
6436 if (update_dominance)
6438 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6439 doms.safe_push (get_bb_original (entry->dest));
6440 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6441 doms.release ();
6444 /* Add the other PHI node arguments. */
6445 add_phi_args_after_copy (region_copy, n_region, NULL);
6447 if (free_region_copy)
6448 free (region_copy);
6450 free_original_copy_tables ();
6451 return true;
6454 /* Checks if BB is part of the region defined by N_REGION BBS. */
6455 static bool
6456 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6458 unsigned int n;
6460 for (n = 0; n < n_region; n++)
6462 if (bb == bbs[n])
6463 return true;
6465 return false;
6468 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6469 are stored to REGION_COPY in the same order in that they appear
6470 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6471 the region, EXIT an exit from it. The condition guarding EXIT
6472 is moved to ENTRY. Returns true if duplication succeeds, false
6473 otherwise.
6475 For example,
6477 some_code;
6478 if (cond)
6480 else
6483 is transformed to
6485 if (cond)
6487 some_code;
6490 else
6492 some_code;
6497 bool
6498 gimple_duplicate_sese_tail (edge entry, edge exit,
6499 basic_block *region, unsigned n_region,
6500 basic_block *region_copy)
6502 unsigned i;
6503 bool free_region_copy = false;
6504 struct loop *loop = exit->dest->loop_father;
6505 struct loop *orig_loop = entry->dest->loop_father;
6506 basic_block switch_bb, entry_bb, nentry_bb;
6507 vec<basic_block> doms;
6508 profile_count total_count = profile_count::uninitialized (),
6509 exit_count = profile_count::uninitialized ();
6510 edge exits[2], nexits[2], e;
6511 gimple_stmt_iterator gsi;
6512 gimple *cond_stmt;
6513 edge sorig, snew;
6514 basic_block exit_bb;
6515 gphi_iterator psi;
6516 gphi *phi;
6517 tree def;
6518 struct loop *target, *aloop, *cloop;
6520 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6521 exits[0] = exit;
6522 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6524 if (!can_copy_bbs_p (region, n_region))
6525 return false;
6527 initialize_original_copy_tables ();
6528 set_loop_copy (orig_loop, loop);
6530 target= loop;
6531 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6533 if (bb_part_of_region_p (aloop->header, region, n_region))
6535 cloop = duplicate_loop (aloop, target);
6536 duplicate_subloops (aloop, cloop);
6540 if (!region_copy)
6542 region_copy = XNEWVEC (basic_block, n_region);
6543 free_region_copy = true;
6546 gcc_assert (!need_ssa_update_p (cfun));
6548 /* Record blocks outside the region that are dominated by something
6549 inside. */
6550 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6552 total_count = exit->src->count;
6553 exit_count = exit->count ();
6554 /* Fix up corner cases, to avoid division by zero or creation of negative
6555 frequencies. */
6556 if (exit_count > total_count)
6557 exit_count = total_count;
6559 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6560 split_edge_bb_loc (exit), true);
6561 if (total_count.initialized_p () && exit_count.initialized_p ())
6563 scale_bbs_frequencies_profile_count (region, n_region,
6564 total_count - exit_count,
6565 total_count);
6566 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6567 total_count);
6570 /* Create the switch block, and put the exit condition to it. */
6571 entry_bb = entry->dest;
6572 nentry_bb = get_bb_copy (entry_bb);
6573 if (!last_stmt (entry->src)
6574 || !stmt_ends_bb_p (last_stmt (entry->src)))
6575 switch_bb = entry->src;
6576 else
6577 switch_bb = split_edge (entry);
6578 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6580 gsi = gsi_last_bb (switch_bb);
6581 cond_stmt = last_stmt (exit->src);
6582 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6583 cond_stmt = gimple_copy (cond_stmt);
6585 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6587 sorig = single_succ_edge (switch_bb);
6588 sorig->flags = exits[1]->flags;
6589 sorig->probability = exits[1]->probability;
6590 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6591 snew->probability = exits[0]->probability;
6594 /* Register the new edge from SWITCH_BB in loop exit lists. */
6595 rescan_loop_exit (snew, true, false);
6597 /* Add the PHI node arguments. */
6598 add_phi_args_after_copy (region_copy, n_region, snew);
6600 /* Get rid of now superfluous conditions and associated edges (and phi node
6601 arguments). */
6602 exit_bb = exit->dest;
6604 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6605 PENDING_STMT (e) = NULL;
6607 /* The latch of ORIG_LOOP was copied, and so was the backedge
6608 to the original header. We redirect this backedge to EXIT_BB. */
6609 for (i = 0; i < n_region; i++)
6610 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6612 gcc_assert (single_succ_edge (region_copy[i]));
6613 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6614 PENDING_STMT (e) = NULL;
6615 for (psi = gsi_start_phis (exit_bb);
6616 !gsi_end_p (psi);
6617 gsi_next (&psi))
6619 phi = psi.phi ();
6620 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6621 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6624 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6625 PENDING_STMT (e) = NULL;
6627 /* Anything that is outside of the region, but was dominated by something
6628 inside needs to update dominance info. */
6629 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6630 doms.release ();
6631 /* Update the SSA web. */
6632 update_ssa (TODO_update_ssa);
6634 if (free_region_copy)
6635 free (region_copy);
6637 free_original_copy_tables ();
6638 return true;
6641 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6642 adding blocks when the dominator traversal reaches EXIT. This
6643 function silently assumes that ENTRY strictly dominates EXIT. */
6645 void
6646 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6647 vec<basic_block> *bbs_p)
6649 basic_block son;
6651 for (son = first_dom_son (CDI_DOMINATORS, entry);
6652 son;
6653 son = next_dom_son (CDI_DOMINATORS, son))
6655 bbs_p->safe_push (son);
6656 if (son != exit)
6657 gather_blocks_in_sese_region (son, exit, bbs_p);
6661 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6662 The duplicates are recorded in VARS_MAP. */
6664 static void
6665 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6666 tree to_context)
6668 tree t = *tp, new_t;
6669 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6671 if (DECL_CONTEXT (t) == to_context)
6672 return;
6674 bool existed;
6675 tree &loc = vars_map->get_or_insert (t, &existed);
6677 if (!existed)
6679 if (SSA_VAR_P (t))
6681 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6682 add_local_decl (f, new_t);
6684 else
6686 gcc_assert (TREE_CODE (t) == CONST_DECL);
6687 new_t = copy_node (t);
6689 DECL_CONTEXT (new_t) = to_context;
6691 loc = new_t;
6693 else
6694 new_t = loc;
6696 *tp = new_t;
6700 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6701 VARS_MAP maps old ssa names and var_decls to the new ones. */
6703 static tree
6704 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6705 tree to_context)
6707 tree new_name;
6709 gcc_assert (!virtual_operand_p (name));
6711 tree *loc = vars_map->get (name);
6713 if (!loc)
6715 tree decl = SSA_NAME_VAR (name);
6716 if (decl)
6718 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6719 replace_by_duplicate_decl (&decl, vars_map, to_context);
6720 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6721 decl, SSA_NAME_DEF_STMT (name));
6723 else
6724 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6725 name, SSA_NAME_DEF_STMT (name));
6727 /* Now that we've used the def stmt to define new_name, make sure it
6728 doesn't define name anymore. */
6729 SSA_NAME_DEF_STMT (name) = NULL;
6731 vars_map->put (name, new_name);
6733 else
6734 new_name = *loc;
6736 return new_name;
6739 struct move_stmt_d
6741 tree orig_block;
6742 tree new_block;
6743 tree from_context;
6744 tree to_context;
6745 hash_map<tree, tree> *vars_map;
6746 htab_t new_label_map;
6747 hash_map<void *, void *> *eh_map;
6748 bool remap_decls_p;
6751 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6752 contained in *TP if it has been ORIG_BLOCK previously and change the
6753 DECL_CONTEXT of every local variable referenced in *TP. */
6755 static tree
6756 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6758 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6759 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6760 tree t = *tp;
6762 if (EXPR_P (t))
6764 tree block = TREE_BLOCK (t);
6765 if (block == NULL_TREE)
6767 else if (block == p->orig_block
6768 || p->orig_block == NULL_TREE)
6769 TREE_SET_BLOCK (t, p->new_block);
6770 else if (flag_checking)
6772 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6773 block = BLOCK_SUPERCONTEXT (block);
6774 gcc_assert (block == p->orig_block);
6777 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6779 if (TREE_CODE (t) == SSA_NAME)
6780 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6781 else if (TREE_CODE (t) == PARM_DECL
6782 && gimple_in_ssa_p (cfun))
6783 *tp = *(p->vars_map->get (t));
6784 else if (TREE_CODE (t) == LABEL_DECL)
6786 if (p->new_label_map)
6788 struct tree_map in, *out;
6789 in.base.from = t;
6790 out = (struct tree_map *)
6791 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6792 if (out)
6793 *tp = t = out->to;
6796 /* For FORCED_LABELs we can end up with references from other
6797 functions if some SESE regions are outlined. It is UB to
6798 jump in between them, but they could be used just for printing
6799 addresses etc. In that case, DECL_CONTEXT on the label should
6800 be the function containing the glabel stmt with that LABEL_DECL,
6801 rather than whatever function a reference to the label was seen
6802 last time. */
6803 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6804 DECL_CONTEXT (t) = p->to_context;
6806 else if (p->remap_decls_p)
6808 /* Replace T with its duplicate. T should no longer appear in the
6809 parent function, so this looks wasteful; however, it may appear
6810 in referenced_vars, and more importantly, as virtual operands of
6811 statements, and in alias lists of other variables. It would be
6812 quite difficult to expunge it from all those places. ??? It might
6813 suffice to do this for addressable variables. */
6814 if ((VAR_P (t) && !is_global_var (t))
6815 || TREE_CODE (t) == CONST_DECL)
6816 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6818 *walk_subtrees = 0;
6820 else if (TYPE_P (t))
6821 *walk_subtrees = 0;
6823 return NULL_TREE;
6826 /* Helper for move_stmt_r. Given an EH region number for the source
6827 function, map that to the duplicate EH regio number in the dest. */
6829 static int
6830 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6832 eh_region old_r, new_r;
6834 old_r = get_eh_region_from_number (old_nr);
6835 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6837 return new_r->index;
6840 /* Similar, but operate on INTEGER_CSTs. */
6842 static tree
6843 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6845 int old_nr, new_nr;
6847 old_nr = tree_to_shwi (old_t_nr);
6848 new_nr = move_stmt_eh_region_nr (old_nr, p);
6850 return build_int_cst (integer_type_node, new_nr);
6853 /* Like move_stmt_op, but for gimple statements.
6855 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6856 contained in the current statement in *GSI_P and change the
6857 DECL_CONTEXT of every local variable referenced in the current
6858 statement. */
6860 static tree
6861 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6862 struct walk_stmt_info *wi)
6864 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6865 gimple *stmt = gsi_stmt (*gsi_p);
6866 tree block = gimple_block (stmt);
6868 if (block == p->orig_block
6869 || (p->orig_block == NULL_TREE
6870 && block != NULL_TREE))
6871 gimple_set_block (stmt, p->new_block);
6873 switch (gimple_code (stmt))
6875 case GIMPLE_CALL:
6876 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6878 tree r, fndecl = gimple_call_fndecl (stmt);
6879 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6880 switch (DECL_FUNCTION_CODE (fndecl))
6882 case BUILT_IN_EH_COPY_VALUES:
6883 r = gimple_call_arg (stmt, 1);
6884 r = move_stmt_eh_region_tree_nr (r, p);
6885 gimple_call_set_arg (stmt, 1, r);
6886 /* FALLTHRU */
6888 case BUILT_IN_EH_POINTER:
6889 case BUILT_IN_EH_FILTER:
6890 r = gimple_call_arg (stmt, 0);
6891 r = move_stmt_eh_region_tree_nr (r, p);
6892 gimple_call_set_arg (stmt, 0, r);
6893 break;
6895 default:
6896 break;
6899 break;
6901 case GIMPLE_RESX:
6903 gresx *resx_stmt = as_a <gresx *> (stmt);
6904 int r = gimple_resx_region (resx_stmt);
6905 r = move_stmt_eh_region_nr (r, p);
6906 gimple_resx_set_region (resx_stmt, r);
6908 break;
6910 case GIMPLE_EH_DISPATCH:
6912 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6913 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6914 r = move_stmt_eh_region_nr (r, p);
6915 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6917 break;
6919 case GIMPLE_OMP_RETURN:
6920 case GIMPLE_OMP_CONTINUE:
6921 break;
6923 case GIMPLE_LABEL:
6925 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6926 so that such labels can be referenced from other regions.
6927 Make sure to update it when seeing a GIMPLE_LABEL though,
6928 that is the owner of the label. */
6929 walk_gimple_op (stmt, move_stmt_op, wi);
6930 *handled_ops_p = true;
6931 tree label = gimple_label_label (as_a <glabel *> (stmt));
6932 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6933 DECL_CONTEXT (label) = p->to_context;
6935 break;
6937 default:
6938 if (is_gimple_omp (stmt))
6940 /* Do not remap variables inside OMP directives. Variables
6941 referenced in clauses and directive header belong to the
6942 parent function and should not be moved into the child
6943 function. */
6944 bool save_remap_decls_p = p->remap_decls_p;
6945 p->remap_decls_p = false;
6946 *handled_ops_p = true;
6948 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6949 move_stmt_op, wi);
6951 p->remap_decls_p = save_remap_decls_p;
6953 break;
6956 return NULL_TREE;
6959 /* Move basic block BB from function CFUN to function DEST_FN. The
6960 block is moved out of the original linked list and placed after
6961 block AFTER in the new list. Also, the block is removed from the
6962 original array of blocks and placed in DEST_FN's array of blocks.
6963 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6964 updated to reflect the moved edges.
6966 The local variables are remapped to new instances, VARS_MAP is used
6967 to record the mapping. */
6969 static void
6970 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6971 basic_block after, bool update_edge_count_p,
6972 struct move_stmt_d *d)
6974 struct control_flow_graph *cfg;
6975 edge_iterator ei;
6976 edge e;
6977 gimple_stmt_iterator si;
6978 unsigned old_len, new_len;
6980 /* Remove BB from dominance structures. */
6981 delete_from_dominance_info (CDI_DOMINATORS, bb);
6983 /* Move BB from its current loop to the copy in the new function. */
6984 if (current_loops)
6986 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6987 if (new_loop)
6988 bb->loop_father = new_loop;
6991 /* Link BB to the new linked list. */
6992 move_block_after (bb, after);
6994 /* Update the edge count in the corresponding flowgraphs. */
6995 if (update_edge_count_p)
6996 FOR_EACH_EDGE (e, ei, bb->succs)
6998 cfun->cfg->x_n_edges--;
6999 dest_cfun->cfg->x_n_edges++;
7002 /* Remove BB from the original basic block array. */
7003 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7004 cfun->cfg->x_n_basic_blocks--;
7006 /* Grow DEST_CFUN's basic block array if needed. */
7007 cfg = dest_cfun->cfg;
7008 cfg->x_n_basic_blocks++;
7009 if (bb->index >= cfg->x_last_basic_block)
7010 cfg->x_last_basic_block = bb->index + 1;
7012 old_len = vec_safe_length (cfg->x_basic_block_info);
7013 if ((unsigned) cfg->x_last_basic_block >= old_len)
7015 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7016 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7019 (*cfg->x_basic_block_info)[bb->index] = bb;
7021 /* Remap the variables in phi nodes. */
7022 for (gphi_iterator psi = gsi_start_phis (bb);
7023 !gsi_end_p (psi); )
7025 gphi *phi = psi.phi ();
7026 use_operand_p use;
7027 tree op = PHI_RESULT (phi);
7028 ssa_op_iter oi;
7029 unsigned i;
7031 if (virtual_operand_p (op))
7033 /* Remove the phi nodes for virtual operands (alias analysis will be
7034 run for the new function, anyway). */
7035 remove_phi_node (&psi, true);
7036 continue;
7039 SET_PHI_RESULT (phi,
7040 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7041 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7043 op = USE_FROM_PTR (use);
7044 if (TREE_CODE (op) == SSA_NAME)
7045 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7048 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7050 location_t locus = gimple_phi_arg_location (phi, i);
7051 tree block = LOCATION_BLOCK (locus);
7053 if (locus == UNKNOWN_LOCATION)
7054 continue;
7055 if (d->orig_block == NULL_TREE || block == d->orig_block)
7057 locus = set_block (locus, d->new_block);
7058 gimple_phi_arg_set_location (phi, i, locus);
7062 gsi_next (&psi);
7065 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7067 gimple *stmt = gsi_stmt (si);
7068 struct walk_stmt_info wi;
7070 memset (&wi, 0, sizeof (wi));
7071 wi.info = d;
7072 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7074 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7076 tree label = gimple_label_label (label_stmt);
7077 int uid = LABEL_DECL_UID (label);
7079 gcc_assert (uid > -1);
7081 old_len = vec_safe_length (cfg->x_label_to_block_map);
7082 if (old_len <= (unsigned) uid)
7084 new_len = 3 * uid / 2 + 1;
7085 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7088 (*cfg->x_label_to_block_map)[uid] = bb;
7089 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7091 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7093 if (uid >= dest_cfun->cfg->last_label_uid)
7094 dest_cfun->cfg->last_label_uid = uid + 1;
7097 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7098 remove_stmt_from_eh_lp_fn (cfun, stmt);
7100 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7101 gimple_remove_stmt_histograms (cfun, stmt);
7103 /* We cannot leave any operands allocated from the operand caches of
7104 the current function. */
7105 free_stmt_operands (cfun, stmt);
7106 push_cfun (dest_cfun);
7107 update_stmt (stmt);
7108 pop_cfun ();
7111 FOR_EACH_EDGE (e, ei, bb->succs)
7112 if (e->goto_locus != UNKNOWN_LOCATION)
7114 tree block = LOCATION_BLOCK (e->goto_locus);
7115 if (d->orig_block == NULL_TREE
7116 || block == d->orig_block)
7117 e->goto_locus = set_block (e->goto_locus, d->new_block);
7121 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7122 the outermost EH region. Use REGION as the incoming base EH region. */
7124 static eh_region
7125 find_outermost_region_in_block (struct function *src_cfun,
7126 basic_block bb, eh_region region)
7128 gimple_stmt_iterator si;
7130 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7132 gimple *stmt = gsi_stmt (si);
7133 eh_region stmt_region;
7134 int lp_nr;
7136 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7137 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7138 if (stmt_region)
7140 if (region == NULL)
7141 region = stmt_region;
7142 else if (stmt_region != region)
7144 region = eh_region_outermost (src_cfun, stmt_region, region);
7145 gcc_assert (region != NULL);
7150 return region;
7153 static tree
7154 new_label_mapper (tree decl, void *data)
7156 htab_t hash = (htab_t) data;
7157 struct tree_map *m;
7158 void **slot;
7160 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7162 m = XNEW (struct tree_map);
7163 m->hash = DECL_UID (decl);
7164 m->base.from = decl;
7165 m->to = create_artificial_label (UNKNOWN_LOCATION);
7166 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7167 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7168 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7170 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7171 gcc_assert (*slot == NULL);
7173 *slot = m;
7175 return m->to;
7178 /* Tree walker to replace the decls used inside value expressions by
7179 duplicates. */
7181 static tree
7182 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7184 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7186 switch (TREE_CODE (*tp))
7188 case VAR_DECL:
7189 case PARM_DECL:
7190 case RESULT_DECL:
7191 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7192 break;
7193 default:
7194 break;
7197 if (IS_TYPE_OR_DECL_P (*tp))
7198 *walk_subtrees = false;
7200 return NULL;
7203 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7204 subblocks. */
7206 static void
7207 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7208 tree to_context)
7210 tree *tp, t;
7212 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7214 t = *tp;
7215 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7216 continue;
7217 replace_by_duplicate_decl (&t, vars_map, to_context);
7218 if (t != *tp)
7220 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7222 tree x = DECL_VALUE_EXPR (*tp);
7223 struct replace_decls_d rd = { vars_map, to_context };
7224 unshare_expr (x);
7225 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7226 SET_DECL_VALUE_EXPR (t, x);
7227 DECL_HAS_VALUE_EXPR_P (t) = 1;
7229 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7230 *tp = t;
7234 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7235 replace_block_vars_by_duplicates (block, vars_map, to_context);
7238 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7239 from FN1 to FN2. */
7241 static void
7242 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7243 struct loop *loop)
7245 /* Discard it from the old loop array. */
7246 (*get_loops (fn1))[loop->num] = NULL;
7248 /* Place it in the new loop array, assigning it a new number. */
7249 loop->num = number_of_loops (fn2);
7250 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7252 /* Recurse to children. */
7253 for (loop = loop->inner; loop; loop = loop->next)
7254 fixup_loop_arrays_after_move (fn1, fn2, loop);
7257 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7258 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7260 DEBUG_FUNCTION void
7261 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7263 basic_block bb;
7264 edge_iterator ei;
7265 edge e;
7266 bitmap bbs = BITMAP_ALLOC (NULL);
7267 int i;
7269 gcc_assert (entry != NULL);
7270 gcc_assert (entry != exit);
7271 gcc_assert (bbs_p != NULL);
7273 gcc_assert (bbs_p->length () > 0);
7275 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7276 bitmap_set_bit (bbs, bb->index);
7278 gcc_assert (bitmap_bit_p (bbs, entry->index));
7279 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7281 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7283 if (bb == entry)
7285 gcc_assert (single_pred_p (entry));
7286 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7288 else
7289 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7291 e = ei_edge (ei);
7292 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7295 if (bb == exit)
7297 gcc_assert (single_succ_p (exit));
7298 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7300 else
7301 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7303 e = ei_edge (ei);
7304 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7308 BITMAP_FREE (bbs);
7311 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7313 bool
7314 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7316 bitmap release_names = (bitmap)data;
7318 if (TREE_CODE (from) != SSA_NAME)
7319 return true;
7321 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7322 return true;
7325 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7326 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7327 single basic block in the original CFG and the new basic block is
7328 returned. DEST_CFUN must not have a CFG yet.
7330 Note that the region need not be a pure SESE region. Blocks inside
7331 the region may contain calls to abort/exit. The only restriction
7332 is that ENTRY_BB should be the only entry point and it must
7333 dominate EXIT_BB.
7335 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7336 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7337 to the new function.
7339 All local variables referenced in the region are assumed to be in
7340 the corresponding BLOCK_VARS and unexpanded variable lists
7341 associated with DEST_CFUN.
7343 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7344 reimplement move_sese_region_to_fn by duplicating the region rather than
7345 moving it. */
7347 basic_block
7348 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7349 basic_block exit_bb, tree orig_block)
7351 vec<basic_block> bbs, dom_bbs;
7352 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7353 basic_block after, bb, *entry_pred, *exit_succ, abb;
7354 struct function *saved_cfun = cfun;
7355 int *entry_flag, *exit_flag;
7356 profile_probability *entry_prob, *exit_prob;
7357 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7358 edge e;
7359 edge_iterator ei;
7360 htab_t new_label_map;
7361 hash_map<void *, void *> *eh_map;
7362 struct loop *loop = entry_bb->loop_father;
7363 struct loop *loop0 = get_loop (saved_cfun, 0);
7364 struct move_stmt_d d;
7366 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7367 region. */
7368 gcc_assert (entry_bb != exit_bb
7369 && (!exit_bb
7370 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7372 /* Collect all the blocks in the region. Manually add ENTRY_BB
7373 because it won't be added by dfs_enumerate_from. */
7374 bbs.create (0);
7375 bbs.safe_push (entry_bb);
7376 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7378 if (flag_checking)
7379 verify_sese (entry_bb, exit_bb, &bbs);
7381 /* The blocks that used to be dominated by something in BBS will now be
7382 dominated by the new block. */
7383 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7384 bbs.address (),
7385 bbs.length ());
7387 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7388 the predecessor edges to ENTRY_BB and the successor edges to
7389 EXIT_BB so that we can re-attach them to the new basic block that
7390 will replace the region. */
7391 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7392 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7393 entry_flag = XNEWVEC (int, num_entry_edges);
7394 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7395 i = 0;
7396 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7398 entry_prob[i] = e->probability;
7399 entry_flag[i] = e->flags;
7400 entry_pred[i++] = e->src;
7401 remove_edge (e);
7404 if (exit_bb)
7406 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7407 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7408 exit_flag = XNEWVEC (int, num_exit_edges);
7409 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7410 i = 0;
7411 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7413 exit_prob[i] = e->probability;
7414 exit_flag[i] = e->flags;
7415 exit_succ[i++] = e->dest;
7416 remove_edge (e);
7419 else
7421 num_exit_edges = 0;
7422 exit_succ = NULL;
7423 exit_flag = NULL;
7424 exit_prob = NULL;
7427 /* Switch context to the child function to initialize DEST_FN's CFG. */
7428 gcc_assert (dest_cfun->cfg == NULL);
7429 push_cfun (dest_cfun);
7431 init_empty_tree_cfg ();
7433 /* Initialize EH information for the new function. */
7434 eh_map = NULL;
7435 new_label_map = NULL;
7436 if (saved_cfun->eh)
7438 eh_region region = NULL;
7440 FOR_EACH_VEC_ELT (bbs, i, bb)
7441 region = find_outermost_region_in_block (saved_cfun, bb, region);
7443 init_eh_for_function ();
7444 if (region != NULL)
7446 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7447 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7448 new_label_mapper, new_label_map);
7452 /* Initialize an empty loop tree. */
7453 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7454 init_loops_structure (dest_cfun, loops, 1);
7455 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7456 set_loops_for_fn (dest_cfun, loops);
7458 /* Move the outlined loop tree part. */
7459 num_nodes = bbs.length ();
7460 FOR_EACH_VEC_ELT (bbs, i, bb)
7462 if (bb->loop_father->header == bb)
7464 struct loop *this_loop = bb->loop_father;
7465 struct loop *outer = loop_outer (this_loop);
7466 if (outer == loop
7467 /* If the SESE region contains some bbs ending with
7468 a noreturn call, those are considered to belong
7469 to the outermost loop in saved_cfun, rather than
7470 the entry_bb's loop_father. */
7471 || outer == loop0)
7473 if (outer != loop)
7474 num_nodes -= this_loop->num_nodes;
7475 flow_loop_tree_node_remove (bb->loop_father);
7476 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7477 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7480 else if (bb->loop_father == loop0 && loop0 != loop)
7481 num_nodes--;
7483 /* Remove loop exits from the outlined region. */
7484 if (loops_for_fn (saved_cfun)->exits)
7485 FOR_EACH_EDGE (e, ei, bb->succs)
7487 struct loops *l = loops_for_fn (saved_cfun);
7488 loop_exit **slot
7489 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7490 NO_INSERT);
7491 if (slot)
7492 l->exits->clear_slot (slot);
7497 /* Adjust the number of blocks in the tree root of the outlined part. */
7498 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7500 /* Setup a mapping to be used by move_block_to_fn. */
7501 loop->aux = current_loops->tree_root;
7502 loop0->aux = current_loops->tree_root;
7504 pop_cfun ();
7506 /* Move blocks from BBS into DEST_CFUN. */
7507 gcc_assert (bbs.length () >= 2);
7508 after = dest_cfun->cfg->x_entry_block_ptr;
7509 hash_map<tree, tree> vars_map;
7511 memset (&d, 0, sizeof (d));
7512 d.orig_block = orig_block;
7513 d.new_block = DECL_INITIAL (dest_cfun->decl);
7514 d.from_context = cfun->decl;
7515 d.to_context = dest_cfun->decl;
7516 d.vars_map = &vars_map;
7517 d.new_label_map = new_label_map;
7518 d.eh_map = eh_map;
7519 d.remap_decls_p = true;
7521 if (gimple_in_ssa_p (cfun))
7522 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7524 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7525 set_ssa_default_def (dest_cfun, arg, narg);
7526 vars_map.put (arg, narg);
7529 FOR_EACH_VEC_ELT (bbs, i, bb)
7531 /* No need to update edge counts on the last block. It has
7532 already been updated earlier when we detached the region from
7533 the original CFG. */
7534 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7535 after = bb;
7538 loop->aux = NULL;
7539 loop0->aux = NULL;
7540 /* Loop sizes are no longer correct, fix them up. */
7541 loop->num_nodes -= num_nodes;
7542 for (struct loop *outer = loop_outer (loop);
7543 outer; outer = loop_outer (outer))
7544 outer->num_nodes -= num_nodes;
7545 loop0->num_nodes -= bbs.length () - num_nodes;
7547 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7549 struct loop *aloop;
7550 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7551 if (aloop != NULL)
7553 if (aloop->simduid)
7555 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7556 d.to_context);
7557 dest_cfun->has_simduid_loops = true;
7559 if (aloop->force_vectorize)
7560 dest_cfun->has_force_vectorize_loops = true;
7564 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7565 if (orig_block)
7567 tree block;
7568 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7569 == NULL_TREE);
7570 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7571 = BLOCK_SUBBLOCKS (orig_block);
7572 for (block = BLOCK_SUBBLOCKS (orig_block);
7573 block; block = BLOCK_CHAIN (block))
7574 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7575 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7578 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7579 &vars_map, dest_cfun->decl);
7581 if (new_label_map)
7582 htab_delete (new_label_map);
7583 if (eh_map)
7584 delete eh_map;
7586 if (gimple_in_ssa_p (cfun))
7588 /* We need to release ssa-names in a defined order, so first find them,
7589 and then iterate in ascending version order. */
7590 bitmap release_names = BITMAP_ALLOC (NULL);
7591 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7592 bitmap_iterator bi;
7593 unsigned i;
7594 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7595 release_ssa_name (ssa_name (i));
7596 BITMAP_FREE (release_names);
7599 /* Rewire the entry and exit blocks. The successor to the entry
7600 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7601 the child function. Similarly, the predecessor of DEST_FN's
7602 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7603 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7604 various CFG manipulation function get to the right CFG.
7606 FIXME, this is silly. The CFG ought to become a parameter to
7607 these helpers. */
7608 push_cfun (dest_cfun);
7609 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7610 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7611 if (exit_bb)
7613 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7614 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7616 else
7617 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7618 pop_cfun ();
7620 /* Back in the original function, the SESE region has disappeared,
7621 create a new basic block in its place. */
7622 bb = create_empty_bb (entry_pred[0]);
7623 if (current_loops)
7624 add_bb_to_loop (bb, loop);
7625 for (i = 0; i < num_entry_edges; i++)
7627 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7628 e->probability = entry_prob[i];
7631 for (i = 0; i < num_exit_edges; i++)
7633 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7634 e->probability = exit_prob[i];
7637 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7638 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7639 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7640 dom_bbs.release ();
7642 if (exit_bb)
7644 free (exit_prob);
7645 free (exit_flag);
7646 free (exit_succ);
7648 free (entry_prob);
7649 free (entry_flag);
7650 free (entry_pred);
7651 bbs.release ();
7653 return bb;
7656 /* Dump default def DEF to file FILE using FLAGS and indentation
7657 SPC. */
7659 static void
7660 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7662 for (int i = 0; i < spc; ++i)
7663 fprintf (file, " ");
7664 dump_ssaname_info_to_file (file, def, spc);
7666 print_generic_expr (file, TREE_TYPE (def), flags);
7667 fprintf (file, " ");
7668 print_generic_expr (file, def, flags);
7669 fprintf (file, " = ");
7670 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7671 fprintf (file, ";\n");
7674 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7676 static void
7677 print_no_sanitize_attr_value (FILE *file, tree value)
7679 unsigned int flags = tree_to_uhwi (value);
7680 bool first = true;
7681 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7683 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7685 if (!first)
7686 fprintf (file, " | ");
7687 fprintf (file, "%s", sanitizer_opts[i].name);
7688 first = false;
7693 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7696 void
7697 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7699 tree arg, var, old_current_fndecl = current_function_decl;
7700 struct function *dsf;
7701 bool ignore_topmost_bind = false, any_var = false;
7702 basic_block bb;
7703 tree chain;
7704 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7705 && decl_is_tm_clone (fndecl));
7706 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7708 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7710 fprintf (file, "__attribute__((");
7712 bool first = true;
7713 tree chain;
7714 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7715 first = false, chain = TREE_CHAIN (chain))
7717 if (!first)
7718 fprintf (file, ", ");
7720 tree name = get_attribute_name (chain);
7721 print_generic_expr (file, name, dump_flags);
7722 if (TREE_VALUE (chain) != NULL_TREE)
7724 fprintf (file, " (");
7726 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7727 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7728 else
7729 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7730 fprintf (file, ")");
7734 fprintf (file, "))\n");
7737 current_function_decl = fndecl;
7738 if (flags & TDF_GIMPLE)
7740 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7741 dump_flags | TDF_SLIM);
7742 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7744 else
7745 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7747 arg = DECL_ARGUMENTS (fndecl);
7748 while (arg)
7750 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7751 fprintf (file, " ");
7752 print_generic_expr (file, arg, dump_flags);
7753 if (DECL_CHAIN (arg))
7754 fprintf (file, ", ");
7755 arg = DECL_CHAIN (arg);
7757 fprintf (file, ")\n");
7759 dsf = DECL_STRUCT_FUNCTION (fndecl);
7760 if (dsf && (flags & TDF_EH))
7761 dump_eh_tree (file, dsf);
7763 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7765 dump_node (fndecl, TDF_SLIM | flags, file);
7766 current_function_decl = old_current_fndecl;
7767 return;
7770 /* When GIMPLE is lowered, the variables are no longer available in
7771 BIND_EXPRs, so display them separately. */
7772 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7774 unsigned ix;
7775 ignore_topmost_bind = true;
7777 fprintf (file, "{\n");
7778 if (gimple_in_ssa_p (fun)
7779 && (flags & TDF_ALIAS))
7781 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7782 arg = DECL_CHAIN (arg))
7784 tree def = ssa_default_def (fun, arg);
7785 if (def)
7786 dump_default_def (file, def, 2, flags);
7789 tree res = DECL_RESULT (fun->decl);
7790 if (res != NULL_TREE
7791 && DECL_BY_REFERENCE (res))
7793 tree def = ssa_default_def (fun, res);
7794 if (def)
7795 dump_default_def (file, def, 2, flags);
7798 tree static_chain = fun->static_chain_decl;
7799 if (static_chain != NULL_TREE)
7801 tree def = ssa_default_def (fun, static_chain);
7802 if (def)
7803 dump_default_def (file, def, 2, flags);
7807 if (!vec_safe_is_empty (fun->local_decls))
7808 FOR_EACH_LOCAL_DECL (fun, ix, var)
7810 print_generic_decl (file, var, flags);
7811 fprintf (file, "\n");
7813 any_var = true;
7816 tree name;
7818 if (gimple_in_ssa_p (cfun))
7819 FOR_EACH_SSA_NAME (ix, name, cfun)
7821 if (!SSA_NAME_VAR (name))
7823 fprintf (file, " ");
7824 print_generic_expr (file, TREE_TYPE (name), flags);
7825 fprintf (file, " ");
7826 print_generic_expr (file, name, flags);
7827 fprintf (file, ";\n");
7829 any_var = true;
7834 if (fun && fun->decl == fndecl
7835 && fun->cfg
7836 && basic_block_info_for_fn (fun))
7838 /* If the CFG has been built, emit a CFG-based dump. */
7839 if (!ignore_topmost_bind)
7840 fprintf (file, "{\n");
7842 if (any_var && n_basic_blocks_for_fn (fun))
7843 fprintf (file, "\n");
7845 FOR_EACH_BB_FN (bb, fun)
7846 dump_bb (file, bb, 2, flags);
7848 fprintf (file, "}\n");
7850 else if (fun->curr_properties & PROP_gimple_any)
7852 /* The function is now in GIMPLE form but the CFG has not been
7853 built yet. Emit the single sequence of GIMPLE statements
7854 that make up its body. */
7855 gimple_seq body = gimple_body (fndecl);
7857 if (gimple_seq_first_stmt (body)
7858 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7859 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7860 print_gimple_seq (file, body, 0, flags);
7861 else
7863 if (!ignore_topmost_bind)
7864 fprintf (file, "{\n");
7866 if (any_var)
7867 fprintf (file, "\n");
7869 print_gimple_seq (file, body, 2, flags);
7870 fprintf (file, "}\n");
7873 else
7875 int indent;
7877 /* Make a tree based dump. */
7878 chain = DECL_SAVED_TREE (fndecl);
7879 if (chain && TREE_CODE (chain) == BIND_EXPR)
7881 if (ignore_topmost_bind)
7883 chain = BIND_EXPR_BODY (chain);
7884 indent = 2;
7886 else
7887 indent = 0;
7889 else
7891 if (!ignore_topmost_bind)
7893 fprintf (file, "{\n");
7894 /* No topmost bind, pretend it's ignored for later. */
7895 ignore_topmost_bind = true;
7897 indent = 2;
7900 if (any_var)
7901 fprintf (file, "\n");
7903 print_generic_stmt_indented (file, chain, flags, indent);
7904 if (ignore_topmost_bind)
7905 fprintf (file, "}\n");
7908 if (flags & TDF_ENUMERATE_LOCALS)
7909 dump_enumerated_decls (file, flags);
7910 fprintf (file, "\n\n");
7912 current_function_decl = old_current_fndecl;
7915 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7917 DEBUG_FUNCTION void
7918 debug_function (tree fn, dump_flags_t flags)
7920 dump_function_to_file (fn, stderr, flags);
7924 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7926 static void
7927 print_pred_bbs (FILE *file, basic_block bb)
7929 edge e;
7930 edge_iterator ei;
7932 FOR_EACH_EDGE (e, ei, bb->preds)
7933 fprintf (file, "bb_%d ", e->src->index);
7937 /* Print on FILE the indexes for the successors of basic_block BB. */
7939 static void
7940 print_succ_bbs (FILE *file, basic_block bb)
7942 edge e;
7943 edge_iterator ei;
7945 FOR_EACH_EDGE (e, ei, bb->succs)
7946 fprintf (file, "bb_%d ", e->dest->index);
7949 /* Print to FILE the basic block BB following the VERBOSITY level. */
7951 void
7952 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7954 char *s_indent = (char *) alloca ((size_t) indent + 1);
7955 memset ((void *) s_indent, ' ', (size_t) indent);
7956 s_indent[indent] = '\0';
7958 /* Print basic_block's header. */
7959 if (verbosity >= 2)
7961 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7962 print_pred_bbs (file, bb);
7963 fprintf (file, "}, succs = {");
7964 print_succ_bbs (file, bb);
7965 fprintf (file, "})\n");
7968 /* Print basic_block's body. */
7969 if (verbosity >= 3)
7971 fprintf (file, "%s {\n", s_indent);
7972 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7973 fprintf (file, "%s }\n", s_indent);
7977 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7979 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7980 VERBOSITY level this outputs the contents of the loop, or just its
7981 structure. */
7983 static void
7984 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7986 char *s_indent;
7987 basic_block bb;
7989 if (loop == NULL)
7990 return;
7992 s_indent = (char *) alloca ((size_t) indent + 1);
7993 memset ((void *) s_indent, ' ', (size_t) indent);
7994 s_indent[indent] = '\0';
7996 /* Print loop's header. */
7997 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7998 if (loop->header)
7999 fprintf (file, "header = %d", loop->header->index);
8000 else
8002 fprintf (file, "deleted)\n");
8003 return;
8005 if (loop->latch)
8006 fprintf (file, ", latch = %d", loop->latch->index);
8007 else
8008 fprintf (file, ", multiple latches");
8009 fprintf (file, ", niter = ");
8010 print_generic_expr (file, loop->nb_iterations);
8012 if (loop->any_upper_bound)
8014 fprintf (file, ", upper_bound = ");
8015 print_decu (loop->nb_iterations_upper_bound, file);
8017 if (loop->any_likely_upper_bound)
8019 fprintf (file, ", likely_upper_bound = ");
8020 print_decu (loop->nb_iterations_likely_upper_bound, file);
8023 if (loop->any_estimate)
8025 fprintf (file, ", estimate = ");
8026 print_decu (loop->nb_iterations_estimate, file);
8028 if (loop->unroll)
8029 fprintf (file, ", unroll = %d", loop->unroll);
8030 fprintf (file, ")\n");
8032 /* Print loop's body. */
8033 if (verbosity >= 1)
8035 fprintf (file, "%s{\n", s_indent);
8036 FOR_EACH_BB_FN (bb, cfun)
8037 if (bb->loop_father == loop)
8038 print_loops_bb (file, bb, indent, verbosity);
8040 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8041 fprintf (file, "%s}\n", s_indent);
8045 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8046 spaces. Following VERBOSITY level this outputs the contents of the
8047 loop, or just its structure. */
8049 static void
8050 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8051 int verbosity)
8053 if (loop == NULL)
8054 return;
8056 print_loop (file, loop, indent, verbosity);
8057 print_loop_and_siblings (file, loop->next, indent, verbosity);
8060 /* Follow a CFG edge from the entry point of the program, and on entry
8061 of a loop, pretty print the loop structure on FILE. */
8063 void
8064 print_loops (FILE *file, int verbosity)
8066 basic_block bb;
8068 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8069 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8070 if (bb && bb->loop_father)
8071 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8074 /* Dump a loop. */
8076 DEBUG_FUNCTION void
8077 debug (struct loop &ref)
8079 print_loop (stderr, &ref, 0, /*verbosity*/0);
8082 DEBUG_FUNCTION void
8083 debug (struct loop *ptr)
8085 if (ptr)
8086 debug (*ptr);
8087 else
8088 fprintf (stderr, "<nil>\n");
8091 /* Dump a loop verbosely. */
8093 DEBUG_FUNCTION void
8094 debug_verbose (struct loop &ref)
8096 print_loop (stderr, &ref, 0, /*verbosity*/3);
8099 DEBUG_FUNCTION void
8100 debug_verbose (struct loop *ptr)
8102 if (ptr)
8103 debug (*ptr);
8104 else
8105 fprintf (stderr, "<nil>\n");
8109 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8111 DEBUG_FUNCTION void
8112 debug_loops (int verbosity)
8114 print_loops (stderr, verbosity);
8117 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8119 DEBUG_FUNCTION void
8120 debug_loop (struct loop *loop, int verbosity)
8122 print_loop (stderr, loop, 0, verbosity);
8125 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8126 level. */
8128 DEBUG_FUNCTION void
8129 debug_loop_num (unsigned num, int verbosity)
8131 debug_loop (get_loop (cfun, num), verbosity);
8134 /* Return true if BB ends with a call, possibly followed by some
8135 instructions that must stay with the call. Return false,
8136 otherwise. */
8138 static bool
8139 gimple_block_ends_with_call_p (basic_block bb)
8141 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8142 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8146 /* Return true if BB ends with a conditional branch. Return false,
8147 otherwise. */
8149 static bool
8150 gimple_block_ends_with_condjump_p (const_basic_block bb)
8152 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8153 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8157 /* Return true if statement T may terminate execution of BB in ways not
8158 explicitly represtented in the CFG. */
8160 bool
8161 stmt_can_terminate_bb_p (gimple *t)
8163 tree fndecl = NULL_TREE;
8164 int call_flags = 0;
8166 /* Eh exception not handled internally terminates execution of the whole
8167 function. */
8168 if (stmt_can_throw_external (t))
8169 return true;
8171 /* NORETURN and LONGJMP calls already have an edge to exit.
8172 CONST and PURE calls do not need one.
8173 We don't currently check for CONST and PURE here, although
8174 it would be a good idea, because those attributes are
8175 figured out from the RTL in mark_constant_function, and
8176 the counter incrementation code from -fprofile-arcs
8177 leads to different results from -fbranch-probabilities. */
8178 if (is_gimple_call (t))
8180 fndecl = gimple_call_fndecl (t);
8181 call_flags = gimple_call_flags (t);
8184 if (is_gimple_call (t)
8185 && fndecl
8186 && DECL_BUILT_IN (fndecl)
8187 && (call_flags & ECF_NOTHROW)
8188 && !(call_flags & ECF_RETURNS_TWICE)
8189 /* fork() doesn't really return twice, but the effect of
8190 wrapping it in __gcov_fork() which calls __gcov_flush()
8191 and clears the counters before forking has the same
8192 effect as returning twice. Force a fake edge. */
8193 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8194 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8195 return false;
8197 if (is_gimple_call (t))
8199 edge_iterator ei;
8200 edge e;
8201 basic_block bb;
8203 if (call_flags & (ECF_PURE | ECF_CONST)
8204 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8205 return false;
8207 /* Function call may do longjmp, terminate program or do other things.
8208 Special case noreturn that have non-abnormal edges out as in this case
8209 the fact is sufficiently represented by lack of edges out of T. */
8210 if (!(call_flags & ECF_NORETURN))
8211 return true;
8213 bb = gimple_bb (t);
8214 FOR_EACH_EDGE (e, ei, bb->succs)
8215 if ((e->flags & EDGE_FAKE) == 0)
8216 return true;
8219 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8220 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8221 return true;
8223 return false;
8227 /* Add fake edges to the function exit for any non constant and non
8228 noreturn calls (or noreturn calls with EH/abnormal edges),
8229 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8230 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8231 that were split.
8233 The goal is to expose cases in which entering a basic block does
8234 not imply that all subsequent instructions must be executed. */
8236 static int
8237 gimple_flow_call_edges_add (sbitmap blocks)
8239 int i;
8240 int blocks_split = 0;
8241 int last_bb = last_basic_block_for_fn (cfun);
8242 bool check_last_block = false;
8244 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8245 return 0;
8247 if (! blocks)
8248 check_last_block = true;
8249 else
8250 check_last_block = bitmap_bit_p (blocks,
8251 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8253 /* In the last basic block, before epilogue generation, there will be
8254 a fallthru edge to EXIT. Special care is required if the last insn
8255 of the last basic block is a call because make_edge folds duplicate
8256 edges, which would result in the fallthru edge also being marked
8257 fake, which would result in the fallthru edge being removed by
8258 remove_fake_edges, which would result in an invalid CFG.
8260 Moreover, we can't elide the outgoing fake edge, since the block
8261 profiler needs to take this into account in order to solve the minimal
8262 spanning tree in the case that the call doesn't return.
8264 Handle this by adding a dummy instruction in a new last basic block. */
8265 if (check_last_block)
8267 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8268 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8269 gimple *t = NULL;
8271 if (!gsi_end_p (gsi))
8272 t = gsi_stmt (gsi);
8274 if (t && stmt_can_terminate_bb_p (t))
8276 edge e;
8278 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8279 if (e)
8281 gsi_insert_on_edge (e, gimple_build_nop ());
8282 gsi_commit_edge_inserts ();
8287 /* Now add fake edges to the function exit for any non constant
8288 calls since there is no way that we can determine if they will
8289 return or not... */
8290 for (i = 0; i < last_bb; i++)
8292 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8293 gimple_stmt_iterator gsi;
8294 gimple *stmt, *last_stmt;
8296 if (!bb)
8297 continue;
8299 if (blocks && !bitmap_bit_p (blocks, i))
8300 continue;
8302 gsi = gsi_last_nondebug_bb (bb);
8303 if (!gsi_end_p (gsi))
8305 last_stmt = gsi_stmt (gsi);
8308 stmt = gsi_stmt (gsi);
8309 if (stmt_can_terminate_bb_p (stmt))
8311 edge e;
8313 /* The handling above of the final block before the
8314 epilogue should be enough to verify that there is
8315 no edge to the exit block in CFG already.
8316 Calling make_edge in such case would cause us to
8317 mark that edge as fake and remove it later. */
8318 if (flag_checking && stmt == last_stmt)
8320 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8321 gcc_assert (e == NULL);
8324 /* Note that the following may create a new basic block
8325 and renumber the existing basic blocks. */
8326 if (stmt != last_stmt)
8328 e = split_block (bb, stmt);
8329 if (e)
8330 blocks_split++;
8332 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8333 e->probability = profile_probability::guessed_never ();
8335 gsi_prev (&gsi);
8337 while (!gsi_end_p (gsi));
8341 if (blocks_split)
8342 checking_verify_flow_info ();
8344 return blocks_split;
8347 /* Removes edge E and all the blocks dominated by it, and updates dominance
8348 information. The IL in E->src needs to be updated separately.
8349 If dominance info is not available, only the edge E is removed.*/
8351 void
8352 remove_edge_and_dominated_blocks (edge e)
8354 vec<basic_block> bbs_to_remove = vNULL;
8355 vec<basic_block> bbs_to_fix_dom = vNULL;
8356 edge f;
8357 edge_iterator ei;
8358 bool none_removed = false;
8359 unsigned i;
8360 basic_block bb, dbb;
8361 bitmap_iterator bi;
8363 /* If we are removing a path inside a non-root loop that may change
8364 loop ownership of blocks or remove loops. Mark loops for fixup. */
8365 if (current_loops
8366 && loop_outer (e->src->loop_father) != NULL
8367 && e->src->loop_father == e->dest->loop_father)
8368 loops_state_set (LOOPS_NEED_FIXUP);
8370 if (!dom_info_available_p (CDI_DOMINATORS))
8372 remove_edge (e);
8373 return;
8376 /* No updating is needed for edges to exit. */
8377 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8379 if (cfgcleanup_altered_bbs)
8380 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8381 remove_edge (e);
8382 return;
8385 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8386 that is not dominated by E->dest, then this set is empty. Otherwise,
8387 all the basic blocks dominated by E->dest are removed.
8389 Also, to DF_IDOM we store the immediate dominators of the blocks in
8390 the dominance frontier of E (i.e., of the successors of the
8391 removed blocks, if there are any, and of E->dest otherwise). */
8392 FOR_EACH_EDGE (f, ei, e->dest->preds)
8394 if (f == e)
8395 continue;
8397 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8399 none_removed = true;
8400 break;
8404 auto_bitmap df, df_idom;
8405 if (none_removed)
8406 bitmap_set_bit (df_idom,
8407 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8408 else
8410 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8411 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8413 FOR_EACH_EDGE (f, ei, bb->succs)
8415 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8416 bitmap_set_bit (df, f->dest->index);
8419 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8420 bitmap_clear_bit (df, bb->index);
8422 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8424 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8425 bitmap_set_bit (df_idom,
8426 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8430 if (cfgcleanup_altered_bbs)
8432 /* Record the set of the altered basic blocks. */
8433 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8434 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8437 /* Remove E and the cancelled blocks. */
8438 if (none_removed)
8439 remove_edge (e);
8440 else
8442 /* Walk backwards so as to get a chance to substitute all
8443 released DEFs into debug stmts. See
8444 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8445 details. */
8446 for (i = bbs_to_remove.length (); i-- > 0; )
8447 delete_basic_block (bbs_to_remove[i]);
8450 /* Update the dominance information. The immediate dominator may change only
8451 for blocks whose immediate dominator belongs to DF_IDOM:
8453 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8454 removal. Let Z the arbitrary block such that idom(Z) = Y and
8455 Z dominates X after the removal. Before removal, there exists a path P
8456 from Y to X that avoids Z. Let F be the last edge on P that is
8457 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8458 dominates W, and because of P, Z does not dominate W), and W belongs to
8459 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8460 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8462 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8463 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8464 dbb;
8465 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8466 bbs_to_fix_dom.safe_push (dbb);
8469 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8471 bbs_to_remove.release ();
8472 bbs_to_fix_dom.release ();
8475 /* Purge dead EH edges from basic block BB. */
8477 bool
8478 gimple_purge_dead_eh_edges (basic_block bb)
8480 bool changed = false;
8481 edge e;
8482 edge_iterator ei;
8483 gimple *stmt = last_stmt (bb);
8485 if (stmt && stmt_can_throw_internal (stmt))
8486 return false;
8488 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8490 if (e->flags & EDGE_EH)
8492 remove_edge_and_dominated_blocks (e);
8493 changed = true;
8495 else
8496 ei_next (&ei);
8499 return changed;
8502 /* Purge dead EH edges from basic block listed in BLOCKS. */
8504 bool
8505 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8507 bool changed = false;
8508 unsigned i;
8509 bitmap_iterator bi;
8511 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8513 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8515 /* Earlier gimple_purge_dead_eh_edges could have removed
8516 this basic block already. */
8517 gcc_assert (bb || changed);
8518 if (bb != NULL)
8519 changed |= gimple_purge_dead_eh_edges (bb);
8522 return changed;
8525 /* Purge dead abnormal call edges from basic block BB. */
8527 bool
8528 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8530 bool changed = false;
8531 edge e;
8532 edge_iterator ei;
8533 gimple *stmt = last_stmt (bb);
8535 if (!cfun->has_nonlocal_label
8536 && !cfun->calls_setjmp)
8537 return false;
8539 if (stmt && stmt_can_make_abnormal_goto (stmt))
8540 return false;
8542 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8544 if (e->flags & EDGE_ABNORMAL)
8546 if (e->flags & EDGE_FALLTHRU)
8547 e->flags &= ~EDGE_ABNORMAL;
8548 else
8549 remove_edge_and_dominated_blocks (e);
8550 changed = true;
8552 else
8553 ei_next (&ei);
8556 return changed;
8559 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8561 bool
8562 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8564 bool changed = false;
8565 unsigned i;
8566 bitmap_iterator bi;
8568 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8570 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8572 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8573 this basic block already. */
8574 gcc_assert (bb || changed);
8575 if (bb != NULL)
8576 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8579 return changed;
8582 /* This function is called whenever a new edge is created or
8583 redirected. */
8585 static void
8586 gimple_execute_on_growing_pred (edge e)
8588 basic_block bb = e->dest;
8590 if (!gimple_seq_empty_p (phi_nodes (bb)))
8591 reserve_phi_args_for_new_edge (bb);
8594 /* This function is called immediately before edge E is removed from
8595 the edge vector E->dest->preds. */
8597 static void
8598 gimple_execute_on_shrinking_pred (edge e)
8600 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8601 remove_phi_args (e);
8604 /*---------------------------------------------------------------------------
8605 Helper functions for Loop versioning
8606 ---------------------------------------------------------------------------*/
8608 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8609 of 'first'. Both of them are dominated by 'new_head' basic block. When
8610 'new_head' was created by 'second's incoming edge it received phi arguments
8611 on the edge by split_edge(). Later, additional edge 'e' was created to
8612 connect 'new_head' and 'first'. Now this routine adds phi args on this
8613 additional edge 'e' that new_head to second edge received as part of edge
8614 splitting. */
8616 static void
8617 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8618 basic_block new_head, edge e)
8620 gphi *phi1, *phi2;
8621 gphi_iterator psi1, psi2;
8622 tree def;
8623 edge e2 = find_edge (new_head, second);
8625 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8626 edge, we should always have an edge from NEW_HEAD to SECOND. */
8627 gcc_assert (e2 != NULL);
8629 /* Browse all 'second' basic block phi nodes and add phi args to
8630 edge 'e' for 'first' head. PHI args are always in correct order. */
8632 for (psi2 = gsi_start_phis (second),
8633 psi1 = gsi_start_phis (first);
8634 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8635 gsi_next (&psi2), gsi_next (&psi1))
8637 phi1 = psi1.phi ();
8638 phi2 = psi2.phi ();
8639 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8640 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8645 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8646 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8647 the destination of the ELSE part. */
8649 static void
8650 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8651 basic_block second_head ATTRIBUTE_UNUSED,
8652 basic_block cond_bb, void *cond_e)
8654 gimple_stmt_iterator gsi;
8655 gimple *new_cond_expr;
8656 tree cond_expr = (tree) cond_e;
8657 edge e0;
8659 /* Build new conditional expr */
8660 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8661 NULL_TREE, NULL_TREE);
8663 /* Add new cond in cond_bb. */
8664 gsi = gsi_last_bb (cond_bb);
8665 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8667 /* Adjust edges appropriately to connect new head with first head
8668 as well as second head. */
8669 e0 = single_succ_edge (cond_bb);
8670 e0->flags &= ~EDGE_FALLTHRU;
8671 e0->flags |= EDGE_FALSE_VALUE;
8675 /* Do book-keeping of basic block BB for the profile consistency checker.
8676 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8677 then do post-pass accounting. Store the counting in RECORD. */
8678 static void
8679 gimple_account_profile_record (basic_block bb, int after_pass,
8680 struct profile_record *record)
8682 gimple_stmt_iterator i;
8683 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8685 record->size[after_pass]
8686 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8687 if (bb->count.initialized_p ())
8688 record->time[after_pass]
8689 += estimate_num_insns (gsi_stmt (i),
8690 &eni_time_weights) * bb->count.to_gcov_type ();
8691 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8692 record->time[after_pass]
8693 += estimate_num_insns (gsi_stmt (i),
8694 &eni_time_weights) * bb->count.to_frequency (cfun);
8698 struct cfg_hooks gimple_cfg_hooks = {
8699 "gimple",
8700 gimple_verify_flow_info,
8701 gimple_dump_bb, /* dump_bb */
8702 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8703 create_bb, /* create_basic_block */
8704 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8705 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8706 gimple_can_remove_branch_p, /* can_remove_branch_p */
8707 remove_bb, /* delete_basic_block */
8708 gimple_split_block, /* split_block */
8709 gimple_move_block_after, /* move_block_after */
8710 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8711 gimple_merge_blocks, /* merge_blocks */
8712 gimple_predict_edge, /* predict_edge */
8713 gimple_predicted_by_p, /* predicted_by_p */
8714 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8715 gimple_duplicate_bb, /* duplicate_block */
8716 gimple_split_edge, /* split_edge */
8717 gimple_make_forwarder_block, /* make_forward_block */
8718 NULL, /* tidy_fallthru_edge */
8719 NULL, /* force_nonfallthru */
8720 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8721 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8722 gimple_flow_call_edges_add, /* flow_call_edges_add */
8723 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8724 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8725 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8726 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8727 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8728 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8729 flush_pending_stmts, /* flush_pending_stmts */
8730 gimple_empty_block_p, /* block_empty_p */
8731 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8732 gimple_account_profile_record,
8736 /* Split all critical edges. */
8738 unsigned int
8739 split_critical_edges (void)
8741 basic_block bb;
8742 edge e;
8743 edge_iterator ei;
8745 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8746 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8747 mappings around the calls to split_edge. */
8748 start_recording_case_labels ();
8749 FOR_ALL_BB_FN (bb, cfun)
8751 FOR_EACH_EDGE (e, ei, bb->succs)
8753 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8754 split_edge (e);
8755 /* PRE inserts statements to edges and expects that
8756 since split_critical_edges was done beforehand, committing edge
8757 insertions will not split more edges. In addition to critical
8758 edges we must split edges that have multiple successors and
8759 end by control flow statements, such as RESX.
8760 Go ahead and split them too. This matches the logic in
8761 gimple_find_edge_insert_loc. */
8762 else if ((!single_pred_p (e->dest)
8763 || !gimple_seq_empty_p (phi_nodes (e->dest))
8764 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8765 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8766 && !(e->flags & EDGE_ABNORMAL))
8768 gimple_stmt_iterator gsi;
8770 gsi = gsi_last_bb (e->src);
8771 if (!gsi_end_p (gsi)
8772 && stmt_ends_bb_p (gsi_stmt (gsi))
8773 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8774 && !gimple_call_builtin_p (gsi_stmt (gsi),
8775 BUILT_IN_RETURN)))
8776 split_edge (e);
8780 end_recording_case_labels ();
8781 return 0;
8784 namespace {
8786 const pass_data pass_data_split_crit_edges =
8788 GIMPLE_PASS, /* type */
8789 "crited", /* name */
8790 OPTGROUP_NONE, /* optinfo_flags */
8791 TV_TREE_SPLIT_EDGES, /* tv_id */
8792 PROP_cfg, /* properties_required */
8793 PROP_no_crit_edges, /* properties_provided */
8794 0, /* properties_destroyed */
8795 0, /* todo_flags_start */
8796 0, /* todo_flags_finish */
8799 class pass_split_crit_edges : public gimple_opt_pass
8801 public:
8802 pass_split_crit_edges (gcc::context *ctxt)
8803 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8806 /* opt_pass methods: */
8807 virtual unsigned int execute (function *) { return split_critical_edges (); }
8809 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8810 }; // class pass_split_crit_edges
8812 } // anon namespace
8814 gimple_opt_pass *
8815 make_pass_split_crit_edges (gcc::context *ctxt)
8817 return new pass_split_crit_edges (ctxt);
8821 /* Insert COND expression which is GIMPLE_COND after STMT
8822 in basic block BB with appropriate basic block split
8823 and creation of a new conditionally executed basic block.
8824 Update profile so the new bb is visited with probability PROB.
8825 Return created basic block. */
8826 basic_block
8827 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
8828 profile_probability prob)
8830 edge fall = split_block (bb, stmt);
8831 gimple_stmt_iterator iter = gsi_last_bb (bb);
8832 basic_block new_bb;
8834 /* Insert cond statement. */
8835 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8836 if (gsi_end_p (iter))
8837 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8838 else
8839 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8841 /* Create conditionally executed block. */
8842 new_bb = create_empty_bb (bb);
8843 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8844 e->probability = prob;
8845 new_bb->count = e->count ();
8846 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8848 /* Fix edge for split bb. */
8849 fall->flags = EDGE_FALSE_VALUE;
8850 fall->probability -= e->probability;
8852 /* Update dominance info. */
8853 if (dom_info_available_p (CDI_DOMINATORS))
8855 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8856 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8859 /* Update loop info. */
8860 if (current_loops)
8861 add_bb_to_loop (new_bb, bb->loop_father);
8863 return new_bb;
8866 /* Build a ternary operation and gimplify it. Emit code before GSI.
8867 Return the gimple_val holding the result. */
8869 tree
8870 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8871 tree type, tree a, tree b, tree c)
8873 tree ret;
8874 location_t loc = gimple_location (gsi_stmt (*gsi));
8876 ret = fold_build3_loc (loc, code, type, a, b, c);
8877 STRIP_NOPS (ret);
8879 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8880 GSI_SAME_STMT);
8883 /* Build a binary operation and gimplify it. Emit code before GSI.
8884 Return the gimple_val holding the result. */
8886 tree
8887 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8888 tree type, tree a, tree b)
8890 tree ret;
8892 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8893 STRIP_NOPS (ret);
8895 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8896 GSI_SAME_STMT);
8899 /* Build a unary operation and gimplify it. Emit code before GSI.
8900 Return the gimple_val holding the result. */
8902 tree
8903 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8904 tree a)
8906 tree ret;
8908 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8909 STRIP_NOPS (ret);
8911 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8912 GSI_SAME_STMT);
8917 /* Given a basic block B which ends with a conditional and has
8918 precisely two successors, determine which of the edges is taken if
8919 the conditional is true and which is taken if the conditional is
8920 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8922 void
8923 extract_true_false_edges_from_block (basic_block b,
8924 edge *true_edge,
8925 edge *false_edge)
8927 edge e = EDGE_SUCC (b, 0);
8929 if (e->flags & EDGE_TRUE_VALUE)
8931 *true_edge = e;
8932 *false_edge = EDGE_SUCC (b, 1);
8934 else
8936 *false_edge = e;
8937 *true_edge = EDGE_SUCC (b, 1);
8942 /* From a controlling predicate in the immediate dominator DOM of
8943 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8944 predicate evaluates to true and false and store them to
8945 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8946 they are non-NULL. Returns true if the edges can be determined,
8947 else return false. */
8949 bool
8950 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8951 edge *true_controlled_edge,
8952 edge *false_controlled_edge)
8954 basic_block bb = phiblock;
8955 edge true_edge, false_edge, tem;
8956 edge e0 = NULL, e1 = NULL;
8958 /* We have to verify that one edge into the PHI node is dominated
8959 by the true edge of the predicate block and the other edge
8960 dominated by the false edge. This ensures that the PHI argument
8961 we are going to take is completely determined by the path we
8962 take from the predicate block.
8963 We can only use BB dominance checks below if the destination of
8964 the true/false edges are dominated by their edge, thus only
8965 have a single predecessor. */
8966 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8967 tem = EDGE_PRED (bb, 0);
8968 if (tem == true_edge
8969 || (single_pred_p (true_edge->dest)
8970 && (tem->src == true_edge->dest
8971 || dominated_by_p (CDI_DOMINATORS,
8972 tem->src, true_edge->dest))))
8973 e0 = tem;
8974 else if (tem == false_edge
8975 || (single_pred_p (false_edge->dest)
8976 && (tem->src == false_edge->dest
8977 || dominated_by_p (CDI_DOMINATORS,
8978 tem->src, false_edge->dest))))
8979 e1 = tem;
8980 else
8981 return false;
8982 tem = EDGE_PRED (bb, 1);
8983 if (tem == true_edge
8984 || (single_pred_p (true_edge->dest)
8985 && (tem->src == true_edge->dest
8986 || dominated_by_p (CDI_DOMINATORS,
8987 tem->src, true_edge->dest))))
8988 e0 = tem;
8989 else if (tem == false_edge
8990 || (single_pred_p (false_edge->dest)
8991 && (tem->src == false_edge->dest
8992 || dominated_by_p (CDI_DOMINATORS,
8993 tem->src, false_edge->dest))))
8994 e1 = tem;
8995 else
8996 return false;
8997 if (!e0 || !e1)
8998 return false;
9000 if (true_controlled_edge)
9001 *true_controlled_edge = e0;
9002 if (false_controlled_edge)
9003 *false_controlled_edge = e1;
9005 return true;
9008 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9009 range [low, high]. Place associated stmts before *GSI. */
9011 void
9012 generate_range_test (basic_block bb, tree index, tree low, tree high,
9013 tree *lhs, tree *rhs)
9015 tree type = TREE_TYPE (index);
9016 tree utype = unsigned_type_for (type);
9018 low = fold_convert (type, low);
9019 high = fold_convert (type, high);
9021 tree tmp = make_ssa_name (type);
9022 gassign *sub1
9023 = gimple_build_assign (tmp, MINUS_EXPR, index, low);
9025 *lhs = make_ssa_name (utype);
9026 gassign *a = gimple_build_assign (*lhs, NOP_EXPR, tmp);
9028 *rhs = fold_build2 (MINUS_EXPR, utype, high, low);
9029 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9030 gsi_insert_before (&gsi, sub1, GSI_SAME_STMT);
9031 gsi_insert_before (&gsi, a, GSI_SAME_STMT);
9034 /* Emit return warnings. */
9036 namespace {
9038 const pass_data pass_data_warn_function_return =
9040 GIMPLE_PASS, /* type */
9041 "*warn_function_return", /* name */
9042 OPTGROUP_NONE, /* optinfo_flags */
9043 TV_NONE, /* tv_id */
9044 PROP_cfg, /* properties_required */
9045 0, /* properties_provided */
9046 0, /* properties_destroyed */
9047 0, /* todo_flags_start */
9048 0, /* todo_flags_finish */
9051 class pass_warn_function_return : public gimple_opt_pass
9053 public:
9054 pass_warn_function_return (gcc::context *ctxt)
9055 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9058 /* opt_pass methods: */
9059 virtual unsigned int execute (function *);
9061 }; // class pass_warn_function_return
9063 unsigned int
9064 pass_warn_function_return::execute (function *fun)
9066 source_location location;
9067 gimple *last;
9068 edge e;
9069 edge_iterator ei;
9071 if (!targetm.warn_func_return (fun->decl))
9072 return 0;
9074 /* If we have a path to EXIT, then we do return. */
9075 if (TREE_THIS_VOLATILE (fun->decl)
9076 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9078 location = UNKNOWN_LOCATION;
9079 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9080 (e = ei_safe_edge (ei)); )
9082 last = last_stmt (e->src);
9083 if ((gimple_code (last) == GIMPLE_RETURN
9084 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9085 && location == UNKNOWN_LOCATION
9086 && ((location = LOCATION_LOCUS (gimple_location (last)))
9087 != UNKNOWN_LOCATION)
9088 && !optimize)
9089 break;
9090 /* When optimizing, replace return stmts in noreturn functions
9091 with __builtin_unreachable () call. */
9092 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9094 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9095 gimple *new_stmt = gimple_build_call (fndecl, 0);
9096 gimple_set_location (new_stmt, gimple_location (last));
9097 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9098 gsi_replace (&gsi, new_stmt, true);
9099 remove_edge (e);
9101 else
9102 ei_next (&ei);
9104 if (location == UNKNOWN_LOCATION)
9105 location = cfun->function_end_locus;
9106 warning_at (location, 0, "%<noreturn%> function does return");
9109 /* If we see "return;" in some basic block, then we do reach the end
9110 without returning a value. */
9111 else if (warn_return_type > 0
9112 && !TREE_NO_WARNING (fun->decl)
9113 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9115 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9117 gimple *last = last_stmt (e->src);
9118 greturn *return_stmt = dyn_cast <greturn *> (last);
9119 if (return_stmt
9120 && gimple_return_retval (return_stmt) == NULL
9121 && !gimple_no_warning_p (last))
9123 location = gimple_location (last);
9124 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9125 location = fun->function_end_locus;
9126 warning_at (location, OPT_Wreturn_type,
9127 "control reaches end of non-void function");
9128 TREE_NO_WARNING (fun->decl) = 1;
9129 break;
9132 /* The C++ FE turns fallthrough from the end of non-void function
9133 into __builtin_unreachable () call with BUILTINS_LOCATION.
9134 Recognize those too. */
9135 basic_block bb;
9136 if (!TREE_NO_WARNING (fun->decl))
9137 FOR_EACH_BB_FN (bb, fun)
9138 if (EDGE_COUNT (bb->succs) == 0)
9140 gimple *last = last_stmt (bb);
9141 if (last
9142 && (LOCATION_LOCUS (gimple_location (last))
9143 == BUILTINS_LOCATION)
9144 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9146 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9147 gsi_prev_nondebug (&gsi);
9148 gimple *prev = gsi_stmt (gsi);
9149 if (prev == NULL)
9150 location = UNKNOWN_LOCATION;
9151 else
9152 location = gimple_location (prev);
9153 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9154 location = fun->function_end_locus;
9155 warning_at (location, OPT_Wreturn_type,
9156 "control reaches end of non-void function");
9157 TREE_NO_WARNING (fun->decl) = 1;
9158 break;
9162 return 0;
9165 } // anon namespace
9167 gimple_opt_pass *
9168 make_pass_warn_function_return (gcc::context *ctxt)
9170 return new pass_warn_function_return (ctxt);
9173 /* Walk a gimplified function and warn for functions whose return value is
9174 ignored and attribute((warn_unused_result)) is set. This is done before
9175 inlining, so we don't have to worry about that. */
9177 static void
9178 do_warn_unused_result (gimple_seq seq)
9180 tree fdecl, ftype;
9181 gimple_stmt_iterator i;
9183 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9185 gimple *g = gsi_stmt (i);
9187 switch (gimple_code (g))
9189 case GIMPLE_BIND:
9190 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9191 break;
9192 case GIMPLE_TRY:
9193 do_warn_unused_result (gimple_try_eval (g));
9194 do_warn_unused_result (gimple_try_cleanup (g));
9195 break;
9196 case GIMPLE_CATCH:
9197 do_warn_unused_result (gimple_catch_handler (
9198 as_a <gcatch *> (g)));
9199 break;
9200 case GIMPLE_EH_FILTER:
9201 do_warn_unused_result (gimple_eh_filter_failure (g));
9202 break;
9204 case GIMPLE_CALL:
9205 if (gimple_call_lhs (g))
9206 break;
9207 if (gimple_call_internal_p (g))
9208 break;
9210 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9211 LHS. All calls whose value is ignored should be
9212 represented like this. Look for the attribute. */
9213 fdecl = gimple_call_fndecl (g);
9214 ftype = gimple_call_fntype (g);
9216 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9218 location_t loc = gimple_location (g);
9220 if (fdecl)
9221 warning_at (loc, OPT_Wunused_result,
9222 "ignoring return value of %qD, "
9223 "declared with attribute warn_unused_result",
9224 fdecl);
9225 else
9226 warning_at (loc, OPT_Wunused_result,
9227 "ignoring return value of function "
9228 "declared with attribute warn_unused_result");
9230 break;
9232 default:
9233 /* Not a container, not a call, or a call whose value is used. */
9234 break;
9239 namespace {
9241 const pass_data pass_data_warn_unused_result =
9243 GIMPLE_PASS, /* type */
9244 "*warn_unused_result", /* name */
9245 OPTGROUP_NONE, /* optinfo_flags */
9246 TV_NONE, /* tv_id */
9247 PROP_gimple_any, /* properties_required */
9248 0, /* properties_provided */
9249 0, /* properties_destroyed */
9250 0, /* todo_flags_start */
9251 0, /* todo_flags_finish */
9254 class pass_warn_unused_result : public gimple_opt_pass
9256 public:
9257 pass_warn_unused_result (gcc::context *ctxt)
9258 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9261 /* opt_pass methods: */
9262 virtual bool gate (function *) { return flag_warn_unused_result; }
9263 virtual unsigned int execute (function *)
9265 do_warn_unused_result (gimple_body (current_function_decl));
9266 return 0;
9269 }; // class pass_warn_unused_result
9271 } // anon namespace
9273 gimple_opt_pass *
9274 make_pass_warn_unused_result (gcc::context *ctxt)
9276 return new pass_warn_unused_result (ctxt);
9279 /* IPA passes, compilation of earlier functions or inlining
9280 might have changed some properties, such as marked functions nothrow,
9281 pure, const or noreturn.
9282 Remove redundant edges and basic blocks, and create new ones if necessary.
9284 This pass can't be executed as stand alone pass from pass manager, because
9285 in between inlining and this fixup the verify_flow_info would fail. */
9287 unsigned int
9288 execute_fixup_cfg (void)
9290 basic_block bb;
9291 gimple_stmt_iterator gsi;
9292 int todo = 0;
9293 cgraph_node *node = cgraph_node::get (current_function_decl);
9294 profile_count num = node->count;
9295 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9296 bool scale = num.initialized_p () && !(num == den);
9298 if (scale)
9300 profile_count::adjust_for_ipa_scaling (&num, &den);
9301 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9302 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9303 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9306 FOR_EACH_BB_FN (bb, cfun)
9308 if (scale)
9309 bb->count = bb->count.apply_scale (num, den);
9310 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9312 gimple *stmt = gsi_stmt (gsi);
9313 tree decl = is_gimple_call (stmt)
9314 ? gimple_call_fndecl (stmt)
9315 : NULL;
9316 if (decl)
9318 int flags = gimple_call_flags (stmt);
9319 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9321 if (gimple_purge_dead_abnormal_call_edges (bb))
9322 todo |= TODO_cleanup_cfg;
9324 if (gimple_in_ssa_p (cfun))
9326 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9327 update_stmt (stmt);
9331 if (flags & ECF_NORETURN
9332 && fixup_noreturn_call (stmt))
9333 todo |= TODO_cleanup_cfg;
9336 /* Remove stores to variables we marked write-only.
9337 Keep access when store has side effect, i.e. in case when source
9338 is volatile. */
9339 if (gimple_store_p (stmt)
9340 && !gimple_has_side_effects (stmt))
9342 tree lhs = get_base_address (gimple_get_lhs (stmt));
9344 if (VAR_P (lhs)
9345 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9346 && varpool_node::get (lhs)->writeonly)
9348 unlink_stmt_vdef (stmt);
9349 gsi_remove (&gsi, true);
9350 release_defs (stmt);
9351 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9352 continue;
9355 /* For calls we can simply remove LHS when it is known
9356 to be write-only. */
9357 if (is_gimple_call (stmt)
9358 && gimple_get_lhs (stmt))
9360 tree lhs = get_base_address (gimple_get_lhs (stmt));
9362 if (VAR_P (lhs)
9363 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9364 && varpool_node::get (lhs)->writeonly)
9366 gimple_call_set_lhs (stmt, NULL);
9367 update_stmt (stmt);
9368 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9372 if (maybe_clean_eh_stmt (stmt)
9373 && gimple_purge_dead_eh_edges (bb))
9374 todo |= TODO_cleanup_cfg;
9375 gsi_next (&gsi);
9378 /* If we have a basic block with no successors that does not
9379 end with a control statement or a noreturn call end it with
9380 a call to __builtin_unreachable. This situation can occur
9381 when inlining a noreturn call that does in fact return. */
9382 if (EDGE_COUNT (bb->succs) == 0)
9384 gimple *stmt = last_stmt (bb);
9385 if (!stmt
9386 || (!is_ctrl_stmt (stmt)
9387 && (!is_gimple_call (stmt)
9388 || !gimple_call_noreturn_p (stmt))))
9390 if (stmt && is_gimple_call (stmt))
9391 gimple_call_set_ctrl_altering (stmt, false);
9392 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9393 stmt = gimple_build_call (fndecl, 0);
9394 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9395 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9396 if (!cfun->after_inlining)
9398 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9399 node->create_edge (cgraph_node::get_create (fndecl),
9400 call_stmt, bb->count);
9405 if (scale)
9406 compute_function_frequency ();
9408 if (current_loops
9409 && (todo & TODO_cleanup_cfg))
9410 loops_state_set (LOOPS_NEED_FIXUP);
9412 return todo;
9415 namespace {
9417 const pass_data pass_data_fixup_cfg =
9419 GIMPLE_PASS, /* type */
9420 "fixup_cfg", /* name */
9421 OPTGROUP_NONE, /* optinfo_flags */
9422 TV_NONE, /* tv_id */
9423 PROP_cfg, /* properties_required */
9424 0, /* properties_provided */
9425 0, /* properties_destroyed */
9426 0, /* todo_flags_start */
9427 0, /* todo_flags_finish */
9430 class pass_fixup_cfg : public gimple_opt_pass
9432 public:
9433 pass_fixup_cfg (gcc::context *ctxt)
9434 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9437 /* opt_pass methods: */
9438 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9439 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9441 }; // class pass_fixup_cfg
9443 } // anon namespace
9445 gimple_opt_pass *
9446 make_pass_fixup_cfg (gcc::context *ctxt)
9448 return new pass_fixup_cfg (ctxt);
9451 /* Garbage collection support for edge_def. */
9453 extern void gt_ggc_mx (tree&);
9454 extern void gt_ggc_mx (gimple *&);
9455 extern void gt_ggc_mx (rtx&);
9456 extern void gt_ggc_mx (basic_block&);
9458 static void
9459 gt_ggc_mx (rtx_insn *& x)
9461 if (x)
9462 gt_ggc_mx_rtx_def ((void *) x);
9465 void
9466 gt_ggc_mx (edge_def *e)
9468 tree block = LOCATION_BLOCK (e->goto_locus);
9469 gt_ggc_mx (e->src);
9470 gt_ggc_mx (e->dest);
9471 if (current_ir_type () == IR_GIMPLE)
9472 gt_ggc_mx (e->insns.g);
9473 else
9474 gt_ggc_mx (e->insns.r);
9475 gt_ggc_mx (block);
9478 /* PCH support for edge_def. */
9480 extern void gt_pch_nx (tree&);
9481 extern void gt_pch_nx (gimple *&);
9482 extern void gt_pch_nx (rtx&);
9483 extern void gt_pch_nx (basic_block&);
9485 static void
9486 gt_pch_nx (rtx_insn *& x)
9488 if (x)
9489 gt_pch_nx_rtx_def ((void *) x);
9492 void
9493 gt_pch_nx (edge_def *e)
9495 tree block = LOCATION_BLOCK (e->goto_locus);
9496 gt_pch_nx (e->src);
9497 gt_pch_nx (e->dest);
9498 if (current_ir_type () == IR_GIMPLE)
9499 gt_pch_nx (e->insns.g);
9500 else
9501 gt_pch_nx (e->insns.r);
9502 gt_pch_nx (block);
9505 void
9506 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9508 tree block = LOCATION_BLOCK (e->goto_locus);
9509 op (&(e->src), cookie);
9510 op (&(e->dest), cookie);
9511 if (current_ir_type () == IR_GIMPLE)
9512 op (&(e->insns.g), cookie);
9513 else
9514 op (&(e->insns.r), cookie);
9515 op (&(block), cookie);
9518 #if CHECKING_P
9520 namespace selftest {
9522 /* Helper function for CFG selftests: create a dummy function decl
9523 and push it as cfun. */
9525 static tree
9526 push_fndecl (const char *name)
9528 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9529 /* FIXME: this uses input_location: */
9530 tree fndecl = build_fn_decl (name, fn_type);
9531 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9532 NULL_TREE, integer_type_node);
9533 DECL_RESULT (fndecl) = retval;
9534 push_struct_function (fndecl);
9535 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9536 ASSERT_TRUE (fun != NULL);
9537 init_empty_tree_cfg_for_function (fun);
9538 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9539 ASSERT_EQ (0, n_edges_for_fn (fun));
9540 return fndecl;
9543 /* These tests directly create CFGs.
9544 Compare with the static fns within tree-cfg.c:
9545 - build_gimple_cfg
9546 - make_blocks: calls create_basic_block (seq, bb);
9547 - make_edges. */
9549 /* Verify a simple cfg of the form:
9550 ENTRY -> A -> B -> C -> EXIT. */
9552 static void
9553 test_linear_chain ()
9555 gimple_register_cfg_hooks ();
9557 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9558 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9560 /* Create some empty blocks. */
9561 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9562 basic_block bb_b = create_empty_bb (bb_a);
9563 basic_block bb_c = create_empty_bb (bb_b);
9565 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9566 ASSERT_EQ (0, n_edges_for_fn (fun));
9568 /* Create some edges: a simple linear chain of BBs. */
9569 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9570 make_edge (bb_a, bb_b, 0);
9571 make_edge (bb_b, bb_c, 0);
9572 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9574 /* Verify the edges. */
9575 ASSERT_EQ (4, n_edges_for_fn (fun));
9576 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9577 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9578 ASSERT_EQ (1, bb_a->preds->length ());
9579 ASSERT_EQ (1, bb_a->succs->length ());
9580 ASSERT_EQ (1, bb_b->preds->length ());
9581 ASSERT_EQ (1, bb_b->succs->length ());
9582 ASSERT_EQ (1, bb_c->preds->length ());
9583 ASSERT_EQ (1, bb_c->succs->length ());
9584 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9585 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9587 /* Verify the dominance information
9588 Each BB in our simple chain should be dominated by the one before
9589 it. */
9590 calculate_dominance_info (CDI_DOMINATORS);
9591 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9592 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9593 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9594 ASSERT_EQ (1, dom_by_b.length ());
9595 ASSERT_EQ (bb_c, dom_by_b[0]);
9596 free_dominance_info (CDI_DOMINATORS);
9597 dom_by_b.release ();
9599 /* Similarly for post-dominance: each BB in our chain is post-dominated
9600 by the one after it. */
9601 calculate_dominance_info (CDI_POST_DOMINATORS);
9602 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9603 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9604 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9605 ASSERT_EQ (1, postdom_by_b.length ());
9606 ASSERT_EQ (bb_a, postdom_by_b[0]);
9607 free_dominance_info (CDI_POST_DOMINATORS);
9608 postdom_by_b.release ();
9610 pop_cfun ();
9613 /* Verify a simple CFG of the form:
9614 ENTRY
9618 /t \f
9624 EXIT. */
9626 static void
9627 test_diamond ()
9629 gimple_register_cfg_hooks ();
9631 tree fndecl = push_fndecl ("cfg_test_diamond");
9632 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9634 /* Create some empty blocks. */
9635 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9636 basic_block bb_b = create_empty_bb (bb_a);
9637 basic_block bb_c = create_empty_bb (bb_a);
9638 basic_block bb_d = create_empty_bb (bb_b);
9640 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9641 ASSERT_EQ (0, n_edges_for_fn (fun));
9643 /* Create the edges. */
9644 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9645 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9646 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9647 make_edge (bb_b, bb_d, 0);
9648 make_edge (bb_c, bb_d, 0);
9649 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9651 /* Verify the edges. */
9652 ASSERT_EQ (6, n_edges_for_fn (fun));
9653 ASSERT_EQ (1, bb_a->preds->length ());
9654 ASSERT_EQ (2, bb_a->succs->length ());
9655 ASSERT_EQ (1, bb_b->preds->length ());
9656 ASSERT_EQ (1, bb_b->succs->length ());
9657 ASSERT_EQ (1, bb_c->preds->length ());
9658 ASSERT_EQ (1, bb_c->succs->length ());
9659 ASSERT_EQ (2, bb_d->preds->length ());
9660 ASSERT_EQ (1, bb_d->succs->length ());
9662 /* Verify the dominance information. */
9663 calculate_dominance_info (CDI_DOMINATORS);
9664 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9665 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9666 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9667 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9668 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9669 dom_by_a.release ();
9670 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9671 ASSERT_EQ (0, dom_by_b.length ());
9672 dom_by_b.release ();
9673 free_dominance_info (CDI_DOMINATORS);
9675 /* Similarly for post-dominance. */
9676 calculate_dominance_info (CDI_POST_DOMINATORS);
9677 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9678 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9679 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9680 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9681 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9682 postdom_by_d.release ();
9683 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9684 ASSERT_EQ (0, postdom_by_b.length ());
9685 postdom_by_b.release ();
9686 free_dominance_info (CDI_POST_DOMINATORS);
9688 pop_cfun ();
9691 /* Verify that we can handle a CFG containing a "complete" aka
9692 fully-connected subgraph (where A B C D below all have edges
9693 pointing to each other node, also to themselves).
9694 e.g.:
9695 ENTRY EXIT
9701 A<--->B
9702 ^^ ^^
9703 | \ / |
9704 | X |
9705 | / \ |
9706 VV VV
9707 C<--->D
9710 static void
9711 test_fully_connected ()
9713 gimple_register_cfg_hooks ();
9715 tree fndecl = push_fndecl ("cfg_fully_connected");
9716 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9718 const int n = 4;
9720 /* Create some empty blocks. */
9721 auto_vec <basic_block> subgraph_nodes;
9722 for (int i = 0; i < n; i++)
9723 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9725 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9726 ASSERT_EQ (0, n_edges_for_fn (fun));
9728 /* Create the edges. */
9729 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9730 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9731 for (int i = 0; i < n; i++)
9732 for (int j = 0; j < n; j++)
9733 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9735 /* Verify the edges. */
9736 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9737 /* The first one is linked to ENTRY/EXIT as well as itself and
9738 everything else. */
9739 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9740 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9741 /* The other ones in the subgraph are linked to everything in
9742 the subgraph (including themselves). */
9743 for (int i = 1; i < n; i++)
9745 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9746 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9749 /* Verify the dominance information. */
9750 calculate_dominance_info (CDI_DOMINATORS);
9751 /* The initial block in the subgraph should be dominated by ENTRY. */
9752 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9753 get_immediate_dominator (CDI_DOMINATORS,
9754 subgraph_nodes[0]));
9755 /* Every other block in the subgraph should be dominated by the
9756 initial block. */
9757 for (int i = 1; i < n; i++)
9758 ASSERT_EQ (subgraph_nodes[0],
9759 get_immediate_dominator (CDI_DOMINATORS,
9760 subgraph_nodes[i]));
9761 free_dominance_info (CDI_DOMINATORS);
9763 /* Similarly for post-dominance. */
9764 calculate_dominance_info (CDI_POST_DOMINATORS);
9765 /* The initial block in the subgraph should be postdominated by EXIT. */
9766 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9767 get_immediate_dominator (CDI_POST_DOMINATORS,
9768 subgraph_nodes[0]));
9769 /* Every other block in the subgraph should be postdominated by the
9770 initial block, since that leads to EXIT. */
9771 for (int i = 1; i < n; i++)
9772 ASSERT_EQ (subgraph_nodes[0],
9773 get_immediate_dominator (CDI_POST_DOMINATORS,
9774 subgraph_nodes[i]));
9775 free_dominance_info (CDI_POST_DOMINATORS);
9777 pop_cfun ();
9780 /* Run all of the selftests within this file. */
9782 void
9783 tree_cfg_c_tests ()
9785 test_linear_chain ();
9786 test_diamond ();
9787 test_fully_connected ();
9790 } // namespace selftest
9792 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9793 - loop
9794 - nested loops
9795 - switch statement (a block with many out-edges)
9796 - something that jumps to itself
9797 - etc */
9799 #endif /* CHECKING_P */