2017-11-18 Edward Smith-Rowland <3dw4rd@verizon.net>
[official-gcc.git] / gcc / tree-cfg.c
blob491ac059c59112d36ec0e37a2224cfc278d55a39
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
65 /* This file contains functions for building the Control Flow Graph (CFG)
66 for a function tree. */
68 /* Local declarations. */
70 /* Initial capacity for the basic block array. */
71 static const int initial_cfg_capacity = 20;
73 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
74 which use a particular edge. The CASE_LABEL_EXPRs are chained together
75 via their CASE_CHAIN field, which we clear after we're done with the
76 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
78 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
79 update the case vector in response to edge redirections.
81 Right now this table is set up and torn down at key points in the
82 compilation process. It would be nice if we could make the table
83 more persistent. The key is getting notification of changes to
84 the CFG (particularly edge removal, creation and redirection). */
86 static hash_map<edge, tree> *edge_to_cases;
88 /* If we record edge_to_cases, this bitmap will hold indexes
89 of basic blocks that end in a GIMPLE_SWITCH which we touched
90 due to edge manipulations. */
92 static bitmap touched_switch_bbs;
94 /* CFG statistics. */
95 struct cfg_stats_d
97 long num_merged_labels;
100 static struct cfg_stats_d cfg_stats;
102 /* Data to pass to replace_block_vars_by_duplicates_1. */
103 struct replace_decls_d
105 hash_map<tree, tree> *vars_map;
106 tree to_context;
109 /* Hash table to store last discriminator assigned for each locus. */
110 struct locus_discrim_map
112 location_t locus;
113 int discriminator;
116 /* Hashtable helpers. */
118 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
120 static inline hashval_t hash (const locus_discrim_map *);
121 static inline bool equal (const locus_discrim_map *,
122 const locus_discrim_map *);
125 /* Trivial hash function for a location_t. ITEM is a pointer to
126 a hash table entry that maps a location_t to a discriminator. */
128 inline hashval_t
129 locus_discrim_hasher::hash (const locus_discrim_map *item)
131 return LOCATION_LINE (item->locus);
134 /* Equality function for the locus-to-discriminator map. A and B
135 point to the two hash table entries to compare. */
137 inline bool
138 locus_discrim_hasher::equal (const locus_discrim_map *a,
139 const locus_discrim_map *b)
141 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
144 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
146 /* Basic blocks and flowgraphs. */
147 static void make_blocks (gimple_seq);
149 /* Edges. */
150 static void make_edges (void);
151 static void assign_discriminators (void);
152 static void make_cond_expr_edges (basic_block);
153 static void make_gimple_switch_edges (gswitch *, basic_block);
154 static bool make_goto_expr_edges (basic_block);
155 static void make_gimple_asm_edges (basic_block);
156 static edge gimple_redirect_edge_and_branch (edge, basic_block);
157 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple *, gimple *);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge);
163 static gimple *first_non_label_stmt (basic_block);
164 static bool verify_gimple_transaction (gtransaction *);
165 static bool call_can_make_abnormal_goto (gimple *);
167 /* Flowgraph optimization and cleanup. */
168 static void gimple_merge_blocks (basic_block, basic_block);
169 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
170 static void remove_bb (basic_block);
171 static edge find_taken_edge_computed_goto (basic_block, tree);
172 static edge find_taken_edge_cond_expr (basic_block, tree);
173 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
174 static tree find_case_label_for_value (gswitch *, tree);
175 static void lower_phi_internal_fn ();
177 void
178 init_empty_tree_cfg_for_function (struct function *fn)
180 /* Initialize the basic block array. */
181 init_flow (fn);
182 profile_status_for_fn (fn) = PROFILE_ABSENT;
183 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
184 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
185 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
186 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
187 initial_cfg_capacity);
189 /* Build a mapping of labels to their associated blocks. */
190 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
192 initial_cfg_capacity);
194 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
195 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
197 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn);
199 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 void
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun);
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
216 static void
217 build_gimple_cfg (gimple_seq seq)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224 init_empty_tree_cfg ();
226 make_blocks (seq);
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
236 n_basic_blocks_for_fn (cfun));
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
248 make_edges ();
249 assign_discriminators ();
250 lower_phi_internal_fn ();
251 cleanup_dead_labels ();
252 delete discriminator_per_locus;
253 discriminator_per_locus = NULL;
256 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
257 them and propagate the information to LOOP. We assume that the annotations
258 come immediately before the condition in BB, if any. */
260 static void
261 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
263 gimple_stmt_iterator gsi = gsi_last_bb (bb);
264 gimple *stmt = gsi_stmt (gsi);
266 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
267 return;
269 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
271 stmt = gsi_stmt (gsi);
272 if (gimple_code (stmt) != GIMPLE_CALL)
273 break;
274 if (!gimple_call_internal_p (stmt)
275 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
276 break;
278 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 case annot_expr_ivdep_kind:
281 loop->safelen = INT_MAX;
282 break;
283 case annot_expr_no_vector_kind:
284 loop->dont_vectorize = true;
285 break;
286 case annot_expr_vector_kind:
287 loop->force_vectorize = true;
288 cfun->has_force_vectorize_loops = true;
289 break;
290 case annot_expr_parallel_kind:
291 loop->can_be_parallel = true;
292 loop->safelen = INT_MAX;
293 break;
294 default:
295 gcc_unreachable ();
298 stmt = gimple_build_assign (gimple_call_lhs (stmt),
299 gimple_call_arg (stmt, 0));
300 gsi_replace (&gsi, stmt, true);
304 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
305 them and propagate the information to the loop. We assume that the
306 annotations come immediately before the condition of the loop. */
308 static void
309 replace_loop_annotate (void)
311 struct loop *loop;
312 basic_block bb;
313 gimple_stmt_iterator gsi;
314 gimple *stmt;
316 FOR_EACH_LOOP (loop, 0)
318 /* First look into the header. */
319 replace_loop_annotate_in_block (loop->header, loop);
321 /* Then look into the latch, if any. */
322 if (loop->latch)
323 replace_loop_annotate_in_block (loop->latch, loop);
326 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
327 FOR_EACH_BB_FN (bb, cfun)
329 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
331 stmt = gsi_stmt (gsi);
332 if (gimple_code (stmt) != GIMPLE_CALL)
333 continue;
334 if (!gimple_call_internal_p (stmt)
335 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
336 continue;
338 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
340 case annot_expr_ivdep_kind:
341 case annot_expr_no_vector_kind:
342 case annot_expr_vector_kind:
343 break;
344 default:
345 gcc_unreachable ();
348 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
349 stmt = gimple_build_assign (gimple_call_lhs (stmt),
350 gimple_call_arg (stmt, 0));
351 gsi_replace (&gsi, stmt, true);
356 /* Lower internal PHI function from GIMPLE FE. */
358 static void
359 lower_phi_internal_fn ()
361 basic_block bb, pred = NULL;
362 gimple_stmt_iterator gsi;
363 tree lhs;
364 gphi *phi_node;
365 gimple *stmt;
367 /* After edge creation, handle __PHI function from GIMPLE FE. */
368 FOR_EACH_BB_FN (bb, cfun)
370 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
372 stmt = gsi_stmt (gsi);
373 if (! gimple_call_internal_p (stmt, IFN_PHI))
374 break;
376 lhs = gimple_call_lhs (stmt);
377 phi_node = create_phi_node (lhs, bb);
379 /* Add arguments to the PHI node. */
380 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
382 tree arg = gimple_call_arg (stmt, i);
383 if (TREE_CODE (arg) == LABEL_DECL)
384 pred = label_to_block (arg);
385 else
387 edge e = find_edge (pred, bb);
388 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
392 gsi_remove (&gsi, true);
397 static unsigned int
398 execute_build_cfg (void)
400 gimple_seq body = gimple_body (current_function_decl);
402 build_gimple_cfg (body);
403 gimple_set_body (current_function_decl, NULL);
404 if (dump_file && (dump_flags & TDF_DETAILS))
406 fprintf (dump_file, "Scope blocks:\n");
407 dump_scope_blocks (dump_file, dump_flags);
409 cleanup_tree_cfg ();
410 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
411 replace_loop_annotate ();
412 return 0;
415 namespace {
417 const pass_data pass_data_build_cfg =
419 GIMPLE_PASS, /* type */
420 "cfg", /* name */
421 OPTGROUP_NONE, /* optinfo_flags */
422 TV_TREE_CFG, /* tv_id */
423 PROP_gimple_leh, /* properties_required */
424 ( PROP_cfg | PROP_loops ), /* properties_provided */
425 0, /* properties_destroyed */
426 0, /* todo_flags_start */
427 0, /* todo_flags_finish */
430 class pass_build_cfg : public gimple_opt_pass
432 public:
433 pass_build_cfg (gcc::context *ctxt)
434 : gimple_opt_pass (pass_data_build_cfg, ctxt)
437 /* opt_pass methods: */
438 virtual unsigned int execute (function *) { return execute_build_cfg (); }
440 }; // class pass_build_cfg
442 } // anon namespace
444 gimple_opt_pass *
445 make_pass_build_cfg (gcc::context *ctxt)
447 return new pass_build_cfg (ctxt);
451 /* Return true if T is a computed goto. */
453 bool
454 computed_goto_p (gimple *t)
456 return (gimple_code (t) == GIMPLE_GOTO
457 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
460 /* Returns true if the sequence of statements STMTS only contains
461 a call to __builtin_unreachable (). */
463 bool
464 gimple_seq_unreachable_p (gimple_seq stmts)
466 if (stmts == NULL)
467 return false;
469 gimple_stmt_iterator gsi = gsi_last (stmts);
471 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
472 return false;
474 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
476 gimple *stmt = gsi_stmt (gsi);
477 if (gimple_code (stmt) != GIMPLE_LABEL
478 && !is_gimple_debug (stmt)
479 && !gimple_clobber_p (stmt))
480 return false;
482 return true;
485 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
486 the other edge points to a bb with just __builtin_unreachable ().
487 I.e. return true for C->M edge in:
488 <bb C>:
490 if (something)
491 goto <bb N>;
492 else
493 goto <bb M>;
494 <bb N>:
495 __builtin_unreachable ();
496 <bb M>: */
498 bool
499 assert_unreachable_fallthru_edge_p (edge e)
501 basic_block pred_bb = e->src;
502 gimple *last = last_stmt (pred_bb);
503 if (last && gimple_code (last) == GIMPLE_COND)
505 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
506 if (other_bb == e->dest)
507 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
508 if (EDGE_COUNT (other_bb->succs) == 0)
509 return gimple_seq_unreachable_p (bb_seq (other_bb));
511 return false;
515 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
516 could alter control flow except via eh. We initialize the flag at
517 CFG build time and only ever clear it later. */
519 static void
520 gimple_call_initialize_ctrl_altering (gimple *stmt)
522 int flags = gimple_call_flags (stmt);
524 /* A call alters control flow if it can make an abnormal goto. */
525 if (call_can_make_abnormal_goto (stmt)
526 /* A call also alters control flow if it does not return. */
527 || flags & ECF_NORETURN
528 /* TM ending statements have backedges out of the transaction.
529 Return true so we split the basic block containing them.
530 Note that the TM_BUILTIN test is merely an optimization. */
531 || ((flags & ECF_TM_BUILTIN)
532 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
533 /* BUILT_IN_RETURN call is same as return statement. */
534 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
535 /* IFN_UNIQUE should be the last insn, to make checking for it
536 as cheap as possible. */
537 || (gimple_call_internal_p (stmt)
538 && gimple_call_internal_unique_p (stmt)))
539 gimple_call_set_ctrl_altering (stmt, true);
540 else
541 gimple_call_set_ctrl_altering (stmt, false);
545 /* Insert SEQ after BB and build a flowgraph. */
547 static basic_block
548 make_blocks_1 (gimple_seq seq, basic_block bb)
550 gimple_stmt_iterator i = gsi_start (seq);
551 gimple *stmt = NULL;
552 bool start_new_block = true;
553 bool first_stmt_of_seq = true;
555 while (!gsi_end_p (i))
557 gimple *prev_stmt;
559 prev_stmt = stmt;
560 stmt = gsi_stmt (i);
562 if (stmt && is_gimple_call (stmt))
563 gimple_call_initialize_ctrl_altering (stmt);
565 /* If the statement starts a new basic block or if we have determined
566 in a previous pass that we need to create a new block for STMT, do
567 so now. */
568 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
570 if (!first_stmt_of_seq)
571 gsi_split_seq_before (&i, &seq);
572 bb = create_basic_block (seq, bb);
573 start_new_block = false;
576 /* Now add STMT to BB and create the subgraphs for special statement
577 codes. */
578 gimple_set_bb (stmt, bb);
580 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
581 next iteration. */
582 if (stmt_ends_bb_p (stmt))
584 /* If the stmt can make abnormal goto use a new temporary
585 for the assignment to the LHS. This makes sure the old value
586 of the LHS is available on the abnormal edge. Otherwise
587 we will end up with overlapping life-ranges for abnormal
588 SSA names. */
589 if (gimple_has_lhs (stmt)
590 && stmt_can_make_abnormal_goto (stmt)
591 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
593 tree lhs = gimple_get_lhs (stmt);
594 tree tmp = create_tmp_var (TREE_TYPE (lhs));
595 gimple *s = gimple_build_assign (lhs, tmp);
596 gimple_set_location (s, gimple_location (stmt));
597 gimple_set_block (s, gimple_block (stmt));
598 gimple_set_lhs (stmt, tmp);
599 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
600 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
601 DECL_GIMPLE_REG_P (tmp) = 1;
602 gsi_insert_after (&i, s, GSI_SAME_STMT);
604 start_new_block = true;
607 gsi_next (&i);
608 first_stmt_of_seq = false;
610 return bb;
613 /* Build a flowgraph for the sequence of stmts SEQ. */
615 static void
616 make_blocks (gimple_seq seq)
618 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
621 /* Create and return a new empty basic block after bb AFTER. */
623 static basic_block
624 create_bb (void *h, void *e, basic_block after)
626 basic_block bb;
628 gcc_assert (!e);
630 /* Create and initialize a new basic block. Since alloc_block uses
631 GC allocation that clears memory to allocate a basic block, we do
632 not have to clear the newly allocated basic block here. */
633 bb = alloc_block ();
635 bb->index = last_basic_block_for_fn (cfun);
636 bb->flags = BB_NEW;
637 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
639 /* Add the new block to the linked list of blocks. */
640 link_block (bb, after);
642 /* Grow the basic block array if needed. */
643 if ((size_t) last_basic_block_for_fn (cfun)
644 == basic_block_info_for_fn (cfun)->length ())
646 size_t new_size =
647 (last_basic_block_for_fn (cfun)
648 + (last_basic_block_for_fn (cfun) + 3) / 4);
649 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
652 /* Add the newly created block to the array. */
653 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
655 n_basic_blocks_for_fn (cfun)++;
656 last_basic_block_for_fn (cfun)++;
658 return bb;
662 /*---------------------------------------------------------------------------
663 Edge creation
664 ---------------------------------------------------------------------------*/
666 /* If basic block BB has an abnormal edge to a basic block
667 containing IFN_ABNORMAL_DISPATCHER internal call, return
668 that the dispatcher's basic block, otherwise return NULL. */
670 basic_block
671 get_abnormal_succ_dispatcher (basic_block bb)
673 edge e;
674 edge_iterator ei;
676 FOR_EACH_EDGE (e, ei, bb->succs)
677 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
679 gimple_stmt_iterator gsi
680 = gsi_start_nondebug_after_labels_bb (e->dest);
681 gimple *g = gsi_stmt (gsi);
682 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
683 return e->dest;
685 return NULL;
688 /* Helper function for make_edges. Create a basic block with
689 with ABNORMAL_DISPATCHER internal call in it if needed, and
690 create abnormal edges from BBS to it and from it to FOR_BB
691 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
693 static void
694 handle_abnormal_edges (basic_block *dispatcher_bbs,
695 basic_block for_bb, int *bb_to_omp_idx,
696 auto_vec<basic_block> *bbs, bool computed_goto)
698 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
699 unsigned int idx = 0;
700 basic_block bb;
701 bool inner = false;
703 if (bb_to_omp_idx)
705 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
706 if (bb_to_omp_idx[for_bb->index] != 0)
707 inner = true;
710 /* If the dispatcher has been created already, then there are basic
711 blocks with abnormal edges to it, so just make a new edge to
712 for_bb. */
713 if (*dispatcher == NULL)
715 /* Check if there are any basic blocks that need to have
716 abnormal edges to this dispatcher. If there are none, return
717 early. */
718 if (bb_to_omp_idx == NULL)
720 if (bbs->is_empty ())
721 return;
723 else
725 FOR_EACH_VEC_ELT (*bbs, idx, bb)
726 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
727 break;
728 if (bb == NULL)
729 return;
732 /* Create the dispatcher bb. */
733 *dispatcher = create_basic_block (NULL, for_bb);
734 if (computed_goto)
736 /* Factor computed gotos into a common computed goto site. Also
737 record the location of that site so that we can un-factor the
738 gotos after we have converted back to normal form. */
739 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
741 /* Create the destination of the factored goto. Each original
742 computed goto will put its desired destination into this
743 variable and jump to the label we create immediately below. */
744 tree var = create_tmp_var (ptr_type_node, "gotovar");
746 /* Build a label for the new block which will contain the
747 factored computed goto. */
748 tree factored_label_decl
749 = create_artificial_label (UNKNOWN_LOCATION);
750 gimple *factored_computed_goto_label
751 = gimple_build_label (factored_label_decl);
752 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
754 /* Build our new computed goto. */
755 gimple *factored_computed_goto = gimple_build_goto (var);
756 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
758 FOR_EACH_VEC_ELT (*bbs, idx, bb)
760 if (bb_to_omp_idx
761 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
762 continue;
764 gsi = gsi_last_bb (bb);
765 gimple *last = gsi_stmt (gsi);
767 gcc_assert (computed_goto_p (last));
769 /* Copy the original computed goto's destination into VAR. */
770 gimple *assignment
771 = gimple_build_assign (var, gimple_goto_dest (last));
772 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
774 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
775 e->goto_locus = gimple_location (last);
776 gsi_remove (&gsi, true);
779 else
781 tree arg = inner ? boolean_true_node : boolean_false_node;
782 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
783 1, arg);
784 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
785 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
787 /* Create predecessor edges of the dispatcher. */
788 FOR_EACH_VEC_ELT (*bbs, idx, bb)
790 if (bb_to_omp_idx
791 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
792 continue;
793 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
798 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
801 /* Creates outgoing edges for BB. Returns 1 when it ends with an
802 computed goto, returns 2 when it ends with a statement that
803 might return to this function via an nonlocal goto, otherwise
804 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
806 static int
807 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
809 gimple *last = last_stmt (bb);
810 bool fallthru = false;
811 int ret = 0;
813 if (!last)
814 return ret;
816 switch (gimple_code (last))
818 case GIMPLE_GOTO:
819 if (make_goto_expr_edges (bb))
820 ret = 1;
821 fallthru = false;
822 break;
823 case GIMPLE_RETURN:
825 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
826 e->goto_locus = gimple_location (last);
827 fallthru = false;
829 break;
830 case GIMPLE_COND:
831 make_cond_expr_edges (bb);
832 fallthru = false;
833 break;
834 case GIMPLE_SWITCH:
835 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
836 fallthru = false;
837 break;
838 case GIMPLE_RESX:
839 make_eh_edges (last);
840 fallthru = false;
841 break;
842 case GIMPLE_EH_DISPATCH:
843 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
844 break;
846 case GIMPLE_CALL:
847 /* If this function receives a nonlocal goto, then we need to
848 make edges from this call site to all the nonlocal goto
849 handlers. */
850 if (stmt_can_make_abnormal_goto (last))
851 ret = 2;
853 /* If this statement has reachable exception handlers, then
854 create abnormal edges to them. */
855 make_eh_edges (last);
857 /* BUILTIN_RETURN is really a return statement. */
858 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
860 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
861 fallthru = false;
863 /* Some calls are known not to return. */
864 else
865 fallthru = !gimple_call_noreturn_p (last);
866 break;
868 case GIMPLE_ASSIGN:
869 /* A GIMPLE_ASSIGN may throw internally and thus be considered
870 control-altering. */
871 if (is_ctrl_altering_stmt (last))
872 make_eh_edges (last);
873 fallthru = true;
874 break;
876 case GIMPLE_ASM:
877 make_gimple_asm_edges (bb);
878 fallthru = true;
879 break;
881 CASE_GIMPLE_OMP:
882 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
883 break;
885 case GIMPLE_TRANSACTION:
887 gtransaction *txn = as_a <gtransaction *> (last);
888 tree label1 = gimple_transaction_label_norm (txn);
889 tree label2 = gimple_transaction_label_uninst (txn);
891 if (label1)
892 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
893 if (label2)
894 make_edge (bb, label_to_block (label2),
895 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
897 tree label3 = gimple_transaction_label_over (txn);
898 if (gimple_transaction_subcode (txn)
899 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
900 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
902 fallthru = false;
904 break;
906 default:
907 gcc_assert (!stmt_ends_bb_p (last));
908 fallthru = true;
909 break;
912 if (fallthru)
913 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
915 return ret;
918 /* Join all the blocks in the flowgraph. */
920 static void
921 make_edges (void)
923 basic_block bb;
924 struct omp_region *cur_region = NULL;
925 auto_vec<basic_block> ab_edge_goto;
926 auto_vec<basic_block> ab_edge_call;
927 int *bb_to_omp_idx = NULL;
928 int cur_omp_region_idx = 0;
930 /* Create an edge from entry to the first block with executable
931 statements in it. */
932 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
933 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
934 EDGE_FALLTHRU);
936 /* Traverse the basic block array placing edges. */
937 FOR_EACH_BB_FN (bb, cfun)
939 int mer;
941 if (bb_to_omp_idx)
942 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
944 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
945 if (mer == 1)
946 ab_edge_goto.safe_push (bb);
947 else if (mer == 2)
948 ab_edge_call.safe_push (bb);
950 if (cur_region && bb_to_omp_idx == NULL)
951 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
954 /* Computed gotos are hell to deal with, especially if there are
955 lots of them with a large number of destinations. So we factor
956 them to a common computed goto location before we build the
957 edge list. After we convert back to normal form, we will un-factor
958 the computed gotos since factoring introduces an unwanted jump.
959 For non-local gotos and abnormal edges from calls to calls that return
960 twice or forced labels, factor the abnormal edges too, by having all
961 abnormal edges from the calls go to a common artificial basic block
962 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
963 basic block to all forced labels and calls returning twice.
964 We do this per-OpenMP structured block, because those regions
965 are guaranteed to be single entry single exit by the standard,
966 so it is not allowed to enter or exit such regions abnormally this way,
967 thus all computed gotos, non-local gotos and setjmp/longjmp calls
968 must not transfer control across SESE region boundaries. */
969 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
971 gimple_stmt_iterator gsi;
972 basic_block dispatcher_bb_array[2] = { NULL, NULL };
973 basic_block *dispatcher_bbs = dispatcher_bb_array;
974 int count = n_basic_blocks_for_fn (cfun);
976 if (bb_to_omp_idx)
977 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
979 FOR_EACH_BB_FN (bb, cfun)
981 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
983 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
984 tree target;
986 if (!label_stmt)
987 break;
989 target = gimple_label_label (label_stmt);
991 /* Make an edge to every label block that has been marked as a
992 potential target for a computed goto or a non-local goto. */
993 if (FORCED_LABEL (target))
994 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
995 &ab_edge_goto, true);
996 if (DECL_NONLOCAL (target))
998 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
999 &ab_edge_call, false);
1000 break;
1004 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1005 gsi_next_nondebug (&gsi);
1006 if (!gsi_end_p (gsi))
1008 /* Make an edge to every setjmp-like call. */
1009 gimple *call_stmt = gsi_stmt (gsi);
1010 if (is_gimple_call (call_stmt)
1011 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1012 || gimple_call_builtin_p (call_stmt,
1013 BUILT_IN_SETJMP_RECEIVER)))
1014 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1015 &ab_edge_call, false);
1019 if (bb_to_omp_idx)
1020 XDELETE (dispatcher_bbs);
1023 XDELETE (bb_to_omp_idx);
1025 omp_free_regions ();
1028 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1029 needed. Returns true if new bbs were created.
1030 Note: This is transitional code, and should not be used for new code. We
1031 should be able to get rid of this by rewriting all target va-arg
1032 gimplification hooks to use an interface gimple_build_cond_value as described
1033 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1035 bool
1036 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1038 gimple *stmt = gsi_stmt (*gsi);
1039 basic_block bb = gimple_bb (stmt);
1040 basic_block lastbb, afterbb;
1041 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1042 edge e;
1043 lastbb = make_blocks_1 (seq, bb);
1044 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1045 return false;
1046 e = split_block (bb, stmt);
1047 /* Move e->dest to come after the new basic blocks. */
1048 afterbb = e->dest;
1049 unlink_block (afterbb);
1050 link_block (afterbb, lastbb);
1051 redirect_edge_succ (e, bb->next_bb);
1052 bb = bb->next_bb;
1053 while (bb != afterbb)
1055 struct omp_region *cur_region = NULL;
1056 profile_count cnt = profile_count::zero ();
1057 bool all = true;
1059 int cur_omp_region_idx = 0;
1060 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1061 gcc_assert (!mer && !cur_region);
1062 add_bb_to_loop (bb, afterbb->loop_father);
1064 edge e;
1065 edge_iterator ei;
1066 FOR_EACH_EDGE (e, ei, bb->preds)
1068 if (e->count ().initialized_p ())
1069 cnt += e->count ();
1070 else
1071 all = false;
1073 tree_guess_outgoing_edge_probabilities (bb);
1074 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1075 bb->count = cnt;
1077 bb = bb->next_bb;
1079 return true;
1082 /* Find the next available discriminator value for LOCUS. The
1083 discriminator distinguishes among several basic blocks that
1084 share a common locus, allowing for more accurate sample-based
1085 profiling. */
1087 static int
1088 next_discriminator_for_locus (location_t locus)
1090 struct locus_discrim_map item;
1091 struct locus_discrim_map **slot;
1093 item.locus = locus;
1094 item.discriminator = 0;
1095 slot = discriminator_per_locus->find_slot_with_hash (
1096 &item, LOCATION_LINE (locus), INSERT);
1097 gcc_assert (slot);
1098 if (*slot == HTAB_EMPTY_ENTRY)
1100 *slot = XNEW (struct locus_discrim_map);
1101 gcc_assert (*slot);
1102 (*slot)->locus = locus;
1103 (*slot)->discriminator = 0;
1105 (*slot)->discriminator++;
1106 return (*slot)->discriminator;
1109 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1111 static bool
1112 same_line_p (location_t locus1, location_t locus2)
1114 expanded_location from, to;
1116 if (locus1 == locus2)
1117 return true;
1119 from = expand_location (locus1);
1120 to = expand_location (locus2);
1122 if (from.line != to.line)
1123 return false;
1124 if (from.file == to.file)
1125 return true;
1126 return (from.file != NULL
1127 && to.file != NULL
1128 && filename_cmp (from.file, to.file) == 0);
1131 /* Assign discriminators to each basic block. */
1133 static void
1134 assign_discriminators (void)
1136 basic_block bb;
1138 FOR_EACH_BB_FN (bb, cfun)
1140 edge e;
1141 edge_iterator ei;
1142 gimple *last = last_stmt (bb);
1143 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1145 if (locus == UNKNOWN_LOCATION)
1146 continue;
1148 FOR_EACH_EDGE (e, ei, bb->succs)
1150 gimple *first = first_non_label_stmt (e->dest);
1151 gimple *last = last_stmt (e->dest);
1152 if ((first && same_line_p (locus, gimple_location (first)))
1153 || (last && same_line_p (locus, gimple_location (last))))
1155 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1156 bb->discriminator = next_discriminator_for_locus (locus);
1157 else
1158 e->dest->discriminator = next_discriminator_for_locus (locus);
1164 /* Create the edges for a GIMPLE_COND starting at block BB. */
1166 static void
1167 make_cond_expr_edges (basic_block bb)
1169 gcond *entry = as_a <gcond *> (last_stmt (bb));
1170 gimple *then_stmt, *else_stmt;
1171 basic_block then_bb, else_bb;
1172 tree then_label, else_label;
1173 edge e;
1175 gcc_assert (entry);
1176 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1178 /* Entry basic blocks for each component. */
1179 then_label = gimple_cond_true_label (entry);
1180 else_label = gimple_cond_false_label (entry);
1181 then_bb = label_to_block (then_label);
1182 else_bb = label_to_block (else_label);
1183 then_stmt = first_stmt (then_bb);
1184 else_stmt = first_stmt (else_bb);
1186 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1187 e->goto_locus = gimple_location (then_stmt);
1188 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1189 if (e)
1190 e->goto_locus = gimple_location (else_stmt);
1192 /* We do not need the labels anymore. */
1193 gimple_cond_set_true_label (entry, NULL_TREE);
1194 gimple_cond_set_false_label (entry, NULL_TREE);
1198 /* Called for each element in the hash table (P) as we delete the
1199 edge to cases hash table.
1201 Clear all the CASE_CHAINs to prevent problems with copying of
1202 SWITCH_EXPRs and structure sharing rules, then free the hash table
1203 element. */
1205 bool
1206 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1208 tree t, next;
1210 for (t = value; t; t = next)
1212 next = CASE_CHAIN (t);
1213 CASE_CHAIN (t) = NULL;
1216 return true;
1219 /* Start recording information mapping edges to case labels. */
1221 void
1222 start_recording_case_labels (void)
1224 gcc_assert (edge_to_cases == NULL);
1225 edge_to_cases = new hash_map<edge, tree>;
1226 touched_switch_bbs = BITMAP_ALLOC (NULL);
1229 /* Return nonzero if we are recording information for case labels. */
1231 static bool
1232 recording_case_labels_p (void)
1234 return (edge_to_cases != NULL);
1237 /* Stop recording information mapping edges to case labels and
1238 remove any information we have recorded. */
1239 void
1240 end_recording_case_labels (void)
1242 bitmap_iterator bi;
1243 unsigned i;
1244 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1245 delete edge_to_cases;
1246 edge_to_cases = NULL;
1247 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1249 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1250 if (bb)
1252 gimple *stmt = last_stmt (bb);
1253 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1254 group_case_labels_stmt (as_a <gswitch *> (stmt));
1257 BITMAP_FREE (touched_switch_bbs);
1260 /* If we are inside a {start,end}_recording_cases block, then return
1261 a chain of CASE_LABEL_EXPRs from T which reference E.
1263 Otherwise return NULL. */
1265 static tree
1266 get_cases_for_edge (edge e, gswitch *t)
1268 tree *slot;
1269 size_t i, n;
1271 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1272 chains available. Return NULL so the caller can detect this case. */
1273 if (!recording_case_labels_p ())
1274 return NULL;
1276 slot = edge_to_cases->get (e);
1277 if (slot)
1278 return *slot;
1280 /* If we did not find E in the hash table, then this must be the first
1281 time we have been queried for information about E & T. Add all the
1282 elements from T to the hash table then perform the query again. */
1284 n = gimple_switch_num_labels (t);
1285 for (i = 0; i < n; i++)
1287 tree elt = gimple_switch_label (t, i);
1288 tree lab = CASE_LABEL (elt);
1289 basic_block label_bb = label_to_block (lab);
1290 edge this_edge = find_edge (e->src, label_bb);
1292 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1293 a new chain. */
1294 tree &s = edge_to_cases->get_or_insert (this_edge);
1295 CASE_CHAIN (elt) = s;
1296 s = elt;
1299 return *edge_to_cases->get (e);
1302 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1304 static void
1305 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1307 size_t i, n;
1309 n = gimple_switch_num_labels (entry);
1311 for (i = 0; i < n; ++i)
1313 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1314 basic_block label_bb = label_to_block (lab);
1315 make_edge (bb, label_bb, 0);
1320 /* Return the basic block holding label DEST. */
1322 basic_block
1323 label_to_block_fn (struct function *ifun, tree dest)
1325 int uid = LABEL_DECL_UID (dest);
1327 /* We would die hard when faced by an undefined label. Emit a label to
1328 the very first basic block. This will hopefully make even the dataflow
1329 and undefined variable warnings quite right. */
1330 if (seen_error () && uid < 0)
1332 gimple_stmt_iterator gsi =
1333 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1334 gimple *stmt;
1336 stmt = gimple_build_label (dest);
1337 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1338 uid = LABEL_DECL_UID (dest);
1340 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1341 return NULL;
1342 return (*ifun->cfg->x_label_to_block_map)[uid];
1345 /* Create edges for a goto statement at block BB. Returns true
1346 if abnormal edges should be created. */
1348 static bool
1349 make_goto_expr_edges (basic_block bb)
1351 gimple_stmt_iterator last = gsi_last_bb (bb);
1352 gimple *goto_t = gsi_stmt (last);
1354 /* A simple GOTO creates normal edges. */
1355 if (simple_goto_p (goto_t))
1357 tree dest = gimple_goto_dest (goto_t);
1358 basic_block label_bb = label_to_block (dest);
1359 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1360 e->goto_locus = gimple_location (goto_t);
1361 gsi_remove (&last, true);
1362 return false;
1365 /* A computed GOTO creates abnormal edges. */
1366 return true;
1369 /* Create edges for an asm statement with labels at block BB. */
1371 static void
1372 make_gimple_asm_edges (basic_block bb)
1374 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1375 int i, n = gimple_asm_nlabels (stmt);
1377 for (i = 0; i < n; ++i)
1379 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1380 basic_block label_bb = label_to_block (label);
1381 make_edge (bb, label_bb, 0);
1385 /*---------------------------------------------------------------------------
1386 Flowgraph analysis
1387 ---------------------------------------------------------------------------*/
1389 /* Cleanup useless labels in basic blocks. This is something we wish
1390 to do early because it allows us to group case labels before creating
1391 the edges for the CFG, and it speeds up block statement iterators in
1392 all passes later on.
1393 We rerun this pass after CFG is created, to get rid of the labels that
1394 are no longer referenced. After then we do not run it any more, since
1395 (almost) no new labels should be created. */
1397 /* A map from basic block index to the leading label of that block. */
1398 static struct label_record
1400 /* The label. */
1401 tree label;
1403 /* True if the label is referenced from somewhere. */
1404 bool used;
1405 } *label_for_bb;
1407 /* Given LABEL return the first label in the same basic block. */
1409 static tree
1410 main_block_label (tree label)
1412 basic_block bb = label_to_block (label);
1413 tree main_label = label_for_bb[bb->index].label;
1415 /* label_to_block possibly inserted undefined label into the chain. */
1416 if (!main_label)
1418 label_for_bb[bb->index].label = label;
1419 main_label = label;
1422 label_for_bb[bb->index].used = true;
1423 return main_label;
1426 /* Clean up redundant labels within the exception tree. */
1428 static void
1429 cleanup_dead_labels_eh (void)
1431 eh_landing_pad lp;
1432 eh_region r;
1433 tree lab;
1434 int i;
1436 if (cfun->eh == NULL)
1437 return;
1439 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1440 if (lp && lp->post_landing_pad)
1442 lab = main_block_label (lp->post_landing_pad);
1443 if (lab != lp->post_landing_pad)
1445 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1446 EH_LANDING_PAD_NR (lab) = lp->index;
1450 FOR_ALL_EH_REGION (r)
1451 switch (r->type)
1453 case ERT_CLEANUP:
1454 case ERT_MUST_NOT_THROW:
1455 break;
1457 case ERT_TRY:
1459 eh_catch c;
1460 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1462 lab = c->label;
1463 if (lab)
1464 c->label = main_block_label (lab);
1467 break;
1469 case ERT_ALLOWED_EXCEPTIONS:
1470 lab = r->u.allowed.label;
1471 if (lab)
1472 r->u.allowed.label = main_block_label (lab);
1473 break;
1478 /* Cleanup redundant labels. This is a three-step process:
1479 1) Find the leading label for each block.
1480 2) Redirect all references to labels to the leading labels.
1481 3) Cleanup all useless labels. */
1483 void
1484 cleanup_dead_labels (void)
1486 basic_block bb;
1487 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1489 /* Find a suitable label for each block. We use the first user-defined
1490 label if there is one, or otherwise just the first label we see. */
1491 FOR_EACH_BB_FN (bb, cfun)
1493 gimple_stmt_iterator i;
1495 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1497 tree label;
1498 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1500 if (!label_stmt)
1501 break;
1503 label = gimple_label_label (label_stmt);
1505 /* If we have not yet seen a label for the current block,
1506 remember this one and see if there are more labels. */
1507 if (!label_for_bb[bb->index].label)
1509 label_for_bb[bb->index].label = label;
1510 continue;
1513 /* If we did see a label for the current block already, but it
1514 is an artificially created label, replace it if the current
1515 label is a user defined label. */
1516 if (!DECL_ARTIFICIAL (label)
1517 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1519 label_for_bb[bb->index].label = label;
1520 break;
1525 /* Now redirect all jumps/branches to the selected label.
1526 First do so for each block ending in a control statement. */
1527 FOR_EACH_BB_FN (bb, cfun)
1529 gimple *stmt = last_stmt (bb);
1530 tree label, new_label;
1532 if (!stmt)
1533 continue;
1535 switch (gimple_code (stmt))
1537 case GIMPLE_COND:
1539 gcond *cond_stmt = as_a <gcond *> (stmt);
1540 label = gimple_cond_true_label (cond_stmt);
1541 if (label)
1543 new_label = main_block_label (label);
1544 if (new_label != label)
1545 gimple_cond_set_true_label (cond_stmt, new_label);
1548 label = gimple_cond_false_label (cond_stmt);
1549 if (label)
1551 new_label = main_block_label (label);
1552 if (new_label != label)
1553 gimple_cond_set_false_label (cond_stmt, new_label);
1556 break;
1558 case GIMPLE_SWITCH:
1560 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1561 size_t i, n = gimple_switch_num_labels (switch_stmt);
1563 /* Replace all destination labels. */
1564 for (i = 0; i < n; ++i)
1566 tree case_label = gimple_switch_label (switch_stmt, i);
1567 label = CASE_LABEL (case_label);
1568 new_label = main_block_label (label);
1569 if (new_label != label)
1570 CASE_LABEL (case_label) = new_label;
1572 break;
1575 case GIMPLE_ASM:
1577 gasm *asm_stmt = as_a <gasm *> (stmt);
1578 int i, n = gimple_asm_nlabels (asm_stmt);
1580 for (i = 0; i < n; ++i)
1582 tree cons = gimple_asm_label_op (asm_stmt, i);
1583 tree label = main_block_label (TREE_VALUE (cons));
1584 TREE_VALUE (cons) = label;
1586 break;
1589 /* We have to handle gotos until they're removed, and we don't
1590 remove them until after we've created the CFG edges. */
1591 case GIMPLE_GOTO:
1592 if (!computed_goto_p (stmt))
1594 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1595 label = gimple_goto_dest (goto_stmt);
1596 new_label = main_block_label (label);
1597 if (new_label != label)
1598 gimple_goto_set_dest (goto_stmt, new_label);
1600 break;
1602 case GIMPLE_TRANSACTION:
1604 gtransaction *txn = as_a <gtransaction *> (stmt);
1606 label = gimple_transaction_label_norm (txn);
1607 if (label)
1609 new_label = main_block_label (label);
1610 if (new_label != label)
1611 gimple_transaction_set_label_norm (txn, new_label);
1614 label = gimple_transaction_label_uninst (txn);
1615 if (label)
1617 new_label = main_block_label (label);
1618 if (new_label != label)
1619 gimple_transaction_set_label_uninst (txn, new_label);
1622 label = gimple_transaction_label_over (txn);
1623 if (label)
1625 new_label = main_block_label (label);
1626 if (new_label != label)
1627 gimple_transaction_set_label_over (txn, new_label);
1630 break;
1632 default:
1633 break;
1637 /* Do the same for the exception region tree labels. */
1638 cleanup_dead_labels_eh ();
1640 /* Finally, purge dead labels. All user-defined labels and labels that
1641 can be the target of non-local gotos and labels which have their
1642 address taken are preserved. */
1643 FOR_EACH_BB_FN (bb, cfun)
1645 gimple_stmt_iterator i;
1646 tree label_for_this_bb = label_for_bb[bb->index].label;
1648 if (!label_for_this_bb)
1649 continue;
1651 /* If the main label of the block is unused, we may still remove it. */
1652 if (!label_for_bb[bb->index].used)
1653 label_for_this_bb = NULL;
1655 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1657 tree label;
1658 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1660 if (!label_stmt)
1661 break;
1663 label = gimple_label_label (label_stmt);
1665 if (label == label_for_this_bb
1666 || !DECL_ARTIFICIAL (label)
1667 || DECL_NONLOCAL (label)
1668 || FORCED_LABEL (label))
1669 gsi_next (&i);
1670 else
1671 gsi_remove (&i, true);
1675 free (label_for_bb);
1678 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1679 the ones jumping to the same label.
1680 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1682 bool
1683 group_case_labels_stmt (gswitch *stmt)
1685 int old_size = gimple_switch_num_labels (stmt);
1686 int i, next_index, new_size;
1687 basic_block default_bb = NULL;
1689 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1691 /* Look for possible opportunities to merge cases. */
1692 new_size = i = 1;
1693 while (i < old_size)
1695 tree base_case, base_high;
1696 basic_block base_bb;
1698 base_case = gimple_switch_label (stmt, i);
1700 gcc_assert (base_case);
1701 base_bb = label_to_block (CASE_LABEL (base_case));
1703 /* Discard cases that have the same destination as the default case or
1704 whose destiniation blocks have already been removed as unreachable. */
1705 if (base_bb == NULL || base_bb == default_bb)
1707 i++;
1708 continue;
1711 base_high = CASE_HIGH (base_case)
1712 ? CASE_HIGH (base_case)
1713 : CASE_LOW (base_case);
1714 next_index = i + 1;
1716 /* Try to merge case labels. Break out when we reach the end
1717 of the label vector or when we cannot merge the next case
1718 label with the current one. */
1719 while (next_index < old_size)
1721 tree merge_case = gimple_switch_label (stmt, next_index);
1722 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1723 wide_int bhp1 = wi::to_wide (base_high) + 1;
1725 /* Merge the cases if they jump to the same place,
1726 and their ranges are consecutive. */
1727 if (merge_bb == base_bb
1728 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1730 base_high = CASE_HIGH (merge_case) ?
1731 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1732 CASE_HIGH (base_case) = base_high;
1733 next_index++;
1735 else
1736 break;
1739 /* Discard cases that have an unreachable destination block. */
1740 if (EDGE_COUNT (base_bb->succs) == 0
1741 && gimple_seq_unreachable_p (bb_seq (base_bb)))
1743 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1744 if (base_edge != NULL)
1745 remove_edge_and_dominated_blocks (base_edge);
1746 i = next_index;
1747 continue;
1750 if (new_size < i)
1751 gimple_switch_set_label (stmt, new_size,
1752 gimple_switch_label (stmt, i));
1753 i = next_index;
1754 new_size++;
1757 gcc_assert (new_size <= old_size);
1759 if (new_size < old_size)
1760 gimple_switch_set_num_labels (stmt, new_size);
1762 return new_size < old_size;
1765 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1766 and scan the sorted vector of cases. Combine the ones jumping to the
1767 same label. */
1769 bool
1770 group_case_labels (void)
1772 basic_block bb;
1773 bool changed = false;
1775 FOR_EACH_BB_FN (bb, cfun)
1777 gimple *stmt = last_stmt (bb);
1778 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1779 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1782 return changed;
1785 /* Checks whether we can merge block B into block A. */
1787 static bool
1788 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1790 gimple *stmt;
1792 if (!single_succ_p (a))
1793 return false;
1795 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1796 return false;
1798 if (single_succ (a) != b)
1799 return false;
1801 if (!single_pred_p (b))
1802 return false;
1804 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1805 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1806 return false;
1808 /* If A ends by a statement causing exceptions or something similar, we
1809 cannot merge the blocks. */
1810 stmt = last_stmt (a);
1811 if (stmt && stmt_ends_bb_p (stmt))
1812 return false;
1814 /* Do not allow a block with only a non-local label to be merged. */
1815 if (stmt)
1816 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1817 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1818 return false;
1820 /* Examine the labels at the beginning of B. */
1821 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1822 gsi_next (&gsi))
1824 tree lab;
1825 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1826 if (!label_stmt)
1827 break;
1828 lab = gimple_label_label (label_stmt);
1830 /* Do not remove user forced labels or for -O0 any user labels. */
1831 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1832 return false;
1835 /* Protect simple loop latches. We only want to avoid merging
1836 the latch with the loop header or with a block in another
1837 loop in this case. */
1838 if (current_loops
1839 && b->loop_father->latch == b
1840 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1841 && (b->loop_father->header == a
1842 || b->loop_father != a->loop_father))
1843 return false;
1845 /* It must be possible to eliminate all phi nodes in B. If ssa form
1846 is not up-to-date and a name-mapping is registered, we cannot eliminate
1847 any phis. Symbols marked for renaming are never a problem though. */
1848 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1849 gsi_next (&gsi))
1851 gphi *phi = gsi.phi ();
1852 /* Technically only new names matter. */
1853 if (name_registered_for_update_p (PHI_RESULT (phi)))
1854 return false;
1857 /* When not optimizing, don't merge if we'd lose goto_locus. */
1858 if (!optimize
1859 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1861 location_t goto_locus = single_succ_edge (a)->goto_locus;
1862 gimple_stmt_iterator prev, next;
1863 prev = gsi_last_nondebug_bb (a);
1864 next = gsi_after_labels (b);
1865 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1866 gsi_next_nondebug (&next);
1867 if ((gsi_end_p (prev)
1868 || gimple_location (gsi_stmt (prev)) != goto_locus)
1869 && (gsi_end_p (next)
1870 || gimple_location (gsi_stmt (next)) != goto_locus))
1871 return false;
1874 return true;
1877 /* Replaces all uses of NAME by VAL. */
1879 void
1880 replace_uses_by (tree name, tree val)
1882 imm_use_iterator imm_iter;
1883 use_operand_p use;
1884 gimple *stmt;
1885 edge e;
1887 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1889 /* Mark the block if we change the last stmt in it. */
1890 if (cfgcleanup_altered_bbs
1891 && stmt_ends_bb_p (stmt))
1892 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1894 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1896 replace_exp (use, val);
1898 if (gimple_code (stmt) == GIMPLE_PHI)
1900 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1901 PHI_ARG_INDEX_FROM_USE (use));
1902 if (e->flags & EDGE_ABNORMAL
1903 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1905 /* This can only occur for virtual operands, since
1906 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1907 would prevent replacement. */
1908 gcc_checking_assert (virtual_operand_p (name));
1909 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1914 if (gimple_code (stmt) != GIMPLE_PHI)
1916 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1917 gimple *orig_stmt = stmt;
1918 size_t i;
1920 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1921 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1922 only change sth from non-invariant to invariant, and only
1923 when propagating constants. */
1924 if (is_gimple_min_invariant (val))
1925 for (i = 0; i < gimple_num_ops (stmt); i++)
1927 tree op = gimple_op (stmt, i);
1928 /* Operands may be empty here. For example, the labels
1929 of a GIMPLE_COND are nulled out following the creation
1930 of the corresponding CFG edges. */
1931 if (op && TREE_CODE (op) == ADDR_EXPR)
1932 recompute_tree_invariant_for_addr_expr (op);
1935 if (fold_stmt (&gsi))
1936 stmt = gsi_stmt (gsi);
1938 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1939 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1941 update_stmt (stmt);
1945 gcc_checking_assert (has_zero_uses (name));
1947 /* Also update the trees stored in loop structures. */
1948 if (current_loops)
1950 struct loop *loop;
1952 FOR_EACH_LOOP (loop, 0)
1954 substitute_in_loop_info (loop, name, val);
1959 /* Merge block B into block A. */
1961 static void
1962 gimple_merge_blocks (basic_block a, basic_block b)
1964 gimple_stmt_iterator last, gsi;
1965 gphi_iterator psi;
1967 if (dump_file)
1968 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1970 /* Remove all single-valued PHI nodes from block B of the form
1971 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1972 gsi = gsi_last_bb (a);
1973 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1975 gimple *phi = gsi_stmt (psi);
1976 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1977 gimple *copy;
1978 bool may_replace_uses = (virtual_operand_p (def)
1979 || may_propagate_copy (def, use));
1981 /* In case we maintain loop closed ssa form, do not propagate arguments
1982 of loop exit phi nodes. */
1983 if (current_loops
1984 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1985 && !virtual_operand_p (def)
1986 && TREE_CODE (use) == SSA_NAME
1987 && a->loop_father != b->loop_father)
1988 may_replace_uses = false;
1990 if (!may_replace_uses)
1992 gcc_assert (!virtual_operand_p (def));
1994 /* Note that just emitting the copies is fine -- there is no problem
1995 with ordering of phi nodes. This is because A is the single
1996 predecessor of B, therefore results of the phi nodes cannot
1997 appear as arguments of the phi nodes. */
1998 copy = gimple_build_assign (def, use);
1999 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2000 remove_phi_node (&psi, false);
2002 else
2004 /* If we deal with a PHI for virtual operands, we can simply
2005 propagate these without fussing with folding or updating
2006 the stmt. */
2007 if (virtual_operand_p (def))
2009 imm_use_iterator iter;
2010 use_operand_p use_p;
2011 gimple *stmt;
2013 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2014 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2015 SET_USE (use_p, use);
2017 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2018 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2020 else
2021 replace_uses_by (def, use);
2023 remove_phi_node (&psi, true);
2027 /* Ensure that B follows A. */
2028 move_block_after (b, a);
2030 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2031 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2033 /* Remove labels from B and set gimple_bb to A for other statements. */
2034 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2036 gimple *stmt = gsi_stmt (gsi);
2037 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2039 tree label = gimple_label_label (label_stmt);
2040 int lp_nr;
2042 gsi_remove (&gsi, false);
2044 /* Now that we can thread computed gotos, we might have
2045 a situation where we have a forced label in block B
2046 However, the label at the start of block B might still be
2047 used in other ways (think about the runtime checking for
2048 Fortran assigned gotos). So we can not just delete the
2049 label. Instead we move the label to the start of block A. */
2050 if (FORCED_LABEL (label))
2052 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2053 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2055 /* Other user labels keep around in a form of a debug stmt. */
2056 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2058 gimple *dbg = gimple_build_debug_bind (label,
2059 integer_zero_node,
2060 stmt);
2061 gimple_debug_bind_reset_value (dbg);
2062 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2065 lp_nr = EH_LANDING_PAD_NR (label);
2066 if (lp_nr)
2068 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2069 lp->post_landing_pad = NULL;
2072 else
2074 gimple_set_bb (stmt, a);
2075 gsi_next (&gsi);
2079 /* When merging two BBs, if their counts are different, the larger count
2080 is selected as the new bb count. This is to handle inconsistent
2081 profiles. */
2082 if (a->loop_father == b->loop_father)
2084 a->count = a->count.merge (b->count);
2087 /* Merge the sequences. */
2088 last = gsi_last_bb (a);
2089 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2090 set_bb_seq (b, NULL);
2092 if (cfgcleanup_altered_bbs)
2093 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2097 /* Return the one of two successors of BB that is not reachable by a
2098 complex edge, if there is one. Else, return BB. We use
2099 this in optimizations that use post-dominators for their heuristics,
2100 to catch the cases in C++ where function calls are involved. */
2102 basic_block
2103 single_noncomplex_succ (basic_block bb)
2105 edge e0, e1;
2106 if (EDGE_COUNT (bb->succs) != 2)
2107 return bb;
2109 e0 = EDGE_SUCC (bb, 0);
2110 e1 = EDGE_SUCC (bb, 1);
2111 if (e0->flags & EDGE_COMPLEX)
2112 return e1->dest;
2113 if (e1->flags & EDGE_COMPLEX)
2114 return e0->dest;
2116 return bb;
2119 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2121 void
2122 notice_special_calls (gcall *call)
2124 int flags = gimple_call_flags (call);
2126 if (flags & ECF_MAY_BE_ALLOCA)
2127 cfun->calls_alloca = true;
2128 if (flags & ECF_RETURNS_TWICE)
2129 cfun->calls_setjmp = true;
2133 /* Clear flags set by notice_special_calls. Used by dead code removal
2134 to update the flags. */
2136 void
2137 clear_special_calls (void)
2139 cfun->calls_alloca = false;
2140 cfun->calls_setjmp = false;
2143 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2145 static void
2146 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2148 /* Since this block is no longer reachable, we can just delete all
2149 of its PHI nodes. */
2150 remove_phi_nodes (bb);
2152 /* Remove edges to BB's successors. */
2153 while (EDGE_COUNT (bb->succs) > 0)
2154 remove_edge (EDGE_SUCC (bb, 0));
2158 /* Remove statements of basic block BB. */
2160 static void
2161 remove_bb (basic_block bb)
2163 gimple_stmt_iterator i;
2165 if (dump_file)
2167 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2168 if (dump_flags & TDF_DETAILS)
2170 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2171 fprintf (dump_file, "\n");
2175 if (current_loops)
2177 struct loop *loop = bb->loop_father;
2179 /* If a loop gets removed, clean up the information associated
2180 with it. */
2181 if (loop->latch == bb
2182 || loop->header == bb)
2183 free_numbers_of_iterations_estimates (loop);
2186 /* Remove all the instructions in the block. */
2187 if (bb_seq (bb) != NULL)
2189 /* Walk backwards so as to get a chance to substitute all
2190 released DEFs into debug stmts. See
2191 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2192 details. */
2193 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2195 gimple *stmt = gsi_stmt (i);
2196 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2197 if (label_stmt
2198 && (FORCED_LABEL (gimple_label_label (label_stmt))
2199 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2201 basic_block new_bb;
2202 gimple_stmt_iterator new_gsi;
2204 /* A non-reachable non-local label may still be referenced.
2205 But it no longer needs to carry the extra semantics of
2206 non-locality. */
2207 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2209 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2210 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2213 new_bb = bb->prev_bb;
2214 new_gsi = gsi_start_bb (new_bb);
2215 gsi_remove (&i, false);
2216 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2218 else
2220 /* Release SSA definitions. */
2221 release_defs (stmt);
2222 gsi_remove (&i, true);
2225 if (gsi_end_p (i))
2226 i = gsi_last_bb (bb);
2227 else
2228 gsi_prev (&i);
2232 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2233 bb->il.gimple.seq = NULL;
2234 bb->il.gimple.phi_nodes = NULL;
2238 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2239 predicate VAL, return the edge that will be taken out of the block.
2240 If VAL does not match a unique edge, NULL is returned. */
2242 edge
2243 find_taken_edge (basic_block bb, tree val)
2245 gimple *stmt;
2247 stmt = last_stmt (bb);
2249 gcc_assert (is_ctrl_stmt (stmt));
2251 if (gimple_code (stmt) == GIMPLE_COND)
2252 return find_taken_edge_cond_expr (bb, val);
2254 if (gimple_code (stmt) == GIMPLE_SWITCH)
2255 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2257 if (computed_goto_p (stmt))
2259 /* Only optimize if the argument is a label, if the argument is
2260 not a label then we can not construct a proper CFG.
2262 It may be the case that we only need to allow the LABEL_REF to
2263 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2264 appear inside a LABEL_EXPR just to be safe. */
2265 if (val
2266 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2267 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2268 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2269 return NULL;
2272 gcc_unreachable ();
2275 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2276 statement, determine which of the outgoing edges will be taken out of the
2277 block. Return NULL if either edge may be taken. */
2279 static edge
2280 find_taken_edge_computed_goto (basic_block bb, tree val)
2282 basic_block dest;
2283 edge e = NULL;
2285 dest = label_to_block (val);
2286 if (dest)
2288 e = find_edge (bb, dest);
2289 gcc_assert (e != NULL);
2292 return e;
2295 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2296 statement, determine which of the two edges will be taken out of the
2297 block. Return NULL if either edge may be taken. */
2299 static edge
2300 find_taken_edge_cond_expr (basic_block bb, tree val)
2302 edge true_edge, false_edge;
2304 if (val == NULL
2305 || TREE_CODE (val) != INTEGER_CST)
2306 return NULL;
2308 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2310 return (integer_zerop (val) ? false_edge : true_edge);
2313 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2314 statement, determine which edge will be taken out of the block. Return
2315 NULL if any edge may be taken. */
2317 static edge
2318 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2319 tree val)
2321 basic_block dest_bb;
2322 edge e;
2323 tree taken_case;
2325 if (gimple_switch_num_labels (switch_stmt) == 1)
2326 taken_case = gimple_switch_default_label (switch_stmt);
2327 else if (! val || TREE_CODE (val) != INTEGER_CST)
2328 return NULL;
2329 else
2330 taken_case = find_case_label_for_value (switch_stmt, val);
2331 dest_bb = label_to_block (CASE_LABEL (taken_case));
2333 e = find_edge (bb, dest_bb);
2334 gcc_assert (e);
2335 return e;
2339 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2340 We can make optimal use here of the fact that the case labels are
2341 sorted: We can do a binary search for a case matching VAL. */
2343 static tree
2344 find_case_label_for_value (gswitch *switch_stmt, tree val)
2346 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2347 tree default_case = gimple_switch_default_label (switch_stmt);
2349 for (low = 0, high = n; high - low > 1; )
2351 size_t i = (high + low) / 2;
2352 tree t = gimple_switch_label (switch_stmt, i);
2353 int cmp;
2355 /* Cache the result of comparing CASE_LOW and val. */
2356 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2358 if (cmp > 0)
2359 high = i;
2360 else
2361 low = i;
2363 if (CASE_HIGH (t) == NULL)
2365 /* A singe-valued case label. */
2366 if (cmp == 0)
2367 return t;
2369 else
2371 /* A case range. We can only handle integer ranges. */
2372 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2373 return t;
2377 return default_case;
2381 /* Dump a basic block on stderr. */
2383 void
2384 gimple_debug_bb (basic_block bb)
2386 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2390 /* Dump basic block with index N on stderr. */
2392 basic_block
2393 gimple_debug_bb_n (int n)
2395 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2396 return BASIC_BLOCK_FOR_FN (cfun, n);
2400 /* Dump the CFG on stderr.
2402 FLAGS are the same used by the tree dumping functions
2403 (see TDF_* in dumpfile.h). */
2405 void
2406 gimple_debug_cfg (dump_flags_t flags)
2408 gimple_dump_cfg (stderr, flags);
2412 /* Dump the program showing basic block boundaries on the given FILE.
2414 FLAGS are the same used by the tree dumping functions (see TDF_* in
2415 tree.h). */
2417 void
2418 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2420 if (flags & TDF_DETAILS)
2422 dump_function_header (file, current_function_decl, flags);
2423 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2424 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2425 last_basic_block_for_fn (cfun));
2427 brief_dump_cfg (file, flags);
2428 fprintf (file, "\n");
2431 if (flags & TDF_STATS)
2432 dump_cfg_stats (file);
2434 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2438 /* Dump CFG statistics on FILE. */
2440 void
2441 dump_cfg_stats (FILE *file)
2443 static long max_num_merged_labels = 0;
2444 unsigned long size, total = 0;
2445 long num_edges;
2446 basic_block bb;
2447 const char * const fmt_str = "%-30s%-13s%12s\n";
2448 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2449 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2450 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2451 const char *funcname = current_function_name ();
2453 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2455 fprintf (file, "---------------------------------------------------------\n");
2456 fprintf (file, fmt_str, "", " Number of ", "Memory");
2457 fprintf (file, fmt_str, "", " instances ", "used ");
2458 fprintf (file, "---------------------------------------------------------\n");
2460 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2461 total += size;
2462 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2463 SCALE (size), LABEL (size));
2465 num_edges = 0;
2466 FOR_EACH_BB_FN (bb, cfun)
2467 num_edges += EDGE_COUNT (bb->succs);
2468 size = num_edges * sizeof (struct edge_def);
2469 total += size;
2470 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2472 fprintf (file, "---------------------------------------------------------\n");
2473 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2474 LABEL (total));
2475 fprintf (file, "---------------------------------------------------------\n");
2476 fprintf (file, "\n");
2478 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2479 max_num_merged_labels = cfg_stats.num_merged_labels;
2481 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2482 cfg_stats.num_merged_labels, max_num_merged_labels);
2484 fprintf (file, "\n");
2488 /* Dump CFG statistics on stderr. Keep extern so that it's always
2489 linked in the final executable. */
2491 DEBUG_FUNCTION void
2492 debug_cfg_stats (void)
2494 dump_cfg_stats (stderr);
2497 /*---------------------------------------------------------------------------
2498 Miscellaneous helpers
2499 ---------------------------------------------------------------------------*/
2501 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2502 flow. Transfers of control flow associated with EH are excluded. */
2504 static bool
2505 call_can_make_abnormal_goto (gimple *t)
2507 /* If the function has no non-local labels, then a call cannot make an
2508 abnormal transfer of control. */
2509 if (!cfun->has_nonlocal_label
2510 && !cfun->calls_setjmp)
2511 return false;
2513 /* Likewise if the call has no side effects. */
2514 if (!gimple_has_side_effects (t))
2515 return false;
2517 /* Likewise if the called function is leaf. */
2518 if (gimple_call_flags (t) & ECF_LEAF)
2519 return false;
2521 return true;
2525 /* Return true if T can make an abnormal transfer of control flow.
2526 Transfers of control flow associated with EH are excluded. */
2528 bool
2529 stmt_can_make_abnormal_goto (gimple *t)
2531 if (computed_goto_p (t))
2532 return true;
2533 if (is_gimple_call (t))
2534 return call_can_make_abnormal_goto (t);
2535 return false;
2539 /* Return true if T represents a stmt that always transfers control. */
2541 bool
2542 is_ctrl_stmt (gimple *t)
2544 switch (gimple_code (t))
2546 case GIMPLE_COND:
2547 case GIMPLE_SWITCH:
2548 case GIMPLE_GOTO:
2549 case GIMPLE_RETURN:
2550 case GIMPLE_RESX:
2551 return true;
2552 default:
2553 return false;
2558 /* Return true if T is a statement that may alter the flow of control
2559 (e.g., a call to a non-returning function). */
2561 bool
2562 is_ctrl_altering_stmt (gimple *t)
2564 gcc_assert (t);
2566 switch (gimple_code (t))
2568 case GIMPLE_CALL:
2569 /* Per stmt call flag indicates whether the call could alter
2570 controlflow. */
2571 if (gimple_call_ctrl_altering_p (t))
2572 return true;
2573 break;
2575 case GIMPLE_EH_DISPATCH:
2576 /* EH_DISPATCH branches to the individual catch handlers at
2577 this level of a try or allowed-exceptions region. It can
2578 fallthru to the next statement as well. */
2579 return true;
2581 case GIMPLE_ASM:
2582 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2583 return true;
2584 break;
2586 CASE_GIMPLE_OMP:
2587 /* OpenMP directives alter control flow. */
2588 return true;
2590 case GIMPLE_TRANSACTION:
2591 /* A transaction start alters control flow. */
2592 return true;
2594 default:
2595 break;
2598 /* If a statement can throw, it alters control flow. */
2599 return stmt_can_throw_internal (t);
2603 /* Return true if T is a simple local goto. */
2605 bool
2606 simple_goto_p (gimple *t)
2608 return (gimple_code (t) == GIMPLE_GOTO
2609 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2613 /* Return true if STMT should start a new basic block. PREV_STMT is
2614 the statement preceding STMT. It is used when STMT is a label or a
2615 case label. Labels should only start a new basic block if their
2616 previous statement wasn't a label. Otherwise, sequence of labels
2617 would generate unnecessary basic blocks that only contain a single
2618 label. */
2620 static inline bool
2621 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2623 if (stmt == NULL)
2624 return false;
2626 /* Labels start a new basic block only if the preceding statement
2627 wasn't a label of the same type. This prevents the creation of
2628 consecutive blocks that have nothing but a single label. */
2629 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2631 /* Nonlocal and computed GOTO targets always start a new block. */
2632 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2633 || FORCED_LABEL (gimple_label_label (label_stmt)))
2634 return true;
2636 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2638 if (DECL_NONLOCAL (gimple_label_label (
2639 as_a <glabel *> (prev_stmt))))
2640 return true;
2642 cfg_stats.num_merged_labels++;
2643 return false;
2645 else
2646 return true;
2648 else if (gimple_code (stmt) == GIMPLE_CALL)
2650 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2651 /* setjmp acts similar to a nonlocal GOTO target and thus should
2652 start a new block. */
2653 return true;
2654 if (gimple_call_internal_p (stmt, IFN_PHI)
2655 && prev_stmt
2656 && gimple_code (prev_stmt) != GIMPLE_LABEL
2657 && (gimple_code (prev_stmt) != GIMPLE_CALL
2658 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2659 /* PHI nodes start a new block unless preceeded by a label
2660 or another PHI. */
2661 return true;
2664 return false;
2668 /* Return true if T should end a basic block. */
2670 bool
2671 stmt_ends_bb_p (gimple *t)
2673 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2676 /* Remove block annotations and other data structures. */
2678 void
2679 delete_tree_cfg_annotations (struct function *fn)
2681 vec_free (label_to_block_map_for_fn (fn));
2684 /* Return the virtual phi in BB. */
2686 gphi *
2687 get_virtual_phi (basic_block bb)
2689 for (gphi_iterator gsi = gsi_start_phis (bb);
2690 !gsi_end_p (gsi);
2691 gsi_next (&gsi))
2693 gphi *phi = gsi.phi ();
2695 if (virtual_operand_p (PHI_RESULT (phi)))
2696 return phi;
2699 return NULL;
2702 /* Return the first statement in basic block BB. */
2704 gimple *
2705 first_stmt (basic_block bb)
2707 gimple_stmt_iterator i = gsi_start_bb (bb);
2708 gimple *stmt = NULL;
2710 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2712 gsi_next (&i);
2713 stmt = NULL;
2715 return stmt;
2718 /* Return the first non-label statement in basic block BB. */
2720 static gimple *
2721 first_non_label_stmt (basic_block bb)
2723 gimple_stmt_iterator i = gsi_start_bb (bb);
2724 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2725 gsi_next (&i);
2726 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2729 /* Return the last statement in basic block BB. */
2731 gimple *
2732 last_stmt (basic_block bb)
2734 gimple_stmt_iterator i = gsi_last_bb (bb);
2735 gimple *stmt = NULL;
2737 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2739 gsi_prev (&i);
2740 stmt = NULL;
2742 return stmt;
2745 /* Return the last statement of an otherwise empty block. Return NULL
2746 if the block is totally empty, or if it contains more than one
2747 statement. */
2749 gimple *
2750 last_and_only_stmt (basic_block bb)
2752 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2753 gimple *last, *prev;
2755 if (gsi_end_p (i))
2756 return NULL;
2758 last = gsi_stmt (i);
2759 gsi_prev_nondebug (&i);
2760 if (gsi_end_p (i))
2761 return last;
2763 /* Empty statements should no longer appear in the instruction stream.
2764 Everything that might have appeared before should be deleted by
2765 remove_useless_stmts, and the optimizers should just gsi_remove
2766 instead of smashing with build_empty_stmt.
2768 Thus the only thing that should appear here in a block containing
2769 one executable statement is a label. */
2770 prev = gsi_stmt (i);
2771 if (gimple_code (prev) == GIMPLE_LABEL)
2772 return last;
2773 else
2774 return NULL;
2777 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2779 static void
2780 reinstall_phi_args (edge new_edge, edge old_edge)
2782 edge_var_map *vm;
2783 int i;
2784 gphi_iterator phis;
2786 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2787 if (!v)
2788 return;
2790 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2791 v->iterate (i, &vm) && !gsi_end_p (phis);
2792 i++, gsi_next (&phis))
2794 gphi *phi = phis.phi ();
2795 tree result = redirect_edge_var_map_result (vm);
2796 tree arg = redirect_edge_var_map_def (vm);
2798 gcc_assert (result == gimple_phi_result (phi));
2800 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2803 redirect_edge_var_map_clear (old_edge);
2806 /* Returns the basic block after which the new basic block created
2807 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2808 near its "logical" location. This is of most help to humans looking
2809 at debugging dumps. */
2811 basic_block
2812 split_edge_bb_loc (edge edge_in)
2814 basic_block dest = edge_in->dest;
2815 basic_block dest_prev = dest->prev_bb;
2817 if (dest_prev)
2819 edge e = find_edge (dest_prev, dest);
2820 if (e && !(e->flags & EDGE_COMPLEX))
2821 return edge_in->src;
2823 return dest_prev;
2826 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2827 Abort on abnormal edges. */
2829 static basic_block
2830 gimple_split_edge (edge edge_in)
2832 basic_block new_bb, after_bb, dest;
2833 edge new_edge, e;
2835 /* Abnormal edges cannot be split. */
2836 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2838 dest = edge_in->dest;
2840 after_bb = split_edge_bb_loc (edge_in);
2842 new_bb = create_empty_bb (after_bb);
2843 new_bb->count = edge_in->count ();
2845 e = redirect_edge_and_branch (edge_in, new_bb);
2846 gcc_assert (e == edge_in);
2848 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2849 reinstall_phi_args (new_edge, e);
2851 return new_bb;
2855 /* Verify properties of the address expression T with base object BASE. */
2857 static tree
2858 verify_address (tree t, tree base)
2860 bool old_constant;
2861 bool old_side_effects;
2862 bool new_constant;
2863 bool new_side_effects;
2865 old_constant = TREE_CONSTANT (t);
2866 old_side_effects = TREE_SIDE_EFFECTS (t);
2868 recompute_tree_invariant_for_addr_expr (t);
2869 new_side_effects = TREE_SIDE_EFFECTS (t);
2870 new_constant = TREE_CONSTANT (t);
2872 if (old_constant != new_constant)
2874 error ("constant not recomputed when ADDR_EXPR changed");
2875 return t;
2877 if (old_side_effects != new_side_effects)
2879 error ("side effects not recomputed when ADDR_EXPR changed");
2880 return t;
2883 if (!(VAR_P (base)
2884 || TREE_CODE (base) == PARM_DECL
2885 || TREE_CODE (base) == RESULT_DECL))
2886 return NULL_TREE;
2888 if (DECL_GIMPLE_REG_P (base))
2890 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2891 return base;
2894 return NULL_TREE;
2897 /* Callback for walk_tree, check that all elements with address taken are
2898 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2899 inside a PHI node. */
2901 static tree
2902 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2904 tree t = *tp, x;
2906 if (TYPE_P (t))
2907 *walk_subtrees = 0;
2909 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2910 #define CHECK_OP(N, MSG) \
2911 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2912 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2914 switch (TREE_CODE (t))
2916 case SSA_NAME:
2917 if (SSA_NAME_IN_FREE_LIST (t))
2919 error ("SSA name in freelist but still referenced");
2920 return *tp;
2922 break;
2924 case PARM_DECL:
2925 case VAR_DECL:
2926 case RESULT_DECL:
2928 tree context = decl_function_context (t);
2929 if (context != cfun->decl
2930 && !SCOPE_FILE_SCOPE_P (context)
2931 && !TREE_STATIC (t)
2932 && !DECL_EXTERNAL (t))
2934 error ("Local declaration from a different function");
2935 return t;
2938 break;
2940 case INDIRECT_REF:
2941 error ("INDIRECT_REF in gimple IL");
2942 return t;
2944 case MEM_REF:
2945 x = TREE_OPERAND (t, 0);
2946 if (!POINTER_TYPE_P (TREE_TYPE (x))
2947 || !is_gimple_mem_ref_addr (x))
2949 error ("invalid first operand of MEM_REF");
2950 return x;
2952 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2953 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2955 error ("invalid offset operand of MEM_REF");
2956 return TREE_OPERAND (t, 1);
2958 if (TREE_CODE (x) == ADDR_EXPR)
2960 tree va = verify_address (x, TREE_OPERAND (x, 0));
2961 if (va)
2962 return va;
2963 x = TREE_OPERAND (x, 0);
2965 walk_tree (&x, verify_expr, data, NULL);
2966 *walk_subtrees = 0;
2967 break;
2969 case ASSERT_EXPR:
2970 x = fold (ASSERT_EXPR_COND (t));
2971 if (x == boolean_false_node)
2973 error ("ASSERT_EXPR with an always-false condition");
2974 return *tp;
2976 break;
2978 case MODIFY_EXPR:
2979 error ("MODIFY_EXPR not expected while having tuples");
2980 return *tp;
2982 case ADDR_EXPR:
2984 tree tem;
2986 gcc_assert (is_gimple_address (t));
2988 /* Skip any references (they will be checked when we recurse down the
2989 tree) and ensure that any variable used as a prefix is marked
2990 addressable. */
2991 for (x = TREE_OPERAND (t, 0);
2992 handled_component_p (x);
2993 x = TREE_OPERAND (x, 0))
2996 if ((tem = verify_address (t, x)))
2997 return tem;
2999 if (!(VAR_P (x)
3000 || TREE_CODE (x) == PARM_DECL
3001 || TREE_CODE (x) == RESULT_DECL))
3002 return NULL;
3004 if (!TREE_ADDRESSABLE (x))
3006 error ("address taken, but ADDRESSABLE bit not set");
3007 return x;
3010 break;
3013 case COND_EXPR:
3014 x = COND_EXPR_COND (t);
3015 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3017 error ("non-integral used in condition");
3018 return x;
3020 if (!is_gimple_condexpr (x))
3022 error ("invalid conditional operand");
3023 return x;
3025 break;
3027 case NON_LVALUE_EXPR:
3028 case TRUTH_NOT_EXPR:
3029 gcc_unreachable ();
3031 CASE_CONVERT:
3032 case FIX_TRUNC_EXPR:
3033 case FLOAT_EXPR:
3034 case NEGATE_EXPR:
3035 case ABS_EXPR:
3036 case BIT_NOT_EXPR:
3037 CHECK_OP (0, "invalid operand to unary operator");
3038 break;
3040 case REALPART_EXPR:
3041 case IMAGPART_EXPR:
3042 case BIT_FIELD_REF:
3043 if (!is_gimple_reg_type (TREE_TYPE (t)))
3045 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3046 return t;
3049 if (TREE_CODE (t) == BIT_FIELD_REF)
3051 tree t0 = TREE_OPERAND (t, 0);
3052 tree t1 = TREE_OPERAND (t, 1);
3053 tree t2 = TREE_OPERAND (t, 2);
3054 if (!tree_fits_uhwi_p (t1)
3055 || !tree_fits_uhwi_p (t2)
3056 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3057 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3059 error ("invalid position or size operand to BIT_FIELD_REF");
3060 return t;
3062 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3063 && (TYPE_PRECISION (TREE_TYPE (t))
3064 != tree_to_uhwi (t1)))
3066 error ("integral result type precision does not match "
3067 "field size of BIT_FIELD_REF");
3068 return t;
3070 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3071 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3072 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3073 != tree_to_uhwi (t1)))
3075 error ("mode size of non-integral result does not "
3076 "match field size of BIT_FIELD_REF");
3077 return t;
3079 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3080 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3081 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3083 error ("position plus size exceeds size of referenced object in "
3084 "BIT_FIELD_REF");
3085 return t;
3088 t = TREE_OPERAND (t, 0);
3090 /* Fall-through. */
3091 case COMPONENT_REF:
3092 case ARRAY_REF:
3093 case ARRAY_RANGE_REF:
3094 case VIEW_CONVERT_EXPR:
3095 /* We have a nest of references. Verify that each of the operands
3096 that determine where to reference is either a constant or a variable,
3097 verify that the base is valid, and then show we've already checked
3098 the subtrees. */
3099 while (handled_component_p (t))
3101 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3102 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3103 else if (TREE_CODE (t) == ARRAY_REF
3104 || TREE_CODE (t) == ARRAY_RANGE_REF)
3106 CHECK_OP (1, "invalid array index");
3107 if (TREE_OPERAND (t, 2))
3108 CHECK_OP (2, "invalid array lower bound");
3109 if (TREE_OPERAND (t, 3))
3110 CHECK_OP (3, "invalid array stride");
3112 else if (TREE_CODE (t) == BIT_FIELD_REF
3113 || TREE_CODE (t) == REALPART_EXPR
3114 || TREE_CODE (t) == IMAGPART_EXPR)
3116 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3117 "REALPART_EXPR");
3118 return t;
3121 t = TREE_OPERAND (t, 0);
3124 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3126 error ("invalid reference prefix");
3127 return t;
3129 walk_tree (&t, verify_expr, data, NULL);
3130 *walk_subtrees = 0;
3131 break;
3132 case PLUS_EXPR:
3133 case MINUS_EXPR:
3134 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3135 POINTER_PLUS_EXPR. */
3136 if (POINTER_TYPE_P (TREE_TYPE (t)))
3138 error ("invalid operand to plus/minus, type is a pointer");
3139 return t;
3141 CHECK_OP (0, "invalid operand to binary operator");
3142 CHECK_OP (1, "invalid operand to binary operator");
3143 break;
3145 case POINTER_PLUS_EXPR:
3146 /* Check to make sure the first operand is a pointer or reference type. */
3147 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3149 error ("invalid operand to pointer plus, first operand is not a pointer");
3150 return t;
3152 /* Check to make sure the second operand is a ptrofftype. */
3153 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3155 error ("invalid operand to pointer plus, second operand is not an "
3156 "integer type of appropriate width");
3157 return t;
3159 /* FALLTHROUGH */
3160 case LT_EXPR:
3161 case LE_EXPR:
3162 case GT_EXPR:
3163 case GE_EXPR:
3164 case EQ_EXPR:
3165 case NE_EXPR:
3166 case UNORDERED_EXPR:
3167 case ORDERED_EXPR:
3168 case UNLT_EXPR:
3169 case UNLE_EXPR:
3170 case UNGT_EXPR:
3171 case UNGE_EXPR:
3172 case UNEQ_EXPR:
3173 case LTGT_EXPR:
3174 case MULT_EXPR:
3175 case TRUNC_DIV_EXPR:
3176 case CEIL_DIV_EXPR:
3177 case FLOOR_DIV_EXPR:
3178 case ROUND_DIV_EXPR:
3179 case TRUNC_MOD_EXPR:
3180 case CEIL_MOD_EXPR:
3181 case FLOOR_MOD_EXPR:
3182 case ROUND_MOD_EXPR:
3183 case RDIV_EXPR:
3184 case EXACT_DIV_EXPR:
3185 case MIN_EXPR:
3186 case MAX_EXPR:
3187 case LSHIFT_EXPR:
3188 case RSHIFT_EXPR:
3189 case LROTATE_EXPR:
3190 case RROTATE_EXPR:
3191 case BIT_IOR_EXPR:
3192 case BIT_XOR_EXPR:
3193 case BIT_AND_EXPR:
3194 CHECK_OP (0, "invalid operand to binary operator");
3195 CHECK_OP (1, "invalid operand to binary operator");
3196 break;
3198 case CONSTRUCTOR:
3199 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3200 *walk_subtrees = 0;
3201 break;
3203 case CASE_LABEL_EXPR:
3204 if (CASE_CHAIN (t))
3206 error ("invalid CASE_CHAIN");
3207 return t;
3209 break;
3211 default:
3212 break;
3214 return NULL;
3216 #undef CHECK_OP
3220 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3221 Returns true if there is an error, otherwise false. */
3223 static bool
3224 verify_types_in_gimple_min_lval (tree expr)
3226 tree op;
3228 if (is_gimple_id (expr))
3229 return false;
3231 if (TREE_CODE (expr) != TARGET_MEM_REF
3232 && TREE_CODE (expr) != MEM_REF)
3234 error ("invalid expression for min lvalue");
3235 return true;
3238 /* TARGET_MEM_REFs are strange beasts. */
3239 if (TREE_CODE (expr) == TARGET_MEM_REF)
3240 return false;
3242 op = TREE_OPERAND (expr, 0);
3243 if (!is_gimple_val (op))
3245 error ("invalid operand in indirect reference");
3246 debug_generic_stmt (op);
3247 return true;
3249 /* Memory references now generally can involve a value conversion. */
3251 return false;
3254 /* Verify if EXPR is a valid GIMPLE reference expression. If
3255 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3256 if there is an error, otherwise false. */
3258 static bool
3259 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3261 while (handled_component_p (expr))
3263 tree op = TREE_OPERAND (expr, 0);
3265 if (TREE_CODE (expr) == ARRAY_REF
3266 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3268 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3269 || (TREE_OPERAND (expr, 2)
3270 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3271 || (TREE_OPERAND (expr, 3)
3272 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3274 error ("invalid operands to array reference");
3275 debug_generic_stmt (expr);
3276 return true;
3280 /* Verify if the reference array element types are compatible. */
3281 if (TREE_CODE (expr) == ARRAY_REF
3282 && !useless_type_conversion_p (TREE_TYPE (expr),
3283 TREE_TYPE (TREE_TYPE (op))))
3285 error ("type mismatch in array reference");
3286 debug_generic_stmt (TREE_TYPE (expr));
3287 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3288 return true;
3290 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3291 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3292 TREE_TYPE (TREE_TYPE (op))))
3294 error ("type mismatch in array range reference");
3295 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3296 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3297 return true;
3300 if ((TREE_CODE (expr) == REALPART_EXPR
3301 || TREE_CODE (expr) == IMAGPART_EXPR)
3302 && !useless_type_conversion_p (TREE_TYPE (expr),
3303 TREE_TYPE (TREE_TYPE (op))))
3305 error ("type mismatch in real/imagpart reference");
3306 debug_generic_stmt (TREE_TYPE (expr));
3307 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3308 return true;
3311 if (TREE_CODE (expr) == COMPONENT_REF
3312 && !useless_type_conversion_p (TREE_TYPE (expr),
3313 TREE_TYPE (TREE_OPERAND (expr, 1))))
3315 error ("type mismatch in component reference");
3316 debug_generic_stmt (TREE_TYPE (expr));
3317 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3318 return true;
3321 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3323 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3324 that their operand is not an SSA name or an invariant when
3325 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3326 bug). Otherwise there is nothing to verify, gross mismatches at
3327 most invoke undefined behavior. */
3328 if (require_lvalue
3329 && (TREE_CODE (op) == SSA_NAME
3330 || is_gimple_min_invariant (op)))
3332 error ("conversion of an SSA_NAME on the left hand side");
3333 debug_generic_stmt (expr);
3334 return true;
3336 else if (TREE_CODE (op) == SSA_NAME
3337 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3339 error ("conversion of register to a different size");
3340 debug_generic_stmt (expr);
3341 return true;
3343 else if (!handled_component_p (op))
3344 return false;
3347 expr = op;
3350 if (TREE_CODE (expr) == MEM_REF)
3352 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3354 error ("invalid address operand in MEM_REF");
3355 debug_generic_stmt (expr);
3356 return true;
3358 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3359 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3361 error ("invalid offset operand in MEM_REF");
3362 debug_generic_stmt (expr);
3363 return true;
3366 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3368 if (!TMR_BASE (expr)
3369 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3371 error ("invalid address operand in TARGET_MEM_REF");
3372 return true;
3374 if (!TMR_OFFSET (expr)
3375 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3376 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3378 error ("invalid offset operand in TARGET_MEM_REF");
3379 debug_generic_stmt (expr);
3380 return true;
3384 return ((require_lvalue || !is_gimple_min_invariant (expr))
3385 && verify_types_in_gimple_min_lval (expr));
3388 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3389 list of pointer-to types that is trivially convertible to DEST. */
3391 static bool
3392 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3394 tree src;
3396 if (!TYPE_POINTER_TO (src_obj))
3397 return true;
3399 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3400 if (useless_type_conversion_p (dest, src))
3401 return true;
3403 return false;
3406 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3407 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3409 static bool
3410 valid_fixed_convert_types_p (tree type1, tree type2)
3412 return (FIXED_POINT_TYPE_P (type1)
3413 && (INTEGRAL_TYPE_P (type2)
3414 || SCALAR_FLOAT_TYPE_P (type2)
3415 || FIXED_POINT_TYPE_P (type2)));
3418 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3419 is a problem, otherwise false. */
3421 static bool
3422 verify_gimple_call (gcall *stmt)
3424 tree fn = gimple_call_fn (stmt);
3425 tree fntype, fndecl;
3426 unsigned i;
3428 if (gimple_call_internal_p (stmt))
3430 if (fn)
3432 error ("gimple call has two targets");
3433 debug_generic_stmt (fn);
3434 return true;
3436 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3437 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3439 return false;
3442 else
3444 if (!fn)
3446 error ("gimple call has no target");
3447 return true;
3451 if (fn && !is_gimple_call_addr (fn))
3453 error ("invalid function in gimple call");
3454 debug_generic_stmt (fn);
3455 return true;
3458 if (fn
3459 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3460 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3461 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3463 error ("non-function in gimple call");
3464 return true;
3467 fndecl = gimple_call_fndecl (stmt);
3468 if (fndecl
3469 && TREE_CODE (fndecl) == FUNCTION_DECL
3470 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3471 && !DECL_PURE_P (fndecl)
3472 && !TREE_READONLY (fndecl))
3474 error ("invalid pure const state for function");
3475 return true;
3478 tree lhs = gimple_call_lhs (stmt);
3479 if (lhs
3480 && (!is_gimple_lvalue (lhs)
3481 || verify_types_in_gimple_reference (lhs, true)))
3483 error ("invalid LHS in gimple call");
3484 return true;
3487 if (gimple_call_ctrl_altering_p (stmt)
3488 && gimple_call_noreturn_p (stmt)
3489 && should_remove_lhs_p (lhs))
3491 error ("LHS in noreturn call");
3492 return true;
3495 fntype = gimple_call_fntype (stmt);
3496 if (fntype
3497 && lhs
3498 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3499 /* ??? At least C++ misses conversions at assignments from
3500 void * call results.
3501 For now simply allow arbitrary pointer type conversions. */
3502 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3503 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3505 error ("invalid conversion in gimple call");
3506 debug_generic_stmt (TREE_TYPE (lhs));
3507 debug_generic_stmt (TREE_TYPE (fntype));
3508 return true;
3511 if (gimple_call_chain (stmt)
3512 && !is_gimple_val (gimple_call_chain (stmt)))
3514 error ("invalid static chain in gimple call");
3515 debug_generic_stmt (gimple_call_chain (stmt));
3516 return true;
3519 /* If there is a static chain argument, the call should either be
3520 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3521 if (gimple_call_chain (stmt)
3522 && fndecl
3523 && !DECL_STATIC_CHAIN (fndecl))
3525 error ("static chain with function that doesn%'t use one");
3526 return true;
3529 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3531 switch (DECL_FUNCTION_CODE (fndecl))
3533 case BUILT_IN_UNREACHABLE:
3534 case BUILT_IN_TRAP:
3535 if (gimple_call_num_args (stmt) > 0)
3537 /* Built-in unreachable with parameters might not be caught by
3538 undefined behavior sanitizer. Front-ends do check users do not
3539 call them that way but we also produce calls to
3540 __builtin_unreachable internally, for example when IPA figures
3541 out a call cannot happen in a legal program. In such cases,
3542 we must make sure arguments are stripped off. */
3543 error ("__builtin_unreachable or __builtin_trap call with "
3544 "arguments");
3545 return true;
3547 break;
3548 default:
3549 break;
3553 /* ??? The C frontend passes unpromoted arguments in case it
3554 didn't see a function declaration before the call. So for now
3555 leave the call arguments mostly unverified. Once we gimplify
3556 unit-at-a-time we have a chance to fix this. */
3558 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3560 tree arg = gimple_call_arg (stmt, i);
3561 if ((is_gimple_reg_type (TREE_TYPE (arg))
3562 && !is_gimple_val (arg))
3563 || (!is_gimple_reg_type (TREE_TYPE (arg))
3564 && !is_gimple_lvalue (arg)))
3566 error ("invalid argument to gimple call");
3567 debug_generic_expr (arg);
3568 return true;
3572 return false;
3575 /* Verifies the gimple comparison with the result type TYPE and
3576 the operands OP0 and OP1, comparison code is CODE. */
3578 static bool
3579 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3581 tree op0_type = TREE_TYPE (op0);
3582 tree op1_type = TREE_TYPE (op1);
3584 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3586 error ("invalid operands in gimple comparison");
3587 return true;
3590 /* For comparisons we do not have the operations type as the
3591 effective type the comparison is carried out in. Instead
3592 we require that either the first operand is trivially
3593 convertible into the second, or the other way around.
3594 Because we special-case pointers to void we allow
3595 comparisons of pointers with the same mode as well. */
3596 if (!useless_type_conversion_p (op0_type, op1_type)
3597 && !useless_type_conversion_p (op1_type, op0_type)
3598 && (!POINTER_TYPE_P (op0_type)
3599 || !POINTER_TYPE_P (op1_type)
3600 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3602 error ("mismatching comparison operand types");
3603 debug_generic_expr (op0_type);
3604 debug_generic_expr (op1_type);
3605 return true;
3608 /* The resulting type of a comparison may be an effective boolean type. */
3609 if (INTEGRAL_TYPE_P (type)
3610 && (TREE_CODE (type) == BOOLEAN_TYPE
3611 || TYPE_PRECISION (type) == 1))
3613 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3614 || TREE_CODE (op1_type) == VECTOR_TYPE)
3615 && code != EQ_EXPR && code != NE_EXPR
3616 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3617 && !VECTOR_INTEGER_TYPE_P (op0_type))
3619 error ("unsupported operation or type for vector comparison"
3620 " returning a boolean");
3621 debug_generic_expr (op0_type);
3622 debug_generic_expr (op1_type);
3623 return true;
3626 /* Or a boolean vector type with the same element count
3627 as the comparison operand types. */
3628 else if (TREE_CODE (type) == VECTOR_TYPE
3629 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3631 if (TREE_CODE (op0_type) != VECTOR_TYPE
3632 || TREE_CODE (op1_type) != VECTOR_TYPE)
3634 error ("non-vector operands in vector comparison");
3635 debug_generic_expr (op0_type);
3636 debug_generic_expr (op1_type);
3637 return true;
3640 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3642 error ("invalid vector comparison resulting type");
3643 debug_generic_expr (type);
3644 return true;
3647 else
3649 error ("bogus comparison result type");
3650 debug_generic_expr (type);
3651 return true;
3654 return false;
3657 /* Verify a gimple assignment statement STMT with an unary rhs.
3658 Returns true if anything is wrong. */
3660 static bool
3661 verify_gimple_assign_unary (gassign *stmt)
3663 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3664 tree lhs = gimple_assign_lhs (stmt);
3665 tree lhs_type = TREE_TYPE (lhs);
3666 tree rhs1 = gimple_assign_rhs1 (stmt);
3667 tree rhs1_type = TREE_TYPE (rhs1);
3669 if (!is_gimple_reg (lhs))
3671 error ("non-register as LHS of unary operation");
3672 return true;
3675 if (!is_gimple_val (rhs1))
3677 error ("invalid operand in unary operation");
3678 return true;
3681 /* First handle conversions. */
3682 switch (rhs_code)
3684 CASE_CONVERT:
3686 /* Allow conversions from pointer type to integral type only if
3687 there is no sign or zero extension involved.
3688 For targets were the precision of ptrofftype doesn't match that
3689 of pointers we need to allow arbitrary conversions to ptrofftype. */
3690 if ((POINTER_TYPE_P (lhs_type)
3691 && INTEGRAL_TYPE_P (rhs1_type))
3692 || (POINTER_TYPE_P (rhs1_type)
3693 && INTEGRAL_TYPE_P (lhs_type)
3694 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3695 || ptrofftype_p (sizetype))))
3696 return false;
3698 /* Allow conversion from integral to offset type and vice versa. */
3699 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3700 && INTEGRAL_TYPE_P (rhs1_type))
3701 || (INTEGRAL_TYPE_P (lhs_type)
3702 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3703 return false;
3705 /* Otherwise assert we are converting between types of the
3706 same kind. */
3707 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3709 error ("invalid types in nop conversion");
3710 debug_generic_expr (lhs_type);
3711 debug_generic_expr (rhs1_type);
3712 return true;
3715 return false;
3718 case ADDR_SPACE_CONVERT_EXPR:
3720 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3721 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3722 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3724 error ("invalid types in address space conversion");
3725 debug_generic_expr (lhs_type);
3726 debug_generic_expr (rhs1_type);
3727 return true;
3730 return false;
3733 case FIXED_CONVERT_EXPR:
3735 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3736 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3738 error ("invalid types in fixed-point conversion");
3739 debug_generic_expr (lhs_type);
3740 debug_generic_expr (rhs1_type);
3741 return true;
3744 return false;
3747 case FLOAT_EXPR:
3749 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3750 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3751 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3753 error ("invalid types in conversion to floating point");
3754 debug_generic_expr (lhs_type);
3755 debug_generic_expr (rhs1_type);
3756 return true;
3759 return false;
3762 case FIX_TRUNC_EXPR:
3764 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3765 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3766 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3768 error ("invalid types in conversion to integer");
3769 debug_generic_expr (lhs_type);
3770 debug_generic_expr (rhs1_type);
3771 return true;
3774 return false;
3776 case REDUC_MAX_EXPR:
3777 case REDUC_MIN_EXPR:
3778 case REDUC_PLUS_EXPR:
3779 if (!VECTOR_TYPE_P (rhs1_type)
3780 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3782 error ("reduction should convert from vector to element type");
3783 debug_generic_expr (lhs_type);
3784 debug_generic_expr (rhs1_type);
3785 return true;
3787 return false;
3789 case VEC_UNPACK_HI_EXPR:
3790 case VEC_UNPACK_LO_EXPR:
3791 case VEC_UNPACK_FLOAT_HI_EXPR:
3792 case VEC_UNPACK_FLOAT_LO_EXPR:
3793 /* FIXME. */
3794 return false;
3796 case NEGATE_EXPR:
3797 case ABS_EXPR:
3798 case BIT_NOT_EXPR:
3799 case PAREN_EXPR:
3800 case CONJ_EXPR:
3801 break;
3803 default:
3804 gcc_unreachable ();
3807 /* For the remaining codes assert there is no conversion involved. */
3808 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3810 error ("non-trivial conversion in unary operation");
3811 debug_generic_expr (lhs_type);
3812 debug_generic_expr (rhs1_type);
3813 return true;
3816 return false;
3819 /* Verify a gimple assignment statement STMT with a binary rhs.
3820 Returns true if anything is wrong. */
3822 static bool
3823 verify_gimple_assign_binary (gassign *stmt)
3825 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3826 tree lhs = gimple_assign_lhs (stmt);
3827 tree lhs_type = TREE_TYPE (lhs);
3828 tree rhs1 = gimple_assign_rhs1 (stmt);
3829 tree rhs1_type = TREE_TYPE (rhs1);
3830 tree rhs2 = gimple_assign_rhs2 (stmt);
3831 tree rhs2_type = TREE_TYPE (rhs2);
3833 if (!is_gimple_reg (lhs))
3835 error ("non-register as LHS of binary operation");
3836 return true;
3839 if (!is_gimple_val (rhs1)
3840 || !is_gimple_val (rhs2))
3842 error ("invalid operands in binary operation");
3843 return true;
3846 /* First handle operations that involve different types. */
3847 switch (rhs_code)
3849 case COMPLEX_EXPR:
3851 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3852 || !(INTEGRAL_TYPE_P (rhs1_type)
3853 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3854 || !(INTEGRAL_TYPE_P (rhs2_type)
3855 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3857 error ("type mismatch in complex expression");
3858 debug_generic_expr (lhs_type);
3859 debug_generic_expr (rhs1_type);
3860 debug_generic_expr (rhs2_type);
3861 return true;
3864 return false;
3867 case LSHIFT_EXPR:
3868 case RSHIFT_EXPR:
3869 case LROTATE_EXPR:
3870 case RROTATE_EXPR:
3872 /* Shifts and rotates are ok on integral types, fixed point
3873 types and integer vector types. */
3874 if ((!INTEGRAL_TYPE_P (rhs1_type)
3875 && !FIXED_POINT_TYPE_P (rhs1_type)
3876 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3877 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3878 || (!INTEGRAL_TYPE_P (rhs2_type)
3879 /* Vector shifts of vectors are also ok. */
3880 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3881 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3882 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3883 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3884 || !useless_type_conversion_p (lhs_type, rhs1_type))
3886 error ("type mismatch in shift expression");
3887 debug_generic_expr (lhs_type);
3888 debug_generic_expr (rhs1_type);
3889 debug_generic_expr (rhs2_type);
3890 return true;
3893 return false;
3896 case WIDEN_LSHIFT_EXPR:
3898 if (!INTEGRAL_TYPE_P (lhs_type)
3899 || !INTEGRAL_TYPE_P (rhs1_type)
3900 || TREE_CODE (rhs2) != INTEGER_CST
3901 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3903 error ("type mismatch in widening vector shift expression");
3904 debug_generic_expr (lhs_type);
3905 debug_generic_expr (rhs1_type);
3906 debug_generic_expr (rhs2_type);
3907 return true;
3910 return false;
3913 case VEC_WIDEN_LSHIFT_HI_EXPR:
3914 case VEC_WIDEN_LSHIFT_LO_EXPR:
3916 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3917 || TREE_CODE (lhs_type) != VECTOR_TYPE
3918 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3919 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3920 || TREE_CODE (rhs2) != INTEGER_CST
3921 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3922 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3924 error ("type mismatch in widening vector shift expression");
3925 debug_generic_expr (lhs_type);
3926 debug_generic_expr (rhs1_type);
3927 debug_generic_expr (rhs2_type);
3928 return true;
3931 return false;
3934 case PLUS_EXPR:
3935 case MINUS_EXPR:
3937 tree lhs_etype = lhs_type;
3938 tree rhs1_etype = rhs1_type;
3939 tree rhs2_etype = rhs2_type;
3940 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3942 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3943 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3945 error ("invalid non-vector operands to vector valued plus");
3946 return true;
3948 lhs_etype = TREE_TYPE (lhs_type);
3949 rhs1_etype = TREE_TYPE (rhs1_type);
3950 rhs2_etype = TREE_TYPE (rhs2_type);
3952 if (POINTER_TYPE_P (lhs_etype)
3953 || POINTER_TYPE_P (rhs1_etype)
3954 || POINTER_TYPE_P (rhs2_etype))
3956 error ("invalid (pointer) operands to plus/minus");
3957 return true;
3960 /* Continue with generic binary expression handling. */
3961 break;
3964 case POINTER_PLUS_EXPR:
3966 if (!POINTER_TYPE_P (rhs1_type)
3967 || !useless_type_conversion_p (lhs_type, rhs1_type)
3968 || !ptrofftype_p (rhs2_type))
3970 error ("type mismatch in pointer plus expression");
3971 debug_generic_stmt (lhs_type);
3972 debug_generic_stmt (rhs1_type);
3973 debug_generic_stmt (rhs2_type);
3974 return true;
3977 return false;
3980 case TRUTH_ANDIF_EXPR:
3981 case TRUTH_ORIF_EXPR:
3982 case TRUTH_AND_EXPR:
3983 case TRUTH_OR_EXPR:
3984 case TRUTH_XOR_EXPR:
3986 gcc_unreachable ();
3988 case LT_EXPR:
3989 case LE_EXPR:
3990 case GT_EXPR:
3991 case GE_EXPR:
3992 case EQ_EXPR:
3993 case NE_EXPR:
3994 case UNORDERED_EXPR:
3995 case ORDERED_EXPR:
3996 case UNLT_EXPR:
3997 case UNLE_EXPR:
3998 case UNGT_EXPR:
3999 case UNGE_EXPR:
4000 case UNEQ_EXPR:
4001 case LTGT_EXPR:
4002 /* Comparisons are also binary, but the result type is not
4003 connected to the operand types. */
4004 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4006 case WIDEN_MULT_EXPR:
4007 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4008 return true;
4009 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4010 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4012 case WIDEN_SUM_EXPR:
4014 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4015 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4016 && ((!INTEGRAL_TYPE_P (rhs1_type)
4017 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4018 || (!INTEGRAL_TYPE_P (lhs_type)
4019 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4020 || !useless_type_conversion_p (lhs_type, rhs2_type)
4021 || (GET_MODE_SIZE (element_mode (rhs2_type))
4022 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4024 error ("type mismatch in widening sum reduction");
4025 debug_generic_expr (lhs_type);
4026 debug_generic_expr (rhs1_type);
4027 debug_generic_expr (rhs2_type);
4028 return true;
4030 return false;
4033 case VEC_WIDEN_MULT_HI_EXPR:
4034 case VEC_WIDEN_MULT_LO_EXPR:
4035 case VEC_WIDEN_MULT_EVEN_EXPR:
4036 case VEC_WIDEN_MULT_ODD_EXPR:
4038 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4039 || TREE_CODE (lhs_type) != VECTOR_TYPE
4040 || !types_compatible_p (rhs1_type, rhs2_type)
4041 || (GET_MODE_SIZE (element_mode (lhs_type))
4042 != 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4044 error ("type mismatch in vector widening multiplication");
4045 debug_generic_expr (lhs_type);
4046 debug_generic_expr (rhs1_type);
4047 debug_generic_expr (rhs2_type);
4048 return true;
4050 return false;
4053 case VEC_PACK_TRUNC_EXPR:
4054 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4055 vector boolean types. */
4056 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4057 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4058 && types_compatible_p (rhs1_type, rhs2_type)
4059 && (TYPE_VECTOR_SUBPARTS (lhs_type)
4060 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4061 return false;
4063 /* Fallthru. */
4064 case VEC_PACK_SAT_EXPR:
4065 case VEC_PACK_FIX_TRUNC_EXPR:
4067 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4068 || TREE_CODE (lhs_type) != VECTOR_TYPE
4069 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4070 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4071 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4072 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4073 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4074 || !types_compatible_p (rhs1_type, rhs2_type)
4075 || (GET_MODE_SIZE (element_mode (rhs1_type))
4076 != 2 * GET_MODE_SIZE (element_mode (lhs_type))))
4078 error ("type mismatch in vector pack expression");
4079 debug_generic_expr (lhs_type);
4080 debug_generic_expr (rhs1_type);
4081 debug_generic_expr (rhs2_type);
4082 return true;
4085 return false;
4088 case MULT_EXPR:
4089 case MULT_HIGHPART_EXPR:
4090 case TRUNC_DIV_EXPR:
4091 case CEIL_DIV_EXPR:
4092 case FLOOR_DIV_EXPR:
4093 case ROUND_DIV_EXPR:
4094 case TRUNC_MOD_EXPR:
4095 case CEIL_MOD_EXPR:
4096 case FLOOR_MOD_EXPR:
4097 case ROUND_MOD_EXPR:
4098 case RDIV_EXPR:
4099 case EXACT_DIV_EXPR:
4100 case MIN_EXPR:
4101 case MAX_EXPR:
4102 case BIT_IOR_EXPR:
4103 case BIT_XOR_EXPR:
4104 case BIT_AND_EXPR:
4105 /* Continue with generic binary expression handling. */
4106 break;
4108 default:
4109 gcc_unreachable ();
4112 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4113 || !useless_type_conversion_p (lhs_type, rhs2_type))
4115 error ("type mismatch in binary expression");
4116 debug_generic_stmt (lhs_type);
4117 debug_generic_stmt (rhs1_type);
4118 debug_generic_stmt (rhs2_type);
4119 return true;
4122 return false;
4125 /* Verify a gimple assignment statement STMT with a ternary rhs.
4126 Returns true if anything is wrong. */
4128 static bool
4129 verify_gimple_assign_ternary (gassign *stmt)
4131 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4132 tree lhs = gimple_assign_lhs (stmt);
4133 tree lhs_type = TREE_TYPE (lhs);
4134 tree rhs1 = gimple_assign_rhs1 (stmt);
4135 tree rhs1_type = TREE_TYPE (rhs1);
4136 tree rhs2 = gimple_assign_rhs2 (stmt);
4137 tree rhs2_type = TREE_TYPE (rhs2);
4138 tree rhs3 = gimple_assign_rhs3 (stmt);
4139 tree rhs3_type = TREE_TYPE (rhs3);
4141 if (!is_gimple_reg (lhs))
4143 error ("non-register as LHS of ternary operation");
4144 return true;
4147 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4148 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4149 || !is_gimple_val (rhs2)
4150 || !is_gimple_val (rhs3))
4152 error ("invalid operands in ternary operation");
4153 return true;
4156 /* First handle operations that involve different types. */
4157 switch (rhs_code)
4159 case WIDEN_MULT_PLUS_EXPR:
4160 case WIDEN_MULT_MINUS_EXPR:
4161 if ((!INTEGRAL_TYPE_P (rhs1_type)
4162 && !FIXED_POINT_TYPE_P (rhs1_type))
4163 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4164 || !useless_type_conversion_p (lhs_type, rhs3_type)
4165 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4166 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4168 error ("type mismatch in widening multiply-accumulate expression");
4169 debug_generic_expr (lhs_type);
4170 debug_generic_expr (rhs1_type);
4171 debug_generic_expr (rhs2_type);
4172 debug_generic_expr (rhs3_type);
4173 return true;
4175 break;
4177 case FMA_EXPR:
4178 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4179 || !useless_type_conversion_p (lhs_type, rhs2_type)
4180 || !useless_type_conversion_p (lhs_type, rhs3_type))
4182 error ("type mismatch in fused multiply-add expression");
4183 debug_generic_expr (lhs_type);
4184 debug_generic_expr (rhs1_type);
4185 debug_generic_expr (rhs2_type);
4186 debug_generic_expr (rhs3_type);
4187 return true;
4189 break;
4191 case VEC_COND_EXPR:
4192 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4193 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4194 != TYPE_VECTOR_SUBPARTS (lhs_type))
4196 error ("the first argument of a VEC_COND_EXPR must be of a "
4197 "boolean vector type of the same number of elements "
4198 "as the result");
4199 debug_generic_expr (lhs_type);
4200 debug_generic_expr (rhs1_type);
4201 return true;
4203 /* Fallthrough. */
4204 case COND_EXPR:
4205 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4206 || !useless_type_conversion_p (lhs_type, rhs3_type))
4208 error ("type mismatch in conditional expression");
4209 debug_generic_expr (lhs_type);
4210 debug_generic_expr (rhs2_type);
4211 debug_generic_expr (rhs3_type);
4212 return true;
4214 break;
4216 case VEC_PERM_EXPR:
4217 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4218 || !useless_type_conversion_p (lhs_type, rhs2_type))
4220 error ("type mismatch in vector permute expression");
4221 debug_generic_expr (lhs_type);
4222 debug_generic_expr (rhs1_type);
4223 debug_generic_expr (rhs2_type);
4224 debug_generic_expr (rhs3_type);
4225 return true;
4228 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4229 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4230 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4232 error ("vector types expected in vector permute expression");
4233 debug_generic_expr (lhs_type);
4234 debug_generic_expr (rhs1_type);
4235 debug_generic_expr (rhs2_type);
4236 debug_generic_expr (rhs3_type);
4237 return true;
4240 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4241 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4242 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4243 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4244 != TYPE_VECTOR_SUBPARTS (lhs_type))
4246 error ("vectors with different element number found "
4247 "in vector permute expression");
4248 debug_generic_expr (lhs_type);
4249 debug_generic_expr (rhs1_type);
4250 debug_generic_expr (rhs2_type);
4251 debug_generic_expr (rhs3_type);
4252 return true;
4255 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4256 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type)))
4257 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type))))
4259 error ("invalid mask type in vector permute expression");
4260 debug_generic_expr (lhs_type);
4261 debug_generic_expr (rhs1_type);
4262 debug_generic_expr (rhs2_type);
4263 debug_generic_expr (rhs3_type);
4264 return true;
4267 return false;
4269 case SAD_EXPR:
4270 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4271 || !useless_type_conversion_p (lhs_type, rhs3_type)
4272 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4273 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4275 error ("type mismatch in sad expression");
4276 debug_generic_expr (lhs_type);
4277 debug_generic_expr (rhs1_type);
4278 debug_generic_expr (rhs2_type);
4279 debug_generic_expr (rhs3_type);
4280 return true;
4283 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4284 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4285 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4287 error ("vector types expected in sad expression");
4288 debug_generic_expr (lhs_type);
4289 debug_generic_expr (rhs1_type);
4290 debug_generic_expr (rhs2_type);
4291 debug_generic_expr (rhs3_type);
4292 return true;
4295 return false;
4297 case BIT_INSERT_EXPR:
4298 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4300 error ("type mismatch in BIT_INSERT_EXPR");
4301 debug_generic_expr (lhs_type);
4302 debug_generic_expr (rhs1_type);
4303 return true;
4305 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4306 && INTEGRAL_TYPE_P (rhs2_type))
4307 || (VECTOR_TYPE_P (rhs1_type)
4308 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4310 error ("not allowed type combination in BIT_INSERT_EXPR");
4311 debug_generic_expr (rhs1_type);
4312 debug_generic_expr (rhs2_type);
4313 return true;
4315 if (! tree_fits_uhwi_p (rhs3)
4316 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4317 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4319 error ("invalid position or size in BIT_INSERT_EXPR");
4320 return true;
4322 if (INTEGRAL_TYPE_P (rhs1_type))
4324 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4325 if (bitpos >= TYPE_PRECISION (rhs1_type)
4326 || (bitpos + TYPE_PRECISION (rhs2_type)
4327 > TYPE_PRECISION (rhs1_type)))
4329 error ("insertion out of range in BIT_INSERT_EXPR");
4330 return true;
4333 else if (VECTOR_TYPE_P (rhs1_type))
4335 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4336 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4337 if (bitpos % bitsize != 0)
4339 error ("vector insertion not at element boundary");
4340 return true;
4343 return false;
4345 case DOT_PROD_EXPR:
4347 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4348 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4349 && ((!INTEGRAL_TYPE_P (rhs1_type)
4350 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4351 || (!INTEGRAL_TYPE_P (lhs_type)
4352 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4353 || !types_compatible_p (rhs1_type, rhs2_type)
4354 || !useless_type_conversion_p (lhs_type, rhs3_type)
4355 || (GET_MODE_SIZE (element_mode (rhs3_type))
4356 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4358 error ("type mismatch in dot product reduction");
4359 debug_generic_expr (lhs_type);
4360 debug_generic_expr (rhs1_type);
4361 debug_generic_expr (rhs2_type);
4362 return true;
4364 return false;
4367 case REALIGN_LOAD_EXPR:
4368 /* FIXME. */
4369 return false;
4371 default:
4372 gcc_unreachable ();
4374 return false;
4377 /* Verify a gimple assignment statement STMT with a single rhs.
4378 Returns true if anything is wrong. */
4380 static bool
4381 verify_gimple_assign_single (gassign *stmt)
4383 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4384 tree lhs = gimple_assign_lhs (stmt);
4385 tree lhs_type = TREE_TYPE (lhs);
4386 tree rhs1 = gimple_assign_rhs1 (stmt);
4387 tree rhs1_type = TREE_TYPE (rhs1);
4388 bool res = false;
4390 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4392 error ("non-trivial conversion at assignment");
4393 debug_generic_expr (lhs_type);
4394 debug_generic_expr (rhs1_type);
4395 return true;
4398 if (gimple_clobber_p (stmt)
4399 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4401 error ("non-decl/MEM_REF LHS in clobber statement");
4402 debug_generic_expr (lhs);
4403 return true;
4406 if (handled_component_p (lhs)
4407 || TREE_CODE (lhs) == MEM_REF
4408 || TREE_CODE (lhs) == TARGET_MEM_REF)
4409 res |= verify_types_in_gimple_reference (lhs, true);
4411 /* Special codes we cannot handle via their class. */
4412 switch (rhs_code)
4414 case ADDR_EXPR:
4416 tree op = TREE_OPERAND (rhs1, 0);
4417 if (!is_gimple_addressable (op))
4419 error ("invalid operand in unary expression");
4420 return true;
4423 /* Technically there is no longer a need for matching types, but
4424 gimple hygiene asks for this check. In LTO we can end up
4425 combining incompatible units and thus end up with addresses
4426 of globals that change their type to a common one. */
4427 if (!in_lto_p
4428 && !types_compatible_p (TREE_TYPE (op),
4429 TREE_TYPE (TREE_TYPE (rhs1)))
4430 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4431 TREE_TYPE (op)))
4433 error ("type mismatch in address expression");
4434 debug_generic_stmt (TREE_TYPE (rhs1));
4435 debug_generic_stmt (TREE_TYPE (op));
4436 return true;
4439 return verify_types_in_gimple_reference (op, true);
4442 /* tcc_reference */
4443 case INDIRECT_REF:
4444 error ("INDIRECT_REF in gimple IL");
4445 return true;
4447 case COMPONENT_REF:
4448 case BIT_FIELD_REF:
4449 case ARRAY_REF:
4450 case ARRAY_RANGE_REF:
4451 case VIEW_CONVERT_EXPR:
4452 case REALPART_EXPR:
4453 case IMAGPART_EXPR:
4454 case TARGET_MEM_REF:
4455 case MEM_REF:
4456 if (!is_gimple_reg (lhs)
4457 && is_gimple_reg_type (TREE_TYPE (lhs)))
4459 error ("invalid rhs for gimple memory store");
4460 debug_generic_stmt (lhs);
4461 debug_generic_stmt (rhs1);
4462 return true;
4464 return res || verify_types_in_gimple_reference (rhs1, false);
4466 /* tcc_constant */
4467 case SSA_NAME:
4468 case INTEGER_CST:
4469 case REAL_CST:
4470 case FIXED_CST:
4471 case COMPLEX_CST:
4472 case VECTOR_CST:
4473 case STRING_CST:
4474 return res;
4476 /* tcc_declaration */
4477 case CONST_DECL:
4478 return res;
4479 case VAR_DECL:
4480 case PARM_DECL:
4481 if (!is_gimple_reg (lhs)
4482 && !is_gimple_reg (rhs1)
4483 && is_gimple_reg_type (TREE_TYPE (lhs)))
4485 error ("invalid rhs for gimple memory store");
4486 debug_generic_stmt (lhs);
4487 debug_generic_stmt (rhs1);
4488 return true;
4490 return res;
4492 case CONSTRUCTOR:
4493 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4495 unsigned int i;
4496 tree elt_i, elt_v, elt_t = NULL_TREE;
4498 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4499 return res;
4500 /* For vector CONSTRUCTORs we require that either it is empty
4501 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4502 (then the element count must be correct to cover the whole
4503 outer vector and index must be NULL on all elements, or it is
4504 a CONSTRUCTOR of scalar elements, where we as an exception allow
4505 smaller number of elements (assuming zero filling) and
4506 consecutive indexes as compared to NULL indexes (such
4507 CONSTRUCTORs can appear in the IL from FEs). */
4508 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4510 if (elt_t == NULL_TREE)
4512 elt_t = TREE_TYPE (elt_v);
4513 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4515 tree elt_t = TREE_TYPE (elt_v);
4516 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4517 TREE_TYPE (elt_t)))
4519 error ("incorrect type of vector CONSTRUCTOR"
4520 " elements");
4521 debug_generic_stmt (rhs1);
4522 return true;
4524 else if (CONSTRUCTOR_NELTS (rhs1)
4525 * TYPE_VECTOR_SUBPARTS (elt_t)
4526 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4528 error ("incorrect number of vector CONSTRUCTOR"
4529 " elements");
4530 debug_generic_stmt (rhs1);
4531 return true;
4534 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4535 elt_t))
4537 error ("incorrect type of vector CONSTRUCTOR elements");
4538 debug_generic_stmt (rhs1);
4539 return true;
4541 else if (CONSTRUCTOR_NELTS (rhs1)
4542 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4544 error ("incorrect number of vector CONSTRUCTOR elements");
4545 debug_generic_stmt (rhs1);
4546 return true;
4549 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4551 error ("incorrect type of vector CONSTRUCTOR elements");
4552 debug_generic_stmt (rhs1);
4553 return true;
4555 if (elt_i != NULL_TREE
4556 && (TREE_CODE (elt_t) == VECTOR_TYPE
4557 || TREE_CODE (elt_i) != INTEGER_CST
4558 || compare_tree_int (elt_i, i) != 0))
4560 error ("vector CONSTRUCTOR with non-NULL element index");
4561 debug_generic_stmt (rhs1);
4562 return true;
4564 if (!is_gimple_val (elt_v))
4566 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4567 debug_generic_stmt (rhs1);
4568 return true;
4572 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4574 error ("non-vector CONSTRUCTOR with elements");
4575 debug_generic_stmt (rhs1);
4576 return true;
4578 return res;
4579 case OBJ_TYPE_REF:
4580 case ASSERT_EXPR:
4581 case WITH_SIZE_EXPR:
4582 /* FIXME. */
4583 return res;
4585 default:;
4588 return res;
4591 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4592 is a problem, otherwise false. */
4594 static bool
4595 verify_gimple_assign (gassign *stmt)
4597 switch (gimple_assign_rhs_class (stmt))
4599 case GIMPLE_SINGLE_RHS:
4600 return verify_gimple_assign_single (stmt);
4602 case GIMPLE_UNARY_RHS:
4603 return verify_gimple_assign_unary (stmt);
4605 case GIMPLE_BINARY_RHS:
4606 return verify_gimple_assign_binary (stmt);
4608 case GIMPLE_TERNARY_RHS:
4609 return verify_gimple_assign_ternary (stmt);
4611 default:
4612 gcc_unreachable ();
4616 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4617 is a problem, otherwise false. */
4619 static bool
4620 verify_gimple_return (greturn *stmt)
4622 tree op = gimple_return_retval (stmt);
4623 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4625 /* We cannot test for present return values as we do not fix up missing
4626 return values from the original source. */
4627 if (op == NULL)
4628 return false;
4630 if (!is_gimple_val (op)
4631 && TREE_CODE (op) != RESULT_DECL)
4633 error ("invalid operand in return statement");
4634 debug_generic_stmt (op);
4635 return true;
4638 if ((TREE_CODE (op) == RESULT_DECL
4639 && DECL_BY_REFERENCE (op))
4640 || (TREE_CODE (op) == SSA_NAME
4641 && SSA_NAME_VAR (op)
4642 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4643 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4644 op = TREE_TYPE (op);
4646 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4648 error ("invalid conversion in return statement");
4649 debug_generic_stmt (restype);
4650 debug_generic_stmt (TREE_TYPE (op));
4651 return true;
4654 return false;
4658 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4659 is a problem, otherwise false. */
4661 static bool
4662 verify_gimple_goto (ggoto *stmt)
4664 tree dest = gimple_goto_dest (stmt);
4666 /* ??? We have two canonical forms of direct goto destinations, a
4667 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4668 if (TREE_CODE (dest) != LABEL_DECL
4669 && (!is_gimple_val (dest)
4670 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4672 error ("goto destination is neither a label nor a pointer");
4673 return true;
4676 return false;
4679 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4680 is a problem, otherwise false. */
4682 static bool
4683 verify_gimple_switch (gswitch *stmt)
4685 unsigned int i, n;
4686 tree elt, prev_upper_bound = NULL_TREE;
4687 tree index_type, elt_type = NULL_TREE;
4689 if (!is_gimple_val (gimple_switch_index (stmt)))
4691 error ("invalid operand to switch statement");
4692 debug_generic_stmt (gimple_switch_index (stmt));
4693 return true;
4696 index_type = TREE_TYPE (gimple_switch_index (stmt));
4697 if (! INTEGRAL_TYPE_P (index_type))
4699 error ("non-integral type switch statement");
4700 debug_generic_expr (index_type);
4701 return true;
4704 elt = gimple_switch_label (stmt, 0);
4705 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4707 error ("invalid default case label in switch statement");
4708 debug_generic_expr (elt);
4709 return true;
4712 n = gimple_switch_num_labels (stmt);
4713 for (i = 1; i < n; i++)
4715 elt = gimple_switch_label (stmt, i);
4717 if (! CASE_LOW (elt))
4719 error ("invalid case label in switch statement");
4720 debug_generic_expr (elt);
4721 return true;
4723 if (CASE_HIGH (elt)
4724 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4726 error ("invalid case range in switch statement");
4727 debug_generic_expr (elt);
4728 return true;
4731 if (elt_type)
4733 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4734 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4736 error ("type mismatch for case label in switch statement");
4737 debug_generic_expr (elt);
4738 return true;
4741 else
4743 elt_type = TREE_TYPE (CASE_LOW (elt));
4744 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4746 error ("type precision mismatch in switch statement");
4747 return true;
4751 if (prev_upper_bound)
4753 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4755 error ("case labels not sorted in switch statement");
4756 return true;
4760 prev_upper_bound = CASE_HIGH (elt);
4761 if (! prev_upper_bound)
4762 prev_upper_bound = CASE_LOW (elt);
4765 return false;
4768 /* Verify a gimple debug statement STMT.
4769 Returns true if anything is wrong. */
4771 static bool
4772 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4774 /* There isn't much that could be wrong in a gimple debug stmt. A
4775 gimple debug bind stmt, for example, maps a tree, that's usually
4776 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4777 component or member of an aggregate type, to another tree, that
4778 can be an arbitrary expression. These stmts expand into debug
4779 insns, and are converted to debug notes by var-tracking.c. */
4780 return false;
4783 /* Verify a gimple label statement STMT.
4784 Returns true if anything is wrong. */
4786 static bool
4787 verify_gimple_label (glabel *stmt)
4789 tree decl = gimple_label_label (stmt);
4790 int uid;
4791 bool err = false;
4793 if (TREE_CODE (decl) != LABEL_DECL)
4794 return true;
4795 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4796 && DECL_CONTEXT (decl) != current_function_decl)
4798 error ("label's context is not the current function decl");
4799 err |= true;
4802 uid = LABEL_DECL_UID (decl);
4803 if (cfun->cfg
4804 && (uid == -1
4805 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4807 error ("incorrect entry in label_to_block_map");
4808 err |= true;
4811 uid = EH_LANDING_PAD_NR (decl);
4812 if (uid)
4814 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4815 if (decl != lp->post_landing_pad)
4817 error ("incorrect setting of landing pad number");
4818 err |= true;
4822 return err;
4825 /* Verify a gimple cond statement STMT.
4826 Returns true if anything is wrong. */
4828 static bool
4829 verify_gimple_cond (gcond *stmt)
4831 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4833 error ("invalid comparison code in gimple cond");
4834 return true;
4836 if (!(!gimple_cond_true_label (stmt)
4837 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4838 || !(!gimple_cond_false_label (stmt)
4839 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4841 error ("invalid labels in gimple cond");
4842 return true;
4845 return verify_gimple_comparison (boolean_type_node,
4846 gimple_cond_lhs (stmt),
4847 gimple_cond_rhs (stmt),
4848 gimple_cond_code (stmt));
4851 /* Verify the GIMPLE statement STMT. Returns true if there is an
4852 error, otherwise false. */
4854 static bool
4855 verify_gimple_stmt (gimple *stmt)
4857 switch (gimple_code (stmt))
4859 case GIMPLE_ASSIGN:
4860 return verify_gimple_assign (as_a <gassign *> (stmt));
4862 case GIMPLE_LABEL:
4863 return verify_gimple_label (as_a <glabel *> (stmt));
4865 case GIMPLE_CALL:
4866 return verify_gimple_call (as_a <gcall *> (stmt));
4868 case GIMPLE_COND:
4869 return verify_gimple_cond (as_a <gcond *> (stmt));
4871 case GIMPLE_GOTO:
4872 return verify_gimple_goto (as_a <ggoto *> (stmt));
4874 case GIMPLE_SWITCH:
4875 return verify_gimple_switch (as_a <gswitch *> (stmt));
4877 case GIMPLE_RETURN:
4878 return verify_gimple_return (as_a <greturn *> (stmt));
4880 case GIMPLE_ASM:
4881 return false;
4883 case GIMPLE_TRANSACTION:
4884 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4886 /* Tuples that do not have tree operands. */
4887 case GIMPLE_NOP:
4888 case GIMPLE_PREDICT:
4889 case GIMPLE_RESX:
4890 case GIMPLE_EH_DISPATCH:
4891 case GIMPLE_EH_MUST_NOT_THROW:
4892 return false;
4894 CASE_GIMPLE_OMP:
4895 /* OpenMP directives are validated by the FE and never operated
4896 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4897 non-gimple expressions when the main index variable has had
4898 its address taken. This does not affect the loop itself
4899 because the header of an GIMPLE_OMP_FOR is merely used to determine
4900 how to setup the parallel iteration. */
4901 return false;
4903 case GIMPLE_DEBUG:
4904 return verify_gimple_debug (stmt);
4906 default:
4907 gcc_unreachable ();
4911 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4912 and false otherwise. */
4914 static bool
4915 verify_gimple_phi (gimple *phi)
4917 bool err = false;
4918 unsigned i;
4919 tree phi_result = gimple_phi_result (phi);
4920 bool virtual_p;
4922 if (!phi_result)
4924 error ("invalid PHI result");
4925 return true;
4928 virtual_p = virtual_operand_p (phi_result);
4929 if (TREE_CODE (phi_result) != SSA_NAME
4930 || (virtual_p
4931 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4933 error ("invalid PHI result");
4934 err = true;
4937 for (i = 0; i < gimple_phi_num_args (phi); i++)
4939 tree t = gimple_phi_arg_def (phi, i);
4941 if (!t)
4943 error ("missing PHI def");
4944 err |= true;
4945 continue;
4947 /* Addressable variables do have SSA_NAMEs but they
4948 are not considered gimple values. */
4949 else if ((TREE_CODE (t) == SSA_NAME
4950 && virtual_p != virtual_operand_p (t))
4951 || (virtual_p
4952 && (TREE_CODE (t) != SSA_NAME
4953 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4954 || (!virtual_p
4955 && !is_gimple_val (t)))
4957 error ("invalid PHI argument");
4958 debug_generic_expr (t);
4959 err |= true;
4961 #ifdef ENABLE_TYPES_CHECKING
4962 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4964 error ("incompatible types in PHI argument %u", i);
4965 debug_generic_stmt (TREE_TYPE (phi_result));
4966 debug_generic_stmt (TREE_TYPE (t));
4967 err |= true;
4969 #endif
4972 return err;
4975 /* Verify the GIMPLE statements inside the sequence STMTS. */
4977 static bool
4978 verify_gimple_in_seq_2 (gimple_seq stmts)
4980 gimple_stmt_iterator ittr;
4981 bool err = false;
4983 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4985 gimple *stmt = gsi_stmt (ittr);
4987 switch (gimple_code (stmt))
4989 case GIMPLE_BIND:
4990 err |= verify_gimple_in_seq_2 (
4991 gimple_bind_body (as_a <gbind *> (stmt)));
4992 break;
4994 case GIMPLE_TRY:
4995 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4996 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4997 break;
4999 case GIMPLE_EH_FILTER:
5000 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5001 break;
5003 case GIMPLE_EH_ELSE:
5005 geh_else *eh_else = as_a <geh_else *> (stmt);
5006 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5007 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5009 break;
5011 case GIMPLE_CATCH:
5012 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5013 as_a <gcatch *> (stmt)));
5014 break;
5016 case GIMPLE_TRANSACTION:
5017 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5018 break;
5020 default:
5022 bool err2 = verify_gimple_stmt (stmt);
5023 if (err2)
5024 debug_gimple_stmt (stmt);
5025 err |= err2;
5030 return err;
5033 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5034 is a problem, otherwise false. */
5036 static bool
5037 verify_gimple_transaction (gtransaction *stmt)
5039 tree lab;
5041 lab = gimple_transaction_label_norm (stmt);
5042 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5043 return true;
5044 lab = gimple_transaction_label_uninst (stmt);
5045 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5046 return true;
5047 lab = gimple_transaction_label_over (stmt);
5048 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5049 return true;
5051 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5055 /* Verify the GIMPLE statements inside the statement list STMTS. */
5057 DEBUG_FUNCTION void
5058 verify_gimple_in_seq (gimple_seq stmts)
5060 timevar_push (TV_TREE_STMT_VERIFY);
5061 if (verify_gimple_in_seq_2 (stmts))
5062 internal_error ("verify_gimple failed");
5063 timevar_pop (TV_TREE_STMT_VERIFY);
5066 /* Return true when the T can be shared. */
5068 static bool
5069 tree_node_can_be_shared (tree t)
5071 if (IS_TYPE_OR_DECL_P (t)
5072 || is_gimple_min_invariant (t)
5073 || TREE_CODE (t) == SSA_NAME
5074 || t == error_mark_node
5075 || TREE_CODE (t) == IDENTIFIER_NODE)
5076 return true;
5078 if (TREE_CODE (t) == CASE_LABEL_EXPR)
5079 return true;
5081 if (DECL_P (t))
5082 return true;
5084 return false;
5087 /* Called via walk_tree. Verify tree sharing. */
5089 static tree
5090 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5092 hash_set<void *> *visited = (hash_set<void *> *) data;
5094 if (tree_node_can_be_shared (*tp))
5096 *walk_subtrees = false;
5097 return NULL;
5100 if (visited->add (*tp))
5101 return *tp;
5103 return NULL;
5106 /* Called via walk_gimple_stmt. Verify tree sharing. */
5108 static tree
5109 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5111 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5112 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5115 static bool eh_error_found;
5116 bool
5117 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5118 hash_set<gimple *> *visited)
5120 if (!visited->contains (stmt))
5122 error ("dead STMT in EH table");
5123 debug_gimple_stmt (stmt);
5124 eh_error_found = true;
5126 return true;
5129 /* Verify if the location LOCs block is in BLOCKS. */
5131 static bool
5132 verify_location (hash_set<tree> *blocks, location_t loc)
5134 tree block = LOCATION_BLOCK (loc);
5135 if (block != NULL_TREE
5136 && !blocks->contains (block))
5138 error ("location references block not in block tree");
5139 return true;
5141 if (block != NULL_TREE)
5142 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5143 return false;
5146 /* Called via walk_tree. Verify that expressions have no blocks. */
5148 static tree
5149 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5151 if (!EXPR_P (*tp))
5153 *walk_subtrees = false;
5154 return NULL;
5157 location_t loc = EXPR_LOCATION (*tp);
5158 if (LOCATION_BLOCK (loc) != NULL)
5159 return *tp;
5161 return NULL;
5164 /* Called via walk_tree. Verify locations of expressions. */
5166 static tree
5167 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5169 hash_set<tree> *blocks = (hash_set<tree> *) data;
5171 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5173 tree t = DECL_DEBUG_EXPR (*tp);
5174 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5175 if (addr)
5176 return addr;
5178 if ((VAR_P (*tp)
5179 || TREE_CODE (*tp) == PARM_DECL
5180 || TREE_CODE (*tp) == RESULT_DECL)
5181 && DECL_HAS_VALUE_EXPR_P (*tp))
5183 tree t = DECL_VALUE_EXPR (*tp);
5184 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5185 if (addr)
5186 return addr;
5189 if (!EXPR_P (*tp))
5191 *walk_subtrees = false;
5192 return NULL;
5195 location_t loc = EXPR_LOCATION (*tp);
5196 if (verify_location (blocks, loc))
5197 return *tp;
5199 return NULL;
5202 /* Called via walk_gimple_op. Verify locations of expressions. */
5204 static tree
5205 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5207 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5208 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5211 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5213 static void
5214 collect_subblocks (hash_set<tree> *blocks, tree block)
5216 tree t;
5217 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5219 blocks->add (t);
5220 collect_subblocks (blocks, t);
5224 /* Verify the GIMPLE statements in the CFG of FN. */
5226 DEBUG_FUNCTION void
5227 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5229 basic_block bb;
5230 bool err = false;
5232 timevar_push (TV_TREE_STMT_VERIFY);
5233 hash_set<void *> visited;
5234 hash_set<gimple *> visited_stmts;
5236 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5237 hash_set<tree> blocks;
5238 if (DECL_INITIAL (fn->decl))
5240 blocks.add (DECL_INITIAL (fn->decl));
5241 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5244 FOR_EACH_BB_FN (bb, fn)
5246 gimple_stmt_iterator gsi;
5248 for (gphi_iterator gpi = gsi_start_phis (bb);
5249 !gsi_end_p (gpi);
5250 gsi_next (&gpi))
5252 gphi *phi = gpi.phi ();
5253 bool err2 = false;
5254 unsigned i;
5256 visited_stmts.add (phi);
5258 if (gimple_bb (phi) != bb)
5260 error ("gimple_bb (phi) is set to a wrong basic block");
5261 err2 = true;
5264 err2 |= verify_gimple_phi (phi);
5266 /* Only PHI arguments have locations. */
5267 if (gimple_location (phi) != UNKNOWN_LOCATION)
5269 error ("PHI node with location");
5270 err2 = true;
5273 for (i = 0; i < gimple_phi_num_args (phi); i++)
5275 tree arg = gimple_phi_arg_def (phi, i);
5276 tree addr = walk_tree (&arg, verify_node_sharing_1,
5277 &visited, NULL);
5278 if (addr)
5280 error ("incorrect sharing of tree nodes");
5281 debug_generic_expr (addr);
5282 err2 |= true;
5284 location_t loc = gimple_phi_arg_location (phi, i);
5285 if (virtual_operand_p (gimple_phi_result (phi))
5286 && loc != UNKNOWN_LOCATION)
5288 error ("virtual PHI with argument locations");
5289 err2 = true;
5291 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5292 if (addr)
5294 debug_generic_expr (addr);
5295 err2 = true;
5297 err2 |= verify_location (&blocks, loc);
5300 if (err2)
5301 debug_gimple_stmt (phi);
5302 err |= err2;
5305 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5307 gimple *stmt = gsi_stmt (gsi);
5308 bool err2 = false;
5309 struct walk_stmt_info wi;
5310 tree addr;
5311 int lp_nr;
5313 visited_stmts.add (stmt);
5315 if (gimple_bb (stmt) != bb)
5317 error ("gimple_bb (stmt) is set to a wrong basic block");
5318 err2 = true;
5321 err2 |= verify_gimple_stmt (stmt);
5322 err2 |= verify_location (&blocks, gimple_location (stmt));
5324 memset (&wi, 0, sizeof (wi));
5325 wi.info = (void *) &visited;
5326 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5327 if (addr)
5329 error ("incorrect sharing of tree nodes");
5330 debug_generic_expr (addr);
5331 err2 |= true;
5334 memset (&wi, 0, sizeof (wi));
5335 wi.info = (void *) &blocks;
5336 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5337 if (addr)
5339 debug_generic_expr (addr);
5340 err2 |= true;
5343 /* ??? Instead of not checking these stmts at all the walker
5344 should know its context via wi. */
5345 if (!is_gimple_debug (stmt)
5346 && !is_gimple_omp (stmt))
5348 memset (&wi, 0, sizeof (wi));
5349 addr = walk_gimple_op (stmt, verify_expr, &wi);
5350 if (addr)
5352 debug_generic_expr (addr);
5353 inform (gimple_location (stmt), "in statement");
5354 err2 |= true;
5358 /* If the statement is marked as part of an EH region, then it is
5359 expected that the statement could throw. Verify that when we
5360 have optimizations that simplify statements such that we prove
5361 that they cannot throw, that we update other data structures
5362 to match. */
5363 lp_nr = lookup_stmt_eh_lp (stmt);
5364 if (lp_nr > 0)
5366 if (!stmt_could_throw_p (stmt))
5368 if (verify_nothrow)
5370 error ("statement marked for throw, but doesn%'t");
5371 err2 |= true;
5374 else if (!gsi_one_before_end_p (gsi))
5376 error ("statement marked for throw in middle of block");
5377 err2 |= true;
5381 if (err2)
5382 debug_gimple_stmt (stmt);
5383 err |= err2;
5387 eh_error_found = false;
5388 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5389 if (eh_table)
5390 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5391 (&visited_stmts);
5393 if (err || eh_error_found)
5394 internal_error ("verify_gimple failed");
5396 verify_histograms ();
5397 timevar_pop (TV_TREE_STMT_VERIFY);
5401 /* Verifies that the flow information is OK. */
5403 static int
5404 gimple_verify_flow_info (void)
5406 int err = 0;
5407 basic_block bb;
5408 gimple_stmt_iterator gsi;
5409 gimple *stmt;
5410 edge e;
5411 edge_iterator ei;
5413 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5414 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5416 error ("ENTRY_BLOCK has IL associated with it");
5417 err = 1;
5420 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5421 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5423 error ("EXIT_BLOCK has IL associated with it");
5424 err = 1;
5427 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5428 if (e->flags & EDGE_FALLTHRU)
5430 error ("fallthru to exit from bb %d", e->src->index);
5431 err = 1;
5434 FOR_EACH_BB_FN (bb, cfun)
5436 bool found_ctrl_stmt = false;
5438 stmt = NULL;
5440 /* Skip labels on the start of basic block. */
5441 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5443 tree label;
5444 gimple *prev_stmt = stmt;
5446 stmt = gsi_stmt (gsi);
5448 if (gimple_code (stmt) != GIMPLE_LABEL)
5449 break;
5451 label = gimple_label_label (as_a <glabel *> (stmt));
5452 if (prev_stmt && DECL_NONLOCAL (label))
5454 error ("nonlocal label ");
5455 print_generic_expr (stderr, label);
5456 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5457 bb->index);
5458 err = 1;
5461 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5463 error ("EH landing pad label ");
5464 print_generic_expr (stderr, label);
5465 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5466 bb->index);
5467 err = 1;
5470 if (label_to_block (label) != bb)
5472 error ("label ");
5473 print_generic_expr (stderr, label);
5474 fprintf (stderr, " to block does not match in bb %d",
5475 bb->index);
5476 err = 1;
5479 if (decl_function_context (label) != current_function_decl)
5481 error ("label ");
5482 print_generic_expr (stderr, label);
5483 fprintf (stderr, " has incorrect context in bb %d",
5484 bb->index);
5485 err = 1;
5489 /* Verify that body of basic block BB is free of control flow. */
5490 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5492 gimple *stmt = gsi_stmt (gsi);
5494 if (found_ctrl_stmt)
5496 error ("control flow in the middle of basic block %d",
5497 bb->index);
5498 err = 1;
5501 if (stmt_ends_bb_p (stmt))
5502 found_ctrl_stmt = true;
5504 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5506 error ("label ");
5507 print_generic_expr (stderr, gimple_label_label (label_stmt));
5508 fprintf (stderr, " in the middle of basic block %d", bb->index);
5509 err = 1;
5513 gsi = gsi_last_bb (bb);
5514 if (gsi_end_p (gsi))
5515 continue;
5517 stmt = gsi_stmt (gsi);
5519 if (gimple_code (stmt) == GIMPLE_LABEL)
5520 continue;
5522 err |= verify_eh_edges (stmt);
5524 if (is_ctrl_stmt (stmt))
5526 FOR_EACH_EDGE (e, ei, bb->succs)
5527 if (e->flags & EDGE_FALLTHRU)
5529 error ("fallthru edge after a control statement in bb %d",
5530 bb->index);
5531 err = 1;
5535 if (gimple_code (stmt) != GIMPLE_COND)
5537 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5538 after anything else but if statement. */
5539 FOR_EACH_EDGE (e, ei, bb->succs)
5540 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5542 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5543 bb->index);
5544 err = 1;
5548 switch (gimple_code (stmt))
5550 case GIMPLE_COND:
5552 edge true_edge;
5553 edge false_edge;
5555 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5557 if (!true_edge
5558 || !false_edge
5559 || !(true_edge->flags & EDGE_TRUE_VALUE)
5560 || !(false_edge->flags & EDGE_FALSE_VALUE)
5561 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5562 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5563 || EDGE_COUNT (bb->succs) >= 3)
5565 error ("wrong outgoing edge flags at end of bb %d",
5566 bb->index);
5567 err = 1;
5570 break;
5572 case GIMPLE_GOTO:
5573 if (simple_goto_p (stmt))
5575 error ("explicit goto at end of bb %d", bb->index);
5576 err = 1;
5578 else
5580 /* FIXME. We should double check that the labels in the
5581 destination blocks have their address taken. */
5582 FOR_EACH_EDGE (e, ei, bb->succs)
5583 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5584 | EDGE_FALSE_VALUE))
5585 || !(e->flags & EDGE_ABNORMAL))
5587 error ("wrong outgoing edge flags at end of bb %d",
5588 bb->index);
5589 err = 1;
5592 break;
5594 case GIMPLE_CALL:
5595 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5596 break;
5597 /* fallthru */
5598 case GIMPLE_RETURN:
5599 if (!single_succ_p (bb)
5600 || (single_succ_edge (bb)->flags
5601 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5602 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5604 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5605 err = 1;
5607 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5609 error ("return edge does not point to exit in bb %d",
5610 bb->index);
5611 err = 1;
5613 break;
5615 case GIMPLE_SWITCH:
5617 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5618 tree prev;
5619 edge e;
5620 size_t i, n;
5622 n = gimple_switch_num_labels (switch_stmt);
5624 /* Mark all the destination basic blocks. */
5625 for (i = 0; i < n; ++i)
5627 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5628 basic_block label_bb = label_to_block (lab);
5629 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5630 label_bb->aux = (void *)1;
5633 /* Verify that the case labels are sorted. */
5634 prev = gimple_switch_label (switch_stmt, 0);
5635 for (i = 1; i < n; ++i)
5637 tree c = gimple_switch_label (switch_stmt, i);
5638 if (!CASE_LOW (c))
5640 error ("found default case not at the start of "
5641 "case vector");
5642 err = 1;
5643 continue;
5645 if (CASE_LOW (prev)
5646 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5648 error ("case labels not sorted: ");
5649 print_generic_expr (stderr, prev);
5650 fprintf (stderr," is greater than ");
5651 print_generic_expr (stderr, c);
5652 fprintf (stderr," but comes before it.\n");
5653 err = 1;
5655 prev = c;
5657 /* VRP will remove the default case if it can prove it will
5658 never be executed. So do not verify there always exists
5659 a default case here. */
5661 FOR_EACH_EDGE (e, ei, bb->succs)
5663 if (!e->dest->aux)
5665 error ("extra outgoing edge %d->%d",
5666 bb->index, e->dest->index);
5667 err = 1;
5670 e->dest->aux = (void *)2;
5671 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5672 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5674 error ("wrong outgoing edge flags at end of bb %d",
5675 bb->index);
5676 err = 1;
5680 /* Check that we have all of them. */
5681 for (i = 0; i < n; ++i)
5683 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5684 basic_block label_bb = label_to_block (lab);
5686 if (label_bb->aux != (void *)2)
5688 error ("missing edge %i->%i", bb->index, label_bb->index);
5689 err = 1;
5693 FOR_EACH_EDGE (e, ei, bb->succs)
5694 e->dest->aux = (void *)0;
5696 break;
5698 case GIMPLE_EH_DISPATCH:
5699 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5700 break;
5702 default:
5703 break;
5707 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5708 verify_dominators (CDI_DOMINATORS);
5710 return err;
5714 /* Updates phi nodes after creating a forwarder block joined
5715 by edge FALLTHRU. */
5717 static void
5718 gimple_make_forwarder_block (edge fallthru)
5720 edge e;
5721 edge_iterator ei;
5722 basic_block dummy, bb;
5723 tree var;
5724 gphi_iterator gsi;
5726 dummy = fallthru->src;
5727 bb = fallthru->dest;
5729 if (single_pred_p (bb))
5730 return;
5732 /* If we redirected a branch we must create new PHI nodes at the
5733 start of BB. */
5734 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5736 gphi *phi, *new_phi;
5738 phi = gsi.phi ();
5739 var = gimple_phi_result (phi);
5740 new_phi = create_phi_node (var, bb);
5741 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5742 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5743 UNKNOWN_LOCATION);
5746 /* Add the arguments we have stored on edges. */
5747 FOR_EACH_EDGE (e, ei, bb->preds)
5749 if (e == fallthru)
5750 continue;
5752 flush_pending_stmts (e);
5757 /* Return a non-special label in the head of basic block BLOCK.
5758 Create one if it doesn't exist. */
5760 tree
5761 gimple_block_label (basic_block bb)
5763 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5764 bool first = true;
5765 tree label;
5766 glabel *stmt;
5768 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5770 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5771 if (!stmt)
5772 break;
5773 label = gimple_label_label (stmt);
5774 if (!DECL_NONLOCAL (label))
5776 if (!first)
5777 gsi_move_before (&i, &s);
5778 return label;
5782 label = create_artificial_label (UNKNOWN_LOCATION);
5783 stmt = gimple_build_label (label);
5784 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5785 return label;
5789 /* Attempt to perform edge redirection by replacing a possibly complex
5790 jump instruction by a goto or by removing the jump completely.
5791 This can apply only if all edges now point to the same block. The
5792 parameters and return values are equivalent to
5793 redirect_edge_and_branch. */
5795 static edge
5796 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5798 basic_block src = e->src;
5799 gimple_stmt_iterator i;
5800 gimple *stmt;
5802 /* We can replace or remove a complex jump only when we have exactly
5803 two edges. */
5804 if (EDGE_COUNT (src->succs) != 2
5805 /* Verify that all targets will be TARGET. Specifically, the
5806 edge that is not E must also go to TARGET. */
5807 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5808 return NULL;
5810 i = gsi_last_bb (src);
5811 if (gsi_end_p (i))
5812 return NULL;
5814 stmt = gsi_stmt (i);
5816 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5818 gsi_remove (&i, true);
5819 e = ssa_redirect_edge (e, target);
5820 e->flags = EDGE_FALLTHRU;
5821 return e;
5824 return NULL;
5828 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5829 edge representing the redirected branch. */
5831 static edge
5832 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5834 basic_block bb = e->src;
5835 gimple_stmt_iterator gsi;
5836 edge ret;
5837 gimple *stmt;
5839 if (e->flags & EDGE_ABNORMAL)
5840 return NULL;
5842 if (e->dest == dest)
5843 return NULL;
5845 if (e->flags & EDGE_EH)
5846 return redirect_eh_edge (e, dest);
5848 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5850 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5851 if (ret)
5852 return ret;
5855 gsi = gsi_last_bb (bb);
5856 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5858 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5860 case GIMPLE_COND:
5861 /* For COND_EXPR, we only need to redirect the edge. */
5862 break;
5864 case GIMPLE_GOTO:
5865 /* No non-abnormal edges should lead from a non-simple goto, and
5866 simple ones should be represented implicitly. */
5867 gcc_unreachable ();
5869 case GIMPLE_SWITCH:
5871 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5872 tree label = gimple_block_label (dest);
5873 tree cases = get_cases_for_edge (e, switch_stmt);
5875 /* If we have a list of cases associated with E, then use it
5876 as it's a lot faster than walking the entire case vector. */
5877 if (cases)
5879 edge e2 = find_edge (e->src, dest);
5880 tree last, first;
5882 first = cases;
5883 while (cases)
5885 last = cases;
5886 CASE_LABEL (cases) = label;
5887 cases = CASE_CHAIN (cases);
5890 /* If there was already an edge in the CFG, then we need
5891 to move all the cases associated with E to E2. */
5892 if (e2)
5894 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5896 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5897 CASE_CHAIN (cases2) = first;
5899 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5901 else
5903 size_t i, n = gimple_switch_num_labels (switch_stmt);
5905 for (i = 0; i < n; i++)
5907 tree elt = gimple_switch_label (switch_stmt, i);
5908 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5909 CASE_LABEL (elt) = label;
5913 break;
5915 case GIMPLE_ASM:
5917 gasm *asm_stmt = as_a <gasm *> (stmt);
5918 int i, n = gimple_asm_nlabels (asm_stmt);
5919 tree label = NULL;
5921 for (i = 0; i < n; ++i)
5923 tree cons = gimple_asm_label_op (asm_stmt, i);
5924 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5926 if (!label)
5927 label = gimple_block_label (dest);
5928 TREE_VALUE (cons) = label;
5932 /* If we didn't find any label matching the former edge in the
5933 asm labels, we must be redirecting the fallthrough
5934 edge. */
5935 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5937 break;
5939 case GIMPLE_RETURN:
5940 gsi_remove (&gsi, true);
5941 e->flags |= EDGE_FALLTHRU;
5942 break;
5944 case GIMPLE_OMP_RETURN:
5945 case GIMPLE_OMP_CONTINUE:
5946 case GIMPLE_OMP_SECTIONS_SWITCH:
5947 case GIMPLE_OMP_FOR:
5948 /* The edges from OMP constructs can be simply redirected. */
5949 break;
5951 case GIMPLE_EH_DISPATCH:
5952 if (!(e->flags & EDGE_FALLTHRU))
5953 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5954 break;
5956 case GIMPLE_TRANSACTION:
5957 if (e->flags & EDGE_TM_ABORT)
5958 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5959 gimple_block_label (dest));
5960 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5961 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5962 gimple_block_label (dest));
5963 else
5964 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5965 gimple_block_label (dest));
5966 break;
5968 default:
5969 /* Otherwise it must be a fallthru edge, and we don't need to
5970 do anything besides redirecting it. */
5971 gcc_assert (e->flags & EDGE_FALLTHRU);
5972 break;
5975 /* Update/insert PHI nodes as necessary. */
5977 /* Now update the edges in the CFG. */
5978 e = ssa_redirect_edge (e, dest);
5980 return e;
5983 /* Returns true if it is possible to remove edge E by redirecting
5984 it to the destination of the other edge from E->src. */
5986 static bool
5987 gimple_can_remove_branch_p (const_edge e)
5989 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5990 return false;
5992 return true;
5995 /* Simple wrapper, as we can always redirect fallthru edges. */
5997 static basic_block
5998 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6000 e = gimple_redirect_edge_and_branch (e, dest);
6001 gcc_assert (e);
6003 return NULL;
6007 /* Splits basic block BB after statement STMT (but at least after the
6008 labels). If STMT is NULL, BB is split just after the labels. */
6010 static basic_block
6011 gimple_split_block (basic_block bb, void *stmt)
6013 gimple_stmt_iterator gsi;
6014 gimple_stmt_iterator gsi_tgt;
6015 gimple_seq list;
6016 basic_block new_bb;
6017 edge e;
6018 edge_iterator ei;
6020 new_bb = create_empty_bb (bb);
6022 /* Redirect the outgoing edges. */
6023 new_bb->succs = bb->succs;
6024 bb->succs = NULL;
6025 FOR_EACH_EDGE (e, ei, new_bb->succs)
6026 e->src = new_bb;
6028 /* Get a stmt iterator pointing to the first stmt to move. */
6029 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6030 gsi = gsi_after_labels (bb);
6031 else
6033 gsi = gsi_for_stmt ((gimple *) stmt);
6034 gsi_next (&gsi);
6037 /* Move everything from GSI to the new basic block. */
6038 if (gsi_end_p (gsi))
6039 return new_bb;
6041 /* Split the statement list - avoid re-creating new containers as this
6042 brings ugly quadratic memory consumption in the inliner.
6043 (We are still quadratic since we need to update stmt BB pointers,
6044 sadly.) */
6045 gsi_split_seq_before (&gsi, &list);
6046 set_bb_seq (new_bb, list);
6047 for (gsi_tgt = gsi_start (list);
6048 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6049 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6051 return new_bb;
6055 /* Moves basic block BB after block AFTER. */
6057 static bool
6058 gimple_move_block_after (basic_block bb, basic_block after)
6060 if (bb->prev_bb == after)
6061 return true;
6063 unlink_block (bb);
6064 link_block (bb, after);
6066 return true;
6070 /* Return TRUE if block BB has no executable statements, otherwise return
6071 FALSE. */
6073 static bool
6074 gimple_empty_block_p (basic_block bb)
6076 /* BB must have no executable statements. */
6077 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6078 if (phi_nodes (bb))
6079 return false;
6080 if (gsi_end_p (gsi))
6081 return true;
6082 if (is_gimple_debug (gsi_stmt (gsi)))
6083 gsi_next_nondebug (&gsi);
6084 return gsi_end_p (gsi);
6088 /* Split a basic block if it ends with a conditional branch and if the
6089 other part of the block is not empty. */
6091 static basic_block
6092 gimple_split_block_before_cond_jump (basic_block bb)
6094 gimple *last, *split_point;
6095 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6096 if (gsi_end_p (gsi))
6097 return NULL;
6098 last = gsi_stmt (gsi);
6099 if (gimple_code (last) != GIMPLE_COND
6100 && gimple_code (last) != GIMPLE_SWITCH)
6101 return NULL;
6102 gsi_prev (&gsi);
6103 split_point = gsi_stmt (gsi);
6104 return split_block (bb, split_point)->dest;
6108 /* Return true if basic_block can be duplicated. */
6110 static bool
6111 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6113 return true;
6116 /* Create a duplicate of the basic block BB. NOTE: This does not
6117 preserve SSA form. */
6119 static basic_block
6120 gimple_duplicate_bb (basic_block bb)
6122 basic_block new_bb;
6123 gimple_stmt_iterator gsi_tgt;
6125 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6127 /* Copy the PHI nodes. We ignore PHI node arguments here because
6128 the incoming edges have not been setup yet. */
6129 for (gphi_iterator gpi = gsi_start_phis (bb);
6130 !gsi_end_p (gpi);
6131 gsi_next (&gpi))
6133 gphi *phi, *copy;
6134 phi = gpi.phi ();
6135 copy = create_phi_node (NULL_TREE, new_bb);
6136 create_new_def_for (gimple_phi_result (phi), copy,
6137 gimple_phi_result_ptr (copy));
6138 gimple_set_uid (copy, gimple_uid (phi));
6141 gsi_tgt = gsi_start_bb (new_bb);
6142 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6143 !gsi_end_p (gsi);
6144 gsi_next (&gsi))
6146 def_operand_p def_p;
6147 ssa_op_iter op_iter;
6148 tree lhs;
6149 gimple *stmt, *copy;
6151 stmt = gsi_stmt (gsi);
6152 if (gimple_code (stmt) == GIMPLE_LABEL)
6153 continue;
6155 /* Don't duplicate label debug stmts. */
6156 if (gimple_debug_bind_p (stmt)
6157 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6158 == LABEL_DECL)
6159 continue;
6161 /* Create a new copy of STMT and duplicate STMT's virtual
6162 operands. */
6163 copy = gimple_copy (stmt);
6164 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6166 maybe_duplicate_eh_stmt (copy, stmt);
6167 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6169 /* When copying around a stmt writing into a local non-user
6170 aggregate, make sure it won't share stack slot with other
6171 vars. */
6172 lhs = gimple_get_lhs (stmt);
6173 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6175 tree base = get_base_address (lhs);
6176 if (base
6177 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6178 && DECL_IGNORED_P (base)
6179 && !TREE_STATIC (base)
6180 && !DECL_EXTERNAL (base)
6181 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6182 DECL_NONSHAREABLE (base) = 1;
6185 /* Create new names for all the definitions created by COPY and
6186 add replacement mappings for each new name. */
6187 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6188 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6191 return new_bb;
6194 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6196 static void
6197 add_phi_args_after_copy_edge (edge e_copy)
6199 basic_block bb, bb_copy = e_copy->src, dest;
6200 edge e;
6201 edge_iterator ei;
6202 gphi *phi, *phi_copy;
6203 tree def;
6204 gphi_iterator psi, psi_copy;
6206 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6207 return;
6209 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6211 if (e_copy->dest->flags & BB_DUPLICATED)
6212 dest = get_bb_original (e_copy->dest);
6213 else
6214 dest = e_copy->dest;
6216 e = find_edge (bb, dest);
6217 if (!e)
6219 /* During loop unrolling the target of the latch edge is copied.
6220 In this case we are not looking for edge to dest, but to
6221 duplicated block whose original was dest. */
6222 FOR_EACH_EDGE (e, ei, bb->succs)
6224 if ((e->dest->flags & BB_DUPLICATED)
6225 && get_bb_original (e->dest) == dest)
6226 break;
6229 gcc_assert (e != NULL);
6232 for (psi = gsi_start_phis (e->dest),
6233 psi_copy = gsi_start_phis (e_copy->dest);
6234 !gsi_end_p (psi);
6235 gsi_next (&psi), gsi_next (&psi_copy))
6237 phi = psi.phi ();
6238 phi_copy = psi_copy.phi ();
6239 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6240 add_phi_arg (phi_copy, def, e_copy,
6241 gimple_phi_arg_location_from_edge (phi, e));
6246 /* Basic block BB_COPY was created by code duplication. Add phi node
6247 arguments for edges going out of BB_COPY. The blocks that were
6248 duplicated have BB_DUPLICATED set. */
6250 void
6251 add_phi_args_after_copy_bb (basic_block bb_copy)
6253 edge e_copy;
6254 edge_iterator ei;
6256 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6258 add_phi_args_after_copy_edge (e_copy);
6262 /* Blocks in REGION_COPY array of length N_REGION were created by
6263 duplication of basic blocks. Add phi node arguments for edges
6264 going from these blocks. If E_COPY is not NULL, also add
6265 phi node arguments for its destination.*/
6267 void
6268 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6269 edge e_copy)
6271 unsigned i;
6273 for (i = 0; i < n_region; i++)
6274 region_copy[i]->flags |= BB_DUPLICATED;
6276 for (i = 0; i < n_region; i++)
6277 add_phi_args_after_copy_bb (region_copy[i]);
6278 if (e_copy)
6279 add_phi_args_after_copy_edge (e_copy);
6281 for (i = 0; i < n_region; i++)
6282 region_copy[i]->flags &= ~BB_DUPLICATED;
6285 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6286 important exit edge EXIT. By important we mean that no SSA name defined
6287 inside region is live over the other exit edges of the region. All entry
6288 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6289 to the duplicate of the region. Dominance and loop information is
6290 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6291 UPDATE_DOMINANCE is false then we assume that the caller will update the
6292 dominance information after calling this function. The new basic
6293 blocks are stored to REGION_COPY in the same order as they had in REGION,
6294 provided that REGION_COPY is not NULL.
6295 The function returns false if it is unable to copy the region,
6296 true otherwise. */
6298 bool
6299 gimple_duplicate_sese_region (edge entry, edge exit,
6300 basic_block *region, unsigned n_region,
6301 basic_block *region_copy,
6302 bool update_dominance)
6304 unsigned i;
6305 bool free_region_copy = false, copying_header = false;
6306 struct loop *loop = entry->dest->loop_father;
6307 edge exit_copy;
6308 vec<basic_block> doms = vNULL;
6309 edge redirected;
6310 profile_count total_count = profile_count::uninitialized ();
6311 profile_count entry_count = profile_count::uninitialized ();
6313 if (!can_copy_bbs_p (region, n_region))
6314 return false;
6316 /* Some sanity checking. Note that we do not check for all possible
6317 missuses of the functions. I.e. if you ask to copy something weird,
6318 it will work, but the state of structures probably will not be
6319 correct. */
6320 for (i = 0; i < n_region; i++)
6322 /* We do not handle subloops, i.e. all the blocks must belong to the
6323 same loop. */
6324 if (region[i]->loop_father != loop)
6325 return false;
6327 if (region[i] != entry->dest
6328 && region[i] == loop->header)
6329 return false;
6332 /* In case the function is used for loop header copying (which is the primary
6333 use), ensure that EXIT and its copy will be new latch and entry edges. */
6334 if (loop->header == entry->dest)
6336 copying_header = true;
6338 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6339 return false;
6341 for (i = 0; i < n_region; i++)
6342 if (region[i] != exit->src
6343 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6344 return false;
6347 initialize_original_copy_tables ();
6349 if (copying_header)
6350 set_loop_copy (loop, loop_outer (loop));
6351 else
6352 set_loop_copy (loop, loop);
6354 if (!region_copy)
6356 region_copy = XNEWVEC (basic_block, n_region);
6357 free_region_copy = true;
6360 /* Record blocks outside the region that are dominated by something
6361 inside. */
6362 if (update_dominance)
6364 doms.create (0);
6365 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6368 if (entry->dest->count.initialized_p ())
6370 total_count = entry->dest->count;
6371 entry_count = entry->count ();
6372 /* Fix up corner cases, to avoid division by zero or creation of negative
6373 frequencies. */
6374 if (entry_count > total_count)
6375 entry_count = total_count;
6378 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6379 split_edge_bb_loc (entry), update_dominance);
6380 if (total_count.initialized_p () && entry_count.initialized_p ())
6382 scale_bbs_frequencies_profile_count (region, n_region,
6383 total_count - entry_count,
6384 total_count);
6385 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6386 total_count);
6389 if (copying_header)
6391 loop->header = exit->dest;
6392 loop->latch = exit->src;
6395 /* Redirect the entry and add the phi node arguments. */
6396 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6397 gcc_assert (redirected != NULL);
6398 flush_pending_stmts (entry);
6400 /* Concerning updating of dominators: We must recount dominators
6401 for entry block and its copy. Anything that is outside of the
6402 region, but was dominated by something inside needs recounting as
6403 well. */
6404 if (update_dominance)
6406 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6407 doms.safe_push (get_bb_original (entry->dest));
6408 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6409 doms.release ();
6412 /* Add the other PHI node arguments. */
6413 add_phi_args_after_copy (region_copy, n_region, NULL);
6415 if (free_region_copy)
6416 free (region_copy);
6418 free_original_copy_tables ();
6419 return true;
6422 /* Checks if BB is part of the region defined by N_REGION BBS. */
6423 static bool
6424 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6426 unsigned int n;
6428 for (n = 0; n < n_region; n++)
6430 if (bb == bbs[n])
6431 return true;
6433 return false;
6436 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6437 are stored to REGION_COPY in the same order in that they appear
6438 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6439 the region, EXIT an exit from it. The condition guarding EXIT
6440 is moved to ENTRY. Returns true if duplication succeeds, false
6441 otherwise.
6443 For example,
6445 some_code;
6446 if (cond)
6448 else
6451 is transformed to
6453 if (cond)
6455 some_code;
6458 else
6460 some_code;
6465 bool
6466 gimple_duplicate_sese_tail (edge entry, edge exit,
6467 basic_block *region, unsigned n_region,
6468 basic_block *region_copy)
6470 unsigned i;
6471 bool free_region_copy = false;
6472 struct loop *loop = exit->dest->loop_father;
6473 struct loop *orig_loop = entry->dest->loop_father;
6474 basic_block switch_bb, entry_bb, nentry_bb;
6475 vec<basic_block> doms;
6476 profile_count total_count = profile_count::uninitialized (),
6477 exit_count = profile_count::uninitialized ();
6478 edge exits[2], nexits[2], e;
6479 gimple_stmt_iterator gsi;
6480 gimple *cond_stmt;
6481 edge sorig, snew;
6482 basic_block exit_bb;
6483 gphi_iterator psi;
6484 gphi *phi;
6485 tree def;
6486 struct loop *target, *aloop, *cloop;
6488 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6489 exits[0] = exit;
6490 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6492 if (!can_copy_bbs_p (region, n_region))
6493 return false;
6495 initialize_original_copy_tables ();
6496 set_loop_copy (orig_loop, loop);
6498 target= loop;
6499 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6501 if (bb_part_of_region_p (aloop->header, region, n_region))
6503 cloop = duplicate_loop (aloop, target);
6504 duplicate_subloops (aloop, cloop);
6508 if (!region_copy)
6510 region_copy = XNEWVEC (basic_block, n_region);
6511 free_region_copy = true;
6514 gcc_assert (!need_ssa_update_p (cfun));
6516 /* Record blocks outside the region that are dominated by something
6517 inside. */
6518 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6520 total_count = exit->src->count;
6521 exit_count = exit->count ();
6522 /* Fix up corner cases, to avoid division by zero or creation of negative
6523 frequencies. */
6524 if (exit_count > total_count)
6525 exit_count = total_count;
6527 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6528 split_edge_bb_loc (exit), true);
6529 if (total_count.initialized_p () && exit_count.initialized_p ())
6531 scale_bbs_frequencies_profile_count (region, n_region,
6532 total_count - exit_count,
6533 total_count);
6534 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6535 total_count);
6538 /* Create the switch block, and put the exit condition to it. */
6539 entry_bb = entry->dest;
6540 nentry_bb = get_bb_copy (entry_bb);
6541 if (!last_stmt (entry->src)
6542 || !stmt_ends_bb_p (last_stmt (entry->src)))
6543 switch_bb = entry->src;
6544 else
6545 switch_bb = split_edge (entry);
6546 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6548 gsi = gsi_last_bb (switch_bb);
6549 cond_stmt = last_stmt (exit->src);
6550 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6551 cond_stmt = gimple_copy (cond_stmt);
6553 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6555 sorig = single_succ_edge (switch_bb);
6556 sorig->flags = exits[1]->flags;
6557 sorig->probability = exits[1]->probability;
6558 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6559 snew->probability = exits[0]->probability;
6562 /* Register the new edge from SWITCH_BB in loop exit lists. */
6563 rescan_loop_exit (snew, true, false);
6565 /* Add the PHI node arguments. */
6566 add_phi_args_after_copy (region_copy, n_region, snew);
6568 /* Get rid of now superfluous conditions and associated edges (and phi node
6569 arguments). */
6570 exit_bb = exit->dest;
6572 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6573 PENDING_STMT (e) = NULL;
6575 /* The latch of ORIG_LOOP was copied, and so was the backedge
6576 to the original header. We redirect this backedge to EXIT_BB. */
6577 for (i = 0; i < n_region; i++)
6578 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6580 gcc_assert (single_succ_edge (region_copy[i]));
6581 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6582 PENDING_STMT (e) = NULL;
6583 for (psi = gsi_start_phis (exit_bb);
6584 !gsi_end_p (psi);
6585 gsi_next (&psi))
6587 phi = psi.phi ();
6588 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6589 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6592 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6593 PENDING_STMT (e) = NULL;
6595 /* Anything that is outside of the region, but was dominated by something
6596 inside needs to update dominance info. */
6597 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6598 doms.release ();
6599 /* Update the SSA web. */
6600 update_ssa (TODO_update_ssa);
6602 if (free_region_copy)
6603 free (region_copy);
6605 free_original_copy_tables ();
6606 return true;
6609 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6610 adding blocks when the dominator traversal reaches EXIT. This
6611 function silently assumes that ENTRY strictly dominates EXIT. */
6613 void
6614 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6615 vec<basic_block> *bbs_p)
6617 basic_block son;
6619 for (son = first_dom_son (CDI_DOMINATORS, entry);
6620 son;
6621 son = next_dom_son (CDI_DOMINATORS, son))
6623 bbs_p->safe_push (son);
6624 if (son != exit)
6625 gather_blocks_in_sese_region (son, exit, bbs_p);
6629 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6630 The duplicates are recorded in VARS_MAP. */
6632 static void
6633 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6634 tree to_context)
6636 tree t = *tp, new_t;
6637 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6639 if (DECL_CONTEXT (t) == to_context)
6640 return;
6642 bool existed;
6643 tree &loc = vars_map->get_or_insert (t, &existed);
6645 if (!existed)
6647 if (SSA_VAR_P (t))
6649 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6650 add_local_decl (f, new_t);
6652 else
6654 gcc_assert (TREE_CODE (t) == CONST_DECL);
6655 new_t = copy_node (t);
6657 DECL_CONTEXT (new_t) = to_context;
6659 loc = new_t;
6661 else
6662 new_t = loc;
6664 *tp = new_t;
6668 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6669 VARS_MAP maps old ssa names and var_decls to the new ones. */
6671 static tree
6672 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6673 tree to_context)
6675 tree new_name;
6677 gcc_assert (!virtual_operand_p (name));
6679 tree *loc = vars_map->get (name);
6681 if (!loc)
6683 tree decl = SSA_NAME_VAR (name);
6684 if (decl)
6686 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6687 replace_by_duplicate_decl (&decl, vars_map, to_context);
6688 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6689 decl, SSA_NAME_DEF_STMT (name));
6691 else
6692 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6693 name, SSA_NAME_DEF_STMT (name));
6695 /* Now that we've used the def stmt to define new_name, make sure it
6696 doesn't define name anymore. */
6697 SSA_NAME_DEF_STMT (name) = NULL;
6699 vars_map->put (name, new_name);
6701 else
6702 new_name = *loc;
6704 return new_name;
6707 struct move_stmt_d
6709 tree orig_block;
6710 tree new_block;
6711 tree from_context;
6712 tree to_context;
6713 hash_map<tree, tree> *vars_map;
6714 htab_t new_label_map;
6715 hash_map<void *, void *> *eh_map;
6716 bool remap_decls_p;
6719 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6720 contained in *TP if it has been ORIG_BLOCK previously and change the
6721 DECL_CONTEXT of every local variable referenced in *TP. */
6723 static tree
6724 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6726 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6727 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6728 tree t = *tp;
6730 if (EXPR_P (t))
6732 tree block = TREE_BLOCK (t);
6733 if (block == NULL_TREE)
6735 else if (block == p->orig_block
6736 || p->orig_block == NULL_TREE)
6737 TREE_SET_BLOCK (t, p->new_block);
6738 else if (flag_checking)
6740 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6741 block = BLOCK_SUPERCONTEXT (block);
6742 gcc_assert (block == p->orig_block);
6745 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6747 if (TREE_CODE (t) == SSA_NAME)
6748 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6749 else if (TREE_CODE (t) == PARM_DECL
6750 && gimple_in_ssa_p (cfun))
6751 *tp = *(p->vars_map->get (t));
6752 else if (TREE_CODE (t) == LABEL_DECL)
6754 if (p->new_label_map)
6756 struct tree_map in, *out;
6757 in.base.from = t;
6758 out = (struct tree_map *)
6759 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6760 if (out)
6761 *tp = t = out->to;
6764 /* For FORCED_LABELs we can end up with references from other
6765 functions if some SESE regions are outlined. It is UB to
6766 jump in between them, but they could be used just for printing
6767 addresses etc. In that case, DECL_CONTEXT on the label should
6768 be the function containing the glabel stmt with that LABEL_DECL,
6769 rather than whatever function a reference to the label was seen
6770 last time. */
6771 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6772 DECL_CONTEXT (t) = p->to_context;
6774 else if (p->remap_decls_p)
6776 /* Replace T with its duplicate. T should no longer appear in the
6777 parent function, so this looks wasteful; however, it may appear
6778 in referenced_vars, and more importantly, as virtual operands of
6779 statements, and in alias lists of other variables. It would be
6780 quite difficult to expunge it from all those places. ??? It might
6781 suffice to do this for addressable variables. */
6782 if ((VAR_P (t) && !is_global_var (t))
6783 || TREE_CODE (t) == CONST_DECL)
6784 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6786 *walk_subtrees = 0;
6788 else if (TYPE_P (t))
6789 *walk_subtrees = 0;
6791 return NULL_TREE;
6794 /* Helper for move_stmt_r. Given an EH region number for the source
6795 function, map that to the duplicate EH regio number in the dest. */
6797 static int
6798 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6800 eh_region old_r, new_r;
6802 old_r = get_eh_region_from_number (old_nr);
6803 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6805 return new_r->index;
6808 /* Similar, but operate on INTEGER_CSTs. */
6810 static tree
6811 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6813 int old_nr, new_nr;
6815 old_nr = tree_to_shwi (old_t_nr);
6816 new_nr = move_stmt_eh_region_nr (old_nr, p);
6818 return build_int_cst (integer_type_node, new_nr);
6821 /* Like move_stmt_op, but for gimple statements.
6823 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6824 contained in the current statement in *GSI_P and change the
6825 DECL_CONTEXT of every local variable referenced in the current
6826 statement. */
6828 static tree
6829 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6830 struct walk_stmt_info *wi)
6832 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6833 gimple *stmt = gsi_stmt (*gsi_p);
6834 tree block = gimple_block (stmt);
6836 if (block == p->orig_block
6837 || (p->orig_block == NULL_TREE
6838 && block != NULL_TREE))
6839 gimple_set_block (stmt, p->new_block);
6841 switch (gimple_code (stmt))
6843 case GIMPLE_CALL:
6844 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6846 tree r, fndecl = gimple_call_fndecl (stmt);
6847 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6848 switch (DECL_FUNCTION_CODE (fndecl))
6850 case BUILT_IN_EH_COPY_VALUES:
6851 r = gimple_call_arg (stmt, 1);
6852 r = move_stmt_eh_region_tree_nr (r, p);
6853 gimple_call_set_arg (stmt, 1, r);
6854 /* FALLTHRU */
6856 case BUILT_IN_EH_POINTER:
6857 case BUILT_IN_EH_FILTER:
6858 r = gimple_call_arg (stmt, 0);
6859 r = move_stmt_eh_region_tree_nr (r, p);
6860 gimple_call_set_arg (stmt, 0, r);
6861 break;
6863 default:
6864 break;
6867 break;
6869 case GIMPLE_RESX:
6871 gresx *resx_stmt = as_a <gresx *> (stmt);
6872 int r = gimple_resx_region (resx_stmt);
6873 r = move_stmt_eh_region_nr (r, p);
6874 gimple_resx_set_region (resx_stmt, r);
6876 break;
6878 case GIMPLE_EH_DISPATCH:
6880 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6881 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6882 r = move_stmt_eh_region_nr (r, p);
6883 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6885 break;
6887 case GIMPLE_OMP_RETURN:
6888 case GIMPLE_OMP_CONTINUE:
6889 break;
6891 case GIMPLE_LABEL:
6893 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6894 so that such labels can be referenced from other regions.
6895 Make sure to update it when seeing a GIMPLE_LABEL though,
6896 that is the owner of the label. */
6897 walk_gimple_op (stmt, move_stmt_op, wi);
6898 *handled_ops_p = true;
6899 tree label = gimple_label_label (as_a <glabel *> (stmt));
6900 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6901 DECL_CONTEXT (label) = p->to_context;
6903 break;
6905 default:
6906 if (is_gimple_omp (stmt))
6908 /* Do not remap variables inside OMP directives. Variables
6909 referenced in clauses and directive header belong to the
6910 parent function and should not be moved into the child
6911 function. */
6912 bool save_remap_decls_p = p->remap_decls_p;
6913 p->remap_decls_p = false;
6914 *handled_ops_p = true;
6916 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6917 move_stmt_op, wi);
6919 p->remap_decls_p = save_remap_decls_p;
6921 break;
6924 return NULL_TREE;
6927 /* Move basic block BB from function CFUN to function DEST_FN. The
6928 block is moved out of the original linked list and placed after
6929 block AFTER in the new list. Also, the block is removed from the
6930 original array of blocks and placed in DEST_FN's array of blocks.
6931 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6932 updated to reflect the moved edges.
6934 The local variables are remapped to new instances, VARS_MAP is used
6935 to record the mapping. */
6937 static void
6938 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6939 basic_block after, bool update_edge_count_p,
6940 struct move_stmt_d *d)
6942 struct control_flow_graph *cfg;
6943 edge_iterator ei;
6944 edge e;
6945 gimple_stmt_iterator si;
6946 unsigned old_len, new_len;
6948 /* Remove BB from dominance structures. */
6949 delete_from_dominance_info (CDI_DOMINATORS, bb);
6951 /* Move BB from its current loop to the copy in the new function. */
6952 if (current_loops)
6954 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6955 if (new_loop)
6956 bb->loop_father = new_loop;
6959 /* Link BB to the new linked list. */
6960 move_block_after (bb, after);
6962 /* Update the edge count in the corresponding flowgraphs. */
6963 if (update_edge_count_p)
6964 FOR_EACH_EDGE (e, ei, bb->succs)
6966 cfun->cfg->x_n_edges--;
6967 dest_cfun->cfg->x_n_edges++;
6970 /* Remove BB from the original basic block array. */
6971 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6972 cfun->cfg->x_n_basic_blocks--;
6974 /* Grow DEST_CFUN's basic block array if needed. */
6975 cfg = dest_cfun->cfg;
6976 cfg->x_n_basic_blocks++;
6977 if (bb->index >= cfg->x_last_basic_block)
6978 cfg->x_last_basic_block = bb->index + 1;
6980 old_len = vec_safe_length (cfg->x_basic_block_info);
6981 if ((unsigned) cfg->x_last_basic_block >= old_len)
6983 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6984 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6987 (*cfg->x_basic_block_info)[bb->index] = bb;
6989 /* Remap the variables in phi nodes. */
6990 for (gphi_iterator psi = gsi_start_phis (bb);
6991 !gsi_end_p (psi); )
6993 gphi *phi = psi.phi ();
6994 use_operand_p use;
6995 tree op = PHI_RESULT (phi);
6996 ssa_op_iter oi;
6997 unsigned i;
6999 if (virtual_operand_p (op))
7001 /* Remove the phi nodes for virtual operands (alias analysis will be
7002 run for the new function, anyway). */
7003 remove_phi_node (&psi, true);
7004 continue;
7007 SET_PHI_RESULT (phi,
7008 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7009 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7011 op = USE_FROM_PTR (use);
7012 if (TREE_CODE (op) == SSA_NAME)
7013 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7016 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7018 location_t locus = gimple_phi_arg_location (phi, i);
7019 tree block = LOCATION_BLOCK (locus);
7021 if (locus == UNKNOWN_LOCATION)
7022 continue;
7023 if (d->orig_block == NULL_TREE || block == d->orig_block)
7025 locus = set_block (locus, d->new_block);
7026 gimple_phi_arg_set_location (phi, i, locus);
7030 gsi_next (&psi);
7033 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7035 gimple *stmt = gsi_stmt (si);
7036 struct walk_stmt_info wi;
7038 memset (&wi, 0, sizeof (wi));
7039 wi.info = d;
7040 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7042 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7044 tree label = gimple_label_label (label_stmt);
7045 int uid = LABEL_DECL_UID (label);
7047 gcc_assert (uid > -1);
7049 old_len = vec_safe_length (cfg->x_label_to_block_map);
7050 if (old_len <= (unsigned) uid)
7052 new_len = 3 * uid / 2 + 1;
7053 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7056 (*cfg->x_label_to_block_map)[uid] = bb;
7057 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7059 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7061 if (uid >= dest_cfun->cfg->last_label_uid)
7062 dest_cfun->cfg->last_label_uid = uid + 1;
7065 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7066 remove_stmt_from_eh_lp_fn (cfun, stmt);
7068 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7069 gimple_remove_stmt_histograms (cfun, stmt);
7071 /* We cannot leave any operands allocated from the operand caches of
7072 the current function. */
7073 free_stmt_operands (cfun, stmt);
7074 push_cfun (dest_cfun);
7075 update_stmt (stmt);
7076 pop_cfun ();
7079 FOR_EACH_EDGE (e, ei, bb->succs)
7080 if (e->goto_locus != UNKNOWN_LOCATION)
7082 tree block = LOCATION_BLOCK (e->goto_locus);
7083 if (d->orig_block == NULL_TREE
7084 || block == d->orig_block)
7085 e->goto_locus = set_block (e->goto_locus, d->new_block);
7089 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7090 the outermost EH region. Use REGION as the incoming base EH region. */
7092 static eh_region
7093 find_outermost_region_in_block (struct function *src_cfun,
7094 basic_block bb, eh_region region)
7096 gimple_stmt_iterator si;
7098 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7100 gimple *stmt = gsi_stmt (si);
7101 eh_region stmt_region;
7102 int lp_nr;
7104 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7105 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7106 if (stmt_region)
7108 if (region == NULL)
7109 region = stmt_region;
7110 else if (stmt_region != region)
7112 region = eh_region_outermost (src_cfun, stmt_region, region);
7113 gcc_assert (region != NULL);
7118 return region;
7121 static tree
7122 new_label_mapper (tree decl, void *data)
7124 htab_t hash = (htab_t) data;
7125 struct tree_map *m;
7126 void **slot;
7128 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7130 m = XNEW (struct tree_map);
7131 m->hash = DECL_UID (decl);
7132 m->base.from = decl;
7133 m->to = create_artificial_label (UNKNOWN_LOCATION);
7134 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7135 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7136 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7138 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7139 gcc_assert (*slot == NULL);
7141 *slot = m;
7143 return m->to;
7146 /* Tree walker to replace the decls used inside value expressions by
7147 duplicates. */
7149 static tree
7150 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7152 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7154 switch (TREE_CODE (*tp))
7156 case VAR_DECL:
7157 case PARM_DECL:
7158 case RESULT_DECL:
7159 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7160 break;
7161 default:
7162 break;
7165 if (IS_TYPE_OR_DECL_P (*tp))
7166 *walk_subtrees = false;
7168 return NULL;
7171 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7172 subblocks. */
7174 static void
7175 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7176 tree to_context)
7178 tree *tp, t;
7180 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7182 t = *tp;
7183 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7184 continue;
7185 replace_by_duplicate_decl (&t, vars_map, to_context);
7186 if (t != *tp)
7188 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7190 tree x = DECL_VALUE_EXPR (*tp);
7191 struct replace_decls_d rd = { vars_map, to_context };
7192 unshare_expr (x);
7193 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7194 SET_DECL_VALUE_EXPR (t, x);
7195 DECL_HAS_VALUE_EXPR_P (t) = 1;
7197 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7198 *tp = t;
7202 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7203 replace_block_vars_by_duplicates (block, vars_map, to_context);
7206 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7207 from FN1 to FN2. */
7209 static void
7210 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7211 struct loop *loop)
7213 /* Discard it from the old loop array. */
7214 (*get_loops (fn1))[loop->num] = NULL;
7216 /* Place it in the new loop array, assigning it a new number. */
7217 loop->num = number_of_loops (fn2);
7218 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7220 /* Recurse to children. */
7221 for (loop = loop->inner; loop; loop = loop->next)
7222 fixup_loop_arrays_after_move (fn1, fn2, loop);
7225 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7226 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7228 DEBUG_FUNCTION void
7229 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7231 basic_block bb;
7232 edge_iterator ei;
7233 edge e;
7234 bitmap bbs = BITMAP_ALLOC (NULL);
7235 int i;
7237 gcc_assert (entry != NULL);
7238 gcc_assert (entry != exit);
7239 gcc_assert (bbs_p != NULL);
7241 gcc_assert (bbs_p->length () > 0);
7243 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7244 bitmap_set_bit (bbs, bb->index);
7246 gcc_assert (bitmap_bit_p (bbs, entry->index));
7247 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7249 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7251 if (bb == entry)
7253 gcc_assert (single_pred_p (entry));
7254 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7256 else
7257 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7259 e = ei_edge (ei);
7260 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7263 if (bb == exit)
7265 gcc_assert (single_succ_p (exit));
7266 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7268 else
7269 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7271 e = ei_edge (ei);
7272 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7276 BITMAP_FREE (bbs);
7279 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7281 bool
7282 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7284 bitmap release_names = (bitmap)data;
7286 if (TREE_CODE (from) != SSA_NAME)
7287 return true;
7289 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7290 return true;
7293 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7294 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7295 single basic block in the original CFG and the new basic block is
7296 returned. DEST_CFUN must not have a CFG yet.
7298 Note that the region need not be a pure SESE region. Blocks inside
7299 the region may contain calls to abort/exit. The only restriction
7300 is that ENTRY_BB should be the only entry point and it must
7301 dominate EXIT_BB.
7303 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7304 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7305 to the new function.
7307 All local variables referenced in the region are assumed to be in
7308 the corresponding BLOCK_VARS and unexpanded variable lists
7309 associated with DEST_CFUN.
7311 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7312 reimplement move_sese_region_to_fn by duplicating the region rather than
7313 moving it. */
7315 basic_block
7316 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7317 basic_block exit_bb, tree orig_block)
7319 vec<basic_block> bbs, dom_bbs;
7320 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7321 basic_block after, bb, *entry_pred, *exit_succ, abb;
7322 struct function *saved_cfun = cfun;
7323 int *entry_flag, *exit_flag;
7324 profile_probability *entry_prob, *exit_prob;
7325 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7326 edge e;
7327 edge_iterator ei;
7328 htab_t new_label_map;
7329 hash_map<void *, void *> *eh_map;
7330 struct loop *loop = entry_bb->loop_father;
7331 struct loop *loop0 = get_loop (saved_cfun, 0);
7332 struct move_stmt_d d;
7334 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7335 region. */
7336 gcc_assert (entry_bb != exit_bb
7337 && (!exit_bb
7338 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7340 /* Collect all the blocks in the region. Manually add ENTRY_BB
7341 because it won't be added by dfs_enumerate_from. */
7342 bbs.create (0);
7343 bbs.safe_push (entry_bb);
7344 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7346 if (flag_checking)
7347 verify_sese (entry_bb, exit_bb, &bbs);
7349 /* The blocks that used to be dominated by something in BBS will now be
7350 dominated by the new block. */
7351 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7352 bbs.address (),
7353 bbs.length ());
7355 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7356 the predecessor edges to ENTRY_BB and the successor edges to
7357 EXIT_BB so that we can re-attach them to the new basic block that
7358 will replace the region. */
7359 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7360 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7361 entry_flag = XNEWVEC (int, num_entry_edges);
7362 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7363 i = 0;
7364 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7366 entry_prob[i] = e->probability;
7367 entry_flag[i] = e->flags;
7368 entry_pred[i++] = e->src;
7369 remove_edge (e);
7372 if (exit_bb)
7374 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7375 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7376 exit_flag = XNEWVEC (int, num_exit_edges);
7377 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7378 i = 0;
7379 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7381 exit_prob[i] = e->probability;
7382 exit_flag[i] = e->flags;
7383 exit_succ[i++] = e->dest;
7384 remove_edge (e);
7387 else
7389 num_exit_edges = 0;
7390 exit_succ = NULL;
7391 exit_flag = NULL;
7392 exit_prob = NULL;
7395 /* Switch context to the child function to initialize DEST_FN's CFG. */
7396 gcc_assert (dest_cfun->cfg == NULL);
7397 push_cfun (dest_cfun);
7399 init_empty_tree_cfg ();
7401 /* Initialize EH information for the new function. */
7402 eh_map = NULL;
7403 new_label_map = NULL;
7404 if (saved_cfun->eh)
7406 eh_region region = NULL;
7408 FOR_EACH_VEC_ELT (bbs, i, bb)
7409 region = find_outermost_region_in_block (saved_cfun, bb, region);
7411 init_eh_for_function ();
7412 if (region != NULL)
7414 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7415 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7416 new_label_mapper, new_label_map);
7420 /* Initialize an empty loop tree. */
7421 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7422 init_loops_structure (dest_cfun, loops, 1);
7423 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7424 set_loops_for_fn (dest_cfun, loops);
7426 /* Move the outlined loop tree part. */
7427 num_nodes = bbs.length ();
7428 FOR_EACH_VEC_ELT (bbs, i, bb)
7430 if (bb->loop_father->header == bb)
7432 struct loop *this_loop = bb->loop_father;
7433 struct loop *outer = loop_outer (this_loop);
7434 if (outer == loop
7435 /* If the SESE region contains some bbs ending with
7436 a noreturn call, those are considered to belong
7437 to the outermost loop in saved_cfun, rather than
7438 the entry_bb's loop_father. */
7439 || outer == loop0)
7441 if (outer != loop)
7442 num_nodes -= this_loop->num_nodes;
7443 flow_loop_tree_node_remove (bb->loop_father);
7444 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7445 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7448 else if (bb->loop_father == loop0 && loop0 != loop)
7449 num_nodes--;
7451 /* Remove loop exits from the outlined region. */
7452 if (loops_for_fn (saved_cfun)->exits)
7453 FOR_EACH_EDGE (e, ei, bb->succs)
7455 struct loops *l = loops_for_fn (saved_cfun);
7456 loop_exit **slot
7457 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7458 NO_INSERT);
7459 if (slot)
7460 l->exits->clear_slot (slot);
7465 /* Adjust the number of blocks in the tree root of the outlined part. */
7466 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7468 /* Setup a mapping to be used by move_block_to_fn. */
7469 loop->aux = current_loops->tree_root;
7470 loop0->aux = current_loops->tree_root;
7472 pop_cfun ();
7474 /* Move blocks from BBS into DEST_CFUN. */
7475 gcc_assert (bbs.length () >= 2);
7476 after = dest_cfun->cfg->x_entry_block_ptr;
7477 hash_map<tree, tree> vars_map;
7479 memset (&d, 0, sizeof (d));
7480 d.orig_block = orig_block;
7481 d.new_block = DECL_INITIAL (dest_cfun->decl);
7482 d.from_context = cfun->decl;
7483 d.to_context = dest_cfun->decl;
7484 d.vars_map = &vars_map;
7485 d.new_label_map = new_label_map;
7486 d.eh_map = eh_map;
7487 d.remap_decls_p = true;
7489 if (gimple_in_ssa_p (cfun))
7490 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7492 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7493 set_ssa_default_def (dest_cfun, arg, narg);
7494 vars_map.put (arg, narg);
7497 FOR_EACH_VEC_ELT (bbs, i, bb)
7499 /* No need to update edge counts on the last block. It has
7500 already been updated earlier when we detached the region from
7501 the original CFG. */
7502 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7503 after = bb;
7506 loop->aux = NULL;
7507 loop0->aux = NULL;
7508 /* Loop sizes are no longer correct, fix them up. */
7509 loop->num_nodes -= num_nodes;
7510 for (struct loop *outer = loop_outer (loop);
7511 outer; outer = loop_outer (outer))
7512 outer->num_nodes -= num_nodes;
7513 loop0->num_nodes -= bbs.length () - num_nodes;
7515 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7517 struct loop *aloop;
7518 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7519 if (aloop != NULL)
7521 if (aloop->simduid)
7523 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7524 d.to_context);
7525 dest_cfun->has_simduid_loops = true;
7527 if (aloop->force_vectorize)
7528 dest_cfun->has_force_vectorize_loops = true;
7532 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7533 if (orig_block)
7535 tree block;
7536 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7537 == NULL_TREE);
7538 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7539 = BLOCK_SUBBLOCKS (orig_block);
7540 for (block = BLOCK_SUBBLOCKS (orig_block);
7541 block; block = BLOCK_CHAIN (block))
7542 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7543 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7546 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7547 &vars_map, dest_cfun->decl);
7549 if (new_label_map)
7550 htab_delete (new_label_map);
7551 if (eh_map)
7552 delete eh_map;
7554 if (gimple_in_ssa_p (cfun))
7556 /* We need to release ssa-names in a defined order, so first find them,
7557 and then iterate in ascending version order. */
7558 bitmap release_names = BITMAP_ALLOC (NULL);
7559 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7560 bitmap_iterator bi;
7561 unsigned i;
7562 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7563 release_ssa_name (ssa_name (i));
7564 BITMAP_FREE (release_names);
7567 /* Rewire the entry and exit blocks. The successor to the entry
7568 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7569 the child function. Similarly, the predecessor of DEST_FN's
7570 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7571 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7572 various CFG manipulation function get to the right CFG.
7574 FIXME, this is silly. The CFG ought to become a parameter to
7575 these helpers. */
7576 push_cfun (dest_cfun);
7577 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7578 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7579 if (exit_bb)
7581 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7582 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7584 else
7585 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7586 pop_cfun ();
7588 /* Back in the original function, the SESE region has disappeared,
7589 create a new basic block in its place. */
7590 bb = create_empty_bb (entry_pred[0]);
7591 if (current_loops)
7592 add_bb_to_loop (bb, loop);
7593 for (i = 0; i < num_entry_edges; i++)
7595 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7596 e->probability = entry_prob[i];
7599 for (i = 0; i < num_exit_edges; i++)
7601 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7602 e->probability = exit_prob[i];
7605 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7606 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7607 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7608 dom_bbs.release ();
7610 if (exit_bb)
7612 free (exit_prob);
7613 free (exit_flag);
7614 free (exit_succ);
7616 free (entry_prob);
7617 free (entry_flag);
7618 free (entry_pred);
7619 bbs.release ();
7621 return bb;
7624 /* Dump default def DEF to file FILE using FLAGS and indentation
7625 SPC. */
7627 static void
7628 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7630 for (int i = 0; i < spc; ++i)
7631 fprintf (file, " ");
7632 dump_ssaname_info_to_file (file, def, spc);
7634 print_generic_expr (file, TREE_TYPE (def), flags);
7635 fprintf (file, " ");
7636 print_generic_expr (file, def, flags);
7637 fprintf (file, " = ");
7638 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7639 fprintf (file, ";\n");
7642 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7644 static void
7645 print_no_sanitize_attr_value (FILE *file, tree value)
7647 unsigned int flags = tree_to_uhwi (value);
7648 bool first = true;
7649 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7651 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7653 if (!first)
7654 fprintf (file, " | ");
7655 fprintf (file, "%s", sanitizer_opts[i].name);
7656 first = false;
7661 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7664 void
7665 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7667 tree arg, var, old_current_fndecl = current_function_decl;
7668 struct function *dsf;
7669 bool ignore_topmost_bind = false, any_var = false;
7670 basic_block bb;
7671 tree chain;
7672 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7673 && decl_is_tm_clone (fndecl));
7674 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7676 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7678 fprintf (file, "__attribute__((");
7680 bool first = true;
7681 tree chain;
7682 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7683 first = false, chain = TREE_CHAIN (chain))
7685 if (!first)
7686 fprintf (file, ", ");
7688 tree name = get_attribute_name (chain);
7689 print_generic_expr (file, name, dump_flags);
7690 if (TREE_VALUE (chain) != NULL_TREE)
7692 fprintf (file, " (");
7694 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7695 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7696 else
7697 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7698 fprintf (file, ")");
7702 fprintf (file, "))\n");
7705 current_function_decl = fndecl;
7706 if (flags & TDF_GIMPLE)
7708 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7709 dump_flags | TDF_SLIM);
7710 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7712 else
7713 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7715 arg = DECL_ARGUMENTS (fndecl);
7716 while (arg)
7718 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7719 fprintf (file, " ");
7720 print_generic_expr (file, arg, dump_flags);
7721 if (DECL_CHAIN (arg))
7722 fprintf (file, ", ");
7723 arg = DECL_CHAIN (arg);
7725 fprintf (file, ")\n");
7727 dsf = DECL_STRUCT_FUNCTION (fndecl);
7728 if (dsf && (flags & TDF_EH))
7729 dump_eh_tree (file, dsf);
7731 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7733 dump_node (fndecl, TDF_SLIM | flags, file);
7734 current_function_decl = old_current_fndecl;
7735 return;
7738 /* When GIMPLE is lowered, the variables are no longer available in
7739 BIND_EXPRs, so display them separately. */
7740 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7742 unsigned ix;
7743 ignore_topmost_bind = true;
7745 fprintf (file, "{\n");
7746 if (gimple_in_ssa_p (fun)
7747 && (flags & TDF_ALIAS))
7749 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7750 arg = DECL_CHAIN (arg))
7752 tree def = ssa_default_def (fun, arg);
7753 if (def)
7754 dump_default_def (file, def, 2, flags);
7757 tree res = DECL_RESULT (fun->decl);
7758 if (res != NULL_TREE
7759 && DECL_BY_REFERENCE (res))
7761 tree def = ssa_default_def (fun, res);
7762 if (def)
7763 dump_default_def (file, def, 2, flags);
7766 tree static_chain = fun->static_chain_decl;
7767 if (static_chain != NULL_TREE)
7769 tree def = ssa_default_def (fun, static_chain);
7770 if (def)
7771 dump_default_def (file, def, 2, flags);
7775 if (!vec_safe_is_empty (fun->local_decls))
7776 FOR_EACH_LOCAL_DECL (fun, ix, var)
7778 print_generic_decl (file, var, flags);
7779 fprintf (file, "\n");
7781 any_var = true;
7784 tree name;
7786 if (gimple_in_ssa_p (cfun))
7787 FOR_EACH_SSA_NAME (ix, name, cfun)
7789 if (!SSA_NAME_VAR (name))
7791 fprintf (file, " ");
7792 print_generic_expr (file, TREE_TYPE (name), flags);
7793 fprintf (file, " ");
7794 print_generic_expr (file, name, flags);
7795 fprintf (file, ";\n");
7797 any_var = true;
7802 if (fun && fun->decl == fndecl
7803 && fun->cfg
7804 && basic_block_info_for_fn (fun))
7806 /* If the CFG has been built, emit a CFG-based dump. */
7807 if (!ignore_topmost_bind)
7808 fprintf (file, "{\n");
7810 if (any_var && n_basic_blocks_for_fn (fun))
7811 fprintf (file, "\n");
7813 FOR_EACH_BB_FN (bb, fun)
7814 dump_bb (file, bb, 2, flags);
7816 fprintf (file, "}\n");
7818 else if (fun->curr_properties & PROP_gimple_any)
7820 /* The function is now in GIMPLE form but the CFG has not been
7821 built yet. Emit the single sequence of GIMPLE statements
7822 that make up its body. */
7823 gimple_seq body = gimple_body (fndecl);
7825 if (gimple_seq_first_stmt (body)
7826 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7827 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7828 print_gimple_seq (file, body, 0, flags);
7829 else
7831 if (!ignore_topmost_bind)
7832 fprintf (file, "{\n");
7834 if (any_var)
7835 fprintf (file, "\n");
7837 print_gimple_seq (file, body, 2, flags);
7838 fprintf (file, "}\n");
7841 else
7843 int indent;
7845 /* Make a tree based dump. */
7846 chain = DECL_SAVED_TREE (fndecl);
7847 if (chain && TREE_CODE (chain) == BIND_EXPR)
7849 if (ignore_topmost_bind)
7851 chain = BIND_EXPR_BODY (chain);
7852 indent = 2;
7854 else
7855 indent = 0;
7857 else
7859 if (!ignore_topmost_bind)
7861 fprintf (file, "{\n");
7862 /* No topmost bind, pretend it's ignored for later. */
7863 ignore_topmost_bind = true;
7865 indent = 2;
7868 if (any_var)
7869 fprintf (file, "\n");
7871 print_generic_stmt_indented (file, chain, flags, indent);
7872 if (ignore_topmost_bind)
7873 fprintf (file, "}\n");
7876 if (flags & TDF_ENUMERATE_LOCALS)
7877 dump_enumerated_decls (file, flags);
7878 fprintf (file, "\n\n");
7880 current_function_decl = old_current_fndecl;
7883 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7885 DEBUG_FUNCTION void
7886 debug_function (tree fn, dump_flags_t flags)
7888 dump_function_to_file (fn, stderr, flags);
7892 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7894 static void
7895 print_pred_bbs (FILE *file, basic_block bb)
7897 edge e;
7898 edge_iterator ei;
7900 FOR_EACH_EDGE (e, ei, bb->preds)
7901 fprintf (file, "bb_%d ", e->src->index);
7905 /* Print on FILE the indexes for the successors of basic_block BB. */
7907 static void
7908 print_succ_bbs (FILE *file, basic_block bb)
7910 edge e;
7911 edge_iterator ei;
7913 FOR_EACH_EDGE (e, ei, bb->succs)
7914 fprintf (file, "bb_%d ", e->dest->index);
7917 /* Print to FILE the basic block BB following the VERBOSITY level. */
7919 void
7920 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7922 char *s_indent = (char *) alloca ((size_t) indent + 1);
7923 memset ((void *) s_indent, ' ', (size_t) indent);
7924 s_indent[indent] = '\0';
7926 /* Print basic_block's header. */
7927 if (verbosity >= 2)
7929 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7930 print_pred_bbs (file, bb);
7931 fprintf (file, "}, succs = {");
7932 print_succ_bbs (file, bb);
7933 fprintf (file, "})\n");
7936 /* Print basic_block's body. */
7937 if (verbosity >= 3)
7939 fprintf (file, "%s {\n", s_indent);
7940 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7941 fprintf (file, "%s }\n", s_indent);
7945 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7947 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7948 VERBOSITY level this outputs the contents of the loop, or just its
7949 structure. */
7951 static void
7952 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7954 char *s_indent;
7955 basic_block bb;
7957 if (loop == NULL)
7958 return;
7960 s_indent = (char *) alloca ((size_t) indent + 1);
7961 memset ((void *) s_indent, ' ', (size_t) indent);
7962 s_indent[indent] = '\0';
7964 /* Print loop's header. */
7965 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7966 if (loop->header)
7967 fprintf (file, "header = %d", loop->header->index);
7968 else
7970 fprintf (file, "deleted)\n");
7971 return;
7973 if (loop->latch)
7974 fprintf (file, ", latch = %d", loop->latch->index);
7975 else
7976 fprintf (file, ", multiple latches");
7977 fprintf (file, ", niter = ");
7978 print_generic_expr (file, loop->nb_iterations);
7980 if (loop->any_upper_bound)
7982 fprintf (file, ", upper_bound = ");
7983 print_decu (loop->nb_iterations_upper_bound, file);
7985 if (loop->any_likely_upper_bound)
7987 fprintf (file, ", likely_upper_bound = ");
7988 print_decu (loop->nb_iterations_likely_upper_bound, file);
7991 if (loop->any_estimate)
7993 fprintf (file, ", estimate = ");
7994 print_decu (loop->nb_iterations_estimate, file);
7996 fprintf (file, ")\n");
7998 /* Print loop's body. */
7999 if (verbosity >= 1)
8001 fprintf (file, "%s{\n", s_indent);
8002 FOR_EACH_BB_FN (bb, cfun)
8003 if (bb->loop_father == loop)
8004 print_loops_bb (file, bb, indent, verbosity);
8006 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8007 fprintf (file, "%s}\n", s_indent);
8011 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8012 spaces. Following VERBOSITY level this outputs the contents of the
8013 loop, or just its structure. */
8015 static void
8016 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8017 int verbosity)
8019 if (loop == NULL)
8020 return;
8022 print_loop (file, loop, indent, verbosity);
8023 print_loop_and_siblings (file, loop->next, indent, verbosity);
8026 /* Follow a CFG edge from the entry point of the program, and on entry
8027 of a loop, pretty print the loop structure on FILE. */
8029 void
8030 print_loops (FILE *file, int verbosity)
8032 basic_block bb;
8034 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8035 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8036 if (bb && bb->loop_father)
8037 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8040 /* Dump a loop. */
8042 DEBUG_FUNCTION void
8043 debug (struct loop &ref)
8045 print_loop (stderr, &ref, 0, /*verbosity*/0);
8048 DEBUG_FUNCTION void
8049 debug (struct loop *ptr)
8051 if (ptr)
8052 debug (*ptr);
8053 else
8054 fprintf (stderr, "<nil>\n");
8057 /* Dump a loop verbosely. */
8059 DEBUG_FUNCTION void
8060 debug_verbose (struct loop &ref)
8062 print_loop (stderr, &ref, 0, /*verbosity*/3);
8065 DEBUG_FUNCTION void
8066 debug_verbose (struct loop *ptr)
8068 if (ptr)
8069 debug (*ptr);
8070 else
8071 fprintf (stderr, "<nil>\n");
8075 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8077 DEBUG_FUNCTION void
8078 debug_loops (int verbosity)
8080 print_loops (stderr, verbosity);
8083 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8085 DEBUG_FUNCTION void
8086 debug_loop (struct loop *loop, int verbosity)
8088 print_loop (stderr, loop, 0, verbosity);
8091 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8092 level. */
8094 DEBUG_FUNCTION void
8095 debug_loop_num (unsigned num, int verbosity)
8097 debug_loop (get_loop (cfun, num), verbosity);
8100 /* Return true if BB ends with a call, possibly followed by some
8101 instructions that must stay with the call. Return false,
8102 otherwise. */
8104 static bool
8105 gimple_block_ends_with_call_p (basic_block bb)
8107 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8108 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8112 /* Return true if BB ends with a conditional branch. Return false,
8113 otherwise. */
8115 static bool
8116 gimple_block_ends_with_condjump_p (const_basic_block bb)
8118 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8119 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8123 /* Return true if statement T may terminate execution of BB in ways not
8124 explicitly represtented in the CFG. */
8126 bool
8127 stmt_can_terminate_bb_p (gimple *t)
8129 tree fndecl = NULL_TREE;
8130 int call_flags = 0;
8132 /* Eh exception not handled internally terminates execution of the whole
8133 function. */
8134 if (stmt_can_throw_external (t))
8135 return true;
8137 /* NORETURN and LONGJMP calls already have an edge to exit.
8138 CONST and PURE calls do not need one.
8139 We don't currently check for CONST and PURE here, although
8140 it would be a good idea, because those attributes are
8141 figured out from the RTL in mark_constant_function, and
8142 the counter incrementation code from -fprofile-arcs
8143 leads to different results from -fbranch-probabilities. */
8144 if (is_gimple_call (t))
8146 fndecl = gimple_call_fndecl (t);
8147 call_flags = gimple_call_flags (t);
8150 if (is_gimple_call (t)
8151 && fndecl
8152 && DECL_BUILT_IN (fndecl)
8153 && (call_flags & ECF_NOTHROW)
8154 && !(call_flags & ECF_RETURNS_TWICE)
8155 /* fork() doesn't really return twice, but the effect of
8156 wrapping it in __gcov_fork() which calls __gcov_flush()
8157 and clears the counters before forking has the same
8158 effect as returning twice. Force a fake edge. */
8159 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8160 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8161 return false;
8163 if (is_gimple_call (t))
8165 edge_iterator ei;
8166 edge e;
8167 basic_block bb;
8169 if (call_flags & (ECF_PURE | ECF_CONST)
8170 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8171 return false;
8173 /* Function call may do longjmp, terminate program or do other things.
8174 Special case noreturn that have non-abnormal edges out as in this case
8175 the fact is sufficiently represented by lack of edges out of T. */
8176 if (!(call_flags & ECF_NORETURN))
8177 return true;
8179 bb = gimple_bb (t);
8180 FOR_EACH_EDGE (e, ei, bb->succs)
8181 if ((e->flags & EDGE_FAKE) == 0)
8182 return true;
8185 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8186 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8187 return true;
8189 return false;
8193 /* Add fake edges to the function exit for any non constant and non
8194 noreturn calls (or noreturn calls with EH/abnormal edges),
8195 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8196 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8197 that were split.
8199 The goal is to expose cases in which entering a basic block does
8200 not imply that all subsequent instructions must be executed. */
8202 static int
8203 gimple_flow_call_edges_add (sbitmap blocks)
8205 int i;
8206 int blocks_split = 0;
8207 int last_bb = last_basic_block_for_fn (cfun);
8208 bool check_last_block = false;
8210 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8211 return 0;
8213 if (! blocks)
8214 check_last_block = true;
8215 else
8216 check_last_block = bitmap_bit_p (blocks,
8217 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8219 /* In the last basic block, before epilogue generation, there will be
8220 a fallthru edge to EXIT. Special care is required if the last insn
8221 of the last basic block is a call because make_edge folds duplicate
8222 edges, which would result in the fallthru edge also being marked
8223 fake, which would result in the fallthru edge being removed by
8224 remove_fake_edges, which would result in an invalid CFG.
8226 Moreover, we can't elide the outgoing fake edge, since the block
8227 profiler needs to take this into account in order to solve the minimal
8228 spanning tree in the case that the call doesn't return.
8230 Handle this by adding a dummy instruction in a new last basic block. */
8231 if (check_last_block)
8233 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8234 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8235 gimple *t = NULL;
8237 if (!gsi_end_p (gsi))
8238 t = gsi_stmt (gsi);
8240 if (t && stmt_can_terminate_bb_p (t))
8242 edge e;
8244 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8245 if (e)
8247 gsi_insert_on_edge (e, gimple_build_nop ());
8248 gsi_commit_edge_inserts ();
8253 /* Now add fake edges to the function exit for any non constant
8254 calls since there is no way that we can determine if they will
8255 return or not... */
8256 for (i = 0; i < last_bb; i++)
8258 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8259 gimple_stmt_iterator gsi;
8260 gimple *stmt, *last_stmt;
8262 if (!bb)
8263 continue;
8265 if (blocks && !bitmap_bit_p (blocks, i))
8266 continue;
8268 gsi = gsi_last_nondebug_bb (bb);
8269 if (!gsi_end_p (gsi))
8271 last_stmt = gsi_stmt (gsi);
8274 stmt = gsi_stmt (gsi);
8275 if (stmt_can_terminate_bb_p (stmt))
8277 edge e;
8279 /* The handling above of the final block before the
8280 epilogue should be enough to verify that there is
8281 no edge to the exit block in CFG already.
8282 Calling make_edge in such case would cause us to
8283 mark that edge as fake and remove it later. */
8284 if (flag_checking && stmt == last_stmt)
8286 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8287 gcc_assert (e == NULL);
8290 /* Note that the following may create a new basic block
8291 and renumber the existing basic blocks. */
8292 if (stmt != last_stmt)
8294 e = split_block (bb, stmt);
8295 if (e)
8296 blocks_split++;
8298 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8299 e->probability = profile_probability::guessed_never ();
8301 gsi_prev (&gsi);
8303 while (!gsi_end_p (gsi));
8307 if (blocks_split)
8308 checking_verify_flow_info ();
8310 return blocks_split;
8313 /* Removes edge E and all the blocks dominated by it, and updates dominance
8314 information. The IL in E->src needs to be updated separately.
8315 If dominance info is not available, only the edge E is removed.*/
8317 void
8318 remove_edge_and_dominated_blocks (edge e)
8320 vec<basic_block> bbs_to_remove = vNULL;
8321 vec<basic_block> bbs_to_fix_dom = vNULL;
8322 edge f;
8323 edge_iterator ei;
8324 bool none_removed = false;
8325 unsigned i;
8326 basic_block bb, dbb;
8327 bitmap_iterator bi;
8329 /* If we are removing a path inside a non-root loop that may change
8330 loop ownership of blocks or remove loops. Mark loops for fixup. */
8331 if (current_loops
8332 && loop_outer (e->src->loop_father) != NULL
8333 && e->src->loop_father == e->dest->loop_father)
8334 loops_state_set (LOOPS_NEED_FIXUP);
8336 if (!dom_info_available_p (CDI_DOMINATORS))
8338 remove_edge (e);
8339 return;
8342 /* No updating is needed for edges to exit. */
8343 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8345 if (cfgcleanup_altered_bbs)
8346 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8347 remove_edge (e);
8348 return;
8351 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8352 that is not dominated by E->dest, then this set is empty. Otherwise,
8353 all the basic blocks dominated by E->dest are removed.
8355 Also, to DF_IDOM we store the immediate dominators of the blocks in
8356 the dominance frontier of E (i.e., of the successors of the
8357 removed blocks, if there are any, and of E->dest otherwise). */
8358 FOR_EACH_EDGE (f, ei, e->dest->preds)
8360 if (f == e)
8361 continue;
8363 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8365 none_removed = true;
8366 break;
8370 auto_bitmap df, df_idom;
8371 if (none_removed)
8372 bitmap_set_bit (df_idom,
8373 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8374 else
8376 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8377 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8379 FOR_EACH_EDGE (f, ei, bb->succs)
8381 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8382 bitmap_set_bit (df, f->dest->index);
8385 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8386 bitmap_clear_bit (df, bb->index);
8388 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8390 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8391 bitmap_set_bit (df_idom,
8392 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8396 if (cfgcleanup_altered_bbs)
8398 /* Record the set of the altered basic blocks. */
8399 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8400 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8403 /* Remove E and the cancelled blocks. */
8404 if (none_removed)
8405 remove_edge (e);
8406 else
8408 /* Walk backwards so as to get a chance to substitute all
8409 released DEFs into debug stmts. See
8410 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8411 details. */
8412 for (i = bbs_to_remove.length (); i-- > 0; )
8413 delete_basic_block (bbs_to_remove[i]);
8416 /* Update the dominance information. The immediate dominator may change only
8417 for blocks whose immediate dominator belongs to DF_IDOM:
8419 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8420 removal. Let Z the arbitrary block such that idom(Z) = Y and
8421 Z dominates X after the removal. Before removal, there exists a path P
8422 from Y to X that avoids Z. Let F be the last edge on P that is
8423 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8424 dominates W, and because of P, Z does not dominate W), and W belongs to
8425 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8426 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8428 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8429 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8430 dbb;
8431 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8432 bbs_to_fix_dom.safe_push (dbb);
8435 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8437 bbs_to_remove.release ();
8438 bbs_to_fix_dom.release ();
8441 /* Purge dead EH edges from basic block BB. */
8443 bool
8444 gimple_purge_dead_eh_edges (basic_block bb)
8446 bool changed = false;
8447 edge e;
8448 edge_iterator ei;
8449 gimple *stmt = last_stmt (bb);
8451 if (stmt && stmt_can_throw_internal (stmt))
8452 return false;
8454 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8456 if (e->flags & EDGE_EH)
8458 remove_edge_and_dominated_blocks (e);
8459 changed = true;
8461 else
8462 ei_next (&ei);
8465 return changed;
8468 /* Purge dead EH edges from basic block listed in BLOCKS. */
8470 bool
8471 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8473 bool changed = false;
8474 unsigned i;
8475 bitmap_iterator bi;
8477 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8479 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8481 /* Earlier gimple_purge_dead_eh_edges could have removed
8482 this basic block already. */
8483 gcc_assert (bb || changed);
8484 if (bb != NULL)
8485 changed |= gimple_purge_dead_eh_edges (bb);
8488 return changed;
8491 /* Purge dead abnormal call edges from basic block BB. */
8493 bool
8494 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8496 bool changed = false;
8497 edge e;
8498 edge_iterator ei;
8499 gimple *stmt = last_stmt (bb);
8501 if (!cfun->has_nonlocal_label
8502 && !cfun->calls_setjmp)
8503 return false;
8505 if (stmt && stmt_can_make_abnormal_goto (stmt))
8506 return false;
8508 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8510 if (e->flags & EDGE_ABNORMAL)
8512 if (e->flags & EDGE_FALLTHRU)
8513 e->flags &= ~EDGE_ABNORMAL;
8514 else
8515 remove_edge_and_dominated_blocks (e);
8516 changed = true;
8518 else
8519 ei_next (&ei);
8522 return changed;
8525 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8527 bool
8528 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8530 bool changed = false;
8531 unsigned i;
8532 bitmap_iterator bi;
8534 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8536 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8538 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8539 this basic block already. */
8540 gcc_assert (bb || changed);
8541 if (bb != NULL)
8542 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8545 return changed;
8548 /* This function is called whenever a new edge is created or
8549 redirected. */
8551 static void
8552 gimple_execute_on_growing_pred (edge e)
8554 basic_block bb = e->dest;
8556 if (!gimple_seq_empty_p (phi_nodes (bb)))
8557 reserve_phi_args_for_new_edge (bb);
8560 /* This function is called immediately before edge E is removed from
8561 the edge vector E->dest->preds. */
8563 static void
8564 gimple_execute_on_shrinking_pred (edge e)
8566 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8567 remove_phi_args (e);
8570 /*---------------------------------------------------------------------------
8571 Helper functions for Loop versioning
8572 ---------------------------------------------------------------------------*/
8574 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8575 of 'first'. Both of them are dominated by 'new_head' basic block. When
8576 'new_head' was created by 'second's incoming edge it received phi arguments
8577 on the edge by split_edge(). Later, additional edge 'e' was created to
8578 connect 'new_head' and 'first'. Now this routine adds phi args on this
8579 additional edge 'e' that new_head to second edge received as part of edge
8580 splitting. */
8582 static void
8583 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8584 basic_block new_head, edge e)
8586 gphi *phi1, *phi2;
8587 gphi_iterator psi1, psi2;
8588 tree def;
8589 edge e2 = find_edge (new_head, second);
8591 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8592 edge, we should always have an edge from NEW_HEAD to SECOND. */
8593 gcc_assert (e2 != NULL);
8595 /* Browse all 'second' basic block phi nodes and add phi args to
8596 edge 'e' for 'first' head. PHI args are always in correct order. */
8598 for (psi2 = gsi_start_phis (second),
8599 psi1 = gsi_start_phis (first);
8600 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8601 gsi_next (&psi2), gsi_next (&psi1))
8603 phi1 = psi1.phi ();
8604 phi2 = psi2.phi ();
8605 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8606 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8611 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8612 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8613 the destination of the ELSE part. */
8615 static void
8616 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8617 basic_block second_head ATTRIBUTE_UNUSED,
8618 basic_block cond_bb, void *cond_e)
8620 gimple_stmt_iterator gsi;
8621 gimple *new_cond_expr;
8622 tree cond_expr = (tree) cond_e;
8623 edge e0;
8625 /* Build new conditional expr */
8626 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8627 NULL_TREE, NULL_TREE);
8629 /* Add new cond in cond_bb. */
8630 gsi = gsi_last_bb (cond_bb);
8631 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8633 /* Adjust edges appropriately to connect new head with first head
8634 as well as second head. */
8635 e0 = single_succ_edge (cond_bb);
8636 e0->flags &= ~EDGE_FALLTHRU;
8637 e0->flags |= EDGE_FALSE_VALUE;
8641 /* Do book-keeping of basic block BB for the profile consistency checker.
8642 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8643 then do post-pass accounting. Store the counting in RECORD. */
8644 static void
8645 gimple_account_profile_record (basic_block bb, int after_pass,
8646 struct profile_record *record)
8648 gimple_stmt_iterator i;
8649 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8651 record->size[after_pass]
8652 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8653 if (bb->count.initialized_p ())
8654 record->time[after_pass]
8655 += estimate_num_insns (gsi_stmt (i),
8656 &eni_time_weights) * bb->count.to_gcov_type ();
8657 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8658 record->time[after_pass]
8659 += estimate_num_insns (gsi_stmt (i),
8660 &eni_time_weights) * bb->count.to_frequency (cfun);
8664 struct cfg_hooks gimple_cfg_hooks = {
8665 "gimple",
8666 gimple_verify_flow_info,
8667 gimple_dump_bb, /* dump_bb */
8668 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8669 create_bb, /* create_basic_block */
8670 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8671 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8672 gimple_can_remove_branch_p, /* can_remove_branch_p */
8673 remove_bb, /* delete_basic_block */
8674 gimple_split_block, /* split_block */
8675 gimple_move_block_after, /* move_block_after */
8676 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8677 gimple_merge_blocks, /* merge_blocks */
8678 gimple_predict_edge, /* predict_edge */
8679 gimple_predicted_by_p, /* predicted_by_p */
8680 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8681 gimple_duplicate_bb, /* duplicate_block */
8682 gimple_split_edge, /* split_edge */
8683 gimple_make_forwarder_block, /* make_forward_block */
8684 NULL, /* tidy_fallthru_edge */
8685 NULL, /* force_nonfallthru */
8686 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8687 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8688 gimple_flow_call_edges_add, /* flow_call_edges_add */
8689 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8690 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8691 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8692 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8693 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8694 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8695 flush_pending_stmts, /* flush_pending_stmts */
8696 gimple_empty_block_p, /* block_empty_p */
8697 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8698 gimple_account_profile_record,
8702 /* Split all critical edges. */
8704 unsigned int
8705 split_critical_edges (void)
8707 basic_block bb;
8708 edge e;
8709 edge_iterator ei;
8711 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8712 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8713 mappings around the calls to split_edge. */
8714 start_recording_case_labels ();
8715 FOR_ALL_BB_FN (bb, cfun)
8717 FOR_EACH_EDGE (e, ei, bb->succs)
8719 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8720 split_edge (e);
8721 /* PRE inserts statements to edges and expects that
8722 since split_critical_edges was done beforehand, committing edge
8723 insertions will not split more edges. In addition to critical
8724 edges we must split edges that have multiple successors and
8725 end by control flow statements, such as RESX.
8726 Go ahead and split them too. This matches the logic in
8727 gimple_find_edge_insert_loc. */
8728 else if ((!single_pred_p (e->dest)
8729 || !gimple_seq_empty_p (phi_nodes (e->dest))
8730 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8731 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8732 && !(e->flags & EDGE_ABNORMAL))
8734 gimple_stmt_iterator gsi;
8736 gsi = gsi_last_bb (e->src);
8737 if (!gsi_end_p (gsi)
8738 && stmt_ends_bb_p (gsi_stmt (gsi))
8739 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8740 && !gimple_call_builtin_p (gsi_stmt (gsi),
8741 BUILT_IN_RETURN)))
8742 split_edge (e);
8746 end_recording_case_labels ();
8747 return 0;
8750 namespace {
8752 const pass_data pass_data_split_crit_edges =
8754 GIMPLE_PASS, /* type */
8755 "crited", /* name */
8756 OPTGROUP_NONE, /* optinfo_flags */
8757 TV_TREE_SPLIT_EDGES, /* tv_id */
8758 PROP_cfg, /* properties_required */
8759 PROP_no_crit_edges, /* properties_provided */
8760 0, /* properties_destroyed */
8761 0, /* todo_flags_start */
8762 0, /* todo_flags_finish */
8765 class pass_split_crit_edges : public gimple_opt_pass
8767 public:
8768 pass_split_crit_edges (gcc::context *ctxt)
8769 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8772 /* opt_pass methods: */
8773 virtual unsigned int execute (function *) { return split_critical_edges (); }
8775 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8776 }; // class pass_split_crit_edges
8778 } // anon namespace
8780 gimple_opt_pass *
8781 make_pass_split_crit_edges (gcc::context *ctxt)
8783 return new pass_split_crit_edges (ctxt);
8787 /* Insert COND expression which is GIMPLE_COND after STMT
8788 in basic block BB with appropriate basic block split
8789 and creation of a new conditionally executed basic block.
8790 Update profile so the new bb is visited with probability PROB.
8791 Return created basic block. */
8792 basic_block
8793 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
8794 profile_probability prob)
8796 edge fall = split_block (bb, stmt);
8797 gimple_stmt_iterator iter = gsi_last_bb (bb);
8798 basic_block new_bb;
8800 /* Insert cond statement. */
8801 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8802 if (gsi_end_p (iter))
8803 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8804 else
8805 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8807 /* Create conditionally executed block. */
8808 new_bb = create_empty_bb (bb);
8809 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8810 e->probability = prob;
8811 new_bb->count = e->count ();
8812 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8814 /* Fix edge for split bb. */
8815 fall->flags = EDGE_FALSE_VALUE;
8816 fall->probability -= e->probability;
8818 /* Update dominance info. */
8819 if (dom_info_available_p (CDI_DOMINATORS))
8821 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8822 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8825 /* Update loop info. */
8826 if (current_loops)
8827 add_bb_to_loop (new_bb, bb->loop_father);
8829 return new_bb;
8832 /* Build a ternary operation and gimplify it. Emit code before GSI.
8833 Return the gimple_val holding the result. */
8835 tree
8836 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8837 tree type, tree a, tree b, tree c)
8839 tree ret;
8840 location_t loc = gimple_location (gsi_stmt (*gsi));
8842 ret = fold_build3_loc (loc, code, type, a, b, c);
8843 STRIP_NOPS (ret);
8845 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8846 GSI_SAME_STMT);
8849 /* Build a binary operation and gimplify it. Emit code before GSI.
8850 Return the gimple_val holding the result. */
8852 tree
8853 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8854 tree type, tree a, tree b)
8856 tree ret;
8858 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8859 STRIP_NOPS (ret);
8861 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8862 GSI_SAME_STMT);
8865 /* Build a unary operation and gimplify it. Emit code before GSI.
8866 Return the gimple_val holding the result. */
8868 tree
8869 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8870 tree a)
8872 tree ret;
8874 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8875 STRIP_NOPS (ret);
8877 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8878 GSI_SAME_STMT);
8883 /* Given a basic block B which ends with a conditional and has
8884 precisely two successors, determine which of the edges is taken if
8885 the conditional is true and which is taken if the conditional is
8886 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8888 void
8889 extract_true_false_edges_from_block (basic_block b,
8890 edge *true_edge,
8891 edge *false_edge)
8893 edge e = EDGE_SUCC (b, 0);
8895 if (e->flags & EDGE_TRUE_VALUE)
8897 *true_edge = e;
8898 *false_edge = EDGE_SUCC (b, 1);
8900 else
8902 *false_edge = e;
8903 *true_edge = EDGE_SUCC (b, 1);
8908 /* From a controlling predicate in the immediate dominator DOM of
8909 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8910 predicate evaluates to true and false and store them to
8911 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8912 they are non-NULL. Returns true if the edges can be determined,
8913 else return false. */
8915 bool
8916 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8917 edge *true_controlled_edge,
8918 edge *false_controlled_edge)
8920 basic_block bb = phiblock;
8921 edge true_edge, false_edge, tem;
8922 edge e0 = NULL, e1 = NULL;
8924 /* We have to verify that one edge into the PHI node is dominated
8925 by the true edge of the predicate block and the other edge
8926 dominated by the false edge. This ensures that the PHI argument
8927 we are going to take is completely determined by the path we
8928 take from the predicate block.
8929 We can only use BB dominance checks below if the destination of
8930 the true/false edges are dominated by their edge, thus only
8931 have a single predecessor. */
8932 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8933 tem = EDGE_PRED (bb, 0);
8934 if (tem == true_edge
8935 || (single_pred_p (true_edge->dest)
8936 && (tem->src == true_edge->dest
8937 || dominated_by_p (CDI_DOMINATORS,
8938 tem->src, true_edge->dest))))
8939 e0 = tem;
8940 else if (tem == false_edge
8941 || (single_pred_p (false_edge->dest)
8942 && (tem->src == false_edge->dest
8943 || dominated_by_p (CDI_DOMINATORS,
8944 tem->src, false_edge->dest))))
8945 e1 = tem;
8946 else
8947 return false;
8948 tem = EDGE_PRED (bb, 1);
8949 if (tem == true_edge
8950 || (single_pred_p (true_edge->dest)
8951 && (tem->src == true_edge->dest
8952 || dominated_by_p (CDI_DOMINATORS,
8953 tem->src, true_edge->dest))))
8954 e0 = tem;
8955 else if (tem == false_edge
8956 || (single_pred_p (false_edge->dest)
8957 && (tem->src == false_edge->dest
8958 || dominated_by_p (CDI_DOMINATORS,
8959 tem->src, false_edge->dest))))
8960 e1 = tem;
8961 else
8962 return false;
8963 if (!e0 || !e1)
8964 return false;
8966 if (true_controlled_edge)
8967 *true_controlled_edge = e0;
8968 if (false_controlled_edge)
8969 *false_controlled_edge = e1;
8971 return true;
8974 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
8975 range [low, high]. Place associated stmts before *GSI. */
8977 void
8978 generate_range_test (basic_block bb, tree index, tree low, tree high,
8979 tree *lhs, tree *rhs)
8981 tree type = TREE_TYPE (index);
8982 tree utype = unsigned_type_for (type);
8984 low = fold_convert (type, low);
8985 high = fold_convert (type, high);
8987 tree tmp = make_ssa_name (type);
8988 gassign *sub1
8989 = gimple_build_assign (tmp, MINUS_EXPR, index, low);
8991 *lhs = make_ssa_name (utype);
8992 gassign *a = gimple_build_assign (*lhs, NOP_EXPR, tmp);
8994 *rhs = fold_build2 (MINUS_EXPR, utype, high, low);
8995 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8996 gsi_insert_before (&gsi, sub1, GSI_SAME_STMT);
8997 gsi_insert_before (&gsi, a, GSI_SAME_STMT);
9000 /* Emit return warnings. */
9002 namespace {
9004 const pass_data pass_data_warn_function_return =
9006 GIMPLE_PASS, /* type */
9007 "*warn_function_return", /* name */
9008 OPTGROUP_NONE, /* optinfo_flags */
9009 TV_NONE, /* tv_id */
9010 PROP_cfg, /* properties_required */
9011 0, /* properties_provided */
9012 0, /* properties_destroyed */
9013 0, /* todo_flags_start */
9014 0, /* todo_flags_finish */
9017 class pass_warn_function_return : public gimple_opt_pass
9019 public:
9020 pass_warn_function_return (gcc::context *ctxt)
9021 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9024 /* opt_pass methods: */
9025 virtual unsigned int execute (function *);
9027 }; // class pass_warn_function_return
9029 unsigned int
9030 pass_warn_function_return::execute (function *fun)
9032 source_location location;
9033 gimple *last;
9034 edge e;
9035 edge_iterator ei;
9037 if (!targetm.warn_func_return (fun->decl))
9038 return 0;
9040 /* If we have a path to EXIT, then we do return. */
9041 if (TREE_THIS_VOLATILE (fun->decl)
9042 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9044 location = UNKNOWN_LOCATION;
9045 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9046 (e = ei_safe_edge (ei)); )
9048 last = last_stmt (e->src);
9049 if ((gimple_code (last) == GIMPLE_RETURN
9050 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9051 && location == UNKNOWN_LOCATION
9052 && (location = gimple_location (last)) != UNKNOWN_LOCATION
9053 && !optimize)
9054 break;
9055 /* When optimizing, replace return stmts in noreturn functions
9056 with __builtin_unreachable () call. */
9057 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9059 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9060 gimple *new_stmt = gimple_build_call (fndecl, 0);
9061 gimple_set_location (new_stmt, gimple_location (last));
9062 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9063 gsi_replace (&gsi, new_stmt, true);
9064 remove_edge (e);
9066 else
9067 ei_next (&ei);
9069 if (location == UNKNOWN_LOCATION)
9070 location = cfun->function_end_locus;
9071 warning_at (location, 0, "%<noreturn%> function does return");
9074 /* If we see "return;" in some basic block, then we do reach the end
9075 without returning a value. */
9076 else if (warn_return_type > 0
9077 && !TREE_NO_WARNING (fun->decl)
9078 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
9079 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9081 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9083 gimple *last = last_stmt (e->src);
9084 greturn *return_stmt = dyn_cast <greturn *> (last);
9085 if (return_stmt
9086 && gimple_return_retval (return_stmt) == NULL
9087 && !gimple_no_warning_p (last))
9089 location = gimple_location (last);
9090 if (location == UNKNOWN_LOCATION)
9091 location = fun->function_end_locus;
9092 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
9093 TREE_NO_WARNING (fun->decl) = 1;
9094 break;
9098 return 0;
9101 } // anon namespace
9103 gimple_opt_pass *
9104 make_pass_warn_function_return (gcc::context *ctxt)
9106 return new pass_warn_function_return (ctxt);
9109 /* Walk a gimplified function and warn for functions whose return value is
9110 ignored and attribute((warn_unused_result)) is set. This is done before
9111 inlining, so we don't have to worry about that. */
9113 static void
9114 do_warn_unused_result (gimple_seq seq)
9116 tree fdecl, ftype;
9117 gimple_stmt_iterator i;
9119 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9121 gimple *g = gsi_stmt (i);
9123 switch (gimple_code (g))
9125 case GIMPLE_BIND:
9126 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9127 break;
9128 case GIMPLE_TRY:
9129 do_warn_unused_result (gimple_try_eval (g));
9130 do_warn_unused_result (gimple_try_cleanup (g));
9131 break;
9132 case GIMPLE_CATCH:
9133 do_warn_unused_result (gimple_catch_handler (
9134 as_a <gcatch *> (g)));
9135 break;
9136 case GIMPLE_EH_FILTER:
9137 do_warn_unused_result (gimple_eh_filter_failure (g));
9138 break;
9140 case GIMPLE_CALL:
9141 if (gimple_call_lhs (g))
9142 break;
9143 if (gimple_call_internal_p (g))
9144 break;
9146 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9147 LHS. All calls whose value is ignored should be
9148 represented like this. Look for the attribute. */
9149 fdecl = gimple_call_fndecl (g);
9150 ftype = gimple_call_fntype (g);
9152 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9154 location_t loc = gimple_location (g);
9156 if (fdecl)
9157 warning_at (loc, OPT_Wunused_result,
9158 "ignoring return value of %qD, "
9159 "declared with attribute warn_unused_result",
9160 fdecl);
9161 else
9162 warning_at (loc, OPT_Wunused_result,
9163 "ignoring return value of function "
9164 "declared with attribute warn_unused_result");
9166 break;
9168 default:
9169 /* Not a container, not a call, or a call whose value is used. */
9170 break;
9175 namespace {
9177 const pass_data pass_data_warn_unused_result =
9179 GIMPLE_PASS, /* type */
9180 "*warn_unused_result", /* name */
9181 OPTGROUP_NONE, /* optinfo_flags */
9182 TV_NONE, /* tv_id */
9183 PROP_gimple_any, /* properties_required */
9184 0, /* properties_provided */
9185 0, /* properties_destroyed */
9186 0, /* todo_flags_start */
9187 0, /* todo_flags_finish */
9190 class pass_warn_unused_result : public gimple_opt_pass
9192 public:
9193 pass_warn_unused_result (gcc::context *ctxt)
9194 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9197 /* opt_pass methods: */
9198 virtual bool gate (function *) { return flag_warn_unused_result; }
9199 virtual unsigned int execute (function *)
9201 do_warn_unused_result (gimple_body (current_function_decl));
9202 return 0;
9205 }; // class pass_warn_unused_result
9207 } // anon namespace
9209 gimple_opt_pass *
9210 make_pass_warn_unused_result (gcc::context *ctxt)
9212 return new pass_warn_unused_result (ctxt);
9215 /* IPA passes, compilation of earlier functions or inlining
9216 might have changed some properties, such as marked functions nothrow,
9217 pure, const or noreturn.
9218 Remove redundant edges and basic blocks, and create new ones if necessary.
9220 This pass can't be executed as stand alone pass from pass manager, because
9221 in between inlining and this fixup the verify_flow_info would fail. */
9223 unsigned int
9224 execute_fixup_cfg (void)
9226 basic_block bb;
9227 gimple_stmt_iterator gsi;
9228 int todo = 0;
9229 cgraph_node *node = cgraph_node::get (current_function_decl);
9230 profile_count num = node->count;
9231 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9232 bool scale = num.initialized_p () && !(num == den);
9234 if (scale)
9236 profile_count::adjust_for_ipa_scaling (&num, &den);
9237 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9238 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9239 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9242 FOR_EACH_BB_FN (bb, cfun)
9244 if (scale)
9245 bb->count = bb->count.apply_scale (num, den);
9246 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9248 gimple *stmt = gsi_stmt (gsi);
9249 tree decl = is_gimple_call (stmt)
9250 ? gimple_call_fndecl (stmt)
9251 : NULL;
9252 if (decl)
9254 int flags = gimple_call_flags (stmt);
9255 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9257 if (gimple_purge_dead_abnormal_call_edges (bb))
9258 todo |= TODO_cleanup_cfg;
9260 if (gimple_in_ssa_p (cfun))
9262 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9263 update_stmt (stmt);
9267 if (flags & ECF_NORETURN
9268 && fixup_noreturn_call (stmt))
9269 todo |= TODO_cleanup_cfg;
9272 /* Remove stores to variables we marked write-only.
9273 Keep access when store has side effect, i.e. in case when source
9274 is volatile. */
9275 if (gimple_store_p (stmt)
9276 && !gimple_has_side_effects (stmt))
9278 tree lhs = get_base_address (gimple_get_lhs (stmt));
9280 if (VAR_P (lhs)
9281 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9282 && varpool_node::get (lhs)->writeonly)
9284 unlink_stmt_vdef (stmt);
9285 gsi_remove (&gsi, true);
9286 release_defs (stmt);
9287 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9288 continue;
9291 /* For calls we can simply remove LHS when it is known
9292 to be write-only. */
9293 if (is_gimple_call (stmt)
9294 && gimple_get_lhs (stmt))
9296 tree lhs = get_base_address (gimple_get_lhs (stmt));
9298 if (VAR_P (lhs)
9299 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9300 && varpool_node::get (lhs)->writeonly)
9302 gimple_call_set_lhs (stmt, NULL);
9303 update_stmt (stmt);
9304 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9308 if (maybe_clean_eh_stmt (stmt)
9309 && gimple_purge_dead_eh_edges (bb))
9310 todo |= TODO_cleanup_cfg;
9311 gsi_next (&gsi);
9314 /* If we have a basic block with no successors that does not
9315 end with a control statement or a noreturn call end it with
9316 a call to __builtin_unreachable. This situation can occur
9317 when inlining a noreturn call that does in fact return. */
9318 if (EDGE_COUNT (bb->succs) == 0)
9320 gimple *stmt = last_stmt (bb);
9321 if (!stmt
9322 || (!is_ctrl_stmt (stmt)
9323 && (!is_gimple_call (stmt)
9324 || !gimple_call_noreturn_p (stmt))))
9326 if (stmt && is_gimple_call (stmt))
9327 gimple_call_set_ctrl_altering (stmt, false);
9328 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9329 stmt = gimple_build_call (fndecl, 0);
9330 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9331 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9332 if (!cfun->after_inlining)
9334 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9335 node->create_edge (cgraph_node::get_create (fndecl),
9336 call_stmt, bb->count);
9341 if (scale)
9342 compute_function_frequency ();
9344 if (current_loops
9345 && (todo & TODO_cleanup_cfg))
9346 loops_state_set (LOOPS_NEED_FIXUP);
9348 return todo;
9351 namespace {
9353 const pass_data pass_data_fixup_cfg =
9355 GIMPLE_PASS, /* type */
9356 "fixup_cfg", /* name */
9357 OPTGROUP_NONE, /* optinfo_flags */
9358 TV_NONE, /* tv_id */
9359 PROP_cfg, /* properties_required */
9360 0, /* properties_provided */
9361 0, /* properties_destroyed */
9362 0, /* todo_flags_start */
9363 0, /* todo_flags_finish */
9366 class pass_fixup_cfg : public gimple_opt_pass
9368 public:
9369 pass_fixup_cfg (gcc::context *ctxt)
9370 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9373 /* opt_pass methods: */
9374 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9375 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9377 }; // class pass_fixup_cfg
9379 } // anon namespace
9381 gimple_opt_pass *
9382 make_pass_fixup_cfg (gcc::context *ctxt)
9384 return new pass_fixup_cfg (ctxt);
9387 /* Garbage collection support for edge_def. */
9389 extern void gt_ggc_mx (tree&);
9390 extern void gt_ggc_mx (gimple *&);
9391 extern void gt_ggc_mx (rtx&);
9392 extern void gt_ggc_mx (basic_block&);
9394 static void
9395 gt_ggc_mx (rtx_insn *& x)
9397 if (x)
9398 gt_ggc_mx_rtx_def ((void *) x);
9401 void
9402 gt_ggc_mx (edge_def *e)
9404 tree block = LOCATION_BLOCK (e->goto_locus);
9405 gt_ggc_mx (e->src);
9406 gt_ggc_mx (e->dest);
9407 if (current_ir_type () == IR_GIMPLE)
9408 gt_ggc_mx (e->insns.g);
9409 else
9410 gt_ggc_mx (e->insns.r);
9411 gt_ggc_mx (block);
9414 /* PCH support for edge_def. */
9416 extern void gt_pch_nx (tree&);
9417 extern void gt_pch_nx (gimple *&);
9418 extern void gt_pch_nx (rtx&);
9419 extern void gt_pch_nx (basic_block&);
9421 static void
9422 gt_pch_nx (rtx_insn *& x)
9424 if (x)
9425 gt_pch_nx_rtx_def ((void *) x);
9428 void
9429 gt_pch_nx (edge_def *e)
9431 tree block = LOCATION_BLOCK (e->goto_locus);
9432 gt_pch_nx (e->src);
9433 gt_pch_nx (e->dest);
9434 if (current_ir_type () == IR_GIMPLE)
9435 gt_pch_nx (e->insns.g);
9436 else
9437 gt_pch_nx (e->insns.r);
9438 gt_pch_nx (block);
9441 void
9442 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9444 tree block = LOCATION_BLOCK (e->goto_locus);
9445 op (&(e->src), cookie);
9446 op (&(e->dest), cookie);
9447 if (current_ir_type () == IR_GIMPLE)
9448 op (&(e->insns.g), cookie);
9449 else
9450 op (&(e->insns.r), cookie);
9451 op (&(block), cookie);
9454 #if CHECKING_P
9456 namespace selftest {
9458 /* Helper function for CFG selftests: create a dummy function decl
9459 and push it as cfun. */
9461 static tree
9462 push_fndecl (const char *name)
9464 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9465 /* FIXME: this uses input_location: */
9466 tree fndecl = build_fn_decl (name, fn_type);
9467 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9468 NULL_TREE, integer_type_node);
9469 DECL_RESULT (fndecl) = retval;
9470 push_struct_function (fndecl);
9471 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9472 ASSERT_TRUE (fun != NULL);
9473 init_empty_tree_cfg_for_function (fun);
9474 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9475 ASSERT_EQ (0, n_edges_for_fn (fun));
9476 return fndecl;
9479 /* These tests directly create CFGs.
9480 Compare with the static fns within tree-cfg.c:
9481 - build_gimple_cfg
9482 - make_blocks: calls create_basic_block (seq, bb);
9483 - make_edges. */
9485 /* Verify a simple cfg of the form:
9486 ENTRY -> A -> B -> C -> EXIT. */
9488 static void
9489 test_linear_chain ()
9491 gimple_register_cfg_hooks ();
9493 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9494 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9496 /* Create some empty blocks. */
9497 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9498 basic_block bb_b = create_empty_bb (bb_a);
9499 basic_block bb_c = create_empty_bb (bb_b);
9501 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9502 ASSERT_EQ (0, n_edges_for_fn (fun));
9504 /* Create some edges: a simple linear chain of BBs. */
9505 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9506 make_edge (bb_a, bb_b, 0);
9507 make_edge (bb_b, bb_c, 0);
9508 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9510 /* Verify the edges. */
9511 ASSERT_EQ (4, n_edges_for_fn (fun));
9512 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9513 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9514 ASSERT_EQ (1, bb_a->preds->length ());
9515 ASSERT_EQ (1, bb_a->succs->length ());
9516 ASSERT_EQ (1, bb_b->preds->length ());
9517 ASSERT_EQ (1, bb_b->succs->length ());
9518 ASSERT_EQ (1, bb_c->preds->length ());
9519 ASSERT_EQ (1, bb_c->succs->length ());
9520 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9521 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9523 /* Verify the dominance information
9524 Each BB in our simple chain should be dominated by the one before
9525 it. */
9526 calculate_dominance_info (CDI_DOMINATORS);
9527 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9528 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9529 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9530 ASSERT_EQ (1, dom_by_b.length ());
9531 ASSERT_EQ (bb_c, dom_by_b[0]);
9532 free_dominance_info (CDI_DOMINATORS);
9533 dom_by_b.release ();
9535 /* Similarly for post-dominance: each BB in our chain is post-dominated
9536 by the one after it. */
9537 calculate_dominance_info (CDI_POST_DOMINATORS);
9538 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9539 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9540 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9541 ASSERT_EQ (1, postdom_by_b.length ());
9542 ASSERT_EQ (bb_a, postdom_by_b[0]);
9543 free_dominance_info (CDI_POST_DOMINATORS);
9544 postdom_by_b.release ();
9546 pop_cfun ();
9549 /* Verify a simple CFG of the form:
9550 ENTRY
9554 /t \f
9560 EXIT. */
9562 static void
9563 test_diamond ()
9565 gimple_register_cfg_hooks ();
9567 tree fndecl = push_fndecl ("cfg_test_diamond");
9568 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9570 /* Create some empty blocks. */
9571 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9572 basic_block bb_b = create_empty_bb (bb_a);
9573 basic_block bb_c = create_empty_bb (bb_a);
9574 basic_block bb_d = create_empty_bb (bb_b);
9576 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9577 ASSERT_EQ (0, n_edges_for_fn (fun));
9579 /* Create the edges. */
9580 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9581 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9582 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9583 make_edge (bb_b, bb_d, 0);
9584 make_edge (bb_c, bb_d, 0);
9585 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9587 /* Verify the edges. */
9588 ASSERT_EQ (6, n_edges_for_fn (fun));
9589 ASSERT_EQ (1, bb_a->preds->length ());
9590 ASSERT_EQ (2, bb_a->succs->length ());
9591 ASSERT_EQ (1, bb_b->preds->length ());
9592 ASSERT_EQ (1, bb_b->succs->length ());
9593 ASSERT_EQ (1, bb_c->preds->length ());
9594 ASSERT_EQ (1, bb_c->succs->length ());
9595 ASSERT_EQ (2, bb_d->preds->length ());
9596 ASSERT_EQ (1, bb_d->succs->length ());
9598 /* Verify the dominance information. */
9599 calculate_dominance_info (CDI_DOMINATORS);
9600 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9601 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9602 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9603 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9604 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9605 dom_by_a.release ();
9606 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9607 ASSERT_EQ (0, dom_by_b.length ());
9608 dom_by_b.release ();
9609 free_dominance_info (CDI_DOMINATORS);
9611 /* Similarly for post-dominance. */
9612 calculate_dominance_info (CDI_POST_DOMINATORS);
9613 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9614 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9615 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9616 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9617 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9618 postdom_by_d.release ();
9619 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9620 ASSERT_EQ (0, postdom_by_b.length ());
9621 postdom_by_b.release ();
9622 free_dominance_info (CDI_POST_DOMINATORS);
9624 pop_cfun ();
9627 /* Verify that we can handle a CFG containing a "complete" aka
9628 fully-connected subgraph (where A B C D below all have edges
9629 pointing to each other node, also to themselves).
9630 e.g.:
9631 ENTRY EXIT
9637 A<--->B
9638 ^^ ^^
9639 | \ / |
9640 | X |
9641 | / \ |
9642 VV VV
9643 C<--->D
9646 static void
9647 test_fully_connected ()
9649 gimple_register_cfg_hooks ();
9651 tree fndecl = push_fndecl ("cfg_fully_connected");
9652 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9654 const int n = 4;
9656 /* Create some empty blocks. */
9657 auto_vec <basic_block> subgraph_nodes;
9658 for (int i = 0; i < n; i++)
9659 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9661 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9662 ASSERT_EQ (0, n_edges_for_fn (fun));
9664 /* Create the edges. */
9665 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9666 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9667 for (int i = 0; i < n; i++)
9668 for (int j = 0; j < n; j++)
9669 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9671 /* Verify the edges. */
9672 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9673 /* The first one is linked to ENTRY/EXIT as well as itself and
9674 everything else. */
9675 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9676 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9677 /* The other ones in the subgraph are linked to everything in
9678 the subgraph (including themselves). */
9679 for (int i = 1; i < n; i++)
9681 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9682 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9685 /* Verify the dominance information. */
9686 calculate_dominance_info (CDI_DOMINATORS);
9687 /* The initial block in the subgraph should be dominated by ENTRY. */
9688 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9689 get_immediate_dominator (CDI_DOMINATORS,
9690 subgraph_nodes[0]));
9691 /* Every other block in the subgraph should be dominated by the
9692 initial block. */
9693 for (int i = 1; i < n; i++)
9694 ASSERT_EQ (subgraph_nodes[0],
9695 get_immediate_dominator (CDI_DOMINATORS,
9696 subgraph_nodes[i]));
9697 free_dominance_info (CDI_DOMINATORS);
9699 /* Similarly for post-dominance. */
9700 calculate_dominance_info (CDI_POST_DOMINATORS);
9701 /* The initial block in the subgraph should be postdominated by EXIT. */
9702 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9703 get_immediate_dominator (CDI_POST_DOMINATORS,
9704 subgraph_nodes[0]));
9705 /* Every other block in the subgraph should be postdominated by the
9706 initial block, since that leads to EXIT. */
9707 for (int i = 1; i < n; i++)
9708 ASSERT_EQ (subgraph_nodes[0],
9709 get_immediate_dominator (CDI_POST_DOMINATORS,
9710 subgraph_nodes[i]));
9711 free_dominance_info (CDI_POST_DOMINATORS);
9713 pop_cfun ();
9716 /* Run all of the selftests within this file. */
9718 void
9719 tree_cfg_c_tests ()
9721 test_linear_chain ();
9722 test_diamond ();
9723 test_fully_connected ();
9726 } // namespace selftest
9728 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9729 - loop
9730 - nested loops
9731 - switch statement (a block with many out-edges)
9732 - something that jumps to itself
9733 - etc */
9735 #endif /* CHECKING_P */