2016-08-24 Michael Collison <michael.collison@linaro.org>
[official-gcc.git] / gcc / tree-cfg.c
blob57c84108a5c5b9561e6344bb905da0e7230c020e
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-low.h"
58 #include "tree-cfgcleanup.h"
59 #include "gimplify.h"
60 #include "attribs.h"
61 #include "selftest.h"
63 /* This file contains functions for building the Control Flow Graph (CFG)
64 for a function tree. */
66 /* Local declarations. */
68 /* Initial capacity for the basic block array. */
69 static const int initial_cfg_capacity = 20;
71 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
72 which use a particular edge. The CASE_LABEL_EXPRs are chained together
73 via their CASE_CHAIN field, which we clear after we're done with the
74 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
76 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
77 update the case vector in response to edge redirections.
79 Right now this table is set up and torn down at key points in the
80 compilation process. It would be nice if we could make the table
81 more persistent. The key is getting notification of changes to
82 the CFG (particularly edge removal, creation and redirection). */
84 static hash_map<edge, tree> *edge_to_cases;
86 /* If we record edge_to_cases, this bitmap will hold indexes
87 of basic blocks that end in a GIMPLE_SWITCH which we touched
88 due to edge manipulations. */
90 static bitmap touched_switch_bbs;
92 /* CFG statistics. */
93 struct cfg_stats_d
95 long num_merged_labels;
98 static struct cfg_stats_d cfg_stats;
100 /* Data to pass to replace_block_vars_by_duplicates_1. */
101 struct replace_decls_d
103 hash_map<tree, tree> *vars_map;
104 tree to_context;
107 /* Hash table to store last discriminator assigned for each locus. */
108 struct locus_discrim_map
110 location_t locus;
111 int discriminator;
114 /* Hashtable helpers. */
116 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
118 static inline hashval_t hash (const locus_discrim_map *);
119 static inline bool equal (const locus_discrim_map *,
120 const locus_discrim_map *);
123 /* Trivial hash function for a location_t. ITEM is a pointer to
124 a hash table entry that maps a location_t to a discriminator. */
126 inline hashval_t
127 locus_discrim_hasher::hash (const locus_discrim_map *item)
129 return LOCATION_LINE (item->locus);
132 /* Equality function for the locus-to-discriminator map. A and B
133 point to the two hash table entries to compare. */
135 inline bool
136 locus_discrim_hasher::equal (const locus_discrim_map *a,
137 const locus_discrim_map *b)
139 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
142 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
144 /* Basic blocks and flowgraphs. */
145 static void make_blocks (gimple_seq);
147 /* Edges. */
148 static void make_edges (void);
149 static void assign_discriminators (void);
150 static void make_cond_expr_edges (basic_block);
151 static void make_gimple_switch_edges (gswitch *, basic_block);
152 static bool make_goto_expr_edges (basic_block);
153 static void make_gimple_asm_edges (basic_block);
154 static edge gimple_redirect_edge_and_branch (edge, basic_block);
155 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
157 /* Various helpers. */
158 static inline bool stmt_starts_bb_p (gimple *, gimple *);
159 static int gimple_verify_flow_info (void);
160 static void gimple_make_forwarder_block (edge);
161 static gimple *first_non_label_stmt (basic_block);
162 static bool verify_gimple_transaction (gtransaction *);
163 static bool call_can_make_abnormal_goto (gimple *);
165 /* Flowgraph optimization and cleanup. */
166 static void gimple_merge_blocks (basic_block, basic_block);
167 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
168 static void remove_bb (basic_block);
169 static edge find_taken_edge_computed_goto (basic_block, tree);
170 static edge find_taken_edge_cond_expr (basic_block, tree);
171 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
172 static tree find_case_label_for_value (gswitch *, tree);
174 void
175 init_empty_tree_cfg_for_function (struct function *fn)
177 /* Initialize the basic block array. */
178 init_flow (fn);
179 profile_status_for_fn (fn) = PROFILE_ABSENT;
180 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
181 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
182 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
183 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
184 initial_cfg_capacity);
186 /* Build a mapping of labels to their associated blocks. */
187 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
188 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
189 initial_cfg_capacity);
191 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
192 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
194 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
195 = EXIT_BLOCK_PTR_FOR_FN (fn);
196 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
197 = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 void
201 init_empty_tree_cfg (void)
203 init_empty_tree_cfg_for_function (cfun);
206 /*---------------------------------------------------------------------------
207 Create basic blocks
208 ---------------------------------------------------------------------------*/
210 /* Entry point to the CFG builder for trees. SEQ is the sequence of
211 statements to be added to the flowgraph. */
213 static void
214 build_gimple_cfg (gimple_seq seq)
216 /* Register specific gimple functions. */
217 gimple_register_cfg_hooks ();
219 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
221 init_empty_tree_cfg ();
223 make_blocks (seq);
225 /* Make sure there is always at least one block, even if it's empty. */
226 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
227 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
229 /* Adjust the size of the array. */
230 if (basic_block_info_for_fn (cfun)->length ()
231 < (size_t) n_basic_blocks_for_fn (cfun))
232 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
233 n_basic_blocks_for_fn (cfun));
235 /* To speed up statement iterator walks, we first purge dead labels. */
236 cleanup_dead_labels ();
238 /* Group case nodes to reduce the number of edges.
239 We do this after cleaning up dead labels because otherwise we miss
240 a lot of obvious case merging opportunities. */
241 group_case_labels ();
243 /* Create the edges of the flowgraph. */
244 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
245 make_edges ();
246 assign_discriminators ();
247 cleanup_dead_labels ();
248 delete discriminator_per_locus;
249 discriminator_per_locus = NULL;
252 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
253 them and propagate the information to LOOP. We assume that the annotations
254 come immediately before the condition in BB, if any. */
256 static void
257 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
259 gimple_stmt_iterator gsi = gsi_last_bb (bb);
260 gimple *stmt = gsi_stmt (gsi);
262 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
263 return;
265 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
267 stmt = gsi_stmt (gsi);
268 if (gimple_code (stmt) != GIMPLE_CALL)
269 break;
270 if (!gimple_call_internal_p (stmt)
271 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
272 break;
274 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
276 case annot_expr_ivdep_kind:
277 loop->safelen = INT_MAX;
278 break;
279 case annot_expr_no_vector_kind:
280 loop->dont_vectorize = true;
281 break;
282 case annot_expr_vector_kind:
283 loop->force_vectorize = true;
284 cfun->has_force_vectorize_loops = true;
285 break;
286 default:
287 gcc_unreachable ();
290 stmt = gimple_build_assign (gimple_call_lhs (stmt),
291 gimple_call_arg (stmt, 0));
292 gsi_replace (&gsi, stmt, true);
296 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
297 them and propagate the information to the loop. We assume that the
298 annotations come immediately before the condition of the loop. */
300 static void
301 replace_loop_annotate (void)
303 struct loop *loop;
304 basic_block bb;
305 gimple_stmt_iterator gsi;
306 gimple *stmt;
308 FOR_EACH_LOOP (loop, 0)
310 /* First look into the header. */
311 replace_loop_annotate_in_block (loop->header, loop);
313 /* Then look into the latch, if any. */
314 if (loop->latch)
315 replace_loop_annotate_in_block (loop->latch, loop);
318 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
319 FOR_EACH_BB_FN (bb, cfun)
321 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
323 stmt = gsi_stmt (gsi);
324 if (gimple_code (stmt) != GIMPLE_CALL)
325 continue;
326 if (!gimple_call_internal_p (stmt)
327 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
328 continue;
330 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
332 case annot_expr_ivdep_kind:
333 case annot_expr_no_vector_kind:
334 case annot_expr_vector_kind:
335 break;
336 default:
337 gcc_unreachable ();
340 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
341 stmt = gimple_build_assign (gimple_call_lhs (stmt),
342 gimple_call_arg (stmt, 0));
343 gsi_replace (&gsi, stmt, true);
349 static unsigned int
350 execute_build_cfg (void)
352 gimple_seq body = gimple_body (current_function_decl);
354 build_gimple_cfg (body);
355 gimple_set_body (current_function_decl, NULL);
356 if (dump_file && (dump_flags & TDF_DETAILS))
358 fprintf (dump_file, "Scope blocks:\n");
359 dump_scope_blocks (dump_file, dump_flags);
361 cleanup_tree_cfg ();
362 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
363 replace_loop_annotate ();
364 return 0;
367 namespace {
369 const pass_data pass_data_build_cfg =
371 GIMPLE_PASS, /* type */
372 "cfg", /* name */
373 OPTGROUP_NONE, /* optinfo_flags */
374 TV_TREE_CFG, /* tv_id */
375 PROP_gimple_leh, /* properties_required */
376 ( PROP_cfg | PROP_loops ), /* properties_provided */
377 0, /* properties_destroyed */
378 0, /* todo_flags_start */
379 0, /* todo_flags_finish */
382 class pass_build_cfg : public gimple_opt_pass
384 public:
385 pass_build_cfg (gcc::context *ctxt)
386 : gimple_opt_pass (pass_data_build_cfg, ctxt)
389 /* opt_pass methods: */
390 virtual unsigned int execute (function *) { return execute_build_cfg (); }
392 }; // class pass_build_cfg
394 } // anon namespace
396 gimple_opt_pass *
397 make_pass_build_cfg (gcc::context *ctxt)
399 return new pass_build_cfg (ctxt);
403 /* Return true if T is a computed goto. */
405 bool
406 computed_goto_p (gimple *t)
408 return (gimple_code (t) == GIMPLE_GOTO
409 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
412 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
413 the other edge points to a bb with just __builtin_unreachable ().
414 I.e. return true for C->M edge in:
415 <bb C>:
417 if (something)
418 goto <bb N>;
419 else
420 goto <bb M>;
421 <bb N>:
422 __builtin_unreachable ();
423 <bb M>: */
425 bool
426 assert_unreachable_fallthru_edge_p (edge e)
428 basic_block pred_bb = e->src;
429 gimple *last = last_stmt (pred_bb);
430 if (last && gimple_code (last) == GIMPLE_COND)
432 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
433 if (other_bb == e->dest)
434 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
435 if (EDGE_COUNT (other_bb->succs) == 0)
437 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
438 gimple *stmt;
440 if (gsi_end_p (gsi))
441 return false;
442 stmt = gsi_stmt (gsi);
443 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
445 gsi_next (&gsi);
446 if (gsi_end_p (gsi))
447 return false;
448 stmt = gsi_stmt (gsi);
450 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
453 return false;
457 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
458 could alter control flow except via eh. We initialize the flag at
459 CFG build time and only ever clear it later. */
461 static void
462 gimple_call_initialize_ctrl_altering (gimple *stmt)
464 int flags = gimple_call_flags (stmt);
466 /* A call alters control flow if it can make an abnormal goto. */
467 if (call_can_make_abnormal_goto (stmt)
468 /* A call also alters control flow if it does not return. */
469 || flags & ECF_NORETURN
470 /* TM ending statements have backedges out of the transaction.
471 Return true so we split the basic block containing them.
472 Note that the TM_BUILTIN test is merely an optimization. */
473 || ((flags & ECF_TM_BUILTIN)
474 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
475 /* BUILT_IN_RETURN call is same as return statement. */
476 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
477 /* IFN_UNIQUE should be the last insn, to make checking for it
478 as cheap as possible. */
479 || (gimple_call_internal_p (stmt)
480 && gimple_call_internal_unique_p (stmt)))
481 gimple_call_set_ctrl_altering (stmt, true);
482 else
483 gimple_call_set_ctrl_altering (stmt, false);
487 /* Insert SEQ after BB and build a flowgraph. */
489 static basic_block
490 make_blocks_1 (gimple_seq seq, basic_block bb)
492 gimple_stmt_iterator i = gsi_start (seq);
493 gimple *stmt = NULL;
494 bool start_new_block = true;
495 bool first_stmt_of_seq = true;
497 while (!gsi_end_p (i))
499 gimple *prev_stmt;
501 prev_stmt = stmt;
502 stmt = gsi_stmt (i);
504 if (stmt && is_gimple_call (stmt))
505 gimple_call_initialize_ctrl_altering (stmt);
507 /* If the statement starts a new basic block or if we have determined
508 in a previous pass that we need to create a new block for STMT, do
509 so now. */
510 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
512 if (!first_stmt_of_seq)
513 gsi_split_seq_before (&i, &seq);
514 bb = create_basic_block (seq, bb);
515 start_new_block = false;
518 /* Now add STMT to BB and create the subgraphs for special statement
519 codes. */
520 gimple_set_bb (stmt, bb);
522 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
523 next iteration. */
524 if (stmt_ends_bb_p (stmt))
526 /* If the stmt can make abnormal goto use a new temporary
527 for the assignment to the LHS. This makes sure the old value
528 of the LHS is available on the abnormal edge. Otherwise
529 we will end up with overlapping life-ranges for abnormal
530 SSA names. */
531 if (gimple_has_lhs (stmt)
532 && stmt_can_make_abnormal_goto (stmt)
533 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
535 tree lhs = gimple_get_lhs (stmt);
536 tree tmp = create_tmp_var (TREE_TYPE (lhs));
537 gimple *s = gimple_build_assign (lhs, tmp);
538 gimple_set_location (s, gimple_location (stmt));
539 gimple_set_block (s, gimple_block (stmt));
540 gimple_set_lhs (stmt, tmp);
541 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
542 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
543 DECL_GIMPLE_REG_P (tmp) = 1;
544 gsi_insert_after (&i, s, GSI_SAME_STMT);
546 start_new_block = true;
549 gsi_next (&i);
550 first_stmt_of_seq = false;
552 return bb;
555 /* Build a flowgraph for the sequence of stmts SEQ. */
557 static void
558 make_blocks (gimple_seq seq)
560 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
563 /* Create and return a new empty basic block after bb AFTER. */
565 static basic_block
566 create_bb (void *h, void *e, basic_block after)
568 basic_block bb;
570 gcc_assert (!e);
572 /* Create and initialize a new basic block. Since alloc_block uses
573 GC allocation that clears memory to allocate a basic block, we do
574 not have to clear the newly allocated basic block here. */
575 bb = alloc_block ();
577 bb->index = last_basic_block_for_fn (cfun);
578 bb->flags = BB_NEW;
579 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
581 /* Add the new block to the linked list of blocks. */
582 link_block (bb, after);
584 /* Grow the basic block array if needed. */
585 if ((size_t) last_basic_block_for_fn (cfun)
586 == basic_block_info_for_fn (cfun)->length ())
588 size_t new_size =
589 (last_basic_block_for_fn (cfun)
590 + (last_basic_block_for_fn (cfun) + 3) / 4);
591 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
594 /* Add the newly created block to the array. */
595 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
597 n_basic_blocks_for_fn (cfun)++;
598 last_basic_block_for_fn (cfun)++;
600 return bb;
604 /*---------------------------------------------------------------------------
605 Edge creation
606 ---------------------------------------------------------------------------*/
608 /* If basic block BB has an abnormal edge to a basic block
609 containing IFN_ABNORMAL_DISPATCHER internal call, return
610 that the dispatcher's basic block, otherwise return NULL. */
612 basic_block
613 get_abnormal_succ_dispatcher (basic_block bb)
615 edge e;
616 edge_iterator ei;
618 FOR_EACH_EDGE (e, ei, bb->succs)
619 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
621 gimple_stmt_iterator gsi
622 = gsi_start_nondebug_after_labels_bb (e->dest);
623 gimple *g = gsi_stmt (gsi);
624 if (g
625 && is_gimple_call (g)
626 && gimple_call_internal_p (g)
627 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
628 return e->dest;
630 return NULL;
633 /* Helper function for make_edges. Create a basic block with
634 with ABNORMAL_DISPATCHER internal call in it if needed, and
635 create abnormal edges from BBS to it and from it to FOR_BB
636 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
638 static void
639 handle_abnormal_edges (basic_block *dispatcher_bbs,
640 basic_block for_bb, int *bb_to_omp_idx,
641 auto_vec<basic_block> *bbs, bool computed_goto)
643 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
644 unsigned int idx = 0;
645 basic_block bb;
646 bool inner = false;
648 if (bb_to_omp_idx)
650 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
651 if (bb_to_omp_idx[for_bb->index] != 0)
652 inner = true;
655 /* If the dispatcher has been created already, then there are basic
656 blocks with abnormal edges to it, so just make a new edge to
657 for_bb. */
658 if (*dispatcher == NULL)
660 /* Check if there are any basic blocks that need to have
661 abnormal edges to this dispatcher. If there are none, return
662 early. */
663 if (bb_to_omp_idx == NULL)
665 if (bbs->is_empty ())
666 return;
668 else
670 FOR_EACH_VEC_ELT (*bbs, idx, bb)
671 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
672 break;
673 if (bb == NULL)
674 return;
677 /* Create the dispatcher bb. */
678 *dispatcher = create_basic_block (NULL, for_bb);
679 if (computed_goto)
681 /* Factor computed gotos into a common computed goto site. Also
682 record the location of that site so that we can un-factor the
683 gotos after we have converted back to normal form. */
684 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
686 /* Create the destination of the factored goto. Each original
687 computed goto will put its desired destination into this
688 variable and jump to the label we create immediately below. */
689 tree var = create_tmp_var (ptr_type_node, "gotovar");
691 /* Build a label for the new block which will contain the
692 factored computed goto. */
693 tree factored_label_decl
694 = create_artificial_label (UNKNOWN_LOCATION);
695 gimple *factored_computed_goto_label
696 = gimple_build_label (factored_label_decl);
697 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
699 /* Build our new computed goto. */
700 gimple *factored_computed_goto = gimple_build_goto (var);
701 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
703 FOR_EACH_VEC_ELT (*bbs, idx, bb)
705 if (bb_to_omp_idx
706 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
707 continue;
709 gsi = gsi_last_bb (bb);
710 gimple *last = gsi_stmt (gsi);
712 gcc_assert (computed_goto_p (last));
714 /* Copy the original computed goto's destination into VAR. */
715 gimple *assignment
716 = gimple_build_assign (var, gimple_goto_dest (last));
717 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
719 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
720 e->goto_locus = gimple_location (last);
721 gsi_remove (&gsi, true);
724 else
726 tree arg = inner ? boolean_true_node : boolean_false_node;
727 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
728 1, arg);
729 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
730 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
732 /* Create predecessor edges of the dispatcher. */
733 FOR_EACH_VEC_ELT (*bbs, idx, bb)
735 if (bb_to_omp_idx
736 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
737 continue;
738 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
743 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
746 /* Creates outgoing edges for BB. Returns 1 when it ends with an
747 computed goto, returns 2 when it ends with a statement that
748 might return to this function via an nonlocal goto, otherwise
749 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
751 static int
752 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
754 gimple *last = last_stmt (bb);
755 bool fallthru = false;
756 int ret = 0;
758 if (!last)
759 return ret;
761 switch (gimple_code (last))
763 case GIMPLE_GOTO:
764 if (make_goto_expr_edges (bb))
765 ret = 1;
766 fallthru = false;
767 break;
768 case GIMPLE_RETURN:
770 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
771 e->goto_locus = gimple_location (last);
772 fallthru = false;
774 break;
775 case GIMPLE_COND:
776 make_cond_expr_edges (bb);
777 fallthru = false;
778 break;
779 case GIMPLE_SWITCH:
780 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
781 fallthru = false;
782 break;
783 case GIMPLE_RESX:
784 make_eh_edges (last);
785 fallthru = false;
786 break;
787 case GIMPLE_EH_DISPATCH:
788 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
789 break;
791 case GIMPLE_CALL:
792 /* If this function receives a nonlocal goto, then we need to
793 make edges from this call site to all the nonlocal goto
794 handlers. */
795 if (stmt_can_make_abnormal_goto (last))
796 ret = 2;
798 /* If this statement has reachable exception handlers, then
799 create abnormal edges to them. */
800 make_eh_edges (last);
802 /* BUILTIN_RETURN is really a return statement. */
803 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
805 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
806 fallthru = false;
808 /* Some calls are known not to return. */
809 else
810 fallthru = !gimple_call_noreturn_p (last);
811 break;
813 case GIMPLE_ASSIGN:
814 /* A GIMPLE_ASSIGN may throw internally and thus be considered
815 control-altering. */
816 if (is_ctrl_altering_stmt (last))
817 make_eh_edges (last);
818 fallthru = true;
819 break;
821 case GIMPLE_ASM:
822 make_gimple_asm_edges (bb);
823 fallthru = true;
824 break;
826 CASE_GIMPLE_OMP:
827 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
828 break;
830 case GIMPLE_TRANSACTION:
832 gtransaction *txn = as_a <gtransaction *> (last);
833 tree label1 = gimple_transaction_label_norm (txn);
834 tree label2 = gimple_transaction_label_uninst (txn);
836 if (label1)
837 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
838 if (label2)
839 make_edge (bb, label_to_block (label2),
840 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
842 tree label3 = gimple_transaction_label_over (txn);
843 if (gimple_transaction_subcode (txn)
844 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
845 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
847 fallthru = false;
849 break;
851 default:
852 gcc_assert (!stmt_ends_bb_p (last));
853 fallthru = true;
854 break;
857 if (fallthru)
858 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
860 return ret;
863 /* Join all the blocks in the flowgraph. */
865 static void
866 make_edges (void)
868 basic_block bb;
869 struct omp_region *cur_region = NULL;
870 auto_vec<basic_block> ab_edge_goto;
871 auto_vec<basic_block> ab_edge_call;
872 int *bb_to_omp_idx = NULL;
873 int cur_omp_region_idx = 0;
875 /* Create an edge from entry to the first block with executable
876 statements in it. */
877 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
878 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
879 EDGE_FALLTHRU);
881 /* Traverse the basic block array placing edges. */
882 FOR_EACH_BB_FN (bb, cfun)
884 int mer;
886 if (bb_to_omp_idx)
887 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
889 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
890 if (mer == 1)
891 ab_edge_goto.safe_push (bb);
892 else if (mer == 2)
893 ab_edge_call.safe_push (bb);
895 if (cur_region && bb_to_omp_idx == NULL)
896 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
899 /* Computed gotos are hell to deal with, especially if there are
900 lots of them with a large number of destinations. So we factor
901 them to a common computed goto location before we build the
902 edge list. After we convert back to normal form, we will un-factor
903 the computed gotos since factoring introduces an unwanted jump.
904 For non-local gotos and abnormal edges from calls to calls that return
905 twice or forced labels, factor the abnormal edges too, by having all
906 abnormal edges from the calls go to a common artificial basic block
907 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
908 basic block to all forced labels and calls returning twice.
909 We do this per-OpenMP structured block, because those regions
910 are guaranteed to be single entry single exit by the standard,
911 so it is not allowed to enter or exit such regions abnormally this way,
912 thus all computed gotos, non-local gotos and setjmp/longjmp calls
913 must not transfer control across SESE region boundaries. */
914 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
916 gimple_stmt_iterator gsi;
917 basic_block dispatcher_bb_array[2] = { NULL, NULL };
918 basic_block *dispatcher_bbs = dispatcher_bb_array;
919 int count = n_basic_blocks_for_fn (cfun);
921 if (bb_to_omp_idx)
922 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
924 FOR_EACH_BB_FN (bb, cfun)
926 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
928 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
929 tree target;
931 if (!label_stmt)
932 break;
934 target = gimple_label_label (label_stmt);
936 /* Make an edge to every label block that has been marked as a
937 potential target for a computed goto or a non-local goto. */
938 if (FORCED_LABEL (target))
939 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
940 &ab_edge_goto, true);
941 if (DECL_NONLOCAL (target))
943 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
944 &ab_edge_call, false);
945 break;
949 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
950 gsi_next_nondebug (&gsi);
951 if (!gsi_end_p (gsi))
953 /* Make an edge to every setjmp-like call. */
954 gimple *call_stmt = gsi_stmt (gsi);
955 if (is_gimple_call (call_stmt)
956 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
957 || gimple_call_builtin_p (call_stmt,
958 BUILT_IN_SETJMP_RECEIVER)))
959 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
960 &ab_edge_call, false);
964 if (bb_to_omp_idx)
965 XDELETE (dispatcher_bbs);
968 XDELETE (bb_to_omp_idx);
970 free_omp_regions ();
973 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
974 needed. Returns true if new bbs were created.
975 Note: This is transitional code, and should not be used for new code. We
976 should be able to get rid of this by rewriting all target va-arg
977 gimplification hooks to use an interface gimple_build_cond_value as described
978 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
980 bool
981 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
983 gimple *stmt = gsi_stmt (*gsi);
984 basic_block bb = gimple_bb (stmt);
985 basic_block lastbb, afterbb;
986 int old_num_bbs = n_basic_blocks_for_fn (cfun);
987 edge e;
988 lastbb = make_blocks_1 (seq, bb);
989 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
990 return false;
991 e = split_block (bb, stmt);
992 /* Move e->dest to come after the new basic blocks. */
993 afterbb = e->dest;
994 unlink_block (afterbb);
995 link_block (afterbb, lastbb);
996 redirect_edge_succ (e, bb->next_bb);
997 bb = bb->next_bb;
998 while (bb != afterbb)
1000 struct omp_region *cur_region = NULL;
1001 int cur_omp_region_idx = 0;
1002 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1003 gcc_assert (!mer && !cur_region);
1004 add_bb_to_loop (bb, afterbb->loop_father);
1005 bb = bb->next_bb;
1007 return true;
1010 /* Find the next available discriminator value for LOCUS. The
1011 discriminator distinguishes among several basic blocks that
1012 share a common locus, allowing for more accurate sample-based
1013 profiling. */
1015 static int
1016 next_discriminator_for_locus (location_t locus)
1018 struct locus_discrim_map item;
1019 struct locus_discrim_map **slot;
1021 item.locus = locus;
1022 item.discriminator = 0;
1023 slot = discriminator_per_locus->find_slot_with_hash (
1024 &item, LOCATION_LINE (locus), INSERT);
1025 gcc_assert (slot);
1026 if (*slot == HTAB_EMPTY_ENTRY)
1028 *slot = XNEW (struct locus_discrim_map);
1029 gcc_assert (*slot);
1030 (*slot)->locus = locus;
1031 (*slot)->discriminator = 0;
1033 (*slot)->discriminator++;
1034 return (*slot)->discriminator;
1037 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1039 static bool
1040 same_line_p (location_t locus1, location_t locus2)
1042 expanded_location from, to;
1044 if (locus1 == locus2)
1045 return true;
1047 from = expand_location (locus1);
1048 to = expand_location (locus2);
1050 if (from.line != to.line)
1051 return false;
1052 if (from.file == to.file)
1053 return true;
1054 return (from.file != NULL
1055 && to.file != NULL
1056 && filename_cmp (from.file, to.file) == 0);
1059 /* Assign discriminators to each basic block. */
1061 static void
1062 assign_discriminators (void)
1064 basic_block bb;
1066 FOR_EACH_BB_FN (bb, cfun)
1068 edge e;
1069 edge_iterator ei;
1070 gimple *last = last_stmt (bb);
1071 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1073 if (locus == UNKNOWN_LOCATION)
1074 continue;
1076 FOR_EACH_EDGE (e, ei, bb->succs)
1078 gimple *first = first_non_label_stmt (e->dest);
1079 gimple *last = last_stmt (e->dest);
1080 if ((first && same_line_p (locus, gimple_location (first)))
1081 || (last && same_line_p (locus, gimple_location (last))))
1083 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1084 bb->discriminator = next_discriminator_for_locus (locus);
1085 else
1086 e->dest->discriminator = next_discriminator_for_locus (locus);
1092 /* Create the edges for a GIMPLE_COND starting at block BB. */
1094 static void
1095 make_cond_expr_edges (basic_block bb)
1097 gcond *entry = as_a <gcond *> (last_stmt (bb));
1098 gimple *then_stmt, *else_stmt;
1099 basic_block then_bb, else_bb;
1100 tree then_label, else_label;
1101 edge e;
1103 gcc_assert (entry);
1104 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1106 /* Entry basic blocks for each component. */
1107 then_label = gimple_cond_true_label (entry);
1108 else_label = gimple_cond_false_label (entry);
1109 then_bb = label_to_block (then_label);
1110 else_bb = label_to_block (else_label);
1111 then_stmt = first_stmt (then_bb);
1112 else_stmt = first_stmt (else_bb);
1114 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1115 e->goto_locus = gimple_location (then_stmt);
1116 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1117 if (e)
1118 e->goto_locus = gimple_location (else_stmt);
1120 /* We do not need the labels anymore. */
1121 gimple_cond_set_true_label (entry, NULL_TREE);
1122 gimple_cond_set_false_label (entry, NULL_TREE);
1126 /* Called for each element in the hash table (P) as we delete the
1127 edge to cases hash table.
1129 Clear all the CASE_CHAINs to prevent problems with copying of
1130 SWITCH_EXPRs and structure sharing rules, then free the hash table
1131 element. */
1133 bool
1134 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1136 tree t, next;
1138 for (t = value; t; t = next)
1140 next = CASE_CHAIN (t);
1141 CASE_CHAIN (t) = NULL;
1144 return true;
1147 /* Start recording information mapping edges to case labels. */
1149 void
1150 start_recording_case_labels (void)
1152 gcc_assert (edge_to_cases == NULL);
1153 edge_to_cases = new hash_map<edge, tree>;
1154 touched_switch_bbs = BITMAP_ALLOC (NULL);
1157 /* Return nonzero if we are recording information for case labels. */
1159 static bool
1160 recording_case_labels_p (void)
1162 return (edge_to_cases != NULL);
1165 /* Stop recording information mapping edges to case labels and
1166 remove any information we have recorded. */
1167 void
1168 end_recording_case_labels (void)
1170 bitmap_iterator bi;
1171 unsigned i;
1172 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1173 delete edge_to_cases;
1174 edge_to_cases = NULL;
1175 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1177 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1178 if (bb)
1180 gimple *stmt = last_stmt (bb);
1181 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1182 group_case_labels_stmt (as_a <gswitch *> (stmt));
1185 BITMAP_FREE (touched_switch_bbs);
1188 /* If we are inside a {start,end}_recording_cases block, then return
1189 a chain of CASE_LABEL_EXPRs from T which reference E.
1191 Otherwise return NULL. */
1193 static tree
1194 get_cases_for_edge (edge e, gswitch *t)
1196 tree *slot;
1197 size_t i, n;
1199 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1200 chains available. Return NULL so the caller can detect this case. */
1201 if (!recording_case_labels_p ())
1202 return NULL;
1204 slot = edge_to_cases->get (e);
1205 if (slot)
1206 return *slot;
1208 /* If we did not find E in the hash table, then this must be the first
1209 time we have been queried for information about E & T. Add all the
1210 elements from T to the hash table then perform the query again. */
1212 n = gimple_switch_num_labels (t);
1213 for (i = 0; i < n; i++)
1215 tree elt = gimple_switch_label (t, i);
1216 tree lab = CASE_LABEL (elt);
1217 basic_block label_bb = label_to_block (lab);
1218 edge this_edge = find_edge (e->src, label_bb);
1220 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1221 a new chain. */
1222 tree &s = edge_to_cases->get_or_insert (this_edge);
1223 CASE_CHAIN (elt) = s;
1224 s = elt;
1227 return *edge_to_cases->get (e);
1230 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1232 static void
1233 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1235 size_t i, n;
1237 n = gimple_switch_num_labels (entry);
1239 for (i = 0; i < n; ++i)
1241 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1242 basic_block label_bb = label_to_block (lab);
1243 make_edge (bb, label_bb, 0);
1248 /* Return the basic block holding label DEST. */
1250 basic_block
1251 label_to_block_fn (struct function *ifun, tree dest)
1253 int uid = LABEL_DECL_UID (dest);
1255 /* We would die hard when faced by an undefined label. Emit a label to
1256 the very first basic block. This will hopefully make even the dataflow
1257 and undefined variable warnings quite right. */
1258 if (seen_error () && uid < 0)
1260 gimple_stmt_iterator gsi =
1261 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1262 gimple *stmt;
1264 stmt = gimple_build_label (dest);
1265 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1266 uid = LABEL_DECL_UID (dest);
1268 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1269 return NULL;
1270 return (*ifun->cfg->x_label_to_block_map)[uid];
1273 /* Create edges for a goto statement at block BB. Returns true
1274 if abnormal edges should be created. */
1276 static bool
1277 make_goto_expr_edges (basic_block bb)
1279 gimple_stmt_iterator last = gsi_last_bb (bb);
1280 gimple *goto_t = gsi_stmt (last);
1282 /* A simple GOTO creates normal edges. */
1283 if (simple_goto_p (goto_t))
1285 tree dest = gimple_goto_dest (goto_t);
1286 basic_block label_bb = label_to_block (dest);
1287 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1288 e->goto_locus = gimple_location (goto_t);
1289 gsi_remove (&last, true);
1290 return false;
1293 /* A computed GOTO creates abnormal edges. */
1294 return true;
1297 /* Create edges for an asm statement with labels at block BB. */
1299 static void
1300 make_gimple_asm_edges (basic_block bb)
1302 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1303 int i, n = gimple_asm_nlabels (stmt);
1305 for (i = 0; i < n; ++i)
1307 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1308 basic_block label_bb = label_to_block (label);
1309 make_edge (bb, label_bb, 0);
1313 /*---------------------------------------------------------------------------
1314 Flowgraph analysis
1315 ---------------------------------------------------------------------------*/
1317 /* Cleanup useless labels in basic blocks. This is something we wish
1318 to do early because it allows us to group case labels before creating
1319 the edges for the CFG, and it speeds up block statement iterators in
1320 all passes later on.
1321 We rerun this pass after CFG is created, to get rid of the labels that
1322 are no longer referenced. After then we do not run it any more, since
1323 (almost) no new labels should be created. */
1325 /* A map from basic block index to the leading label of that block. */
1326 static struct label_record
1328 /* The label. */
1329 tree label;
1331 /* True if the label is referenced from somewhere. */
1332 bool used;
1333 } *label_for_bb;
1335 /* Given LABEL return the first label in the same basic block. */
1337 static tree
1338 main_block_label (tree label)
1340 basic_block bb = label_to_block (label);
1341 tree main_label = label_for_bb[bb->index].label;
1343 /* label_to_block possibly inserted undefined label into the chain. */
1344 if (!main_label)
1346 label_for_bb[bb->index].label = label;
1347 main_label = label;
1350 label_for_bb[bb->index].used = true;
1351 return main_label;
1354 /* Clean up redundant labels within the exception tree. */
1356 static void
1357 cleanup_dead_labels_eh (void)
1359 eh_landing_pad lp;
1360 eh_region r;
1361 tree lab;
1362 int i;
1364 if (cfun->eh == NULL)
1365 return;
1367 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1368 if (lp && lp->post_landing_pad)
1370 lab = main_block_label (lp->post_landing_pad);
1371 if (lab != lp->post_landing_pad)
1373 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1374 EH_LANDING_PAD_NR (lab) = lp->index;
1378 FOR_ALL_EH_REGION (r)
1379 switch (r->type)
1381 case ERT_CLEANUP:
1382 case ERT_MUST_NOT_THROW:
1383 break;
1385 case ERT_TRY:
1387 eh_catch c;
1388 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1390 lab = c->label;
1391 if (lab)
1392 c->label = main_block_label (lab);
1395 break;
1397 case ERT_ALLOWED_EXCEPTIONS:
1398 lab = r->u.allowed.label;
1399 if (lab)
1400 r->u.allowed.label = main_block_label (lab);
1401 break;
1406 /* Cleanup redundant labels. This is a three-step process:
1407 1) Find the leading label for each block.
1408 2) Redirect all references to labels to the leading labels.
1409 3) Cleanup all useless labels. */
1411 void
1412 cleanup_dead_labels (void)
1414 basic_block bb;
1415 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1417 /* Find a suitable label for each block. We use the first user-defined
1418 label if there is one, or otherwise just the first label we see. */
1419 FOR_EACH_BB_FN (bb, cfun)
1421 gimple_stmt_iterator i;
1423 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1425 tree label;
1426 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1428 if (!label_stmt)
1429 break;
1431 label = gimple_label_label (label_stmt);
1433 /* If we have not yet seen a label for the current block,
1434 remember this one and see if there are more labels. */
1435 if (!label_for_bb[bb->index].label)
1437 label_for_bb[bb->index].label = label;
1438 continue;
1441 /* If we did see a label for the current block already, but it
1442 is an artificially created label, replace it if the current
1443 label is a user defined label. */
1444 if (!DECL_ARTIFICIAL (label)
1445 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1447 label_for_bb[bb->index].label = label;
1448 break;
1453 /* Now redirect all jumps/branches to the selected label.
1454 First do so for each block ending in a control statement. */
1455 FOR_EACH_BB_FN (bb, cfun)
1457 gimple *stmt = last_stmt (bb);
1458 tree label, new_label;
1460 if (!stmt)
1461 continue;
1463 switch (gimple_code (stmt))
1465 case GIMPLE_COND:
1467 gcond *cond_stmt = as_a <gcond *> (stmt);
1468 label = gimple_cond_true_label (cond_stmt);
1469 if (label)
1471 new_label = main_block_label (label);
1472 if (new_label != label)
1473 gimple_cond_set_true_label (cond_stmt, new_label);
1476 label = gimple_cond_false_label (cond_stmt);
1477 if (label)
1479 new_label = main_block_label (label);
1480 if (new_label != label)
1481 gimple_cond_set_false_label (cond_stmt, new_label);
1484 break;
1486 case GIMPLE_SWITCH:
1488 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1489 size_t i, n = gimple_switch_num_labels (switch_stmt);
1491 /* Replace all destination labels. */
1492 for (i = 0; i < n; ++i)
1494 tree case_label = gimple_switch_label (switch_stmt, i);
1495 label = CASE_LABEL (case_label);
1496 new_label = main_block_label (label);
1497 if (new_label != label)
1498 CASE_LABEL (case_label) = new_label;
1500 break;
1503 case GIMPLE_ASM:
1505 gasm *asm_stmt = as_a <gasm *> (stmt);
1506 int i, n = gimple_asm_nlabels (asm_stmt);
1508 for (i = 0; i < n; ++i)
1510 tree cons = gimple_asm_label_op (asm_stmt, i);
1511 tree label = main_block_label (TREE_VALUE (cons));
1512 TREE_VALUE (cons) = label;
1514 break;
1517 /* We have to handle gotos until they're removed, and we don't
1518 remove them until after we've created the CFG edges. */
1519 case GIMPLE_GOTO:
1520 if (!computed_goto_p (stmt))
1522 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1523 label = gimple_goto_dest (goto_stmt);
1524 new_label = main_block_label (label);
1525 if (new_label != label)
1526 gimple_goto_set_dest (goto_stmt, new_label);
1528 break;
1530 case GIMPLE_TRANSACTION:
1532 gtransaction *txn = as_a <gtransaction *> (stmt);
1534 label = gimple_transaction_label_norm (txn);
1535 if (label)
1537 new_label = main_block_label (label);
1538 if (new_label != label)
1539 gimple_transaction_set_label_norm (txn, new_label);
1542 label = gimple_transaction_label_uninst (txn);
1543 if (label)
1545 new_label = main_block_label (label);
1546 if (new_label != label)
1547 gimple_transaction_set_label_uninst (txn, new_label);
1550 label = gimple_transaction_label_over (txn);
1551 if (label)
1553 new_label = main_block_label (label);
1554 if (new_label != label)
1555 gimple_transaction_set_label_over (txn, new_label);
1558 break;
1560 default:
1561 break;
1565 /* Do the same for the exception region tree labels. */
1566 cleanup_dead_labels_eh ();
1568 /* Finally, purge dead labels. All user-defined labels and labels that
1569 can be the target of non-local gotos and labels which have their
1570 address taken are preserved. */
1571 FOR_EACH_BB_FN (bb, cfun)
1573 gimple_stmt_iterator i;
1574 tree label_for_this_bb = label_for_bb[bb->index].label;
1576 if (!label_for_this_bb)
1577 continue;
1579 /* If the main label of the block is unused, we may still remove it. */
1580 if (!label_for_bb[bb->index].used)
1581 label_for_this_bb = NULL;
1583 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1585 tree label;
1586 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1588 if (!label_stmt)
1589 break;
1591 label = gimple_label_label (label_stmt);
1593 if (label == label_for_this_bb
1594 || !DECL_ARTIFICIAL (label)
1595 || DECL_NONLOCAL (label)
1596 || FORCED_LABEL (label))
1597 gsi_next (&i);
1598 else
1599 gsi_remove (&i, true);
1603 free (label_for_bb);
1606 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1607 the ones jumping to the same label.
1608 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1610 void
1611 group_case_labels_stmt (gswitch *stmt)
1613 int old_size = gimple_switch_num_labels (stmt);
1614 int i, j, new_size = old_size;
1615 basic_block default_bb = NULL;
1617 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1619 /* Look for possible opportunities to merge cases. */
1620 i = 1;
1621 while (i < old_size)
1623 tree base_case, base_high;
1624 basic_block base_bb;
1626 base_case = gimple_switch_label (stmt, i);
1628 gcc_assert (base_case);
1629 base_bb = label_to_block (CASE_LABEL (base_case));
1631 /* Discard cases that have the same destination as the
1632 default case. */
1633 if (base_bb == default_bb)
1635 gimple_switch_set_label (stmt, i, NULL_TREE);
1636 i++;
1637 new_size--;
1638 continue;
1641 base_high = CASE_HIGH (base_case)
1642 ? CASE_HIGH (base_case)
1643 : CASE_LOW (base_case);
1644 i++;
1646 /* Try to merge case labels. Break out when we reach the end
1647 of the label vector or when we cannot merge the next case
1648 label with the current one. */
1649 while (i < old_size)
1651 tree merge_case = gimple_switch_label (stmt, i);
1652 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1653 wide_int bhp1 = wi::add (base_high, 1);
1655 /* Merge the cases if they jump to the same place,
1656 and their ranges are consecutive. */
1657 if (merge_bb == base_bb
1658 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1660 base_high = CASE_HIGH (merge_case) ?
1661 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1662 CASE_HIGH (base_case) = base_high;
1663 gimple_switch_set_label (stmt, i, NULL_TREE);
1664 new_size--;
1665 i++;
1667 else
1668 break;
1672 /* Compress the case labels in the label vector, and adjust the
1673 length of the vector. */
1674 for (i = 0, j = 0; i < new_size; i++)
1676 while (! gimple_switch_label (stmt, j))
1677 j++;
1678 gimple_switch_set_label (stmt, i,
1679 gimple_switch_label (stmt, j++));
1682 gcc_assert (new_size <= old_size);
1683 gimple_switch_set_num_labels (stmt, new_size);
1686 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1687 and scan the sorted vector of cases. Combine the ones jumping to the
1688 same label. */
1690 void
1691 group_case_labels (void)
1693 basic_block bb;
1695 FOR_EACH_BB_FN (bb, cfun)
1697 gimple *stmt = last_stmt (bb);
1698 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1699 group_case_labels_stmt (as_a <gswitch *> (stmt));
1703 /* Checks whether we can merge block B into block A. */
1705 static bool
1706 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1708 gimple *stmt;
1710 if (!single_succ_p (a))
1711 return false;
1713 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1714 return false;
1716 if (single_succ (a) != b)
1717 return false;
1719 if (!single_pred_p (b))
1720 return false;
1722 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1723 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1724 return false;
1726 /* If A ends by a statement causing exceptions or something similar, we
1727 cannot merge the blocks. */
1728 stmt = last_stmt (a);
1729 if (stmt && stmt_ends_bb_p (stmt))
1730 return false;
1732 /* Do not allow a block with only a non-local label to be merged. */
1733 if (stmt)
1734 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1735 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1736 return false;
1738 /* Examine the labels at the beginning of B. */
1739 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1740 gsi_next (&gsi))
1742 tree lab;
1743 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1744 if (!label_stmt)
1745 break;
1746 lab = gimple_label_label (label_stmt);
1748 /* Do not remove user forced labels or for -O0 any user labels. */
1749 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1750 return false;
1753 /* Protect simple loop latches. We only want to avoid merging
1754 the latch with the loop header or with a block in another
1755 loop in this case. */
1756 if (current_loops
1757 && b->loop_father->latch == b
1758 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1759 && (b->loop_father->header == a
1760 || b->loop_father != a->loop_father))
1761 return false;
1763 /* It must be possible to eliminate all phi nodes in B. If ssa form
1764 is not up-to-date and a name-mapping is registered, we cannot eliminate
1765 any phis. Symbols marked for renaming are never a problem though. */
1766 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1767 gsi_next (&gsi))
1769 gphi *phi = gsi.phi ();
1770 /* Technically only new names matter. */
1771 if (name_registered_for_update_p (PHI_RESULT (phi)))
1772 return false;
1775 /* When not optimizing, don't merge if we'd lose goto_locus. */
1776 if (!optimize
1777 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1779 location_t goto_locus = single_succ_edge (a)->goto_locus;
1780 gimple_stmt_iterator prev, next;
1781 prev = gsi_last_nondebug_bb (a);
1782 next = gsi_after_labels (b);
1783 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1784 gsi_next_nondebug (&next);
1785 if ((gsi_end_p (prev)
1786 || gimple_location (gsi_stmt (prev)) != goto_locus)
1787 && (gsi_end_p (next)
1788 || gimple_location (gsi_stmt (next)) != goto_locus))
1789 return false;
1792 return true;
1795 /* Replaces all uses of NAME by VAL. */
1797 void
1798 replace_uses_by (tree name, tree val)
1800 imm_use_iterator imm_iter;
1801 use_operand_p use;
1802 gimple *stmt;
1803 edge e;
1805 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1807 /* Mark the block if we change the last stmt in it. */
1808 if (cfgcleanup_altered_bbs
1809 && stmt_ends_bb_p (stmt))
1810 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1812 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1814 replace_exp (use, val);
1816 if (gimple_code (stmt) == GIMPLE_PHI)
1818 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1819 PHI_ARG_INDEX_FROM_USE (use));
1820 if (e->flags & EDGE_ABNORMAL
1821 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1823 /* This can only occur for virtual operands, since
1824 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1825 would prevent replacement. */
1826 gcc_checking_assert (virtual_operand_p (name));
1827 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1832 if (gimple_code (stmt) != GIMPLE_PHI)
1834 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1835 gimple *orig_stmt = stmt;
1836 size_t i;
1838 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1839 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1840 only change sth from non-invariant to invariant, and only
1841 when propagating constants. */
1842 if (is_gimple_min_invariant (val))
1843 for (i = 0; i < gimple_num_ops (stmt); i++)
1845 tree op = gimple_op (stmt, i);
1846 /* Operands may be empty here. For example, the labels
1847 of a GIMPLE_COND are nulled out following the creation
1848 of the corresponding CFG edges. */
1849 if (op && TREE_CODE (op) == ADDR_EXPR)
1850 recompute_tree_invariant_for_addr_expr (op);
1853 if (fold_stmt (&gsi))
1854 stmt = gsi_stmt (gsi);
1856 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1857 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1859 update_stmt (stmt);
1863 gcc_checking_assert (has_zero_uses (name));
1865 /* Also update the trees stored in loop structures. */
1866 if (current_loops)
1868 struct loop *loop;
1870 FOR_EACH_LOOP (loop, 0)
1872 substitute_in_loop_info (loop, name, val);
1877 /* Merge block B into block A. */
1879 static void
1880 gimple_merge_blocks (basic_block a, basic_block b)
1882 gimple_stmt_iterator last, gsi;
1883 gphi_iterator psi;
1885 if (dump_file)
1886 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1888 /* Remove all single-valued PHI nodes from block B of the form
1889 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1890 gsi = gsi_last_bb (a);
1891 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1893 gimple *phi = gsi_stmt (psi);
1894 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1895 gimple *copy;
1896 bool may_replace_uses = (virtual_operand_p (def)
1897 || may_propagate_copy (def, use));
1899 /* In case we maintain loop closed ssa form, do not propagate arguments
1900 of loop exit phi nodes. */
1901 if (current_loops
1902 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1903 && !virtual_operand_p (def)
1904 && TREE_CODE (use) == SSA_NAME
1905 && a->loop_father != b->loop_father)
1906 may_replace_uses = false;
1908 if (!may_replace_uses)
1910 gcc_assert (!virtual_operand_p (def));
1912 /* Note that just emitting the copies is fine -- there is no problem
1913 with ordering of phi nodes. This is because A is the single
1914 predecessor of B, therefore results of the phi nodes cannot
1915 appear as arguments of the phi nodes. */
1916 copy = gimple_build_assign (def, use);
1917 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1918 remove_phi_node (&psi, false);
1920 else
1922 /* If we deal with a PHI for virtual operands, we can simply
1923 propagate these without fussing with folding or updating
1924 the stmt. */
1925 if (virtual_operand_p (def))
1927 imm_use_iterator iter;
1928 use_operand_p use_p;
1929 gimple *stmt;
1931 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1932 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1933 SET_USE (use_p, use);
1935 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1936 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1938 else
1939 replace_uses_by (def, use);
1941 remove_phi_node (&psi, true);
1945 /* Ensure that B follows A. */
1946 move_block_after (b, a);
1948 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1949 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1951 /* Remove labels from B and set gimple_bb to A for other statements. */
1952 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1954 gimple *stmt = gsi_stmt (gsi);
1955 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1957 tree label = gimple_label_label (label_stmt);
1958 int lp_nr;
1960 gsi_remove (&gsi, false);
1962 /* Now that we can thread computed gotos, we might have
1963 a situation where we have a forced label in block B
1964 However, the label at the start of block B might still be
1965 used in other ways (think about the runtime checking for
1966 Fortran assigned gotos). So we can not just delete the
1967 label. Instead we move the label to the start of block A. */
1968 if (FORCED_LABEL (label))
1970 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1971 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1973 /* Other user labels keep around in a form of a debug stmt. */
1974 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1976 gimple *dbg = gimple_build_debug_bind (label,
1977 integer_zero_node,
1978 stmt);
1979 gimple_debug_bind_reset_value (dbg);
1980 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1983 lp_nr = EH_LANDING_PAD_NR (label);
1984 if (lp_nr)
1986 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1987 lp->post_landing_pad = NULL;
1990 else
1992 gimple_set_bb (stmt, a);
1993 gsi_next (&gsi);
1997 /* When merging two BBs, if their counts are different, the larger count
1998 is selected as the new bb count. This is to handle inconsistent
1999 profiles. */
2000 if (a->loop_father == b->loop_father)
2002 a->count = MAX (a->count, b->count);
2003 a->frequency = MAX (a->frequency, b->frequency);
2006 /* Merge the sequences. */
2007 last = gsi_last_bb (a);
2008 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2009 set_bb_seq (b, NULL);
2011 if (cfgcleanup_altered_bbs)
2012 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2016 /* Return the one of two successors of BB that is not reachable by a
2017 complex edge, if there is one. Else, return BB. We use
2018 this in optimizations that use post-dominators for their heuristics,
2019 to catch the cases in C++ where function calls are involved. */
2021 basic_block
2022 single_noncomplex_succ (basic_block bb)
2024 edge e0, e1;
2025 if (EDGE_COUNT (bb->succs) != 2)
2026 return bb;
2028 e0 = EDGE_SUCC (bb, 0);
2029 e1 = EDGE_SUCC (bb, 1);
2030 if (e0->flags & EDGE_COMPLEX)
2031 return e1->dest;
2032 if (e1->flags & EDGE_COMPLEX)
2033 return e0->dest;
2035 return bb;
2038 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2040 void
2041 notice_special_calls (gcall *call)
2043 int flags = gimple_call_flags (call);
2045 if (flags & ECF_MAY_BE_ALLOCA)
2046 cfun->calls_alloca = true;
2047 if (flags & ECF_RETURNS_TWICE)
2048 cfun->calls_setjmp = true;
2052 /* Clear flags set by notice_special_calls. Used by dead code removal
2053 to update the flags. */
2055 void
2056 clear_special_calls (void)
2058 cfun->calls_alloca = false;
2059 cfun->calls_setjmp = false;
2062 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2064 static void
2065 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2067 /* Since this block is no longer reachable, we can just delete all
2068 of its PHI nodes. */
2069 remove_phi_nodes (bb);
2071 /* Remove edges to BB's successors. */
2072 while (EDGE_COUNT (bb->succs) > 0)
2073 remove_edge (EDGE_SUCC (bb, 0));
2077 /* Remove statements of basic block BB. */
2079 static void
2080 remove_bb (basic_block bb)
2082 gimple_stmt_iterator i;
2084 if (dump_file)
2086 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2087 if (dump_flags & TDF_DETAILS)
2089 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2090 fprintf (dump_file, "\n");
2094 if (current_loops)
2096 struct loop *loop = bb->loop_father;
2098 /* If a loop gets removed, clean up the information associated
2099 with it. */
2100 if (loop->latch == bb
2101 || loop->header == bb)
2102 free_numbers_of_iterations_estimates_loop (loop);
2105 /* Remove all the instructions in the block. */
2106 if (bb_seq (bb) != NULL)
2108 /* Walk backwards so as to get a chance to substitute all
2109 released DEFs into debug stmts. See
2110 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2111 details. */
2112 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2114 gimple *stmt = gsi_stmt (i);
2115 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2116 if (label_stmt
2117 && (FORCED_LABEL (gimple_label_label (label_stmt))
2118 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2120 basic_block new_bb;
2121 gimple_stmt_iterator new_gsi;
2123 /* A non-reachable non-local label may still be referenced.
2124 But it no longer needs to carry the extra semantics of
2125 non-locality. */
2126 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2128 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2129 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2132 new_bb = bb->prev_bb;
2133 new_gsi = gsi_start_bb (new_bb);
2134 gsi_remove (&i, false);
2135 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2137 else
2139 /* Release SSA definitions. */
2140 release_defs (stmt);
2141 gsi_remove (&i, true);
2144 if (gsi_end_p (i))
2145 i = gsi_last_bb (bb);
2146 else
2147 gsi_prev (&i);
2151 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2152 bb->il.gimple.seq = NULL;
2153 bb->il.gimple.phi_nodes = NULL;
2157 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2158 predicate VAL, return the edge that will be taken out of the block.
2159 If VAL does not match a unique edge, NULL is returned. */
2161 edge
2162 find_taken_edge (basic_block bb, tree val)
2164 gimple *stmt;
2166 stmt = last_stmt (bb);
2168 gcc_assert (stmt);
2169 gcc_assert (is_ctrl_stmt (stmt));
2171 if (val == NULL)
2172 return NULL;
2174 if (!is_gimple_min_invariant (val))
2175 return NULL;
2177 if (gimple_code (stmt) == GIMPLE_COND)
2178 return find_taken_edge_cond_expr (bb, val);
2180 if (gimple_code (stmt) == GIMPLE_SWITCH)
2181 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2183 if (computed_goto_p (stmt))
2185 /* Only optimize if the argument is a label, if the argument is
2186 not a label then we can not construct a proper CFG.
2188 It may be the case that we only need to allow the LABEL_REF to
2189 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2190 appear inside a LABEL_EXPR just to be safe. */
2191 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2192 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2193 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2194 return NULL;
2197 gcc_unreachable ();
2200 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2201 statement, determine which of the outgoing edges will be taken out of the
2202 block. Return NULL if either edge may be taken. */
2204 static edge
2205 find_taken_edge_computed_goto (basic_block bb, tree val)
2207 basic_block dest;
2208 edge e = NULL;
2210 dest = label_to_block (val);
2211 if (dest)
2213 e = find_edge (bb, dest);
2214 gcc_assert (e != NULL);
2217 return e;
2220 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2221 statement, determine which of the two edges will be taken out of the
2222 block. Return NULL if either edge may be taken. */
2224 static edge
2225 find_taken_edge_cond_expr (basic_block bb, tree val)
2227 edge true_edge, false_edge;
2229 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2231 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2232 return (integer_zerop (val) ? false_edge : true_edge);
2235 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2236 statement, determine which edge will be taken out of the block. Return
2237 NULL if any edge may be taken. */
2239 static edge
2240 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2241 tree val)
2243 basic_block dest_bb;
2244 edge e;
2245 tree taken_case;
2247 taken_case = find_case_label_for_value (switch_stmt, val);
2248 dest_bb = label_to_block (CASE_LABEL (taken_case));
2250 e = find_edge (bb, dest_bb);
2251 gcc_assert (e);
2252 return e;
2256 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2257 We can make optimal use here of the fact that the case labels are
2258 sorted: We can do a binary search for a case matching VAL. */
2260 static tree
2261 find_case_label_for_value (gswitch *switch_stmt, tree val)
2263 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2264 tree default_case = gimple_switch_default_label (switch_stmt);
2266 for (low = 0, high = n; high - low > 1; )
2268 size_t i = (high + low) / 2;
2269 tree t = gimple_switch_label (switch_stmt, i);
2270 int cmp;
2272 /* Cache the result of comparing CASE_LOW and val. */
2273 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2275 if (cmp > 0)
2276 high = i;
2277 else
2278 low = i;
2280 if (CASE_HIGH (t) == NULL)
2282 /* A singe-valued case label. */
2283 if (cmp == 0)
2284 return t;
2286 else
2288 /* A case range. We can only handle integer ranges. */
2289 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2290 return t;
2294 return default_case;
2298 /* Dump a basic block on stderr. */
2300 void
2301 gimple_debug_bb (basic_block bb)
2303 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2307 /* Dump basic block with index N on stderr. */
2309 basic_block
2310 gimple_debug_bb_n (int n)
2312 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2313 return BASIC_BLOCK_FOR_FN (cfun, n);
2317 /* Dump the CFG on stderr.
2319 FLAGS are the same used by the tree dumping functions
2320 (see TDF_* in dumpfile.h). */
2322 void
2323 gimple_debug_cfg (int flags)
2325 gimple_dump_cfg (stderr, flags);
2329 /* Dump the program showing basic block boundaries on the given FILE.
2331 FLAGS are the same used by the tree dumping functions (see TDF_* in
2332 tree.h). */
2334 void
2335 gimple_dump_cfg (FILE *file, int flags)
2337 if (flags & TDF_DETAILS)
2339 dump_function_header (file, current_function_decl, flags);
2340 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2341 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2342 last_basic_block_for_fn (cfun));
2344 brief_dump_cfg (file, flags | TDF_COMMENT);
2345 fprintf (file, "\n");
2348 if (flags & TDF_STATS)
2349 dump_cfg_stats (file);
2351 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2355 /* Dump CFG statistics on FILE. */
2357 void
2358 dump_cfg_stats (FILE *file)
2360 static long max_num_merged_labels = 0;
2361 unsigned long size, total = 0;
2362 long num_edges;
2363 basic_block bb;
2364 const char * const fmt_str = "%-30s%-13s%12s\n";
2365 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2366 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2367 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2368 const char *funcname = current_function_name ();
2370 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2372 fprintf (file, "---------------------------------------------------------\n");
2373 fprintf (file, fmt_str, "", " Number of ", "Memory");
2374 fprintf (file, fmt_str, "", " instances ", "used ");
2375 fprintf (file, "---------------------------------------------------------\n");
2377 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2378 total += size;
2379 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2380 SCALE (size), LABEL (size));
2382 num_edges = 0;
2383 FOR_EACH_BB_FN (bb, cfun)
2384 num_edges += EDGE_COUNT (bb->succs);
2385 size = num_edges * sizeof (struct edge_def);
2386 total += size;
2387 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2389 fprintf (file, "---------------------------------------------------------\n");
2390 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2391 LABEL (total));
2392 fprintf (file, "---------------------------------------------------------\n");
2393 fprintf (file, "\n");
2395 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2396 max_num_merged_labels = cfg_stats.num_merged_labels;
2398 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2399 cfg_stats.num_merged_labels, max_num_merged_labels);
2401 fprintf (file, "\n");
2405 /* Dump CFG statistics on stderr. Keep extern so that it's always
2406 linked in the final executable. */
2408 DEBUG_FUNCTION void
2409 debug_cfg_stats (void)
2411 dump_cfg_stats (stderr);
2414 /*---------------------------------------------------------------------------
2415 Miscellaneous helpers
2416 ---------------------------------------------------------------------------*/
2418 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2419 flow. Transfers of control flow associated with EH are excluded. */
2421 static bool
2422 call_can_make_abnormal_goto (gimple *t)
2424 /* If the function has no non-local labels, then a call cannot make an
2425 abnormal transfer of control. */
2426 if (!cfun->has_nonlocal_label
2427 && !cfun->calls_setjmp)
2428 return false;
2430 /* Likewise if the call has no side effects. */
2431 if (!gimple_has_side_effects (t))
2432 return false;
2434 /* Likewise if the called function is leaf. */
2435 if (gimple_call_flags (t) & ECF_LEAF)
2436 return false;
2438 return true;
2442 /* Return true if T can make an abnormal transfer of control flow.
2443 Transfers of control flow associated with EH are excluded. */
2445 bool
2446 stmt_can_make_abnormal_goto (gimple *t)
2448 if (computed_goto_p (t))
2449 return true;
2450 if (is_gimple_call (t))
2451 return call_can_make_abnormal_goto (t);
2452 return false;
2456 /* Return true if T represents a stmt that always transfers control. */
2458 bool
2459 is_ctrl_stmt (gimple *t)
2461 switch (gimple_code (t))
2463 case GIMPLE_COND:
2464 case GIMPLE_SWITCH:
2465 case GIMPLE_GOTO:
2466 case GIMPLE_RETURN:
2467 case GIMPLE_RESX:
2468 return true;
2469 default:
2470 return false;
2475 /* Return true if T is a statement that may alter the flow of control
2476 (e.g., a call to a non-returning function). */
2478 bool
2479 is_ctrl_altering_stmt (gimple *t)
2481 gcc_assert (t);
2483 switch (gimple_code (t))
2485 case GIMPLE_CALL:
2486 /* Per stmt call flag indicates whether the call could alter
2487 controlflow. */
2488 if (gimple_call_ctrl_altering_p (t))
2489 return true;
2490 break;
2492 case GIMPLE_EH_DISPATCH:
2493 /* EH_DISPATCH branches to the individual catch handlers at
2494 this level of a try or allowed-exceptions region. It can
2495 fallthru to the next statement as well. */
2496 return true;
2498 case GIMPLE_ASM:
2499 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2500 return true;
2501 break;
2503 CASE_GIMPLE_OMP:
2504 /* OpenMP directives alter control flow. */
2505 return true;
2507 case GIMPLE_TRANSACTION:
2508 /* A transaction start alters control flow. */
2509 return true;
2511 default:
2512 break;
2515 /* If a statement can throw, it alters control flow. */
2516 return stmt_can_throw_internal (t);
2520 /* Return true if T is a simple local goto. */
2522 bool
2523 simple_goto_p (gimple *t)
2525 return (gimple_code (t) == GIMPLE_GOTO
2526 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2530 /* Return true if STMT should start a new basic block. PREV_STMT is
2531 the statement preceding STMT. It is used when STMT is a label or a
2532 case label. Labels should only start a new basic block if their
2533 previous statement wasn't a label. Otherwise, sequence of labels
2534 would generate unnecessary basic blocks that only contain a single
2535 label. */
2537 static inline bool
2538 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2540 if (stmt == NULL)
2541 return false;
2543 /* Labels start a new basic block only if the preceding statement
2544 wasn't a label of the same type. This prevents the creation of
2545 consecutive blocks that have nothing but a single label. */
2546 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2548 /* Nonlocal and computed GOTO targets always start a new block. */
2549 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2550 || FORCED_LABEL (gimple_label_label (label_stmt)))
2551 return true;
2553 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2555 if (DECL_NONLOCAL (gimple_label_label (
2556 as_a <glabel *> (prev_stmt))))
2557 return true;
2559 cfg_stats.num_merged_labels++;
2560 return false;
2562 else
2563 return true;
2565 else if (gimple_code (stmt) == GIMPLE_CALL
2566 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2567 /* setjmp acts similar to a nonlocal GOTO target and thus should
2568 start a new block. */
2569 return true;
2571 return false;
2575 /* Return true if T should end a basic block. */
2577 bool
2578 stmt_ends_bb_p (gimple *t)
2580 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2583 /* Remove block annotations and other data structures. */
2585 void
2586 delete_tree_cfg_annotations (struct function *fn)
2588 vec_free (label_to_block_map_for_fn (fn));
2591 /* Return the virtual phi in BB. */
2593 gphi *
2594 get_virtual_phi (basic_block bb)
2596 for (gphi_iterator gsi = gsi_start_phis (bb);
2597 !gsi_end_p (gsi);
2598 gsi_next (&gsi))
2600 gphi *phi = gsi.phi ();
2602 if (virtual_operand_p (PHI_RESULT (phi)))
2603 return phi;
2606 return NULL;
2609 /* Return the first statement in basic block BB. */
2611 gimple *
2612 first_stmt (basic_block bb)
2614 gimple_stmt_iterator i = gsi_start_bb (bb);
2615 gimple *stmt = NULL;
2617 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2619 gsi_next (&i);
2620 stmt = NULL;
2622 return stmt;
2625 /* Return the first non-label statement in basic block BB. */
2627 static gimple *
2628 first_non_label_stmt (basic_block bb)
2630 gimple_stmt_iterator i = gsi_start_bb (bb);
2631 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2632 gsi_next (&i);
2633 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2636 /* Return the last statement in basic block BB. */
2638 gimple *
2639 last_stmt (basic_block bb)
2641 gimple_stmt_iterator i = gsi_last_bb (bb);
2642 gimple *stmt = NULL;
2644 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2646 gsi_prev (&i);
2647 stmt = NULL;
2649 return stmt;
2652 /* Return the last statement of an otherwise empty block. Return NULL
2653 if the block is totally empty, or if it contains more than one
2654 statement. */
2656 gimple *
2657 last_and_only_stmt (basic_block bb)
2659 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2660 gimple *last, *prev;
2662 if (gsi_end_p (i))
2663 return NULL;
2665 last = gsi_stmt (i);
2666 gsi_prev_nondebug (&i);
2667 if (gsi_end_p (i))
2668 return last;
2670 /* Empty statements should no longer appear in the instruction stream.
2671 Everything that might have appeared before should be deleted by
2672 remove_useless_stmts, and the optimizers should just gsi_remove
2673 instead of smashing with build_empty_stmt.
2675 Thus the only thing that should appear here in a block containing
2676 one executable statement is a label. */
2677 prev = gsi_stmt (i);
2678 if (gimple_code (prev) == GIMPLE_LABEL)
2679 return last;
2680 else
2681 return NULL;
2684 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2686 static void
2687 reinstall_phi_args (edge new_edge, edge old_edge)
2689 edge_var_map *vm;
2690 int i;
2691 gphi_iterator phis;
2693 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2694 if (!v)
2695 return;
2697 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2698 v->iterate (i, &vm) && !gsi_end_p (phis);
2699 i++, gsi_next (&phis))
2701 gphi *phi = phis.phi ();
2702 tree result = redirect_edge_var_map_result (vm);
2703 tree arg = redirect_edge_var_map_def (vm);
2705 gcc_assert (result == gimple_phi_result (phi));
2707 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2710 redirect_edge_var_map_clear (old_edge);
2713 /* Returns the basic block after which the new basic block created
2714 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2715 near its "logical" location. This is of most help to humans looking
2716 at debugging dumps. */
2718 basic_block
2719 split_edge_bb_loc (edge edge_in)
2721 basic_block dest = edge_in->dest;
2722 basic_block dest_prev = dest->prev_bb;
2724 if (dest_prev)
2726 edge e = find_edge (dest_prev, dest);
2727 if (e && !(e->flags & EDGE_COMPLEX))
2728 return edge_in->src;
2730 return dest_prev;
2733 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2734 Abort on abnormal edges. */
2736 static basic_block
2737 gimple_split_edge (edge edge_in)
2739 basic_block new_bb, after_bb, dest;
2740 edge new_edge, e;
2742 /* Abnormal edges cannot be split. */
2743 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2745 dest = edge_in->dest;
2747 after_bb = split_edge_bb_loc (edge_in);
2749 new_bb = create_empty_bb (after_bb);
2750 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2751 new_bb->count = edge_in->count;
2752 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2753 new_edge->probability = REG_BR_PROB_BASE;
2754 new_edge->count = edge_in->count;
2756 e = redirect_edge_and_branch (edge_in, new_bb);
2757 gcc_assert (e == edge_in);
2758 reinstall_phi_args (new_edge, e);
2760 return new_bb;
2764 /* Verify properties of the address expression T with base object BASE. */
2766 static tree
2767 verify_address (tree t, tree base)
2769 bool old_constant;
2770 bool old_side_effects;
2771 bool new_constant;
2772 bool new_side_effects;
2774 old_constant = TREE_CONSTANT (t);
2775 old_side_effects = TREE_SIDE_EFFECTS (t);
2777 recompute_tree_invariant_for_addr_expr (t);
2778 new_side_effects = TREE_SIDE_EFFECTS (t);
2779 new_constant = TREE_CONSTANT (t);
2781 if (old_constant != new_constant)
2783 error ("constant not recomputed when ADDR_EXPR changed");
2784 return t;
2786 if (old_side_effects != new_side_effects)
2788 error ("side effects not recomputed when ADDR_EXPR changed");
2789 return t;
2792 if (!(TREE_CODE (base) == VAR_DECL
2793 || TREE_CODE (base) == PARM_DECL
2794 || TREE_CODE (base) == RESULT_DECL))
2795 return NULL_TREE;
2797 if (DECL_GIMPLE_REG_P (base))
2799 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2800 return base;
2803 return NULL_TREE;
2806 /* Callback for walk_tree, check that all elements with address taken are
2807 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2808 inside a PHI node. */
2810 static tree
2811 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2813 tree t = *tp, x;
2815 if (TYPE_P (t))
2816 *walk_subtrees = 0;
2818 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2819 #define CHECK_OP(N, MSG) \
2820 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2821 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2823 switch (TREE_CODE (t))
2825 case SSA_NAME:
2826 if (SSA_NAME_IN_FREE_LIST (t))
2828 error ("SSA name in freelist but still referenced");
2829 return *tp;
2831 break;
2833 case PARM_DECL:
2834 case VAR_DECL:
2835 case RESULT_DECL:
2837 tree context = decl_function_context (t);
2838 if (context != cfun->decl
2839 && !SCOPE_FILE_SCOPE_P (context)
2840 && !TREE_STATIC (t)
2841 && !DECL_EXTERNAL (t))
2843 error ("Local declaration from a different function");
2844 return t;
2847 break;
2849 case INDIRECT_REF:
2850 error ("INDIRECT_REF in gimple IL");
2851 return t;
2853 case MEM_REF:
2854 x = TREE_OPERAND (t, 0);
2855 if (!POINTER_TYPE_P (TREE_TYPE (x))
2856 || !is_gimple_mem_ref_addr (x))
2858 error ("invalid first operand of MEM_REF");
2859 return x;
2861 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2862 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2864 error ("invalid offset operand of MEM_REF");
2865 return TREE_OPERAND (t, 1);
2867 if (TREE_CODE (x) == ADDR_EXPR)
2869 tree va = verify_address (x, TREE_OPERAND (x, 0));
2870 if (va)
2871 return va;
2872 x = TREE_OPERAND (x, 0);
2874 walk_tree (&x, verify_expr, data, NULL);
2875 *walk_subtrees = 0;
2876 break;
2878 case ASSERT_EXPR:
2879 x = fold (ASSERT_EXPR_COND (t));
2880 if (x == boolean_false_node)
2882 error ("ASSERT_EXPR with an always-false condition");
2883 return *tp;
2885 break;
2887 case MODIFY_EXPR:
2888 error ("MODIFY_EXPR not expected while having tuples");
2889 return *tp;
2891 case ADDR_EXPR:
2893 tree tem;
2895 gcc_assert (is_gimple_address (t));
2897 /* Skip any references (they will be checked when we recurse down the
2898 tree) and ensure that any variable used as a prefix is marked
2899 addressable. */
2900 for (x = TREE_OPERAND (t, 0);
2901 handled_component_p (x);
2902 x = TREE_OPERAND (x, 0))
2905 if ((tem = verify_address (t, x)))
2906 return tem;
2908 if (!(TREE_CODE (x) == VAR_DECL
2909 || TREE_CODE (x) == PARM_DECL
2910 || TREE_CODE (x) == RESULT_DECL))
2911 return NULL;
2913 if (!TREE_ADDRESSABLE (x))
2915 error ("address taken, but ADDRESSABLE bit not set");
2916 return x;
2919 break;
2922 case COND_EXPR:
2923 x = COND_EXPR_COND (t);
2924 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2926 error ("non-integral used in condition");
2927 return x;
2929 if (!is_gimple_condexpr (x))
2931 error ("invalid conditional operand");
2932 return x;
2934 break;
2936 case NON_LVALUE_EXPR:
2937 case TRUTH_NOT_EXPR:
2938 gcc_unreachable ();
2940 CASE_CONVERT:
2941 case FIX_TRUNC_EXPR:
2942 case FLOAT_EXPR:
2943 case NEGATE_EXPR:
2944 case ABS_EXPR:
2945 case BIT_NOT_EXPR:
2946 CHECK_OP (0, "invalid operand to unary operator");
2947 break;
2949 case REALPART_EXPR:
2950 case IMAGPART_EXPR:
2951 case BIT_FIELD_REF:
2952 if (!is_gimple_reg_type (TREE_TYPE (t)))
2954 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2955 return t;
2958 if (TREE_CODE (t) == BIT_FIELD_REF)
2960 tree t0 = TREE_OPERAND (t, 0);
2961 tree t1 = TREE_OPERAND (t, 1);
2962 tree t2 = TREE_OPERAND (t, 2);
2963 if (!tree_fits_uhwi_p (t1)
2964 || !tree_fits_uhwi_p (t2))
2966 error ("invalid position or size operand to BIT_FIELD_REF");
2967 return t;
2969 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2970 && (TYPE_PRECISION (TREE_TYPE (t))
2971 != tree_to_uhwi (t1)))
2973 error ("integral result type precision does not match "
2974 "field size of BIT_FIELD_REF");
2975 return t;
2977 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2978 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2979 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
2980 != tree_to_uhwi (t1)))
2982 error ("mode size of non-integral result does not "
2983 "match field size of BIT_FIELD_REF");
2984 return t;
2986 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2987 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2988 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2990 error ("position plus size exceeds size of referenced object in "
2991 "BIT_FIELD_REF");
2992 return t;
2995 t = TREE_OPERAND (t, 0);
2997 /* Fall-through. */
2998 case COMPONENT_REF:
2999 case ARRAY_REF:
3000 case ARRAY_RANGE_REF:
3001 case VIEW_CONVERT_EXPR:
3002 /* We have a nest of references. Verify that each of the operands
3003 that determine where to reference is either a constant or a variable,
3004 verify that the base is valid, and then show we've already checked
3005 the subtrees. */
3006 while (handled_component_p (t))
3008 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3009 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3010 else if (TREE_CODE (t) == ARRAY_REF
3011 || TREE_CODE (t) == ARRAY_RANGE_REF)
3013 CHECK_OP (1, "invalid array index");
3014 if (TREE_OPERAND (t, 2))
3015 CHECK_OP (2, "invalid array lower bound");
3016 if (TREE_OPERAND (t, 3))
3017 CHECK_OP (3, "invalid array stride");
3019 else if (TREE_CODE (t) == BIT_FIELD_REF
3020 || TREE_CODE (t) == REALPART_EXPR
3021 || TREE_CODE (t) == IMAGPART_EXPR)
3023 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3024 "REALPART_EXPR");
3025 return t;
3028 t = TREE_OPERAND (t, 0);
3031 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3033 error ("invalid reference prefix");
3034 return t;
3036 walk_tree (&t, verify_expr, data, NULL);
3037 *walk_subtrees = 0;
3038 break;
3039 case PLUS_EXPR:
3040 case MINUS_EXPR:
3041 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3042 POINTER_PLUS_EXPR. */
3043 if (POINTER_TYPE_P (TREE_TYPE (t)))
3045 error ("invalid operand to plus/minus, type is a pointer");
3046 return t;
3048 CHECK_OP (0, "invalid operand to binary operator");
3049 CHECK_OP (1, "invalid operand to binary operator");
3050 break;
3052 case POINTER_PLUS_EXPR:
3053 /* Check to make sure the first operand is a pointer or reference type. */
3054 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3056 error ("invalid operand to pointer plus, first operand is not a pointer");
3057 return t;
3059 /* Check to make sure the second operand is a ptrofftype. */
3060 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3062 error ("invalid operand to pointer plus, second operand is not an "
3063 "integer type of appropriate width");
3064 return t;
3066 /* FALLTHROUGH */
3067 case LT_EXPR:
3068 case LE_EXPR:
3069 case GT_EXPR:
3070 case GE_EXPR:
3071 case EQ_EXPR:
3072 case NE_EXPR:
3073 case UNORDERED_EXPR:
3074 case ORDERED_EXPR:
3075 case UNLT_EXPR:
3076 case UNLE_EXPR:
3077 case UNGT_EXPR:
3078 case UNGE_EXPR:
3079 case UNEQ_EXPR:
3080 case LTGT_EXPR:
3081 case MULT_EXPR:
3082 case TRUNC_DIV_EXPR:
3083 case CEIL_DIV_EXPR:
3084 case FLOOR_DIV_EXPR:
3085 case ROUND_DIV_EXPR:
3086 case TRUNC_MOD_EXPR:
3087 case CEIL_MOD_EXPR:
3088 case FLOOR_MOD_EXPR:
3089 case ROUND_MOD_EXPR:
3090 case RDIV_EXPR:
3091 case EXACT_DIV_EXPR:
3092 case MIN_EXPR:
3093 case MAX_EXPR:
3094 case LSHIFT_EXPR:
3095 case RSHIFT_EXPR:
3096 case LROTATE_EXPR:
3097 case RROTATE_EXPR:
3098 case BIT_IOR_EXPR:
3099 case BIT_XOR_EXPR:
3100 case BIT_AND_EXPR:
3101 CHECK_OP (0, "invalid operand to binary operator");
3102 CHECK_OP (1, "invalid operand to binary operator");
3103 break;
3105 case CONSTRUCTOR:
3106 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3107 *walk_subtrees = 0;
3108 break;
3110 case CASE_LABEL_EXPR:
3111 if (CASE_CHAIN (t))
3113 error ("invalid CASE_CHAIN");
3114 return t;
3116 break;
3118 default:
3119 break;
3121 return NULL;
3123 #undef CHECK_OP
3127 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3128 Returns true if there is an error, otherwise false. */
3130 static bool
3131 verify_types_in_gimple_min_lval (tree expr)
3133 tree op;
3135 if (is_gimple_id (expr))
3136 return false;
3138 if (TREE_CODE (expr) != TARGET_MEM_REF
3139 && TREE_CODE (expr) != MEM_REF)
3141 error ("invalid expression for min lvalue");
3142 return true;
3145 /* TARGET_MEM_REFs are strange beasts. */
3146 if (TREE_CODE (expr) == TARGET_MEM_REF)
3147 return false;
3149 op = TREE_OPERAND (expr, 0);
3150 if (!is_gimple_val (op))
3152 error ("invalid operand in indirect reference");
3153 debug_generic_stmt (op);
3154 return true;
3156 /* Memory references now generally can involve a value conversion. */
3158 return false;
3161 /* Verify if EXPR is a valid GIMPLE reference expression. If
3162 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3163 if there is an error, otherwise false. */
3165 static bool
3166 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3168 while (handled_component_p (expr))
3170 tree op = TREE_OPERAND (expr, 0);
3172 if (TREE_CODE (expr) == ARRAY_REF
3173 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3175 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3176 || (TREE_OPERAND (expr, 2)
3177 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3178 || (TREE_OPERAND (expr, 3)
3179 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3181 error ("invalid operands to array reference");
3182 debug_generic_stmt (expr);
3183 return true;
3187 /* Verify if the reference array element types are compatible. */
3188 if (TREE_CODE (expr) == ARRAY_REF
3189 && !useless_type_conversion_p (TREE_TYPE (expr),
3190 TREE_TYPE (TREE_TYPE (op))))
3192 error ("type mismatch in array reference");
3193 debug_generic_stmt (TREE_TYPE (expr));
3194 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3195 return true;
3197 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3198 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3199 TREE_TYPE (TREE_TYPE (op))))
3201 error ("type mismatch in array range reference");
3202 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3203 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3204 return true;
3207 if ((TREE_CODE (expr) == REALPART_EXPR
3208 || TREE_CODE (expr) == IMAGPART_EXPR)
3209 && !useless_type_conversion_p (TREE_TYPE (expr),
3210 TREE_TYPE (TREE_TYPE (op))))
3212 error ("type mismatch in real/imagpart reference");
3213 debug_generic_stmt (TREE_TYPE (expr));
3214 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3215 return true;
3218 if (TREE_CODE (expr) == COMPONENT_REF
3219 && !useless_type_conversion_p (TREE_TYPE (expr),
3220 TREE_TYPE (TREE_OPERAND (expr, 1))))
3222 error ("type mismatch in component reference");
3223 debug_generic_stmt (TREE_TYPE (expr));
3224 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3225 return true;
3228 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3230 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3231 that their operand is not an SSA name or an invariant when
3232 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3233 bug). Otherwise there is nothing to verify, gross mismatches at
3234 most invoke undefined behavior. */
3235 if (require_lvalue
3236 && (TREE_CODE (op) == SSA_NAME
3237 || is_gimple_min_invariant (op)))
3239 error ("conversion of an SSA_NAME on the left hand side");
3240 debug_generic_stmt (expr);
3241 return true;
3243 else if (TREE_CODE (op) == SSA_NAME
3244 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3246 error ("conversion of register to a different size");
3247 debug_generic_stmt (expr);
3248 return true;
3250 else if (!handled_component_p (op))
3251 return false;
3254 expr = op;
3257 if (TREE_CODE (expr) == MEM_REF)
3259 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3261 error ("invalid address operand in MEM_REF");
3262 debug_generic_stmt (expr);
3263 return true;
3265 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3266 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3268 error ("invalid offset operand in MEM_REF");
3269 debug_generic_stmt (expr);
3270 return true;
3273 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3275 if (!TMR_BASE (expr)
3276 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3278 error ("invalid address operand in TARGET_MEM_REF");
3279 return true;
3281 if (!TMR_OFFSET (expr)
3282 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3283 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3285 error ("invalid offset operand in TARGET_MEM_REF");
3286 debug_generic_stmt (expr);
3287 return true;
3291 return ((require_lvalue || !is_gimple_min_invariant (expr))
3292 && verify_types_in_gimple_min_lval (expr));
3295 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3296 list of pointer-to types that is trivially convertible to DEST. */
3298 static bool
3299 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3301 tree src;
3303 if (!TYPE_POINTER_TO (src_obj))
3304 return true;
3306 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3307 if (useless_type_conversion_p (dest, src))
3308 return true;
3310 return false;
3313 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3314 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3316 static bool
3317 valid_fixed_convert_types_p (tree type1, tree type2)
3319 return (FIXED_POINT_TYPE_P (type1)
3320 && (INTEGRAL_TYPE_P (type2)
3321 || SCALAR_FLOAT_TYPE_P (type2)
3322 || FIXED_POINT_TYPE_P (type2)));
3325 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3326 is a problem, otherwise false. */
3328 static bool
3329 verify_gimple_call (gcall *stmt)
3331 tree fn = gimple_call_fn (stmt);
3332 tree fntype, fndecl;
3333 unsigned i;
3335 if (gimple_call_internal_p (stmt))
3337 if (fn)
3339 error ("gimple call has two targets");
3340 debug_generic_stmt (fn);
3341 return true;
3344 else
3346 if (!fn)
3348 error ("gimple call has no target");
3349 return true;
3353 if (fn && !is_gimple_call_addr (fn))
3355 error ("invalid function in gimple call");
3356 debug_generic_stmt (fn);
3357 return true;
3360 if (fn
3361 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3362 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3363 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3365 error ("non-function in gimple call");
3366 return true;
3369 fndecl = gimple_call_fndecl (stmt);
3370 if (fndecl
3371 && TREE_CODE (fndecl) == FUNCTION_DECL
3372 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3373 && !DECL_PURE_P (fndecl)
3374 && !TREE_READONLY (fndecl))
3376 error ("invalid pure const state for function");
3377 return true;
3380 tree lhs = gimple_call_lhs (stmt);
3381 if (lhs
3382 && (!is_gimple_lvalue (lhs)
3383 || verify_types_in_gimple_reference (lhs, true)))
3385 error ("invalid LHS in gimple call");
3386 return true;
3389 if (gimple_call_ctrl_altering_p (stmt)
3390 && gimple_call_noreturn_p (stmt)
3391 && should_remove_lhs_p (lhs))
3393 error ("LHS in noreturn call");
3394 return true;
3397 fntype = gimple_call_fntype (stmt);
3398 if (fntype
3399 && lhs
3400 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3401 /* ??? At least C++ misses conversions at assignments from
3402 void * call results.
3403 ??? Java is completely off. Especially with functions
3404 returning java.lang.Object.
3405 For now simply allow arbitrary pointer type conversions. */
3406 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3407 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3409 error ("invalid conversion in gimple call");
3410 debug_generic_stmt (TREE_TYPE (lhs));
3411 debug_generic_stmt (TREE_TYPE (fntype));
3412 return true;
3415 if (gimple_call_chain (stmt)
3416 && !is_gimple_val (gimple_call_chain (stmt)))
3418 error ("invalid static chain in gimple call");
3419 debug_generic_stmt (gimple_call_chain (stmt));
3420 return true;
3423 /* If there is a static chain argument, the call should either be
3424 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3425 if (gimple_call_chain (stmt)
3426 && fndecl
3427 && !DECL_STATIC_CHAIN (fndecl))
3429 error ("static chain with function that doesn%'t use one");
3430 return true;
3433 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3435 switch (DECL_FUNCTION_CODE (fndecl))
3437 case BUILT_IN_UNREACHABLE:
3438 case BUILT_IN_TRAP:
3439 if (gimple_call_num_args (stmt) > 0)
3441 /* Built-in unreachable with parameters might not be caught by
3442 undefined behavior sanitizer. Front-ends do check users do not
3443 call them that way but we also produce calls to
3444 __builtin_unreachable internally, for example when IPA figures
3445 out a call cannot happen in a legal program. In such cases,
3446 we must make sure arguments are stripped off. */
3447 error ("__builtin_unreachable or __builtin_trap call with "
3448 "arguments");
3449 return true;
3451 break;
3452 default:
3453 break;
3457 /* ??? The C frontend passes unpromoted arguments in case it
3458 didn't see a function declaration before the call. So for now
3459 leave the call arguments mostly unverified. Once we gimplify
3460 unit-at-a-time we have a chance to fix this. */
3462 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3464 tree arg = gimple_call_arg (stmt, i);
3465 if ((is_gimple_reg_type (TREE_TYPE (arg))
3466 && !is_gimple_val (arg))
3467 || (!is_gimple_reg_type (TREE_TYPE (arg))
3468 && !is_gimple_lvalue (arg)))
3470 error ("invalid argument to gimple call");
3471 debug_generic_expr (arg);
3472 return true;
3476 return false;
3479 /* Verifies the gimple comparison with the result type TYPE and
3480 the operands OP0 and OP1, comparison code is CODE. */
3482 static bool
3483 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3485 tree op0_type = TREE_TYPE (op0);
3486 tree op1_type = TREE_TYPE (op1);
3488 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3490 error ("invalid operands in gimple comparison");
3491 return true;
3494 /* For comparisons we do not have the operations type as the
3495 effective type the comparison is carried out in. Instead
3496 we require that either the first operand is trivially
3497 convertible into the second, or the other way around.
3498 Because we special-case pointers to void we allow
3499 comparisons of pointers with the same mode as well. */
3500 if (!useless_type_conversion_p (op0_type, op1_type)
3501 && !useless_type_conversion_p (op1_type, op0_type)
3502 && (!POINTER_TYPE_P (op0_type)
3503 || !POINTER_TYPE_P (op1_type)
3504 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3506 error ("mismatching comparison operand types");
3507 debug_generic_expr (op0_type);
3508 debug_generic_expr (op1_type);
3509 return true;
3512 /* The resulting type of a comparison may be an effective boolean type. */
3513 if (INTEGRAL_TYPE_P (type)
3514 && (TREE_CODE (type) == BOOLEAN_TYPE
3515 || TYPE_PRECISION (type) == 1))
3517 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3518 || TREE_CODE (op1_type) == VECTOR_TYPE)
3519 && code != EQ_EXPR && code != NE_EXPR
3520 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3521 && !VECTOR_INTEGER_TYPE_P (op0_type))
3523 error ("unsupported operation or type for vector comparison"
3524 " returning a boolean");
3525 debug_generic_expr (op0_type);
3526 debug_generic_expr (op1_type);
3527 return true;
3530 /* Or a boolean vector type with the same element count
3531 as the comparison operand types. */
3532 else if (TREE_CODE (type) == VECTOR_TYPE
3533 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3535 if (TREE_CODE (op0_type) != VECTOR_TYPE
3536 || TREE_CODE (op1_type) != VECTOR_TYPE)
3538 error ("non-vector operands in vector comparison");
3539 debug_generic_expr (op0_type);
3540 debug_generic_expr (op1_type);
3541 return true;
3544 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3546 error ("invalid vector comparison resulting type");
3547 debug_generic_expr (type);
3548 return true;
3551 else
3553 error ("bogus comparison result type");
3554 debug_generic_expr (type);
3555 return true;
3558 return false;
3561 /* Verify a gimple assignment statement STMT with an unary rhs.
3562 Returns true if anything is wrong. */
3564 static bool
3565 verify_gimple_assign_unary (gassign *stmt)
3567 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3568 tree lhs = gimple_assign_lhs (stmt);
3569 tree lhs_type = TREE_TYPE (lhs);
3570 tree rhs1 = gimple_assign_rhs1 (stmt);
3571 tree rhs1_type = TREE_TYPE (rhs1);
3573 if (!is_gimple_reg (lhs))
3575 error ("non-register as LHS of unary operation");
3576 return true;
3579 if (!is_gimple_val (rhs1))
3581 error ("invalid operand in unary operation");
3582 return true;
3585 /* First handle conversions. */
3586 switch (rhs_code)
3588 CASE_CONVERT:
3590 /* Allow conversions from pointer type to integral type only if
3591 there is no sign or zero extension involved.
3592 For targets were the precision of ptrofftype doesn't match that
3593 of pointers we need to allow arbitrary conversions to ptrofftype. */
3594 if ((POINTER_TYPE_P (lhs_type)
3595 && INTEGRAL_TYPE_P (rhs1_type))
3596 || (POINTER_TYPE_P (rhs1_type)
3597 && INTEGRAL_TYPE_P (lhs_type)
3598 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3599 || ptrofftype_p (sizetype))))
3600 return false;
3602 /* Allow conversion from integral to offset type and vice versa. */
3603 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3604 && INTEGRAL_TYPE_P (rhs1_type))
3605 || (INTEGRAL_TYPE_P (lhs_type)
3606 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3607 return false;
3609 /* Otherwise assert we are converting between types of the
3610 same kind. */
3611 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3613 error ("invalid types in nop conversion");
3614 debug_generic_expr (lhs_type);
3615 debug_generic_expr (rhs1_type);
3616 return true;
3619 return false;
3622 case ADDR_SPACE_CONVERT_EXPR:
3624 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3625 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3626 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3628 error ("invalid types in address space conversion");
3629 debug_generic_expr (lhs_type);
3630 debug_generic_expr (rhs1_type);
3631 return true;
3634 return false;
3637 case FIXED_CONVERT_EXPR:
3639 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3640 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3642 error ("invalid types in fixed-point conversion");
3643 debug_generic_expr (lhs_type);
3644 debug_generic_expr (rhs1_type);
3645 return true;
3648 return false;
3651 case FLOAT_EXPR:
3653 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3654 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3655 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3657 error ("invalid types in conversion to floating point");
3658 debug_generic_expr (lhs_type);
3659 debug_generic_expr (rhs1_type);
3660 return true;
3663 return false;
3666 case FIX_TRUNC_EXPR:
3668 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3669 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3670 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3672 error ("invalid types in conversion to integer");
3673 debug_generic_expr (lhs_type);
3674 debug_generic_expr (rhs1_type);
3675 return true;
3678 return false;
3680 case REDUC_MAX_EXPR:
3681 case REDUC_MIN_EXPR:
3682 case REDUC_PLUS_EXPR:
3683 if (!VECTOR_TYPE_P (rhs1_type)
3684 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3686 error ("reduction should convert from vector to element type");
3687 debug_generic_expr (lhs_type);
3688 debug_generic_expr (rhs1_type);
3689 return true;
3691 return false;
3693 case VEC_UNPACK_HI_EXPR:
3694 case VEC_UNPACK_LO_EXPR:
3695 case VEC_UNPACK_FLOAT_HI_EXPR:
3696 case VEC_UNPACK_FLOAT_LO_EXPR:
3697 /* FIXME. */
3698 return false;
3700 case NEGATE_EXPR:
3701 case ABS_EXPR:
3702 case BIT_NOT_EXPR:
3703 case PAREN_EXPR:
3704 case CONJ_EXPR:
3705 break;
3707 default:
3708 gcc_unreachable ();
3711 /* For the remaining codes assert there is no conversion involved. */
3712 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3714 error ("non-trivial conversion in unary operation");
3715 debug_generic_expr (lhs_type);
3716 debug_generic_expr (rhs1_type);
3717 return true;
3720 return false;
3723 /* Verify a gimple assignment statement STMT with a binary rhs.
3724 Returns true if anything is wrong. */
3726 static bool
3727 verify_gimple_assign_binary (gassign *stmt)
3729 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3730 tree lhs = gimple_assign_lhs (stmt);
3731 tree lhs_type = TREE_TYPE (lhs);
3732 tree rhs1 = gimple_assign_rhs1 (stmt);
3733 tree rhs1_type = TREE_TYPE (rhs1);
3734 tree rhs2 = gimple_assign_rhs2 (stmt);
3735 tree rhs2_type = TREE_TYPE (rhs2);
3737 if (!is_gimple_reg (lhs))
3739 error ("non-register as LHS of binary operation");
3740 return true;
3743 if (!is_gimple_val (rhs1)
3744 || !is_gimple_val (rhs2))
3746 error ("invalid operands in binary operation");
3747 return true;
3750 /* First handle operations that involve different types. */
3751 switch (rhs_code)
3753 case COMPLEX_EXPR:
3755 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3756 || !(INTEGRAL_TYPE_P (rhs1_type)
3757 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3758 || !(INTEGRAL_TYPE_P (rhs2_type)
3759 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3761 error ("type mismatch in complex expression");
3762 debug_generic_expr (lhs_type);
3763 debug_generic_expr (rhs1_type);
3764 debug_generic_expr (rhs2_type);
3765 return true;
3768 return false;
3771 case LSHIFT_EXPR:
3772 case RSHIFT_EXPR:
3773 case LROTATE_EXPR:
3774 case RROTATE_EXPR:
3776 /* Shifts and rotates are ok on integral types, fixed point
3777 types and integer vector types. */
3778 if ((!INTEGRAL_TYPE_P (rhs1_type)
3779 && !FIXED_POINT_TYPE_P (rhs1_type)
3780 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3781 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3782 || (!INTEGRAL_TYPE_P (rhs2_type)
3783 /* Vector shifts of vectors are also ok. */
3784 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3785 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3786 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3787 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3788 || !useless_type_conversion_p (lhs_type, rhs1_type))
3790 error ("type mismatch in shift expression");
3791 debug_generic_expr (lhs_type);
3792 debug_generic_expr (rhs1_type);
3793 debug_generic_expr (rhs2_type);
3794 return true;
3797 return false;
3800 case WIDEN_LSHIFT_EXPR:
3802 if (!INTEGRAL_TYPE_P (lhs_type)
3803 || !INTEGRAL_TYPE_P (rhs1_type)
3804 || TREE_CODE (rhs2) != INTEGER_CST
3805 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3807 error ("type mismatch in widening vector shift expression");
3808 debug_generic_expr (lhs_type);
3809 debug_generic_expr (rhs1_type);
3810 debug_generic_expr (rhs2_type);
3811 return true;
3814 return false;
3817 case VEC_WIDEN_LSHIFT_HI_EXPR:
3818 case VEC_WIDEN_LSHIFT_LO_EXPR:
3820 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3821 || TREE_CODE (lhs_type) != VECTOR_TYPE
3822 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3823 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3824 || TREE_CODE (rhs2) != INTEGER_CST
3825 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3826 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3828 error ("type mismatch in widening vector shift expression");
3829 debug_generic_expr (lhs_type);
3830 debug_generic_expr (rhs1_type);
3831 debug_generic_expr (rhs2_type);
3832 return true;
3835 return false;
3838 case PLUS_EXPR:
3839 case MINUS_EXPR:
3841 tree lhs_etype = lhs_type;
3842 tree rhs1_etype = rhs1_type;
3843 tree rhs2_etype = rhs2_type;
3844 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3846 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3847 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3849 error ("invalid non-vector operands to vector valued plus");
3850 return true;
3852 lhs_etype = TREE_TYPE (lhs_type);
3853 rhs1_etype = TREE_TYPE (rhs1_type);
3854 rhs2_etype = TREE_TYPE (rhs2_type);
3856 if (POINTER_TYPE_P (lhs_etype)
3857 || POINTER_TYPE_P (rhs1_etype)
3858 || POINTER_TYPE_P (rhs2_etype))
3860 error ("invalid (pointer) operands to plus/minus");
3861 return true;
3864 /* Continue with generic binary expression handling. */
3865 break;
3868 case POINTER_PLUS_EXPR:
3870 if (!POINTER_TYPE_P (rhs1_type)
3871 || !useless_type_conversion_p (lhs_type, rhs1_type)
3872 || !ptrofftype_p (rhs2_type))
3874 error ("type mismatch in pointer plus expression");
3875 debug_generic_stmt (lhs_type);
3876 debug_generic_stmt (rhs1_type);
3877 debug_generic_stmt (rhs2_type);
3878 return true;
3881 return false;
3884 case TRUTH_ANDIF_EXPR:
3885 case TRUTH_ORIF_EXPR:
3886 case TRUTH_AND_EXPR:
3887 case TRUTH_OR_EXPR:
3888 case TRUTH_XOR_EXPR:
3890 gcc_unreachable ();
3892 case LT_EXPR:
3893 case LE_EXPR:
3894 case GT_EXPR:
3895 case GE_EXPR:
3896 case EQ_EXPR:
3897 case NE_EXPR:
3898 case UNORDERED_EXPR:
3899 case ORDERED_EXPR:
3900 case UNLT_EXPR:
3901 case UNLE_EXPR:
3902 case UNGT_EXPR:
3903 case UNGE_EXPR:
3904 case UNEQ_EXPR:
3905 case LTGT_EXPR:
3906 /* Comparisons are also binary, but the result type is not
3907 connected to the operand types. */
3908 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3910 case WIDEN_MULT_EXPR:
3911 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3912 return true;
3913 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3914 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3916 case WIDEN_SUM_EXPR:
3917 case VEC_WIDEN_MULT_HI_EXPR:
3918 case VEC_WIDEN_MULT_LO_EXPR:
3919 case VEC_WIDEN_MULT_EVEN_EXPR:
3920 case VEC_WIDEN_MULT_ODD_EXPR:
3921 case VEC_PACK_TRUNC_EXPR:
3922 case VEC_PACK_SAT_EXPR:
3923 case VEC_PACK_FIX_TRUNC_EXPR:
3924 /* FIXME. */
3925 return false;
3927 case MULT_EXPR:
3928 case MULT_HIGHPART_EXPR:
3929 case TRUNC_DIV_EXPR:
3930 case CEIL_DIV_EXPR:
3931 case FLOOR_DIV_EXPR:
3932 case ROUND_DIV_EXPR:
3933 case TRUNC_MOD_EXPR:
3934 case CEIL_MOD_EXPR:
3935 case FLOOR_MOD_EXPR:
3936 case ROUND_MOD_EXPR:
3937 case RDIV_EXPR:
3938 case EXACT_DIV_EXPR:
3939 case MIN_EXPR:
3940 case MAX_EXPR:
3941 case BIT_IOR_EXPR:
3942 case BIT_XOR_EXPR:
3943 case BIT_AND_EXPR:
3944 /* Continue with generic binary expression handling. */
3945 break;
3947 default:
3948 gcc_unreachable ();
3951 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3952 || !useless_type_conversion_p (lhs_type, rhs2_type))
3954 error ("type mismatch in binary expression");
3955 debug_generic_stmt (lhs_type);
3956 debug_generic_stmt (rhs1_type);
3957 debug_generic_stmt (rhs2_type);
3958 return true;
3961 return false;
3964 /* Verify a gimple assignment statement STMT with a ternary rhs.
3965 Returns true if anything is wrong. */
3967 static bool
3968 verify_gimple_assign_ternary (gassign *stmt)
3970 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3971 tree lhs = gimple_assign_lhs (stmt);
3972 tree lhs_type = TREE_TYPE (lhs);
3973 tree rhs1 = gimple_assign_rhs1 (stmt);
3974 tree rhs1_type = TREE_TYPE (rhs1);
3975 tree rhs2 = gimple_assign_rhs2 (stmt);
3976 tree rhs2_type = TREE_TYPE (rhs2);
3977 tree rhs3 = gimple_assign_rhs3 (stmt);
3978 tree rhs3_type = TREE_TYPE (rhs3);
3980 if (!is_gimple_reg (lhs))
3982 error ("non-register as LHS of ternary operation");
3983 return true;
3986 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3987 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3988 || !is_gimple_val (rhs2)
3989 || !is_gimple_val (rhs3))
3991 error ("invalid operands in ternary operation");
3992 return true;
3995 /* First handle operations that involve different types. */
3996 switch (rhs_code)
3998 case WIDEN_MULT_PLUS_EXPR:
3999 case WIDEN_MULT_MINUS_EXPR:
4000 if ((!INTEGRAL_TYPE_P (rhs1_type)
4001 && !FIXED_POINT_TYPE_P (rhs1_type))
4002 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4003 || !useless_type_conversion_p (lhs_type, rhs3_type)
4004 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4005 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4007 error ("type mismatch in widening multiply-accumulate expression");
4008 debug_generic_expr (lhs_type);
4009 debug_generic_expr (rhs1_type);
4010 debug_generic_expr (rhs2_type);
4011 debug_generic_expr (rhs3_type);
4012 return true;
4014 break;
4016 case FMA_EXPR:
4017 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4018 || !useless_type_conversion_p (lhs_type, rhs2_type)
4019 || !useless_type_conversion_p (lhs_type, rhs3_type))
4021 error ("type mismatch in fused multiply-add expression");
4022 debug_generic_expr (lhs_type);
4023 debug_generic_expr (rhs1_type);
4024 debug_generic_expr (rhs2_type);
4025 debug_generic_expr (rhs3_type);
4026 return true;
4028 break;
4030 case VEC_COND_EXPR:
4031 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4032 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4033 != TYPE_VECTOR_SUBPARTS (lhs_type))
4035 error ("the first argument of a VEC_COND_EXPR must be of a "
4036 "boolean vector type of the same number of elements "
4037 "as the result");
4038 debug_generic_expr (lhs_type);
4039 debug_generic_expr (rhs1_type);
4040 return true;
4042 /* Fallthrough. */
4043 case COND_EXPR:
4044 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4045 || !useless_type_conversion_p (lhs_type, rhs3_type))
4047 error ("type mismatch in conditional expression");
4048 debug_generic_expr (lhs_type);
4049 debug_generic_expr (rhs2_type);
4050 debug_generic_expr (rhs3_type);
4051 return true;
4053 break;
4055 case VEC_PERM_EXPR:
4056 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4057 || !useless_type_conversion_p (lhs_type, rhs2_type))
4059 error ("type mismatch in vector permute expression");
4060 debug_generic_expr (lhs_type);
4061 debug_generic_expr (rhs1_type);
4062 debug_generic_expr (rhs2_type);
4063 debug_generic_expr (rhs3_type);
4064 return true;
4067 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4068 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4069 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4071 error ("vector types expected in vector permute expression");
4072 debug_generic_expr (lhs_type);
4073 debug_generic_expr (rhs1_type);
4074 debug_generic_expr (rhs2_type);
4075 debug_generic_expr (rhs3_type);
4076 return true;
4079 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4080 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4081 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4082 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4083 != TYPE_VECTOR_SUBPARTS (lhs_type))
4085 error ("vectors with different element number found "
4086 "in vector permute expression");
4087 debug_generic_expr (lhs_type);
4088 debug_generic_expr (rhs1_type);
4089 debug_generic_expr (rhs2_type);
4090 debug_generic_expr (rhs3_type);
4091 return true;
4094 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4095 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4096 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4098 error ("invalid mask type in vector permute expression");
4099 debug_generic_expr (lhs_type);
4100 debug_generic_expr (rhs1_type);
4101 debug_generic_expr (rhs2_type);
4102 debug_generic_expr (rhs3_type);
4103 return true;
4106 return false;
4108 case SAD_EXPR:
4109 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4110 || !useless_type_conversion_p (lhs_type, rhs3_type)
4111 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4112 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4114 error ("type mismatch in sad expression");
4115 debug_generic_expr (lhs_type);
4116 debug_generic_expr (rhs1_type);
4117 debug_generic_expr (rhs2_type);
4118 debug_generic_expr (rhs3_type);
4119 return true;
4122 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4123 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4124 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4126 error ("vector types expected in sad expression");
4127 debug_generic_expr (lhs_type);
4128 debug_generic_expr (rhs1_type);
4129 debug_generic_expr (rhs2_type);
4130 debug_generic_expr (rhs3_type);
4131 return true;
4134 return false;
4136 case BIT_INSERT_EXPR:
4137 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4139 error ("type mismatch in BIT_INSERT_EXPR");
4140 debug_generic_expr (lhs_type);
4141 debug_generic_expr (rhs1_type);
4142 return true;
4144 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4145 && INTEGRAL_TYPE_P (rhs2_type))
4146 || (VECTOR_TYPE_P (rhs1_type)
4147 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4149 error ("not allowed type combination in BIT_INSERT_EXPR");
4150 debug_generic_expr (rhs1_type);
4151 debug_generic_expr (rhs2_type);
4152 return true;
4154 if (! tree_fits_uhwi_p (rhs3)
4155 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4157 error ("invalid position or size in BIT_INSERT_EXPR");
4158 return true;
4160 if (INTEGRAL_TYPE_P (rhs1_type))
4162 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4163 if (bitpos >= TYPE_PRECISION (rhs1_type)
4164 || (bitpos + TYPE_PRECISION (rhs2_type)
4165 > TYPE_PRECISION (rhs1_type)))
4167 error ("insertion out of range in BIT_INSERT_EXPR");
4168 return true;
4171 else if (VECTOR_TYPE_P (rhs1_type))
4173 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4174 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4175 if (bitpos % bitsize != 0)
4177 error ("vector insertion not at element boundary");
4178 return true;
4181 return false;
4183 case DOT_PROD_EXPR:
4184 case REALIGN_LOAD_EXPR:
4185 /* FIXME. */
4186 return false;
4188 default:
4189 gcc_unreachable ();
4191 return false;
4194 /* Verify a gimple assignment statement STMT with a single rhs.
4195 Returns true if anything is wrong. */
4197 static bool
4198 verify_gimple_assign_single (gassign *stmt)
4200 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4201 tree lhs = gimple_assign_lhs (stmt);
4202 tree lhs_type = TREE_TYPE (lhs);
4203 tree rhs1 = gimple_assign_rhs1 (stmt);
4204 tree rhs1_type = TREE_TYPE (rhs1);
4205 bool res = false;
4207 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4209 error ("non-trivial conversion at assignment");
4210 debug_generic_expr (lhs_type);
4211 debug_generic_expr (rhs1_type);
4212 return true;
4215 if (gimple_clobber_p (stmt)
4216 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4218 error ("non-decl/MEM_REF LHS in clobber statement");
4219 debug_generic_expr (lhs);
4220 return true;
4223 if (handled_component_p (lhs)
4224 || TREE_CODE (lhs) == MEM_REF
4225 || TREE_CODE (lhs) == TARGET_MEM_REF)
4226 res |= verify_types_in_gimple_reference (lhs, true);
4228 /* Special codes we cannot handle via their class. */
4229 switch (rhs_code)
4231 case ADDR_EXPR:
4233 tree op = TREE_OPERAND (rhs1, 0);
4234 if (!is_gimple_addressable (op))
4236 error ("invalid operand in unary expression");
4237 return true;
4240 /* Technically there is no longer a need for matching types, but
4241 gimple hygiene asks for this check. In LTO we can end up
4242 combining incompatible units and thus end up with addresses
4243 of globals that change their type to a common one. */
4244 if (!in_lto_p
4245 && !types_compatible_p (TREE_TYPE (op),
4246 TREE_TYPE (TREE_TYPE (rhs1)))
4247 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4248 TREE_TYPE (op)))
4250 error ("type mismatch in address expression");
4251 debug_generic_stmt (TREE_TYPE (rhs1));
4252 debug_generic_stmt (TREE_TYPE (op));
4253 return true;
4256 return verify_types_in_gimple_reference (op, true);
4259 /* tcc_reference */
4260 case INDIRECT_REF:
4261 error ("INDIRECT_REF in gimple IL");
4262 return true;
4264 case COMPONENT_REF:
4265 case BIT_FIELD_REF:
4266 case ARRAY_REF:
4267 case ARRAY_RANGE_REF:
4268 case VIEW_CONVERT_EXPR:
4269 case REALPART_EXPR:
4270 case IMAGPART_EXPR:
4271 case TARGET_MEM_REF:
4272 case MEM_REF:
4273 if (!is_gimple_reg (lhs)
4274 && is_gimple_reg_type (TREE_TYPE (lhs)))
4276 error ("invalid rhs for gimple memory store");
4277 debug_generic_stmt (lhs);
4278 debug_generic_stmt (rhs1);
4279 return true;
4281 return res || verify_types_in_gimple_reference (rhs1, false);
4283 /* tcc_constant */
4284 case SSA_NAME:
4285 case INTEGER_CST:
4286 case REAL_CST:
4287 case FIXED_CST:
4288 case COMPLEX_CST:
4289 case VECTOR_CST:
4290 case STRING_CST:
4291 return res;
4293 /* tcc_declaration */
4294 case CONST_DECL:
4295 return res;
4296 case VAR_DECL:
4297 case PARM_DECL:
4298 if (!is_gimple_reg (lhs)
4299 && !is_gimple_reg (rhs1)
4300 && is_gimple_reg_type (TREE_TYPE (lhs)))
4302 error ("invalid rhs for gimple memory store");
4303 debug_generic_stmt (lhs);
4304 debug_generic_stmt (rhs1);
4305 return true;
4307 return res;
4309 case CONSTRUCTOR:
4310 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4312 unsigned int i;
4313 tree elt_i, elt_v, elt_t = NULL_TREE;
4315 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4316 return res;
4317 /* For vector CONSTRUCTORs we require that either it is empty
4318 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4319 (then the element count must be correct to cover the whole
4320 outer vector and index must be NULL on all elements, or it is
4321 a CONSTRUCTOR of scalar elements, where we as an exception allow
4322 smaller number of elements (assuming zero filling) and
4323 consecutive indexes as compared to NULL indexes (such
4324 CONSTRUCTORs can appear in the IL from FEs). */
4325 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4327 if (elt_t == NULL_TREE)
4329 elt_t = TREE_TYPE (elt_v);
4330 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4332 tree elt_t = TREE_TYPE (elt_v);
4333 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4334 TREE_TYPE (elt_t)))
4336 error ("incorrect type of vector CONSTRUCTOR"
4337 " elements");
4338 debug_generic_stmt (rhs1);
4339 return true;
4341 else if (CONSTRUCTOR_NELTS (rhs1)
4342 * TYPE_VECTOR_SUBPARTS (elt_t)
4343 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4345 error ("incorrect number of vector CONSTRUCTOR"
4346 " elements");
4347 debug_generic_stmt (rhs1);
4348 return true;
4351 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4352 elt_t))
4354 error ("incorrect type of vector CONSTRUCTOR elements");
4355 debug_generic_stmt (rhs1);
4356 return true;
4358 else if (CONSTRUCTOR_NELTS (rhs1)
4359 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4361 error ("incorrect number of vector CONSTRUCTOR elements");
4362 debug_generic_stmt (rhs1);
4363 return true;
4366 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4368 error ("incorrect type of vector CONSTRUCTOR elements");
4369 debug_generic_stmt (rhs1);
4370 return true;
4372 if (elt_i != NULL_TREE
4373 && (TREE_CODE (elt_t) == VECTOR_TYPE
4374 || TREE_CODE (elt_i) != INTEGER_CST
4375 || compare_tree_int (elt_i, i) != 0))
4377 error ("vector CONSTRUCTOR with non-NULL element index");
4378 debug_generic_stmt (rhs1);
4379 return true;
4381 if (!is_gimple_val (elt_v))
4383 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4384 debug_generic_stmt (rhs1);
4385 return true;
4389 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4391 error ("non-vector CONSTRUCTOR with elements");
4392 debug_generic_stmt (rhs1);
4393 return true;
4395 return res;
4396 case OBJ_TYPE_REF:
4397 case ASSERT_EXPR:
4398 case WITH_SIZE_EXPR:
4399 /* FIXME. */
4400 return res;
4402 default:;
4405 return res;
4408 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4409 is a problem, otherwise false. */
4411 static bool
4412 verify_gimple_assign (gassign *stmt)
4414 switch (gimple_assign_rhs_class (stmt))
4416 case GIMPLE_SINGLE_RHS:
4417 return verify_gimple_assign_single (stmt);
4419 case GIMPLE_UNARY_RHS:
4420 return verify_gimple_assign_unary (stmt);
4422 case GIMPLE_BINARY_RHS:
4423 return verify_gimple_assign_binary (stmt);
4425 case GIMPLE_TERNARY_RHS:
4426 return verify_gimple_assign_ternary (stmt);
4428 default:
4429 gcc_unreachable ();
4433 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4434 is a problem, otherwise false. */
4436 static bool
4437 verify_gimple_return (greturn *stmt)
4439 tree op = gimple_return_retval (stmt);
4440 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4442 /* We cannot test for present return values as we do not fix up missing
4443 return values from the original source. */
4444 if (op == NULL)
4445 return false;
4447 if (!is_gimple_val (op)
4448 && TREE_CODE (op) != RESULT_DECL)
4450 error ("invalid operand in return statement");
4451 debug_generic_stmt (op);
4452 return true;
4455 if ((TREE_CODE (op) == RESULT_DECL
4456 && DECL_BY_REFERENCE (op))
4457 || (TREE_CODE (op) == SSA_NAME
4458 && SSA_NAME_VAR (op)
4459 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4460 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4461 op = TREE_TYPE (op);
4463 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4465 error ("invalid conversion in return statement");
4466 debug_generic_stmt (restype);
4467 debug_generic_stmt (TREE_TYPE (op));
4468 return true;
4471 return false;
4475 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4476 is a problem, otherwise false. */
4478 static bool
4479 verify_gimple_goto (ggoto *stmt)
4481 tree dest = gimple_goto_dest (stmt);
4483 /* ??? We have two canonical forms of direct goto destinations, a
4484 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4485 if (TREE_CODE (dest) != LABEL_DECL
4486 && (!is_gimple_val (dest)
4487 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4489 error ("goto destination is neither a label nor a pointer");
4490 return true;
4493 return false;
4496 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4497 is a problem, otherwise false. */
4499 static bool
4500 verify_gimple_switch (gswitch *stmt)
4502 unsigned int i, n;
4503 tree elt, prev_upper_bound = NULL_TREE;
4504 tree index_type, elt_type = NULL_TREE;
4506 if (!is_gimple_val (gimple_switch_index (stmt)))
4508 error ("invalid operand to switch statement");
4509 debug_generic_stmt (gimple_switch_index (stmt));
4510 return true;
4513 index_type = TREE_TYPE (gimple_switch_index (stmt));
4514 if (! INTEGRAL_TYPE_P (index_type))
4516 error ("non-integral type switch statement");
4517 debug_generic_expr (index_type);
4518 return true;
4521 elt = gimple_switch_label (stmt, 0);
4522 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4524 error ("invalid default case label in switch statement");
4525 debug_generic_expr (elt);
4526 return true;
4529 n = gimple_switch_num_labels (stmt);
4530 for (i = 1; i < n; i++)
4532 elt = gimple_switch_label (stmt, i);
4534 if (! CASE_LOW (elt))
4536 error ("invalid case label in switch statement");
4537 debug_generic_expr (elt);
4538 return true;
4540 if (CASE_HIGH (elt)
4541 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4543 error ("invalid case range in switch statement");
4544 debug_generic_expr (elt);
4545 return true;
4548 if (elt_type)
4550 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4551 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4553 error ("type mismatch for case label in switch statement");
4554 debug_generic_expr (elt);
4555 return true;
4558 else
4560 elt_type = TREE_TYPE (CASE_LOW (elt));
4561 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4563 error ("type precision mismatch in switch statement");
4564 return true;
4568 if (prev_upper_bound)
4570 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4572 error ("case labels not sorted in switch statement");
4573 return true;
4577 prev_upper_bound = CASE_HIGH (elt);
4578 if (! prev_upper_bound)
4579 prev_upper_bound = CASE_LOW (elt);
4582 return false;
4585 /* Verify a gimple debug statement STMT.
4586 Returns true if anything is wrong. */
4588 static bool
4589 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4591 /* There isn't much that could be wrong in a gimple debug stmt. A
4592 gimple debug bind stmt, for example, maps a tree, that's usually
4593 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4594 component or member of an aggregate type, to another tree, that
4595 can be an arbitrary expression. These stmts expand into debug
4596 insns, and are converted to debug notes by var-tracking.c. */
4597 return false;
4600 /* Verify a gimple label statement STMT.
4601 Returns true if anything is wrong. */
4603 static bool
4604 verify_gimple_label (glabel *stmt)
4606 tree decl = gimple_label_label (stmt);
4607 int uid;
4608 bool err = false;
4610 if (TREE_CODE (decl) != LABEL_DECL)
4611 return true;
4612 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4613 && DECL_CONTEXT (decl) != current_function_decl)
4615 error ("label's context is not the current function decl");
4616 err |= true;
4619 uid = LABEL_DECL_UID (decl);
4620 if (cfun->cfg
4621 && (uid == -1
4622 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4624 error ("incorrect entry in label_to_block_map");
4625 err |= true;
4628 uid = EH_LANDING_PAD_NR (decl);
4629 if (uid)
4631 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4632 if (decl != lp->post_landing_pad)
4634 error ("incorrect setting of landing pad number");
4635 err |= true;
4639 return err;
4642 /* Verify a gimple cond statement STMT.
4643 Returns true if anything is wrong. */
4645 static bool
4646 verify_gimple_cond (gcond *stmt)
4648 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4650 error ("invalid comparison code in gimple cond");
4651 return true;
4653 if (!(!gimple_cond_true_label (stmt)
4654 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4655 || !(!gimple_cond_false_label (stmt)
4656 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4658 error ("invalid labels in gimple cond");
4659 return true;
4662 return verify_gimple_comparison (boolean_type_node,
4663 gimple_cond_lhs (stmt),
4664 gimple_cond_rhs (stmt),
4665 gimple_cond_code (stmt));
4668 /* Verify the GIMPLE statement STMT. Returns true if there is an
4669 error, otherwise false. */
4671 static bool
4672 verify_gimple_stmt (gimple *stmt)
4674 switch (gimple_code (stmt))
4676 case GIMPLE_ASSIGN:
4677 return verify_gimple_assign (as_a <gassign *> (stmt));
4679 case GIMPLE_LABEL:
4680 return verify_gimple_label (as_a <glabel *> (stmt));
4682 case GIMPLE_CALL:
4683 return verify_gimple_call (as_a <gcall *> (stmt));
4685 case GIMPLE_COND:
4686 return verify_gimple_cond (as_a <gcond *> (stmt));
4688 case GIMPLE_GOTO:
4689 return verify_gimple_goto (as_a <ggoto *> (stmt));
4691 case GIMPLE_SWITCH:
4692 return verify_gimple_switch (as_a <gswitch *> (stmt));
4694 case GIMPLE_RETURN:
4695 return verify_gimple_return (as_a <greturn *> (stmt));
4697 case GIMPLE_ASM:
4698 return false;
4700 case GIMPLE_TRANSACTION:
4701 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4703 /* Tuples that do not have tree operands. */
4704 case GIMPLE_NOP:
4705 case GIMPLE_PREDICT:
4706 case GIMPLE_RESX:
4707 case GIMPLE_EH_DISPATCH:
4708 case GIMPLE_EH_MUST_NOT_THROW:
4709 return false;
4711 CASE_GIMPLE_OMP:
4712 /* OpenMP directives are validated by the FE and never operated
4713 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4714 non-gimple expressions when the main index variable has had
4715 its address taken. This does not affect the loop itself
4716 because the header of an GIMPLE_OMP_FOR is merely used to determine
4717 how to setup the parallel iteration. */
4718 return false;
4720 case GIMPLE_DEBUG:
4721 return verify_gimple_debug (stmt);
4723 default:
4724 gcc_unreachable ();
4728 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4729 and false otherwise. */
4731 static bool
4732 verify_gimple_phi (gimple *phi)
4734 bool err = false;
4735 unsigned i;
4736 tree phi_result = gimple_phi_result (phi);
4737 bool virtual_p;
4739 if (!phi_result)
4741 error ("invalid PHI result");
4742 return true;
4745 virtual_p = virtual_operand_p (phi_result);
4746 if (TREE_CODE (phi_result) != SSA_NAME
4747 || (virtual_p
4748 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4750 error ("invalid PHI result");
4751 err = true;
4754 for (i = 0; i < gimple_phi_num_args (phi); i++)
4756 tree t = gimple_phi_arg_def (phi, i);
4758 if (!t)
4760 error ("missing PHI def");
4761 err |= true;
4762 continue;
4764 /* Addressable variables do have SSA_NAMEs but they
4765 are not considered gimple values. */
4766 else if ((TREE_CODE (t) == SSA_NAME
4767 && virtual_p != virtual_operand_p (t))
4768 || (virtual_p
4769 && (TREE_CODE (t) != SSA_NAME
4770 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4771 || (!virtual_p
4772 && !is_gimple_val (t)))
4774 error ("invalid PHI argument");
4775 debug_generic_expr (t);
4776 err |= true;
4778 #ifdef ENABLE_TYPES_CHECKING
4779 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4781 error ("incompatible types in PHI argument %u", i);
4782 debug_generic_stmt (TREE_TYPE (phi_result));
4783 debug_generic_stmt (TREE_TYPE (t));
4784 err |= true;
4786 #endif
4789 return err;
4792 /* Verify the GIMPLE statements inside the sequence STMTS. */
4794 static bool
4795 verify_gimple_in_seq_2 (gimple_seq stmts)
4797 gimple_stmt_iterator ittr;
4798 bool err = false;
4800 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4802 gimple *stmt = gsi_stmt (ittr);
4804 switch (gimple_code (stmt))
4806 case GIMPLE_BIND:
4807 err |= verify_gimple_in_seq_2 (
4808 gimple_bind_body (as_a <gbind *> (stmt)));
4809 break;
4811 case GIMPLE_TRY:
4812 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4813 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4814 break;
4816 case GIMPLE_EH_FILTER:
4817 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4818 break;
4820 case GIMPLE_EH_ELSE:
4822 geh_else *eh_else = as_a <geh_else *> (stmt);
4823 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4824 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4826 break;
4828 case GIMPLE_CATCH:
4829 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4830 as_a <gcatch *> (stmt)));
4831 break;
4833 case GIMPLE_TRANSACTION:
4834 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4835 break;
4837 default:
4839 bool err2 = verify_gimple_stmt (stmt);
4840 if (err2)
4841 debug_gimple_stmt (stmt);
4842 err |= err2;
4847 return err;
4850 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4851 is a problem, otherwise false. */
4853 static bool
4854 verify_gimple_transaction (gtransaction *stmt)
4856 tree lab;
4858 lab = gimple_transaction_label_norm (stmt);
4859 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4860 return true;
4861 lab = gimple_transaction_label_uninst (stmt);
4862 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4863 return true;
4864 lab = gimple_transaction_label_over (stmt);
4865 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4866 return true;
4868 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4872 /* Verify the GIMPLE statements inside the statement list STMTS. */
4874 DEBUG_FUNCTION void
4875 verify_gimple_in_seq (gimple_seq stmts)
4877 timevar_push (TV_TREE_STMT_VERIFY);
4878 if (verify_gimple_in_seq_2 (stmts))
4879 internal_error ("verify_gimple failed");
4880 timevar_pop (TV_TREE_STMT_VERIFY);
4883 /* Return true when the T can be shared. */
4885 static bool
4886 tree_node_can_be_shared (tree t)
4888 if (IS_TYPE_OR_DECL_P (t)
4889 || is_gimple_min_invariant (t)
4890 || TREE_CODE (t) == SSA_NAME
4891 || t == error_mark_node
4892 || TREE_CODE (t) == IDENTIFIER_NODE)
4893 return true;
4895 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4896 return true;
4898 if (DECL_P (t))
4899 return true;
4901 return false;
4904 /* Called via walk_tree. Verify tree sharing. */
4906 static tree
4907 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4909 hash_set<void *> *visited = (hash_set<void *> *) data;
4911 if (tree_node_can_be_shared (*tp))
4913 *walk_subtrees = false;
4914 return NULL;
4917 if (visited->add (*tp))
4918 return *tp;
4920 return NULL;
4923 /* Called via walk_gimple_stmt. Verify tree sharing. */
4925 static tree
4926 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4928 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4929 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4932 static bool eh_error_found;
4933 bool
4934 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4935 hash_set<gimple *> *visited)
4937 if (!visited->contains (stmt))
4939 error ("dead STMT in EH table");
4940 debug_gimple_stmt (stmt);
4941 eh_error_found = true;
4943 return true;
4946 /* Verify if the location LOCs block is in BLOCKS. */
4948 static bool
4949 verify_location (hash_set<tree> *blocks, location_t loc)
4951 tree block = LOCATION_BLOCK (loc);
4952 if (block != NULL_TREE
4953 && !blocks->contains (block))
4955 error ("location references block not in block tree");
4956 return true;
4958 if (block != NULL_TREE)
4959 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4960 return false;
4963 /* Called via walk_tree. Verify that expressions have no blocks. */
4965 static tree
4966 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4968 if (!EXPR_P (*tp))
4970 *walk_subtrees = false;
4971 return NULL;
4974 location_t loc = EXPR_LOCATION (*tp);
4975 if (LOCATION_BLOCK (loc) != NULL)
4976 return *tp;
4978 return NULL;
4981 /* Called via walk_tree. Verify locations of expressions. */
4983 static tree
4984 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4986 hash_set<tree> *blocks = (hash_set<tree> *) data;
4988 if (TREE_CODE (*tp) == VAR_DECL
4989 && DECL_HAS_DEBUG_EXPR_P (*tp))
4991 tree t = DECL_DEBUG_EXPR (*tp);
4992 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4993 if (addr)
4994 return addr;
4996 if ((TREE_CODE (*tp) == VAR_DECL
4997 || TREE_CODE (*tp) == PARM_DECL
4998 || TREE_CODE (*tp) == RESULT_DECL)
4999 && DECL_HAS_VALUE_EXPR_P (*tp))
5001 tree t = DECL_VALUE_EXPR (*tp);
5002 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5003 if (addr)
5004 return addr;
5007 if (!EXPR_P (*tp))
5009 *walk_subtrees = false;
5010 return NULL;
5013 location_t loc = EXPR_LOCATION (*tp);
5014 if (verify_location (blocks, loc))
5015 return *tp;
5017 return NULL;
5020 /* Called via walk_gimple_op. Verify locations of expressions. */
5022 static tree
5023 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5025 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5026 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5029 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5031 static void
5032 collect_subblocks (hash_set<tree> *blocks, tree block)
5034 tree t;
5035 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5037 blocks->add (t);
5038 collect_subblocks (blocks, t);
5042 /* Verify the GIMPLE statements in the CFG of FN. */
5044 DEBUG_FUNCTION void
5045 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5047 basic_block bb;
5048 bool err = false;
5050 timevar_push (TV_TREE_STMT_VERIFY);
5051 hash_set<void *> visited;
5052 hash_set<gimple *> visited_stmts;
5054 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5055 hash_set<tree> blocks;
5056 if (DECL_INITIAL (fn->decl))
5058 blocks.add (DECL_INITIAL (fn->decl));
5059 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5062 FOR_EACH_BB_FN (bb, fn)
5064 gimple_stmt_iterator gsi;
5066 for (gphi_iterator gpi = gsi_start_phis (bb);
5067 !gsi_end_p (gpi);
5068 gsi_next (&gpi))
5070 gphi *phi = gpi.phi ();
5071 bool err2 = false;
5072 unsigned i;
5074 visited_stmts.add (phi);
5076 if (gimple_bb (phi) != bb)
5078 error ("gimple_bb (phi) is set to a wrong basic block");
5079 err2 = true;
5082 err2 |= verify_gimple_phi (phi);
5084 /* Only PHI arguments have locations. */
5085 if (gimple_location (phi) != UNKNOWN_LOCATION)
5087 error ("PHI node with location");
5088 err2 = true;
5091 for (i = 0; i < gimple_phi_num_args (phi); i++)
5093 tree arg = gimple_phi_arg_def (phi, i);
5094 tree addr = walk_tree (&arg, verify_node_sharing_1,
5095 &visited, NULL);
5096 if (addr)
5098 error ("incorrect sharing of tree nodes");
5099 debug_generic_expr (addr);
5100 err2 |= true;
5102 location_t loc = gimple_phi_arg_location (phi, i);
5103 if (virtual_operand_p (gimple_phi_result (phi))
5104 && loc != UNKNOWN_LOCATION)
5106 error ("virtual PHI with argument locations");
5107 err2 = true;
5109 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5110 if (addr)
5112 debug_generic_expr (addr);
5113 err2 = true;
5115 err2 |= verify_location (&blocks, loc);
5118 if (err2)
5119 debug_gimple_stmt (phi);
5120 err |= err2;
5123 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5125 gimple *stmt = gsi_stmt (gsi);
5126 bool err2 = false;
5127 struct walk_stmt_info wi;
5128 tree addr;
5129 int lp_nr;
5131 visited_stmts.add (stmt);
5133 if (gimple_bb (stmt) != bb)
5135 error ("gimple_bb (stmt) is set to a wrong basic block");
5136 err2 = true;
5139 err2 |= verify_gimple_stmt (stmt);
5140 err2 |= verify_location (&blocks, gimple_location (stmt));
5142 memset (&wi, 0, sizeof (wi));
5143 wi.info = (void *) &visited;
5144 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5145 if (addr)
5147 error ("incorrect sharing of tree nodes");
5148 debug_generic_expr (addr);
5149 err2 |= true;
5152 memset (&wi, 0, sizeof (wi));
5153 wi.info = (void *) &blocks;
5154 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5155 if (addr)
5157 debug_generic_expr (addr);
5158 err2 |= true;
5161 /* ??? Instead of not checking these stmts at all the walker
5162 should know its context via wi. */
5163 if (!is_gimple_debug (stmt)
5164 && !is_gimple_omp (stmt))
5166 memset (&wi, 0, sizeof (wi));
5167 addr = walk_gimple_op (stmt, verify_expr, &wi);
5168 if (addr)
5170 debug_generic_expr (addr);
5171 inform (gimple_location (stmt), "in statement");
5172 err2 |= true;
5176 /* If the statement is marked as part of an EH region, then it is
5177 expected that the statement could throw. Verify that when we
5178 have optimizations that simplify statements such that we prove
5179 that they cannot throw, that we update other data structures
5180 to match. */
5181 lp_nr = lookup_stmt_eh_lp (stmt);
5182 if (lp_nr > 0)
5184 if (!stmt_could_throw_p (stmt))
5186 if (verify_nothrow)
5188 error ("statement marked for throw, but doesn%'t");
5189 err2 |= true;
5192 else if (!gsi_one_before_end_p (gsi))
5194 error ("statement marked for throw in middle of block");
5195 err2 |= true;
5199 if (err2)
5200 debug_gimple_stmt (stmt);
5201 err |= err2;
5205 eh_error_found = false;
5206 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5207 if (eh_table)
5208 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5209 (&visited_stmts);
5211 if (err || eh_error_found)
5212 internal_error ("verify_gimple failed");
5214 verify_histograms ();
5215 timevar_pop (TV_TREE_STMT_VERIFY);
5219 /* Verifies that the flow information is OK. */
5221 static int
5222 gimple_verify_flow_info (void)
5224 int err = 0;
5225 basic_block bb;
5226 gimple_stmt_iterator gsi;
5227 gimple *stmt;
5228 edge e;
5229 edge_iterator ei;
5231 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5232 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5234 error ("ENTRY_BLOCK has IL associated with it");
5235 err = 1;
5238 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5239 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5241 error ("EXIT_BLOCK has IL associated with it");
5242 err = 1;
5245 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5246 if (e->flags & EDGE_FALLTHRU)
5248 error ("fallthru to exit from bb %d", e->src->index);
5249 err = 1;
5252 FOR_EACH_BB_FN (bb, cfun)
5254 bool found_ctrl_stmt = false;
5256 stmt = NULL;
5258 /* Skip labels on the start of basic block. */
5259 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5261 tree label;
5262 gimple *prev_stmt = stmt;
5264 stmt = gsi_stmt (gsi);
5266 if (gimple_code (stmt) != GIMPLE_LABEL)
5267 break;
5269 label = gimple_label_label (as_a <glabel *> (stmt));
5270 if (prev_stmt && DECL_NONLOCAL (label))
5272 error ("nonlocal label ");
5273 print_generic_expr (stderr, label, 0);
5274 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5275 bb->index);
5276 err = 1;
5279 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5281 error ("EH landing pad label ");
5282 print_generic_expr (stderr, label, 0);
5283 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5284 bb->index);
5285 err = 1;
5288 if (label_to_block (label) != bb)
5290 error ("label ");
5291 print_generic_expr (stderr, label, 0);
5292 fprintf (stderr, " to block does not match in bb %d",
5293 bb->index);
5294 err = 1;
5297 if (decl_function_context (label) != current_function_decl)
5299 error ("label ");
5300 print_generic_expr (stderr, label, 0);
5301 fprintf (stderr, " has incorrect context in bb %d",
5302 bb->index);
5303 err = 1;
5307 /* Verify that body of basic block BB is free of control flow. */
5308 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5310 gimple *stmt = gsi_stmt (gsi);
5312 if (found_ctrl_stmt)
5314 error ("control flow in the middle of basic block %d",
5315 bb->index);
5316 err = 1;
5319 if (stmt_ends_bb_p (stmt))
5320 found_ctrl_stmt = true;
5322 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5324 error ("label ");
5325 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5326 fprintf (stderr, " in the middle of basic block %d", bb->index);
5327 err = 1;
5331 gsi = gsi_last_bb (bb);
5332 if (gsi_end_p (gsi))
5333 continue;
5335 stmt = gsi_stmt (gsi);
5337 if (gimple_code (stmt) == GIMPLE_LABEL)
5338 continue;
5340 err |= verify_eh_edges (stmt);
5342 if (is_ctrl_stmt (stmt))
5344 FOR_EACH_EDGE (e, ei, bb->succs)
5345 if (e->flags & EDGE_FALLTHRU)
5347 error ("fallthru edge after a control statement in bb %d",
5348 bb->index);
5349 err = 1;
5353 if (gimple_code (stmt) != GIMPLE_COND)
5355 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5356 after anything else but if statement. */
5357 FOR_EACH_EDGE (e, ei, bb->succs)
5358 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5360 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5361 bb->index);
5362 err = 1;
5366 switch (gimple_code (stmt))
5368 case GIMPLE_COND:
5370 edge true_edge;
5371 edge false_edge;
5373 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5375 if (!true_edge
5376 || !false_edge
5377 || !(true_edge->flags & EDGE_TRUE_VALUE)
5378 || !(false_edge->flags & EDGE_FALSE_VALUE)
5379 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5380 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5381 || EDGE_COUNT (bb->succs) >= 3)
5383 error ("wrong outgoing edge flags at end of bb %d",
5384 bb->index);
5385 err = 1;
5388 break;
5390 case GIMPLE_GOTO:
5391 if (simple_goto_p (stmt))
5393 error ("explicit goto at end of bb %d", bb->index);
5394 err = 1;
5396 else
5398 /* FIXME. We should double check that the labels in the
5399 destination blocks have their address taken. */
5400 FOR_EACH_EDGE (e, ei, bb->succs)
5401 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5402 | EDGE_FALSE_VALUE))
5403 || !(e->flags & EDGE_ABNORMAL))
5405 error ("wrong outgoing edge flags at end of bb %d",
5406 bb->index);
5407 err = 1;
5410 break;
5412 case GIMPLE_CALL:
5413 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5414 break;
5415 /* fallthru */
5416 case GIMPLE_RETURN:
5417 if (!single_succ_p (bb)
5418 || (single_succ_edge (bb)->flags
5419 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5420 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5422 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5423 err = 1;
5425 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5427 error ("return edge does not point to exit in bb %d",
5428 bb->index);
5429 err = 1;
5431 break;
5433 case GIMPLE_SWITCH:
5435 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5436 tree prev;
5437 edge e;
5438 size_t i, n;
5440 n = gimple_switch_num_labels (switch_stmt);
5442 /* Mark all the destination basic blocks. */
5443 for (i = 0; i < n; ++i)
5445 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5446 basic_block label_bb = label_to_block (lab);
5447 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5448 label_bb->aux = (void *)1;
5451 /* Verify that the case labels are sorted. */
5452 prev = gimple_switch_label (switch_stmt, 0);
5453 for (i = 1; i < n; ++i)
5455 tree c = gimple_switch_label (switch_stmt, i);
5456 if (!CASE_LOW (c))
5458 error ("found default case not at the start of "
5459 "case vector");
5460 err = 1;
5461 continue;
5463 if (CASE_LOW (prev)
5464 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5466 error ("case labels not sorted: ");
5467 print_generic_expr (stderr, prev, 0);
5468 fprintf (stderr," is greater than ");
5469 print_generic_expr (stderr, c, 0);
5470 fprintf (stderr," but comes before it.\n");
5471 err = 1;
5473 prev = c;
5475 /* VRP will remove the default case if it can prove it will
5476 never be executed. So do not verify there always exists
5477 a default case here. */
5479 FOR_EACH_EDGE (e, ei, bb->succs)
5481 if (!e->dest->aux)
5483 error ("extra outgoing edge %d->%d",
5484 bb->index, e->dest->index);
5485 err = 1;
5488 e->dest->aux = (void *)2;
5489 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5490 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5492 error ("wrong outgoing edge flags at end of bb %d",
5493 bb->index);
5494 err = 1;
5498 /* Check that we have all of them. */
5499 for (i = 0; i < n; ++i)
5501 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5502 basic_block label_bb = label_to_block (lab);
5504 if (label_bb->aux != (void *)2)
5506 error ("missing edge %i->%i", bb->index, label_bb->index);
5507 err = 1;
5511 FOR_EACH_EDGE (e, ei, bb->succs)
5512 e->dest->aux = (void *)0;
5514 break;
5516 case GIMPLE_EH_DISPATCH:
5517 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5518 break;
5520 default:
5521 break;
5525 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5526 verify_dominators (CDI_DOMINATORS);
5528 return err;
5532 /* Updates phi nodes after creating a forwarder block joined
5533 by edge FALLTHRU. */
5535 static void
5536 gimple_make_forwarder_block (edge fallthru)
5538 edge e;
5539 edge_iterator ei;
5540 basic_block dummy, bb;
5541 tree var;
5542 gphi_iterator gsi;
5544 dummy = fallthru->src;
5545 bb = fallthru->dest;
5547 if (single_pred_p (bb))
5548 return;
5550 /* If we redirected a branch we must create new PHI nodes at the
5551 start of BB. */
5552 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5554 gphi *phi, *new_phi;
5556 phi = gsi.phi ();
5557 var = gimple_phi_result (phi);
5558 new_phi = create_phi_node (var, bb);
5559 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5560 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5561 UNKNOWN_LOCATION);
5564 /* Add the arguments we have stored on edges. */
5565 FOR_EACH_EDGE (e, ei, bb->preds)
5567 if (e == fallthru)
5568 continue;
5570 flush_pending_stmts (e);
5575 /* Return a non-special label in the head of basic block BLOCK.
5576 Create one if it doesn't exist. */
5578 tree
5579 gimple_block_label (basic_block bb)
5581 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5582 bool first = true;
5583 tree label;
5584 glabel *stmt;
5586 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5588 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5589 if (!stmt)
5590 break;
5591 label = gimple_label_label (stmt);
5592 if (!DECL_NONLOCAL (label))
5594 if (!first)
5595 gsi_move_before (&i, &s);
5596 return label;
5600 label = create_artificial_label (UNKNOWN_LOCATION);
5601 stmt = gimple_build_label (label);
5602 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5603 return label;
5607 /* Attempt to perform edge redirection by replacing a possibly complex
5608 jump instruction by a goto or by removing the jump completely.
5609 This can apply only if all edges now point to the same block. The
5610 parameters and return values are equivalent to
5611 redirect_edge_and_branch. */
5613 static edge
5614 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5616 basic_block src = e->src;
5617 gimple_stmt_iterator i;
5618 gimple *stmt;
5620 /* We can replace or remove a complex jump only when we have exactly
5621 two edges. */
5622 if (EDGE_COUNT (src->succs) != 2
5623 /* Verify that all targets will be TARGET. Specifically, the
5624 edge that is not E must also go to TARGET. */
5625 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5626 return NULL;
5628 i = gsi_last_bb (src);
5629 if (gsi_end_p (i))
5630 return NULL;
5632 stmt = gsi_stmt (i);
5634 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5636 gsi_remove (&i, true);
5637 e = ssa_redirect_edge (e, target);
5638 e->flags = EDGE_FALLTHRU;
5639 return e;
5642 return NULL;
5646 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5647 edge representing the redirected branch. */
5649 static edge
5650 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5652 basic_block bb = e->src;
5653 gimple_stmt_iterator gsi;
5654 edge ret;
5655 gimple *stmt;
5657 if (e->flags & EDGE_ABNORMAL)
5658 return NULL;
5660 if (e->dest == dest)
5661 return NULL;
5663 if (e->flags & EDGE_EH)
5664 return redirect_eh_edge (e, dest);
5666 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5668 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5669 if (ret)
5670 return ret;
5673 gsi = gsi_last_bb (bb);
5674 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5676 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5678 case GIMPLE_COND:
5679 /* For COND_EXPR, we only need to redirect the edge. */
5680 break;
5682 case GIMPLE_GOTO:
5683 /* No non-abnormal edges should lead from a non-simple goto, and
5684 simple ones should be represented implicitly. */
5685 gcc_unreachable ();
5687 case GIMPLE_SWITCH:
5689 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5690 tree label = gimple_block_label (dest);
5691 tree cases = get_cases_for_edge (e, switch_stmt);
5693 /* If we have a list of cases associated with E, then use it
5694 as it's a lot faster than walking the entire case vector. */
5695 if (cases)
5697 edge e2 = find_edge (e->src, dest);
5698 tree last, first;
5700 first = cases;
5701 while (cases)
5703 last = cases;
5704 CASE_LABEL (cases) = label;
5705 cases = CASE_CHAIN (cases);
5708 /* If there was already an edge in the CFG, then we need
5709 to move all the cases associated with E to E2. */
5710 if (e2)
5712 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5714 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5715 CASE_CHAIN (cases2) = first;
5717 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5719 else
5721 size_t i, n = gimple_switch_num_labels (switch_stmt);
5723 for (i = 0; i < n; i++)
5725 tree elt = gimple_switch_label (switch_stmt, i);
5726 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5727 CASE_LABEL (elt) = label;
5731 break;
5733 case GIMPLE_ASM:
5735 gasm *asm_stmt = as_a <gasm *> (stmt);
5736 int i, n = gimple_asm_nlabels (asm_stmt);
5737 tree label = NULL;
5739 for (i = 0; i < n; ++i)
5741 tree cons = gimple_asm_label_op (asm_stmt, i);
5742 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5744 if (!label)
5745 label = gimple_block_label (dest);
5746 TREE_VALUE (cons) = label;
5750 /* If we didn't find any label matching the former edge in the
5751 asm labels, we must be redirecting the fallthrough
5752 edge. */
5753 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5755 break;
5757 case GIMPLE_RETURN:
5758 gsi_remove (&gsi, true);
5759 e->flags |= EDGE_FALLTHRU;
5760 break;
5762 case GIMPLE_OMP_RETURN:
5763 case GIMPLE_OMP_CONTINUE:
5764 case GIMPLE_OMP_SECTIONS_SWITCH:
5765 case GIMPLE_OMP_FOR:
5766 /* The edges from OMP constructs can be simply redirected. */
5767 break;
5769 case GIMPLE_EH_DISPATCH:
5770 if (!(e->flags & EDGE_FALLTHRU))
5771 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5772 break;
5774 case GIMPLE_TRANSACTION:
5775 if (e->flags & EDGE_TM_ABORT)
5776 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5777 gimple_block_label (dest));
5778 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5779 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5780 gimple_block_label (dest));
5781 else
5782 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5783 gimple_block_label (dest));
5784 break;
5786 default:
5787 /* Otherwise it must be a fallthru edge, and we don't need to
5788 do anything besides redirecting it. */
5789 gcc_assert (e->flags & EDGE_FALLTHRU);
5790 break;
5793 /* Update/insert PHI nodes as necessary. */
5795 /* Now update the edges in the CFG. */
5796 e = ssa_redirect_edge (e, dest);
5798 return e;
5801 /* Returns true if it is possible to remove edge E by redirecting
5802 it to the destination of the other edge from E->src. */
5804 static bool
5805 gimple_can_remove_branch_p (const_edge e)
5807 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5808 return false;
5810 return true;
5813 /* Simple wrapper, as we can always redirect fallthru edges. */
5815 static basic_block
5816 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5818 e = gimple_redirect_edge_and_branch (e, dest);
5819 gcc_assert (e);
5821 return NULL;
5825 /* Splits basic block BB after statement STMT (but at least after the
5826 labels). If STMT is NULL, BB is split just after the labels. */
5828 static basic_block
5829 gimple_split_block (basic_block bb, void *stmt)
5831 gimple_stmt_iterator gsi;
5832 gimple_stmt_iterator gsi_tgt;
5833 gimple_seq list;
5834 basic_block new_bb;
5835 edge e;
5836 edge_iterator ei;
5838 new_bb = create_empty_bb (bb);
5840 /* Redirect the outgoing edges. */
5841 new_bb->succs = bb->succs;
5842 bb->succs = NULL;
5843 FOR_EACH_EDGE (e, ei, new_bb->succs)
5844 e->src = new_bb;
5846 /* Get a stmt iterator pointing to the first stmt to move. */
5847 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5848 gsi = gsi_after_labels (bb);
5849 else
5851 gsi = gsi_for_stmt ((gimple *) stmt);
5852 gsi_next (&gsi);
5855 /* Move everything from GSI to the new basic block. */
5856 if (gsi_end_p (gsi))
5857 return new_bb;
5859 /* Split the statement list - avoid re-creating new containers as this
5860 brings ugly quadratic memory consumption in the inliner.
5861 (We are still quadratic since we need to update stmt BB pointers,
5862 sadly.) */
5863 gsi_split_seq_before (&gsi, &list);
5864 set_bb_seq (new_bb, list);
5865 for (gsi_tgt = gsi_start (list);
5866 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5867 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5869 return new_bb;
5873 /* Moves basic block BB after block AFTER. */
5875 static bool
5876 gimple_move_block_after (basic_block bb, basic_block after)
5878 if (bb->prev_bb == after)
5879 return true;
5881 unlink_block (bb);
5882 link_block (bb, after);
5884 return true;
5888 /* Return TRUE if block BB has no executable statements, otherwise return
5889 FALSE. */
5891 static bool
5892 gimple_empty_block_p (basic_block bb)
5894 /* BB must have no executable statements. */
5895 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5896 if (phi_nodes (bb))
5897 return false;
5898 if (gsi_end_p (gsi))
5899 return true;
5900 if (is_gimple_debug (gsi_stmt (gsi)))
5901 gsi_next_nondebug (&gsi);
5902 return gsi_end_p (gsi);
5906 /* Split a basic block if it ends with a conditional branch and if the
5907 other part of the block is not empty. */
5909 static basic_block
5910 gimple_split_block_before_cond_jump (basic_block bb)
5912 gimple *last, *split_point;
5913 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5914 if (gsi_end_p (gsi))
5915 return NULL;
5916 last = gsi_stmt (gsi);
5917 if (gimple_code (last) != GIMPLE_COND
5918 && gimple_code (last) != GIMPLE_SWITCH)
5919 return NULL;
5920 gsi_prev (&gsi);
5921 split_point = gsi_stmt (gsi);
5922 return split_block (bb, split_point)->dest;
5926 /* Return true if basic_block can be duplicated. */
5928 static bool
5929 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5931 return true;
5934 /* Create a duplicate of the basic block BB. NOTE: This does not
5935 preserve SSA form. */
5937 static basic_block
5938 gimple_duplicate_bb (basic_block bb)
5940 basic_block new_bb;
5941 gimple_stmt_iterator gsi_tgt;
5943 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5945 /* Copy the PHI nodes. We ignore PHI node arguments here because
5946 the incoming edges have not been setup yet. */
5947 for (gphi_iterator gpi = gsi_start_phis (bb);
5948 !gsi_end_p (gpi);
5949 gsi_next (&gpi))
5951 gphi *phi, *copy;
5952 phi = gpi.phi ();
5953 copy = create_phi_node (NULL_TREE, new_bb);
5954 create_new_def_for (gimple_phi_result (phi), copy,
5955 gimple_phi_result_ptr (copy));
5956 gimple_set_uid (copy, gimple_uid (phi));
5959 gsi_tgt = gsi_start_bb (new_bb);
5960 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5961 !gsi_end_p (gsi);
5962 gsi_next (&gsi))
5964 def_operand_p def_p;
5965 ssa_op_iter op_iter;
5966 tree lhs;
5967 gimple *stmt, *copy;
5969 stmt = gsi_stmt (gsi);
5970 if (gimple_code (stmt) == GIMPLE_LABEL)
5971 continue;
5973 /* Don't duplicate label debug stmts. */
5974 if (gimple_debug_bind_p (stmt)
5975 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5976 == LABEL_DECL)
5977 continue;
5979 /* Create a new copy of STMT and duplicate STMT's virtual
5980 operands. */
5981 copy = gimple_copy (stmt);
5982 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5984 maybe_duplicate_eh_stmt (copy, stmt);
5985 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5987 /* When copying around a stmt writing into a local non-user
5988 aggregate, make sure it won't share stack slot with other
5989 vars. */
5990 lhs = gimple_get_lhs (stmt);
5991 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5993 tree base = get_base_address (lhs);
5994 if (base
5995 && (TREE_CODE (base) == VAR_DECL
5996 || TREE_CODE (base) == RESULT_DECL)
5997 && DECL_IGNORED_P (base)
5998 && !TREE_STATIC (base)
5999 && !DECL_EXTERNAL (base)
6000 && (TREE_CODE (base) != VAR_DECL
6001 || !DECL_HAS_VALUE_EXPR_P (base)))
6002 DECL_NONSHAREABLE (base) = 1;
6005 /* Create new names for all the definitions created by COPY and
6006 add replacement mappings for each new name. */
6007 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6008 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6011 return new_bb;
6014 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6016 static void
6017 add_phi_args_after_copy_edge (edge e_copy)
6019 basic_block bb, bb_copy = e_copy->src, dest;
6020 edge e;
6021 edge_iterator ei;
6022 gphi *phi, *phi_copy;
6023 tree def;
6024 gphi_iterator psi, psi_copy;
6026 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6027 return;
6029 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6031 if (e_copy->dest->flags & BB_DUPLICATED)
6032 dest = get_bb_original (e_copy->dest);
6033 else
6034 dest = e_copy->dest;
6036 e = find_edge (bb, dest);
6037 if (!e)
6039 /* During loop unrolling the target of the latch edge is copied.
6040 In this case we are not looking for edge to dest, but to
6041 duplicated block whose original was dest. */
6042 FOR_EACH_EDGE (e, ei, bb->succs)
6044 if ((e->dest->flags & BB_DUPLICATED)
6045 && get_bb_original (e->dest) == dest)
6046 break;
6049 gcc_assert (e != NULL);
6052 for (psi = gsi_start_phis (e->dest),
6053 psi_copy = gsi_start_phis (e_copy->dest);
6054 !gsi_end_p (psi);
6055 gsi_next (&psi), gsi_next (&psi_copy))
6057 phi = psi.phi ();
6058 phi_copy = psi_copy.phi ();
6059 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6060 add_phi_arg (phi_copy, def, e_copy,
6061 gimple_phi_arg_location_from_edge (phi, e));
6066 /* Basic block BB_COPY was created by code duplication. Add phi node
6067 arguments for edges going out of BB_COPY. The blocks that were
6068 duplicated have BB_DUPLICATED set. */
6070 void
6071 add_phi_args_after_copy_bb (basic_block bb_copy)
6073 edge e_copy;
6074 edge_iterator ei;
6076 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6078 add_phi_args_after_copy_edge (e_copy);
6082 /* Blocks in REGION_COPY array of length N_REGION were created by
6083 duplication of basic blocks. Add phi node arguments for edges
6084 going from these blocks. If E_COPY is not NULL, also add
6085 phi node arguments for its destination.*/
6087 void
6088 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6089 edge e_copy)
6091 unsigned i;
6093 for (i = 0; i < n_region; i++)
6094 region_copy[i]->flags |= BB_DUPLICATED;
6096 for (i = 0; i < n_region; i++)
6097 add_phi_args_after_copy_bb (region_copy[i]);
6098 if (e_copy)
6099 add_phi_args_after_copy_edge (e_copy);
6101 for (i = 0; i < n_region; i++)
6102 region_copy[i]->flags &= ~BB_DUPLICATED;
6105 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6106 important exit edge EXIT. By important we mean that no SSA name defined
6107 inside region is live over the other exit edges of the region. All entry
6108 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6109 to the duplicate of the region. Dominance and loop information is
6110 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6111 UPDATE_DOMINANCE is false then we assume that the caller will update the
6112 dominance information after calling this function. The new basic
6113 blocks are stored to REGION_COPY in the same order as they had in REGION,
6114 provided that REGION_COPY is not NULL.
6115 The function returns false if it is unable to copy the region,
6116 true otherwise. */
6118 bool
6119 gimple_duplicate_sese_region (edge entry, edge exit,
6120 basic_block *region, unsigned n_region,
6121 basic_block *region_copy,
6122 bool update_dominance)
6124 unsigned i;
6125 bool free_region_copy = false, copying_header = false;
6126 struct loop *loop = entry->dest->loop_father;
6127 edge exit_copy;
6128 vec<basic_block> doms;
6129 edge redirected;
6130 int total_freq = 0, entry_freq = 0;
6131 gcov_type total_count = 0, entry_count = 0;
6133 if (!can_copy_bbs_p (region, n_region))
6134 return false;
6136 /* Some sanity checking. Note that we do not check for all possible
6137 missuses of the functions. I.e. if you ask to copy something weird,
6138 it will work, but the state of structures probably will not be
6139 correct. */
6140 for (i = 0; i < n_region; i++)
6142 /* We do not handle subloops, i.e. all the blocks must belong to the
6143 same loop. */
6144 if (region[i]->loop_father != loop)
6145 return false;
6147 if (region[i] != entry->dest
6148 && region[i] == loop->header)
6149 return false;
6152 /* In case the function is used for loop header copying (which is the primary
6153 use), ensure that EXIT and its copy will be new latch and entry edges. */
6154 if (loop->header == entry->dest)
6156 copying_header = true;
6158 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6159 return false;
6161 for (i = 0; i < n_region; i++)
6162 if (region[i] != exit->src
6163 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6164 return false;
6167 initialize_original_copy_tables ();
6169 if (copying_header)
6170 set_loop_copy (loop, loop_outer (loop));
6171 else
6172 set_loop_copy (loop, loop);
6174 if (!region_copy)
6176 region_copy = XNEWVEC (basic_block, n_region);
6177 free_region_copy = true;
6180 /* Record blocks outside the region that are dominated by something
6181 inside. */
6182 if (update_dominance)
6184 doms.create (0);
6185 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6188 if (entry->dest->count)
6190 total_count = entry->dest->count;
6191 entry_count = entry->count;
6192 /* Fix up corner cases, to avoid division by zero or creation of negative
6193 frequencies. */
6194 if (entry_count > total_count)
6195 entry_count = total_count;
6197 else
6199 total_freq = entry->dest->frequency;
6200 entry_freq = EDGE_FREQUENCY (entry);
6201 /* Fix up corner cases, to avoid division by zero or creation of negative
6202 frequencies. */
6203 if (total_freq == 0)
6204 total_freq = 1;
6205 else if (entry_freq > total_freq)
6206 entry_freq = total_freq;
6209 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6210 split_edge_bb_loc (entry), update_dominance);
6211 if (total_count)
6213 scale_bbs_frequencies_gcov_type (region, n_region,
6214 total_count - entry_count,
6215 total_count);
6216 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6217 total_count);
6219 else
6221 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6222 total_freq);
6223 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6226 if (copying_header)
6228 loop->header = exit->dest;
6229 loop->latch = exit->src;
6232 /* Redirect the entry and add the phi node arguments. */
6233 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6234 gcc_assert (redirected != NULL);
6235 flush_pending_stmts (entry);
6237 /* Concerning updating of dominators: We must recount dominators
6238 for entry block and its copy. Anything that is outside of the
6239 region, but was dominated by something inside needs recounting as
6240 well. */
6241 if (update_dominance)
6243 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6244 doms.safe_push (get_bb_original (entry->dest));
6245 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6246 doms.release ();
6249 /* Add the other PHI node arguments. */
6250 add_phi_args_after_copy (region_copy, n_region, NULL);
6252 if (free_region_copy)
6253 free (region_copy);
6255 free_original_copy_tables ();
6256 return true;
6259 /* Checks if BB is part of the region defined by N_REGION BBS. */
6260 static bool
6261 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6263 unsigned int n;
6265 for (n = 0; n < n_region; n++)
6267 if (bb == bbs[n])
6268 return true;
6270 return false;
6273 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6274 are stored to REGION_COPY in the same order in that they appear
6275 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6276 the region, EXIT an exit from it. The condition guarding EXIT
6277 is moved to ENTRY. Returns true if duplication succeeds, false
6278 otherwise.
6280 For example,
6282 some_code;
6283 if (cond)
6285 else
6288 is transformed to
6290 if (cond)
6292 some_code;
6295 else
6297 some_code;
6302 bool
6303 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6304 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6305 basic_block *region_copy ATTRIBUTE_UNUSED)
6307 unsigned i;
6308 bool free_region_copy = false;
6309 struct loop *loop = exit->dest->loop_father;
6310 struct loop *orig_loop = entry->dest->loop_father;
6311 basic_block switch_bb, entry_bb, nentry_bb;
6312 vec<basic_block> doms;
6313 int total_freq = 0, exit_freq = 0;
6314 gcov_type total_count = 0, exit_count = 0;
6315 edge exits[2], nexits[2], e;
6316 gimple_stmt_iterator gsi;
6317 gimple *cond_stmt;
6318 edge sorig, snew;
6319 basic_block exit_bb;
6320 gphi_iterator psi;
6321 gphi *phi;
6322 tree def;
6323 struct loop *target, *aloop, *cloop;
6325 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6326 exits[0] = exit;
6327 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6329 if (!can_copy_bbs_p (region, n_region))
6330 return false;
6332 initialize_original_copy_tables ();
6333 set_loop_copy (orig_loop, loop);
6335 target= loop;
6336 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6338 if (bb_part_of_region_p (aloop->header, region, n_region))
6340 cloop = duplicate_loop (aloop, target);
6341 duplicate_subloops (aloop, cloop);
6345 if (!region_copy)
6347 region_copy = XNEWVEC (basic_block, n_region);
6348 free_region_copy = true;
6351 gcc_assert (!need_ssa_update_p (cfun));
6353 /* Record blocks outside the region that are dominated by something
6354 inside. */
6355 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6357 if (exit->src->count)
6359 total_count = exit->src->count;
6360 exit_count = exit->count;
6361 /* Fix up corner cases, to avoid division by zero or creation of negative
6362 frequencies. */
6363 if (exit_count > total_count)
6364 exit_count = total_count;
6366 else
6368 total_freq = exit->src->frequency;
6369 exit_freq = EDGE_FREQUENCY (exit);
6370 /* Fix up corner cases, to avoid division by zero or creation of negative
6371 frequencies. */
6372 if (total_freq == 0)
6373 total_freq = 1;
6374 if (exit_freq > total_freq)
6375 exit_freq = total_freq;
6378 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6379 split_edge_bb_loc (exit), true);
6380 if (total_count)
6382 scale_bbs_frequencies_gcov_type (region, n_region,
6383 total_count - exit_count,
6384 total_count);
6385 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6386 total_count);
6388 else
6390 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6391 total_freq);
6392 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6395 /* Create the switch block, and put the exit condition to it. */
6396 entry_bb = entry->dest;
6397 nentry_bb = get_bb_copy (entry_bb);
6398 if (!last_stmt (entry->src)
6399 || !stmt_ends_bb_p (last_stmt (entry->src)))
6400 switch_bb = entry->src;
6401 else
6402 switch_bb = split_edge (entry);
6403 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6405 gsi = gsi_last_bb (switch_bb);
6406 cond_stmt = last_stmt (exit->src);
6407 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6408 cond_stmt = gimple_copy (cond_stmt);
6410 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6412 sorig = single_succ_edge (switch_bb);
6413 sorig->flags = exits[1]->flags;
6414 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6416 /* Register the new edge from SWITCH_BB in loop exit lists. */
6417 rescan_loop_exit (snew, true, false);
6419 /* Add the PHI node arguments. */
6420 add_phi_args_after_copy (region_copy, n_region, snew);
6422 /* Get rid of now superfluous conditions and associated edges (and phi node
6423 arguments). */
6424 exit_bb = exit->dest;
6426 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6427 PENDING_STMT (e) = NULL;
6429 /* The latch of ORIG_LOOP was copied, and so was the backedge
6430 to the original header. We redirect this backedge to EXIT_BB. */
6431 for (i = 0; i < n_region; i++)
6432 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6434 gcc_assert (single_succ_edge (region_copy[i]));
6435 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6436 PENDING_STMT (e) = NULL;
6437 for (psi = gsi_start_phis (exit_bb);
6438 !gsi_end_p (psi);
6439 gsi_next (&psi))
6441 phi = psi.phi ();
6442 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6443 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6446 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6447 PENDING_STMT (e) = NULL;
6449 /* Anything that is outside of the region, but was dominated by something
6450 inside needs to update dominance info. */
6451 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6452 doms.release ();
6453 /* Update the SSA web. */
6454 update_ssa (TODO_update_ssa);
6456 if (free_region_copy)
6457 free (region_copy);
6459 free_original_copy_tables ();
6460 return true;
6463 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6464 adding blocks when the dominator traversal reaches EXIT. This
6465 function silently assumes that ENTRY strictly dominates EXIT. */
6467 void
6468 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6469 vec<basic_block> *bbs_p)
6471 basic_block son;
6473 for (son = first_dom_son (CDI_DOMINATORS, entry);
6474 son;
6475 son = next_dom_son (CDI_DOMINATORS, son))
6477 bbs_p->safe_push (son);
6478 if (son != exit)
6479 gather_blocks_in_sese_region (son, exit, bbs_p);
6483 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6484 The duplicates are recorded in VARS_MAP. */
6486 static void
6487 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6488 tree to_context)
6490 tree t = *tp, new_t;
6491 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6493 if (DECL_CONTEXT (t) == to_context)
6494 return;
6496 bool existed;
6497 tree &loc = vars_map->get_or_insert (t, &existed);
6499 if (!existed)
6501 if (SSA_VAR_P (t))
6503 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6504 add_local_decl (f, new_t);
6506 else
6508 gcc_assert (TREE_CODE (t) == CONST_DECL);
6509 new_t = copy_node (t);
6511 DECL_CONTEXT (new_t) = to_context;
6513 loc = new_t;
6515 else
6516 new_t = loc;
6518 *tp = new_t;
6522 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6523 VARS_MAP maps old ssa names and var_decls to the new ones. */
6525 static tree
6526 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6527 tree to_context)
6529 tree new_name;
6531 gcc_assert (!virtual_operand_p (name));
6533 tree *loc = vars_map->get (name);
6535 if (!loc)
6537 tree decl = SSA_NAME_VAR (name);
6538 if (decl)
6540 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6541 replace_by_duplicate_decl (&decl, vars_map, to_context);
6542 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6543 decl, SSA_NAME_DEF_STMT (name));
6545 else
6546 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6547 name, SSA_NAME_DEF_STMT (name));
6549 /* Now that we've used the def stmt to define new_name, make sure it
6550 doesn't define name anymore. */
6551 SSA_NAME_DEF_STMT (name) = NULL;
6553 vars_map->put (name, new_name);
6555 else
6556 new_name = *loc;
6558 return new_name;
6561 struct move_stmt_d
6563 tree orig_block;
6564 tree new_block;
6565 tree from_context;
6566 tree to_context;
6567 hash_map<tree, tree> *vars_map;
6568 htab_t new_label_map;
6569 hash_map<void *, void *> *eh_map;
6570 bool remap_decls_p;
6573 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6574 contained in *TP if it has been ORIG_BLOCK previously and change the
6575 DECL_CONTEXT of every local variable referenced in *TP. */
6577 static tree
6578 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6580 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6581 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6582 tree t = *tp;
6584 if (EXPR_P (t))
6586 tree block = TREE_BLOCK (t);
6587 if (block == p->orig_block
6588 || (p->orig_block == NULL_TREE
6589 && block != NULL_TREE))
6590 TREE_SET_BLOCK (t, p->new_block);
6591 else if (flag_checking && block != NULL_TREE)
6593 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6594 block = BLOCK_SUPERCONTEXT (block);
6595 gcc_assert (block == p->orig_block);
6598 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6600 if (TREE_CODE (t) == SSA_NAME)
6601 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6602 else if (TREE_CODE (t) == PARM_DECL
6603 && gimple_in_ssa_p (cfun))
6604 *tp = *(p->vars_map->get (t));
6605 else if (TREE_CODE (t) == LABEL_DECL)
6607 if (p->new_label_map)
6609 struct tree_map in, *out;
6610 in.base.from = t;
6611 out = (struct tree_map *)
6612 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6613 if (out)
6614 *tp = t = out->to;
6617 DECL_CONTEXT (t) = p->to_context;
6619 else if (p->remap_decls_p)
6621 /* Replace T with its duplicate. T should no longer appear in the
6622 parent function, so this looks wasteful; however, it may appear
6623 in referenced_vars, and more importantly, as virtual operands of
6624 statements, and in alias lists of other variables. It would be
6625 quite difficult to expunge it from all those places. ??? It might
6626 suffice to do this for addressable variables. */
6627 if ((TREE_CODE (t) == VAR_DECL
6628 && !is_global_var (t))
6629 || TREE_CODE (t) == CONST_DECL)
6630 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6632 *walk_subtrees = 0;
6634 else if (TYPE_P (t))
6635 *walk_subtrees = 0;
6637 return NULL_TREE;
6640 /* Helper for move_stmt_r. Given an EH region number for the source
6641 function, map that to the duplicate EH regio number in the dest. */
6643 static int
6644 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6646 eh_region old_r, new_r;
6648 old_r = get_eh_region_from_number (old_nr);
6649 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6651 return new_r->index;
6654 /* Similar, but operate on INTEGER_CSTs. */
6656 static tree
6657 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6659 int old_nr, new_nr;
6661 old_nr = tree_to_shwi (old_t_nr);
6662 new_nr = move_stmt_eh_region_nr (old_nr, p);
6664 return build_int_cst (integer_type_node, new_nr);
6667 /* Like move_stmt_op, but for gimple statements.
6669 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6670 contained in the current statement in *GSI_P and change the
6671 DECL_CONTEXT of every local variable referenced in the current
6672 statement. */
6674 static tree
6675 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6676 struct walk_stmt_info *wi)
6678 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6679 gimple *stmt = gsi_stmt (*gsi_p);
6680 tree block = gimple_block (stmt);
6682 if (block == p->orig_block
6683 || (p->orig_block == NULL_TREE
6684 && block != NULL_TREE))
6685 gimple_set_block (stmt, p->new_block);
6687 switch (gimple_code (stmt))
6689 case GIMPLE_CALL:
6690 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6692 tree r, fndecl = gimple_call_fndecl (stmt);
6693 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6694 switch (DECL_FUNCTION_CODE (fndecl))
6696 case BUILT_IN_EH_COPY_VALUES:
6697 r = gimple_call_arg (stmt, 1);
6698 r = move_stmt_eh_region_tree_nr (r, p);
6699 gimple_call_set_arg (stmt, 1, r);
6700 /* FALLTHRU */
6702 case BUILT_IN_EH_POINTER:
6703 case BUILT_IN_EH_FILTER:
6704 r = gimple_call_arg (stmt, 0);
6705 r = move_stmt_eh_region_tree_nr (r, p);
6706 gimple_call_set_arg (stmt, 0, r);
6707 break;
6709 default:
6710 break;
6713 break;
6715 case GIMPLE_RESX:
6717 gresx *resx_stmt = as_a <gresx *> (stmt);
6718 int r = gimple_resx_region (resx_stmt);
6719 r = move_stmt_eh_region_nr (r, p);
6720 gimple_resx_set_region (resx_stmt, r);
6722 break;
6724 case GIMPLE_EH_DISPATCH:
6726 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6727 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6728 r = move_stmt_eh_region_nr (r, p);
6729 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6731 break;
6733 case GIMPLE_OMP_RETURN:
6734 case GIMPLE_OMP_CONTINUE:
6735 break;
6736 default:
6737 if (is_gimple_omp (stmt))
6739 /* Do not remap variables inside OMP directives. Variables
6740 referenced in clauses and directive header belong to the
6741 parent function and should not be moved into the child
6742 function. */
6743 bool save_remap_decls_p = p->remap_decls_p;
6744 p->remap_decls_p = false;
6745 *handled_ops_p = true;
6747 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6748 move_stmt_op, wi);
6750 p->remap_decls_p = save_remap_decls_p;
6752 break;
6755 return NULL_TREE;
6758 /* Move basic block BB from function CFUN to function DEST_FN. The
6759 block is moved out of the original linked list and placed after
6760 block AFTER in the new list. Also, the block is removed from the
6761 original array of blocks and placed in DEST_FN's array of blocks.
6762 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6763 updated to reflect the moved edges.
6765 The local variables are remapped to new instances, VARS_MAP is used
6766 to record the mapping. */
6768 static void
6769 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6770 basic_block after, bool update_edge_count_p,
6771 struct move_stmt_d *d)
6773 struct control_flow_graph *cfg;
6774 edge_iterator ei;
6775 edge e;
6776 gimple_stmt_iterator si;
6777 unsigned old_len, new_len;
6779 /* Remove BB from dominance structures. */
6780 delete_from_dominance_info (CDI_DOMINATORS, bb);
6782 /* Move BB from its current loop to the copy in the new function. */
6783 if (current_loops)
6785 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6786 if (new_loop)
6787 bb->loop_father = new_loop;
6790 /* Link BB to the new linked list. */
6791 move_block_after (bb, after);
6793 /* Update the edge count in the corresponding flowgraphs. */
6794 if (update_edge_count_p)
6795 FOR_EACH_EDGE (e, ei, bb->succs)
6797 cfun->cfg->x_n_edges--;
6798 dest_cfun->cfg->x_n_edges++;
6801 /* Remove BB from the original basic block array. */
6802 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6803 cfun->cfg->x_n_basic_blocks--;
6805 /* Grow DEST_CFUN's basic block array if needed. */
6806 cfg = dest_cfun->cfg;
6807 cfg->x_n_basic_blocks++;
6808 if (bb->index >= cfg->x_last_basic_block)
6809 cfg->x_last_basic_block = bb->index + 1;
6811 old_len = vec_safe_length (cfg->x_basic_block_info);
6812 if ((unsigned) cfg->x_last_basic_block >= old_len)
6814 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6815 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6818 (*cfg->x_basic_block_info)[bb->index] = bb;
6820 /* Remap the variables in phi nodes. */
6821 for (gphi_iterator psi = gsi_start_phis (bb);
6822 !gsi_end_p (psi); )
6824 gphi *phi = psi.phi ();
6825 use_operand_p use;
6826 tree op = PHI_RESULT (phi);
6827 ssa_op_iter oi;
6828 unsigned i;
6830 if (virtual_operand_p (op))
6832 /* Remove the phi nodes for virtual operands (alias analysis will be
6833 run for the new function, anyway). */
6834 remove_phi_node (&psi, true);
6835 continue;
6838 SET_PHI_RESULT (phi,
6839 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6840 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6842 op = USE_FROM_PTR (use);
6843 if (TREE_CODE (op) == SSA_NAME)
6844 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6847 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6849 location_t locus = gimple_phi_arg_location (phi, i);
6850 tree block = LOCATION_BLOCK (locus);
6852 if (locus == UNKNOWN_LOCATION)
6853 continue;
6854 if (d->orig_block == NULL_TREE || block == d->orig_block)
6856 locus = set_block (locus, d->new_block);
6857 gimple_phi_arg_set_location (phi, i, locus);
6861 gsi_next (&psi);
6864 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6866 gimple *stmt = gsi_stmt (si);
6867 struct walk_stmt_info wi;
6869 memset (&wi, 0, sizeof (wi));
6870 wi.info = d;
6871 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6873 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6875 tree label = gimple_label_label (label_stmt);
6876 int uid = LABEL_DECL_UID (label);
6878 gcc_assert (uid > -1);
6880 old_len = vec_safe_length (cfg->x_label_to_block_map);
6881 if (old_len <= (unsigned) uid)
6883 new_len = 3 * uid / 2 + 1;
6884 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6887 (*cfg->x_label_to_block_map)[uid] = bb;
6888 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6890 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6892 if (uid >= dest_cfun->cfg->last_label_uid)
6893 dest_cfun->cfg->last_label_uid = uid + 1;
6896 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6897 remove_stmt_from_eh_lp_fn (cfun, stmt);
6899 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6900 gimple_remove_stmt_histograms (cfun, stmt);
6902 /* We cannot leave any operands allocated from the operand caches of
6903 the current function. */
6904 free_stmt_operands (cfun, stmt);
6905 push_cfun (dest_cfun);
6906 update_stmt (stmt);
6907 pop_cfun ();
6910 FOR_EACH_EDGE (e, ei, bb->succs)
6911 if (e->goto_locus != UNKNOWN_LOCATION)
6913 tree block = LOCATION_BLOCK (e->goto_locus);
6914 if (d->orig_block == NULL_TREE
6915 || block == d->orig_block)
6916 e->goto_locus = set_block (e->goto_locus, d->new_block);
6920 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6921 the outermost EH region. Use REGION as the incoming base EH region. */
6923 static eh_region
6924 find_outermost_region_in_block (struct function *src_cfun,
6925 basic_block bb, eh_region region)
6927 gimple_stmt_iterator si;
6929 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6931 gimple *stmt = gsi_stmt (si);
6932 eh_region stmt_region;
6933 int lp_nr;
6935 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6936 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6937 if (stmt_region)
6939 if (region == NULL)
6940 region = stmt_region;
6941 else if (stmt_region != region)
6943 region = eh_region_outermost (src_cfun, stmt_region, region);
6944 gcc_assert (region != NULL);
6949 return region;
6952 static tree
6953 new_label_mapper (tree decl, void *data)
6955 htab_t hash = (htab_t) data;
6956 struct tree_map *m;
6957 void **slot;
6959 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6961 m = XNEW (struct tree_map);
6962 m->hash = DECL_UID (decl);
6963 m->base.from = decl;
6964 m->to = create_artificial_label (UNKNOWN_LOCATION);
6965 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6966 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6967 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6969 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6970 gcc_assert (*slot == NULL);
6972 *slot = m;
6974 return m->to;
6977 /* Tree walker to replace the decls used inside value expressions by
6978 duplicates. */
6980 static tree
6981 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
6983 struct replace_decls_d *rd = (struct replace_decls_d *)data;
6985 switch (TREE_CODE (*tp))
6987 case VAR_DECL:
6988 case PARM_DECL:
6989 case RESULT_DECL:
6990 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
6991 break;
6992 default:
6993 break;
6996 if (IS_TYPE_OR_DECL_P (*tp))
6997 *walk_subtrees = false;
6999 return NULL;
7002 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7003 subblocks. */
7005 static void
7006 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7007 tree to_context)
7009 tree *tp, t;
7011 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7013 t = *tp;
7014 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
7015 continue;
7016 replace_by_duplicate_decl (&t, vars_map, to_context);
7017 if (t != *tp)
7019 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
7021 tree x = DECL_VALUE_EXPR (*tp);
7022 struct replace_decls_d rd = { vars_map, to_context };
7023 unshare_expr (x);
7024 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7025 SET_DECL_VALUE_EXPR (t, x);
7026 DECL_HAS_VALUE_EXPR_P (t) = 1;
7028 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7029 *tp = t;
7033 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7034 replace_block_vars_by_duplicates (block, vars_map, to_context);
7037 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7038 from FN1 to FN2. */
7040 static void
7041 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7042 struct loop *loop)
7044 /* Discard it from the old loop array. */
7045 (*get_loops (fn1))[loop->num] = NULL;
7047 /* Place it in the new loop array, assigning it a new number. */
7048 loop->num = number_of_loops (fn2);
7049 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7051 /* Recurse to children. */
7052 for (loop = loop->inner; loop; loop = loop->next)
7053 fixup_loop_arrays_after_move (fn1, fn2, loop);
7056 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7057 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7059 DEBUG_FUNCTION void
7060 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7062 basic_block bb;
7063 edge_iterator ei;
7064 edge e;
7065 bitmap bbs = BITMAP_ALLOC (NULL);
7066 int i;
7068 gcc_assert (entry != NULL);
7069 gcc_assert (entry != exit);
7070 gcc_assert (bbs_p != NULL);
7072 gcc_assert (bbs_p->length () > 0);
7074 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7075 bitmap_set_bit (bbs, bb->index);
7077 gcc_assert (bitmap_bit_p (bbs, entry->index));
7078 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7080 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7082 if (bb == entry)
7084 gcc_assert (single_pred_p (entry));
7085 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7087 else
7088 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7090 e = ei_edge (ei);
7091 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7094 if (bb == exit)
7096 gcc_assert (single_succ_p (exit));
7097 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7099 else
7100 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7102 e = ei_edge (ei);
7103 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7107 BITMAP_FREE (bbs);
7110 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7112 bool
7113 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7115 bitmap release_names = (bitmap)data;
7117 if (TREE_CODE (from) != SSA_NAME)
7118 return true;
7120 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7121 return true;
7124 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7125 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7126 single basic block in the original CFG and the new basic block is
7127 returned. DEST_CFUN must not have a CFG yet.
7129 Note that the region need not be a pure SESE region. Blocks inside
7130 the region may contain calls to abort/exit. The only restriction
7131 is that ENTRY_BB should be the only entry point and it must
7132 dominate EXIT_BB.
7134 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7135 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7136 to the new function.
7138 All local variables referenced in the region are assumed to be in
7139 the corresponding BLOCK_VARS and unexpanded variable lists
7140 associated with DEST_CFUN.
7142 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7143 reimplement move_sese_region_to_fn by duplicating the region rather than
7144 moving it. */
7146 basic_block
7147 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7148 basic_block exit_bb, tree orig_block)
7150 vec<basic_block> bbs, dom_bbs;
7151 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7152 basic_block after, bb, *entry_pred, *exit_succ, abb;
7153 struct function *saved_cfun = cfun;
7154 int *entry_flag, *exit_flag;
7155 unsigned *entry_prob, *exit_prob;
7156 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7157 edge e;
7158 edge_iterator ei;
7159 htab_t new_label_map;
7160 hash_map<void *, void *> *eh_map;
7161 struct loop *loop = entry_bb->loop_father;
7162 struct loop *loop0 = get_loop (saved_cfun, 0);
7163 struct move_stmt_d d;
7165 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7166 region. */
7167 gcc_assert (entry_bb != exit_bb
7168 && (!exit_bb
7169 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7171 /* Collect all the blocks in the region. Manually add ENTRY_BB
7172 because it won't be added by dfs_enumerate_from. */
7173 bbs.create (0);
7174 bbs.safe_push (entry_bb);
7175 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7177 if (flag_checking)
7178 verify_sese (entry_bb, exit_bb, &bbs);
7180 /* The blocks that used to be dominated by something in BBS will now be
7181 dominated by the new block. */
7182 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7183 bbs.address (),
7184 bbs.length ());
7186 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7187 the predecessor edges to ENTRY_BB and the successor edges to
7188 EXIT_BB so that we can re-attach them to the new basic block that
7189 will replace the region. */
7190 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7191 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7192 entry_flag = XNEWVEC (int, num_entry_edges);
7193 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7194 i = 0;
7195 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7197 entry_prob[i] = e->probability;
7198 entry_flag[i] = e->flags;
7199 entry_pred[i++] = e->src;
7200 remove_edge (e);
7203 if (exit_bb)
7205 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7206 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7207 exit_flag = XNEWVEC (int, num_exit_edges);
7208 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7209 i = 0;
7210 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7212 exit_prob[i] = e->probability;
7213 exit_flag[i] = e->flags;
7214 exit_succ[i++] = e->dest;
7215 remove_edge (e);
7218 else
7220 num_exit_edges = 0;
7221 exit_succ = NULL;
7222 exit_flag = NULL;
7223 exit_prob = NULL;
7226 /* Switch context to the child function to initialize DEST_FN's CFG. */
7227 gcc_assert (dest_cfun->cfg == NULL);
7228 push_cfun (dest_cfun);
7230 init_empty_tree_cfg ();
7232 /* Initialize EH information for the new function. */
7233 eh_map = NULL;
7234 new_label_map = NULL;
7235 if (saved_cfun->eh)
7237 eh_region region = NULL;
7239 FOR_EACH_VEC_ELT (bbs, i, bb)
7240 region = find_outermost_region_in_block (saved_cfun, bb, region);
7242 init_eh_for_function ();
7243 if (region != NULL)
7245 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7246 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7247 new_label_mapper, new_label_map);
7251 /* Initialize an empty loop tree. */
7252 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7253 init_loops_structure (dest_cfun, loops, 1);
7254 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7255 set_loops_for_fn (dest_cfun, loops);
7257 /* Move the outlined loop tree part. */
7258 num_nodes = bbs.length ();
7259 FOR_EACH_VEC_ELT (bbs, i, bb)
7261 if (bb->loop_father->header == bb)
7263 struct loop *this_loop = bb->loop_father;
7264 struct loop *outer = loop_outer (this_loop);
7265 if (outer == loop
7266 /* If the SESE region contains some bbs ending with
7267 a noreturn call, those are considered to belong
7268 to the outermost loop in saved_cfun, rather than
7269 the entry_bb's loop_father. */
7270 || outer == loop0)
7272 if (outer != loop)
7273 num_nodes -= this_loop->num_nodes;
7274 flow_loop_tree_node_remove (bb->loop_father);
7275 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7276 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7279 else if (bb->loop_father == loop0 && loop0 != loop)
7280 num_nodes--;
7282 /* Remove loop exits from the outlined region. */
7283 if (loops_for_fn (saved_cfun)->exits)
7284 FOR_EACH_EDGE (e, ei, bb->succs)
7286 struct loops *l = loops_for_fn (saved_cfun);
7287 loop_exit **slot
7288 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7289 NO_INSERT);
7290 if (slot)
7291 l->exits->clear_slot (slot);
7296 /* Adjust the number of blocks in the tree root of the outlined part. */
7297 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7299 /* Setup a mapping to be used by move_block_to_fn. */
7300 loop->aux = current_loops->tree_root;
7301 loop0->aux = current_loops->tree_root;
7303 pop_cfun ();
7305 /* Move blocks from BBS into DEST_CFUN. */
7306 gcc_assert (bbs.length () >= 2);
7307 after = dest_cfun->cfg->x_entry_block_ptr;
7308 hash_map<tree, tree> vars_map;
7310 memset (&d, 0, sizeof (d));
7311 d.orig_block = orig_block;
7312 d.new_block = DECL_INITIAL (dest_cfun->decl);
7313 d.from_context = cfun->decl;
7314 d.to_context = dest_cfun->decl;
7315 d.vars_map = &vars_map;
7316 d.new_label_map = new_label_map;
7317 d.eh_map = eh_map;
7318 d.remap_decls_p = true;
7320 if (gimple_in_ssa_p (cfun))
7321 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7323 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7324 set_ssa_default_def (dest_cfun, arg, narg);
7325 vars_map.put (arg, narg);
7328 FOR_EACH_VEC_ELT (bbs, i, bb)
7330 /* No need to update edge counts on the last block. It has
7331 already been updated earlier when we detached the region from
7332 the original CFG. */
7333 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7334 after = bb;
7337 loop->aux = NULL;
7338 loop0->aux = NULL;
7339 /* Loop sizes are no longer correct, fix them up. */
7340 loop->num_nodes -= num_nodes;
7341 for (struct loop *outer = loop_outer (loop);
7342 outer; outer = loop_outer (outer))
7343 outer->num_nodes -= num_nodes;
7344 loop0->num_nodes -= bbs.length () - num_nodes;
7346 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7348 struct loop *aloop;
7349 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7350 if (aloop != NULL)
7352 if (aloop->simduid)
7354 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7355 d.to_context);
7356 dest_cfun->has_simduid_loops = true;
7358 if (aloop->force_vectorize)
7359 dest_cfun->has_force_vectorize_loops = true;
7363 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7364 if (orig_block)
7366 tree block;
7367 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7368 == NULL_TREE);
7369 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7370 = BLOCK_SUBBLOCKS (orig_block);
7371 for (block = BLOCK_SUBBLOCKS (orig_block);
7372 block; block = BLOCK_CHAIN (block))
7373 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7374 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7377 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7378 &vars_map, dest_cfun->decl);
7380 if (new_label_map)
7381 htab_delete (new_label_map);
7382 if (eh_map)
7383 delete eh_map;
7385 if (gimple_in_ssa_p (cfun))
7387 /* We need to release ssa-names in a defined order, so first find them,
7388 and then iterate in ascending version order. */
7389 bitmap release_names = BITMAP_ALLOC (NULL);
7390 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7391 bitmap_iterator bi;
7392 unsigned i;
7393 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7394 release_ssa_name (ssa_name (i));
7395 BITMAP_FREE (release_names);
7398 /* Rewire the entry and exit blocks. The successor to the entry
7399 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7400 the child function. Similarly, the predecessor of DEST_FN's
7401 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7402 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7403 various CFG manipulation function get to the right CFG.
7405 FIXME, this is silly. The CFG ought to become a parameter to
7406 these helpers. */
7407 push_cfun (dest_cfun);
7408 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7409 if (exit_bb)
7410 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7411 pop_cfun ();
7413 /* Back in the original function, the SESE region has disappeared,
7414 create a new basic block in its place. */
7415 bb = create_empty_bb (entry_pred[0]);
7416 if (current_loops)
7417 add_bb_to_loop (bb, loop);
7418 for (i = 0; i < num_entry_edges; i++)
7420 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7421 e->probability = entry_prob[i];
7424 for (i = 0; i < num_exit_edges; i++)
7426 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7427 e->probability = exit_prob[i];
7430 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7431 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7432 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7433 dom_bbs.release ();
7435 if (exit_bb)
7437 free (exit_prob);
7438 free (exit_flag);
7439 free (exit_succ);
7441 free (entry_prob);
7442 free (entry_flag);
7443 free (entry_pred);
7444 bbs.release ();
7446 return bb;
7449 /* Dump default def DEF to file FILE using FLAGS and indentation
7450 SPC. */
7452 static void
7453 dump_default_def (FILE *file, tree def, int spc, int flags)
7455 for (int i = 0; i < spc; ++i)
7456 fprintf (file, " ");
7457 dump_ssaname_info_to_file (file, def, spc);
7459 print_generic_expr (file, TREE_TYPE (def), flags);
7460 fprintf (file, " ");
7461 print_generic_expr (file, def, flags);
7462 fprintf (file, " = ");
7463 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7464 fprintf (file, ";\n");
7467 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7470 void
7471 dump_function_to_file (tree fndecl, FILE *file, int flags)
7473 tree arg, var, old_current_fndecl = current_function_decl;
7474 struct function *dsf;
7475 bool ignore_topmost_bind = false, any_var = false;
7476 basic_block bb;
7477 tree chain;
7478 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7479 && decl_is_tm_clone (fndecl));
7480 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7482 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7484 fprintf (file, "__attribute__((");
7486 bool first = true;
7487 tree chain;
7488 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7489 first = false, chain = TREE_CHAIN (chain))
7491 if (!first)
7492 fprintf (file, ", ");
7494 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7495 if (TREE_VALUE (chain) != NULL_TREE)
7497 fprintf (file, " (");
7498 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7499 fprintf (file, ")");
7503 fprintf (file, "))\n");
7506 current_function_decl = fndecl;
7507 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7509 arg = DECL_ARGUMENTS (fndecl);
7510 while (arg)
7512 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7513 fprintf (file, " ");
7514 print_generic_expr (file, arg, dump_flags);
7515 if (flags & TDF_VERBOSE)
7516 print_node (file, "", arg, 4);
7517 if (DECL_CHAIN (arg))
7518 fprintf (file, ", ");
7519 arg = DECL_CHAIN (arg);
7521 fprintf (file, ")\n");
7523 if (flags & TDF_VERBOSE)
7524 print_node (file, "", fndecl, 2);
7526 dsf = DECL_STRUCT_FUNCTION (fndecl);
7527 if (dsf && (flags & TDF_EH))
7528 dump_eh_tree (file, dsf);
7530 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7532 dump_node (fndecl, TDF_SLIM | flags, file);
7533 current_function_decl = old_current_fndecl;
7534 return;
7537 /* When GIMPLE is lowered, the variables are no longer available in
7538 BIND_EXPRs, so display them separately. */
7539 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7541 unsigned ix;
7542 ignore_topmost_bind = true;
7544 fprintf (file, "{\n");
7545 if (gimple_in_ssa_p (fun)
7546 && (flags & TDF_ALIAS))
7548 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7549 arg = DECL_CHAIN (arg))
7551 tree def = ssa_default_def (fun, arg);
7552 if (def)
7553 dump_default_def (file, def, 2, flags);
7556 tree res = DECL_RESULT (fun->decl);
7557 if (res != NULL_TREE
7558 && DECL_BY_REFERENCE (res))
7560 tree def = ssa_default_def (fun, res);
7561 if (def)
7562 dump_default_def (file, def, 2, flags);
7565 tree static_chain = fun->static_chain_decl;
7566 if (static_chain != NULL_TREE)
7568 tree def = ssa_default_def (fun, static_chain);
7569 if (def)
7570 dump_default_def (file, def, 2, flags);
7574 if (!vec_safe_is_empty (fun->local_decls))
7575 FOR_EACH_LOCAL_DECL (fun, ix, var)
7577 print_generic_decl (file, var, flags);
7578 if (flags & TDF_VERBOSE)
7579 print_node (file, "", var, 4);
7580 fprintf (file, "\n");
7582 any_var = true;
7584 if (gimple_in_ssa_p (cfun))
7585 for (ix = 1; ix < num_ssa_names; ++ix)
7587 tree name = ssa_name (ix);
7588 if (name && !SSA_NAME_VAR (name))
7590 fprintf (file, " ");
7591 print_generic_expr (file, TREE_TYPE (name), flags);
7592 fprintf (file, " ");
7593 print_generic_expr (file, name, flags);
7594 fprintf (file, ";\n");
7596 any_var = true;
7601 if (fun && fun->decl == fndecl
7602 && fun->cfg
7603 && basic_block_info_for_fn (fun))
7605 /* If the CFG has been built, emit a CFG-based dump. */
7606 if (!ignore_topmost_bind)
7607 fprintf (file, "{\n");
7609 if (any_var && n_basic_blocks_for_fn (fun))
7610 fprintf (file, "\n");
7612 FOR_EACH_BB_FN (bb, fun)
7613 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7615 fprintf (file, "}\n");
7617 else if (DECL_SAVED_TREE (fndecl) == NULL)
7619 /* The function is now in GIMPLE form but the CFG has not been
7620 built yet. Emit the single sequence of GIMPLE statements
7621 that make up its body. */
7622 gimple_seq body = gimple_body (fndecl);
7624 if (gimple_seq_first_stmt (body)
7625 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7626 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7627 print_gimple_seq (file, body, 0, flags);
7628 else
7630 if (!ignore_topmost_bind)
7631 fprintf (file, "{\n");
7633 if (any_var)
7634 fprintf (file, "\n");
7636 print_gimple_seq (file, body, 2, flags);
7637 fprintf (file, "}\n");
7640 else
7642 int indent;
7644 /* Make a tree based dump. */
7645 chain = DECL_SAVED_TREE (fndecl);
7646 if (chain && TREE_CODE (chain) == BIND_EXPR)
7648 if (ignore_topmost_bind)
7650 chain = BIND_EXPR_BODY (chain);
7651 indent = 2;
7653 else
7654 indent = 0;
7656 else
7658 if (!ignore_topmost_bind)
7660 fprintf (file, "{\n");
7661 /* No topmost bind, pretend it's ignored for later. */
7662 ignore_topmost_bind = true;
7664 indent = 2;
7667 if (any_var)
7668 fprintf (file, "\n");
7670 print_generic_stmt_indented (file, chain, flags, indent);
7671 if (ignore_topmost_bind)
7672 fprintf (file, "}\n");
7675 if (flags & TDF_ENUMERATE_LOCALS)
7676 dump_enumerated_decls (file, flags);
7677 fprintf (file, "\n\n");
7679 current_function_decl = old_current_fndecl;
7682 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7684 DEBUG_FUNCTION void
7685 debug_function (tree fn, int flags)
7687 dump_function_to_file (fn, stderr, flags);
7691 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7693 static void
7694 print_pred_bbs (FILE *file, basic_block bb)
7696 edge e;
7697 edge_iterator ei;
7699 FOR_EACH_EDGE (e, ei, bb->preds)
7700 fprintf (file, "bb_%d ", e->src->index);
7704 /* Print on FILE the indexes for the successors of basic_block BB. */
7706 static void
7707 print_succ_bbs (FILE *file, basic_block bb)
7709 edge e;
7710 edge_iterator ei;
7712 FOR_EACH_EDGE (e, ei, bb->succs)
7713 fprintf (file, "bb_%d ", e->dest->index);
7716 /* Print to FILE the basic block BB following the VERBOSITY level. */
7718 void
7719 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7721 char *s_indent = (char *) alloca ((size_t) indent + 1);
7722 memset ((void *) s_indent, ' ', (size_t) indent);
7723 s_indent[indent] = '\0';
7725 /* Print basic_block's header. */
7726 if (verbosity >= 2)
7728 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7729 print_pred_bbs (file, bb);
7730 fprintf (file, "}, succs = {");
7731 print_succ_bbs (file, bb);
7732 fprintf (file, "})\n");
7735 /* Print basic_block's body. */
7736 if (verbosity >= 3)
7738 fprintf (file, "%s {\n", s_indent);
7739 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7740 fprintf (file, "%s }\n", s_indent);
7744 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7746 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7747 VERBOSITY level this outputs the contents of the loop, or just its
7748 structure. */
7750 static void
7751 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7753 char *s_indent;
7754 basic_block bb;
7756 if (loop == NULL)
7757 return;
7759 s_indent = (char *) alloca ((size_t) indent + 1);
7760 memset ((void *) s_indent, ' ', (size_t) indent);
7761 s_indent[indent] = '\0';
7763 /* Print loop's header. */
7764 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7765 if (loop->header)
7766 fprintf (file, "header = %d", loop->header->index);
7767 else
7769 fprintf (file, "deleted)\n");
7770 return;
7772 if (loop->latch)
7773 fprintf (file, ", latch = %d", loop->latch->index);
7774 else
7775 fprintf (file, ", multiple latches");
7776 fprintf (file, ", niter = ");
7777 print_generic_expr (file, loop->nb_iterations, 0);
7779 if (loop->any_upper_bound)
7781 fprintf (file, ", upper_bound = ");
7782 print_decu (loop->nb_iterations_upper_bound, file);
7784 if (loop->any_likely_upper_bound)
7786 fprintf (file, ", likely_upper_bound = ");
7787 print_decu (loop->nb_iterations_likely_upper_bound, file);
7790 if (loop->any_estimate)
7792 fprintf (file, ", estimate = ");
7793 print_decu (loop->nb_iterations_estimate, file);
7795 fprintf (file, ")\n");
7797 /* Print loop's body. */
7798 if (verbosity >= 1)
7800 fprintf (file, "%s{\n", s_indent);
7801 FOR_EACH_BB_FN (bb, cfun)
7802 if (bb->loop_father == loop)
7803 print_loops_bb (file, bb, indent, verbosity);
7805 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7806 fprintf (file, "%s}\n", s_indent);
7810 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7811 spaces. Following VERBOSITY level this outputs the contents of the
7812 loop, or just its structure. */
7814 static void
7815 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7816 int verbosity)
7818 if (loop == NULL)
7819 return;
7821 print_loop (file, loop, indent, verbosity);
7822 print_loop_and_siblings (file, loop->next, indent, verbosity);
7825 /* Follow a CFG edge from the entry point of the program, and on entry
7826 of a loop, pretty print the loop structure on FILE. */
7828 void
7829 print_loops (FILE *file, int verbosity)
7831 basic_block bb;
7833 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7834 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7835 if (bb && bb->loop_father)
7836 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7839 /* Dump a loop. */
7841 DEBUG_FUNCTION void
7842 debug (struct loop &ref)
7844 print_loop (stderr, &ref, 0, /*verbosity*/0);
7847 DEBUG_FUNCTION void
7848 debug (struct loop *ptr)
7850 if (ptr)
7851 debug (*ptr);
7852 else
7853 fprintf (stderr, "<nil>\n");
7856 /* Dump a loop verbosely. */
7858 DEBUG_FUNCTION void
7859 debug_verbose (struct loop &ref)
7861 print_loop (stderr, &ref, 0, /*verbosity*/3);
7864 DEBUG_FUNCTION void
7865 debug_verbose (struct loop *ptr)
7867 if (ptr)
7868 debug (*ptr);
7869 else
7870 fprintf (stderr, "<nil>\n");
7874 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7876 DEBUG_FUNCTION void
7877 debug_loops (int verbosity)
7879 print_loops (stderr, verbosity);
7882 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7884 DEBUG_FUNCTION void
7885 debug_loop (struct loop *loop, int verbosity)
7887 print_loop (stderr, loop, 0, verbosity);
7890 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7891 level. */
7893 DEBUG_FUNCTION void
7894 debug_loop_num (unsigned num, int verbosity)
7896 debug_loop (get_loop (cfun, num), verbosity);
7899 /* Return true if BB ends with a call, possibly followed by some
7900 instructions that must stay with the call. Return false,
7901 otherwise. */
7903 static bool
7904 gimple_block_ends_with_call_p (basic_block bb)
7906 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7907 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7911 /* Return true if BB ends with a conditional branch. Return false,
7912 otherwise. */
7914 static bool
7915 gimple_block_ends_with_condjump_p (const_basic_block bb)
7917 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7918 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7922 /* Return true if statement T may terminate execution of BB in ways not
7923 explicitly represtented in the CFG. */
7925 bool
7926 stmt_can_terminate_bb_p (gimple *t)
7928 tree fndecl = NULL_TREE;
7929 int call_flags = 0;
7931 /* Eh exception not handled internally terminates execution of the whole
7932 function. */
7933 if (stmt_can_throw_external (t))
7934 return true;
7936 /* NORETURN and LONGJMP calls already have an edge to exit.
7937 CONST and PURE calls do not need one.
7938 We don't currently check for CONST and PURE here, although
7939 it would be a good idea, because those attributes are
7940 figured out from the RTL in mark_constant_function, and
7941 the counter incrementation code from -fprofile-arcs
7942 leads to different results from -fbranch-probabilities. */
7943 if (is_gimple_call (t))
7945 fndecl = gimple_call_fndecl (t);
7946 call_flags = gimple_call_flags (t);
7949 if (is_gimple_call (t)
7950 && fndecl
7951 && DECL_BUILT_IN (fndecl)
7952 && (call_flags & ECF_NOTHROW)
7953 && !(call_flags & ECF_RETURNS_TWICE)
7954 /* fork() doesn't really return twice, but the effect of
7955 wrapping it in __gcov_fork() which calls __gcov_flush()
7956 and clears the counters before forking has the same
7957 effect as returning twice. Force a fake edge. */
7958 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7959 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7960 return false;
7962 if (is_gimple_call (t))
7964 edge_iterator ei;
7965 edge e;
7966 basic_block bb;
7968 if (call_flags & (ECF_PURE | ECF_CONST)
7969 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
7970 return false;
7972 /* Function call may do longjmp, terminate program or do other things.
7973 Special case noreturn that have non-abnormal edges out as in this case
7974 the fact is sufficiently represented by lack of edges out of T. */
7975 if (!(call_flags & ECF_NORETURN))
7976 return true;
7978 bb = gimple_bb (t);
7979 FOR_EACH_EDGE (e, ei, bb->succs)
7980 if ((e->flags & EDGE_FAKE) == 0)
7981 return true;
7984 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7985 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7986 return true;
7988 return false;
7992 /* Add fake edges to the function exit for any non constant and non
7993 noreturn calls (or noreturn calls with EH/abnormal edges),
7994 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7995 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7996 that were split.
7998 The goal is to expose cases in which entering a basic block does
7999 not imply that all subsequent instructions must be executed. */
8001 static int
8002 gimple_flow_call_edges_add (sbitmap blocks)
8004 int i;
8005 int blocks_split = 0;
8006 int last_bb = last_basic_block_for_fn (cfun);
8007 bool check_last_block = false;
8009 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8010 return 0;
8012 if (! blocks)
8013 check_last_block = true;
8014 else
8015 check_last_block = bitmap_bit_p (blocks,
8016 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8018 /* In the last basic block, before epilogue generation, there will be
8019 a fallthru edge to EXIT. Special care is required if the last insn
8020 of the last basic block is a call because make_edge folds duplicate
8021 edges, which would result in the fallthru edge also being marked
8022 fake, which would result in the fallthru edge being removed by
8023 remove_fake_edges, which would result in an invalid CFG.
8025 Moreover, we can't elide the outgoing fake edge, since the block
8026 profiler needs to take this into account in order to solve the minimal
8027 spanning tree in the case that the call doesn't return.
8029 Handle this by adding a dummy instruction in a new last basic block. */
8030 if (check_last_block)
8032 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8033 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8034 gimple *t = NULL;
8036 if (!gsi_end_p (gsi))
8037 t = gsi_stmt (gsi);
8039 if (t && stmt_can_terminate_bb_p (t))
8041 edge e;
8043 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8044 if (e)
8046 gsi_insert_on_edge (e, gimple_build_nop ());
8047 gsi_commit_edge_inserts ();
8052 /* Now add fake edges to the function exit for any non constant
8053 calls since there is no way that we can determine if they will
8054 return or not... */
8055 for (i = 0; i < last_bb; i++)
8057 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8058 gimple_stmt_iterator gsi;
8059 gimple *stmt, *last_stmt;
8061 if (!bb)
8062 continue;
8064 if (blocks && !bitmap_bit_p (blocks, i))
8065 continue;
8067 gsi = gsi_last_nondebug_bb (bb);
8068 if (!gsi_end_p (gsi))
8070 last_stmt = gsi_stmt (gsi);
8073 stmt = gsi_stmt (gsi);
8074 if (stmt_can_terminate_bb_p (stmt))
8076 edge e;
8078 /* The handling above of the final block before the
8079 epilogue should be enough to verify that there is
8080 no edge to the exit block in CFG already.
8081 Calling make_edge in such case would cause us to
8082 mark that edge as fake and remove it later. */
8083 if (flag_checking && stmt == last_stmt)
8085 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8086 gcc_assert (e == NULL);
8089 /* Note that the following may create a new basic block
8090 and renumber the existing basic blocks. */
8091 if (stmt != last_stmt)
8093 e = split_block (bb, stmt);
8094 if (e)
8095 blocks_split++;
8097 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8099 gsi_prev (&gsi);
8101 while (!gsi_end_p (gsi));
8105 if (blocks_split)
8106 verify_flow_info ();
8108 return blocks_split;
8111 /* Removes edge E and all the blocks dominated by it, and updates dominance
8112 information. The IL in E->src needs to be updated separately.
8113 If dominance info is not available, only the edge E is removed.*/
8115 void
8116 remove_edge_and_dominated_blocks (edge e)
8118 vec<basic_block> bbs_to_remove = vNULL;
8119 vec<basic_block> bbs_to_fix_dom = vNULL;
8120 bitmap df, df_idom;
8121 edge f;
8122 edge_iterator ei;
8123 bool none_removed = false;
8124 unsigned i;
8125 basic_block bb, dbb;
8126 bitmap_iterator bi;
8128 /* If we are removing a path inside a non-root loop that may change
8129 loop ownership of blocks or remove loops. Mark loops for fixup. */
8130 if (current_loops
8131 && loop_outer (e->src->loop_father) != NULL
8132 && e->src->loop_father == e->dest->loop_father)
8133 loops_state_set (LOOPS_NEED_FIXUP);
8135 if (!dom_info_available_p (CDI_DOMINATORS))
8137 remove_edge (e);
8138 return;
8141 /* No updating is needed for edges to exit. */
8142 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8144 if (cfgcleanup_altered_bbs)
8145 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8146 remove_edge (e);
8147 return;
8150 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8151 that is not dominated by E->dest, then this set is empty. Otherwise,
8152 all the basic blocks dominated by E->dest are removed.
8154 Also, to DF_IDOM we store the immediate dominators of the blocks in
8155 the dominance frontier of E (i.e., of the successors of the
8156 removed blocks, if there are any, and of E->dest otherwise). */
8157 FOR_EACH_EDGE (f, ei, e->dest->preds)
8159 if (f == e)
8160 continue;
8162 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8164 none_removed = true;
8165 break;
8169 df = BITMAP_ALLOC (NULL);
8170 df_idom = BITMAP_ALLOC (NULL);
8172 if (none_removed)
8173 bitmap_set_bit (df_idom,
8174 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8175 else
8177 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8178 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8180 FOR_EACH_EDGE (f, ei, bb->succs)
8182 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8183 bitmap_set_bit (df, f->dest->index);
8186 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8187 bitmap_clear_bit (df, bb->index);
8189 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8191 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8192 bitmap_set_bit (df_idom,
8193 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8197 if (cfgcleanup_altered_bbs)
8199 /* Record the set of the altered basic blocks. */
8200 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8201 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8204 /* Remove E and the cancelled blocks. */
8205 if (none_removed)
8206 remove_edge (e);
8207 else
8209 /* Walk backwards so as to get a chance to substitute all
8210 released DEFs into debug stmts. See
8211 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8212 details. */
8213 for (i = bbs_to_remove.length (); i-- > 0; )
8214 delete_basic_block (bbs_to_remove[i]);
8217 /* Update the dominance information. The immediate dominator may change only
8218 for blocks whose immediate dominator belongs to DF_IDOM:
8220 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8221 removal. Let Z the arbitrary block such that idom(Z) = Y and
8222 Z dominates X after the removal. Before removal, there exists a path P
8223 from Y to X that avoids Z. Let F be the last edge on P that is
8224 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8225 dominates W, and because of P, Z does not dominate W), and W belongs to
8226 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8227 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8229 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8230 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8231 dbb;
8232 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8233 bbs_to_fix_dom.safe_push (dbb);
8236 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8238 BITMAP_FREE (df);
8239 BITMAP_FREE (df_idom);
8240 bbs_to_remove.release ();
8241 bbs_to_fix_dom.release ();
8244 /* Purge dead EH edges from basic block BB. */
8246 bool
8247 gimple_purge_dead_eh_edges (basic_block bb)
8249 bool changed = false;
8250 edge e;
8251 edge_iterator ei;
8252 gimple *stmt = last_stmt (bb);
8254 if (stmt && stmt_can_throw_internal (stmt))
8255 return false;
8257 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8259 if (e->flags & EDGE_EH)
8261 remove_edge_and_dominated_blocks (e);
8262 changed = true;
8264 else
8265 ei_next (&ei);
8268 return changed;
8271 /* Purge dead EH edges from basic block listed in BLOCKS. */
8273 bool
8274 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8276 bool changed = false;
8277 unsigned i;
8278 bitmap_iterator bi;
8280 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8282 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8284 /* Earlier gimple_purge_dead_eh_edges could have removed
8285 this basic block already. */
8286 gcc_assert (bb || changed);
8287 if (bb != NULL)
8288 changed |= gimple_purge_dead_eh_edges (bb);
8291 return changed;
8294 /* Purge dead abnormal call edges from basic block BB. */
8296 bool
8297 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8299 bool changed = false;
8300 edge e;
8301 edge_iterator ei;
8302 gimple *stmt = last_stmt (bb);
8304 if (!cfun->has_nonlocal_label
8305 && !cfun->calls_setjmp)
8306 return false;
8308 if (stmt && stmt_can_make_abnormal_goto (stmt))
8309 return false;
8311 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8313 if (e->flags & EDGE_ABNORMAL)
8315 if (e->flags & EDGE_FALLTHRU)
8316 e->flags &= ~EDGE_ABNORMAL;
8317 else
8318 remove_edge_and_dominated_blocks (e);
8319 changed = true;
8321 else
8322 ei_next (&ei);
8325 return changed;
8328 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8330 bool
8331 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8333 bool changed = false;
8334 unsigned i;
8335 bitmap_iterator bi;
8337 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8339 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8341 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8342 this basic block already. */
8343 gcc_assert (bb || changed);
8344 if (bb != NULL)
8345 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8348 return changed;
8351 /* This function is called whenever a new edge is created or
8352 redirected. */
8354 static void
8355 gimple_execute_on_growing_pred (edge e)
8357 basic_block bb = e->dest;
8359 if (!gimple_seq_empty_p (phi_nodes (bb)))
8360 reserve_phi_args_for_new_edge (bb);
8363 /* This function is called immediately before edge E is removed from
8364 the edge vector E->dest->preds. */
8366 static void
8367 gimple_execute_on_shrinking_pred (edge e)
8369 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8370 remove_phi_args (e);
8373 /*---------------------------------------------------------------------------
8374 Helper functions for Loop versioning
8375 ---------------------------------------------------------------------------*/
8377 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8378 of 'first'. Both of them are dominated by 'new_head' basic block. When
8379 'new_head' was created by 'second's incoming edge it received phi arguments
8380 on the edge by split_edge(). Later, additional edge 'e' was created to
8381 connect 'new_head' and 'first'. Now this routine adds phi args on this
8382 additional edge 'e' that new_head to second edge received as part of edge
8383 splitting. */
8385 static void
8386 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8387 basic_block new_head, edge e)
8389 gphi *phi1, *phi2;
8390 gphi_iterator psi1, psi2;
8391 tree def;
8392 edge e2 = find_edge (new_head, second);
8394 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8395 edge, we should always have an edge from NEW_HEAD to SECOND. */
8396 gcc_assert (e2 != NULL);
8398 /* Browse all 'second' basic block phi nodes and add phi args to
8399 edge 'e' for 'first' head. PHI args are always in correct order. */
8401 for (psi2 = gsi_start_phis (second),
8402 psi1 = gsi_start_phis (first);
8403 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8404 gsi_next (&psi2), gsi_next (&psi1))
8406 phi1 = psi1.phi ();
8407 phi2 = psi2.phi ();
8408 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8409 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8414 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8415 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8416 the destination of the ELSE part. */
8418 static void
8419 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8420 basic_block second_head ATTRIBUTE_UNUSED,
8421 basic_block cond_bb, void *cond_e)
8423 gimple_stmt_iterator gsi;
8424 gimple *new_cond_expr;
8425 tree cond_expr = (tree) cond_e;
8426 edge e0;
8428 /* Build new conditional expr */
8429 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8430 NULL_TREE, NULL_TREE);
8432 /* Add new cond in cond_bb. */
8433 gsi = gsi_last_bb (cond_bb);
8434 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8436 /* Adjust edges appropriately to connect new head with first head
8437 as well as second head. */
8438 e0 = single_succ_edge (cond_bb);
8439 e0->flags &= ~EDGE_FALLTHRU;
8440 e0->flags |= EDGE_FALSE_VALUE;
8444 /* Do book-keeping of basic block BB for the profile consistency checker.
8445 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8446 then do post-pass accounting. Store the counting in RECORD. */
8447 static void
8448 gimple_account_profile_record (basic_block bb, int after_pass,
8449 struct profile_record *record)
8451 gimple_stmt_iterator i;
8452 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8454 record->size[after_pass]
8455 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8456 if (profile_status_for_fn (cfun) == PROFILE_READ)
8457 record->time[after_pass]
8458 += estimate_num_insns (gsi_stmt (i),
8459 &eni_time_weights) * bb->count;
8460 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8461 record->time[after_pass]
8462 += estimate_num_insns (gsi_stmt (i),
8463 &eni_time_weights) * bb->frequency;
8467 struct cfg_hooks gimple_cfg_hooks = {
8468 "gimple",
8469 gimple_verify_flow_info,
8470 gimple_dump_bb, /* dump_bb */
8471 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8472 create_bb, /* create_basic_block */
8473 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8474 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8475 gimple_can_remove_branch_p, /* can_remove_branch_p */
8476 remove_bb, /* delete_basic_block */
8477 gimple_split_block, /* split_block */
8478 gimple_move_block_after, /* move_block_after */
8479 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8480 gimple_merge_blocks, /* merge_blocks */
8481 gimple_predict_edge, /* predict_edge */
8482 gimple_predicted_by_p, /* predicted_by_p */
8483 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8484 gimple_duplicate_bb, /* duplicate_block */
8485 gimple_split_edge, /* split_edge */
8486 gimple_make_forwarder_block, /* make_forward_block */
8487 NULL, /* tidy_fallthru_edge */
8488 NULL, /* force_nonfallthru */
8489 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8490 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8491 gimple_flow_call_edges_add, /* flow_call_edges_add */
8492 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8493 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8494 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8495 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8496 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8497 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8498 flush_pending_stmts, /* flush_pending_stmts */
8499 gimple_empty_block_p, /* block_empty_p */
8500 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8501 gimple_account_profile_record,
8505 /* Split all critical edges. */
8507 unsigned int
8508 split_critical_edges (void)
8510 basic_block bb;
8511 edge e;
8512 edge_iterator ei;
8514 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8515 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8516 mappings around the calls to split_edge. */
8517 start_recording_case_labels ();
8518 FOR_ALL_BB_FN (bb, cfun)
8520 FOR_EACH_EDGE (e, ei, bb->succs)
8522 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8523 split_edge (e);
8524 /* PRE inserts statements to edges and expects that
8525 since split_critical_edges was done beforehand, committing edge
8526 insertions will not split more edges. In addition to critical
8527 edges we must split edges that have multiple successors and
8528 end by control flow statements, such as RESX.
8529 Go ahead and split them too. This matches the logic in
8530 gimple_find_edge_insert_loc. */
8531 else if ((!single_pred_p (e->dest)
8532 || !gimple_seq_empty_p (phi_nodes (e->dest))
8533 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8534 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8535 && !(e->flags & EDGE_ABNORMAL))
8537 gimple_stmt_iterator gsi;
8539 gsi = gsi_last_bb (e->src);
8540 if (!gsi_end_p (gsi)
8541 && stmt_ends_bb_p (gsi_stmt (gsi))
8542 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8543 && !gimple_call_builtin_p (gsi_stmt (gsi),
8544 BUILT_IN_RETURN)))
8545 split_edge (e);
8549 end_recording_case_labels ();
8550 return 0;
8553 namespace {
8555 const pass_data pass_data_split_crit_edges =
8557 GIMPLE_PASS, /* type */
8558 "crited", /* name */
8559 OPTGROUP_NONE, /* optinfo_flags */
8560 TV_TREE_SPLIT_EDGES, /* tv_id */
8561 PROP_cfg, /* properties_required */
8562 PROP_no_crit_edges, /* properties_provided */
8563 0, /* properties_destroyed */
8564 0, /* todo_flags_start */
8565 0, /* todo_flags_finish */
8568 class pass_split_crit_edges : public gimple_opt_pass
8570 public:
8571 pass_split_crit_edges (gcc::context *ctxt)
8572 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8575 /* opt_pass methods: */
8576 virtual unsigned int execute (function *) { return split_critical_edges (); }
8578 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8579 }; // class pass_split_crit_edges
8581 } // anon namespace
8583 gimple_opt_pass *
8584 make_pass_split_crit_edges (gcc::context *ctxt)
8586 return new pass_split_crit_edges (ctxt);
8590 /* Insert COND expression which is GIMPLE_COND after STMT
8591 in basic block BB with appropriate basic block split
8592 and creation of a new conditionally executed basic block.
8593 Return created basic block. */
8594 basic_block
8595 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8597 edge fall = split_block (bb, stmt);
8598 gimple_stmt_iterator iter = gsi_last_bb (bb);
8599 basic_block new_bb;
8601 /* Insert cond statement. */
8602 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8603 if (gsi_end_p (iter))
8604 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8605 else
8606 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8608 /* Create conditionally executed block. */
8609 new_bb = create_empty_bb (bb);
8610 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8611 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8613 /* Fix edge for split bb. */
8614 fall->flags = EDGE_FALSE_VALUE;
8616 /* Update dominance info. */
8617 if (dom_info_available_p (CDI_DOMINATORS))
8619 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8620 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8623 /* Update loop info. */
8624 if (current_loops)
8625 add_bb_to_loop (new_bb, bb->loop_father);
8627 return new_bb;
8630 /* Build a ternary operation and gimplify it. Emit code before GSI.
8631 Return the gimple_val holding the result. */
8633 tree
8634 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8635 tree type, tree a, tree b, tree c)
8637 tree ret;
8638 location_t loc = gimple_location (gsi_stmt (*gsi));
8640 ret = fold_build3_loc (loc, code, type, a, b, c);
8641 STRIP_NOPS (ret);
8643 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8644 GSI_SAME_STMT);
8647 /* Build a binary operation and gimplify it. Emit code before GSI.
8648 Return the gimple_val holding the result. */
8650 tree
8651 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8652 tree type, tree a, tree b)
8654 tree ret;
8656 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8657 STRIP_NOPS (ret);
8659 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8660 GSI_SAME_STMT);
8663 /* Build a unary operation and gimplify it. Emit code before GSI.
8664 Return the gimple_val holding the result. */
8666 tree
8667 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8668 tree a)
8670 tree ret;
8672 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8673 STRIP_NOPS (ret);
8675 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8676 GSI_SAME_STMT);
8681 /* Given a basic block B which ends with a conditional and has
8682 precisely two successors, determine which of the edges is taken if
8683 the conditional is true and which is taken if the conditional is
8684 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8686 void
8687 extract_true_false_edges_from_block (basic_block b,
8688 edge *true_edge,
8689 edge *false_edge)
8691 edge e = EDGE_SUCC (b, 0);
8693 if (e->flags & EDGE_TRUE_VALUE)
8695 *true_edge = e;
8696 *false_edge = EDGE_SUCC (b, 1);
8698 else
8700 *false_edge = e;
8701 *true_edge = EDGE_SUCC (b, 1);
8706 /* From a controlling predicate in the immediate dominator DOM of
8707 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8708 predicate evaluates to true and false and store them to
8709 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8710 they are non-NULL. Returns true if the edges can be determined,
8711 else return false. */
8713 bool
8714 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8715 edge *true_controlled_edge,
8716 edge *false_controlled_edge)
8718 basic_block bb = phiblock;
8719 edge true_edge, false_edge, tem;
8720 edge e0 = NULL, e1 = NULL;
8722 /* We have to verify that one edge into the PHI node is dominated
8723 by the true edge of the predicate block and the other edge
8724 dominated by the false edge. This ensures that the PHI argument
8725 we are going to take is completely determined by the path we
8726 take from the predicate block.
8727 We can only use BB dominance checks below if the destination of
8728 the true/false edges are dominated by their edge, thus only
8729 have a single predecessor. */
8730 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8731 tem = EDGE_PRED (bb, 0);
8732 if (tem == true_edge
8733 || (single_pred_p (true_edge->dest)
8734 && (tem->src == true_edge->dest
8735 || dominated_by_p (CDI_DOMINATORS,
8736 tem->src, true_edge->dest))))
8737 e0 = tem;
8738 else if (tem == false_edge
8739 || (single_pred_p (false_edge->dest)
8740 && (tem->src == false_edge->dest
8741 || dominated_by_p (CDI_DOMINATORS,
8742 tem->src, false_edge->dest))))
8743 e1 = tem;
8744 else
8745 return false;
8746 tem = EDGE_PRED (bb, 1);
8747 if (tem == true_edge
8748 || (single_pred_p (true_edge->dest)
8749 && (tem->src == true_edge->dest
8750 || dominated_by_p (CDI_DOMINATORS,
8751 tem->src, true_edge->dest))))
8752 e0 = tem;
8753 else if (tem == false_edge
8754 || (single_pred_p (false_edge->dest)
8755 && (tem->src == false_edge->dest
8756 || dominated_by_p (CDI_DOMINATORS,
8757 tem->src, false_edge->dest))))
8758 e1 = tem;
8759 else
8760 return false;
8761 if (!e0 || !e1)
8762 return false;
8764 if (true_controlled_edge)
8765 *true_controlled_edge = e0;
8766 if (false_controlled_edge)
8767 *false_controlled_edge = e1;
8769 return true;
8774 /* Emit return warnings. */
8776 namespace {
8778 const pass_data pass_data_warn_function_return =
8780 GIMPLE_PASS, /* type */
8781 "*warn_function_return", /* name */
8782 OPTGROUP_NONE, /* optinfo_flags */
8783 TV_NONE, /* tv_id */
8784 PROP_cfg, /* properties_required */
8785 0, /* properties_provided */
8786 0, /* properties_destroyed */
8787 0, /* todo_flags_start */
8788 0, /* todo_flags_finish */
8791 class pass_warn_function_return : public gimple_opt_pass
8793 public:
8794 pass_warn_function_return (gcc::context *ctxt)
8795 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8798 /* opt_pass methods: */
8799 virtual unsigned int execute (function *);
8801 }; // class pass_warn_function_return
8803 unsigned int
8804 pass_warn_function_return::execute (function *fun)
8806 source_location location;
8807 gimple *last;
8808 edge e;
8809 edge_iterator ei;
8811 if (!targetm.warn_func_return (fun->decl))
8812 return 0;
8814 /* If we have a path to EXIT, then we do return. */
8815 if (TREE_THIS_VOLATILE (fun->decl)
8816 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8818 location = UNKNOWN_LOCATION;
8819 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8821 last = last_stmt (e->src);
8822 if ((gimple_code (last) == GIMPLE_RETURN
8823 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8824 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8825 break;
8827 if (location == UNKNOWN_LOCATION)
8828 location = cfun->function_end_locus;
8829 warning_at (location, 0, "%<noreturn%> function does return");
8832 /* If we see "return;" in some basic block, then we do reach the end
8833 without returning a value. */
8834 else if (warn_return_type
8835 && !TREE_NO_WARNING (fun->decl)
8836 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8837 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8839 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8841 gimple *last = last_stmt (e->src);
8842 greturn *return_stmt = dyn_cast <greturn *> (last);
8843 if (return_stmt
8844 && gimple_return_retval (return_stmt) == NULL
8845 && !gimple_no_warning_p (last))
8847 location = gimple_location (last);
8848 if (location == UNKNOWN_LOCATION)
8849 location = fun->function_end_locus;
8850 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8851 TREE_NO_WARNING (fun->decl) = 1;
8852 break;
8856 return 0;
8859 } // anon namespace
8861 gimple_opt_pass *
8862 make_pass_warn_function_return (gcc::context *ctxt)
8864 return new pass_warn_function_return (ctxt);
8867 /* Walk a gimplified function and warn for functions whose return value is
8868 ignored and attribute((warn_unused_result)) is set. This is done before
8869 inlining, so we don't have to worry about that. */
8871 static void
8872 do_warn_unused_result (gimple_seq seq)
8874 tree fdecl, ftype;
8875 gimple_stmt_iterator i;
8877 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8879 gimple *g = gsi_stmt (i);
8881 switch (gimple_code (g))
8883 case GIMPLE_BIND:
8884 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8885 break;
8886 case GIMPLE_TRY:
8887 do_warn_unused_result (gimple_try_eval (g));
8888 do_warn_unused_result (gimple_try_cleanup (g));
8889 break;
8890 case GIMPLE_CATCH:
8891 do_warn_unused_result (gimple_catch_handler (
8892 as_a <gcatch *> (g)));
8893 break;
8894 case GIMPLE_EH_FILTER:
8895 do_warn_unused_result (gimple_eh_filter_failure (g));
8896 break;
8898 case GIMPLE_CALL:
8899 if (gimple_call_lhs (g))
8900 break;
8901 if (gimple_call_internal_p (g))
8902 break;
8904 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8905 LHS. All calls whose value is ignored should be
8906 represented like this. Look for the attribute. */
8907 fdecl = gimple_call_fndecl (g);
8908 ftype = gimple_call_fntype (g);
8910 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8912 location_t loc = gimple_location (g);
8914 if (fdecl)
8915 warning_at (loc, OPT_Wunused_result,
8916 "ignoring return value of %qD, "
8917 "declared with attribute warn_unused_result",
8918 fdecl);
8919 else
8920 warning_at (loc, OPT_Wunused_result,
8921 "ignoring return value of function "
8922 "declared with attribute warn_unused_result");
8924 break;
8926 default:
8927 /* Not a container, not a call, or a call whose value is used. */
8928 break;
8933 namespace {
8935 const pass_data pass_data_warn_unused_result =
8937 GIMPLE_PASS, /* type */
8938 "*warn_unused_result", /* name */
8939 OPTGROUP_NONE, /* optinfo_flags */
8940 TV_NONE, /* tv_id */
8941 PROP_gimple_any, /* properties_required */
8942 0, /* properties_provided */
8943 0, /* properties_destroyed */
8944 0, /* todo_flags_start */
8945 0, /* todo_flags_finish */
8948 class pass_warn_unused_result : public gimple_opt_pass
8950 public:
8951 pass_warn_unused_result (gcc::context *ctxt)
8952 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8955 /* opt_pass methods: */
8956 virtual bool gate (function *) { return flag_warn_unused_result; }
8957 virtual unsigned int execute (function *)
8959 do_warn_unused_result (gimple_body (current_function_decl));
8960 return 0;
8963 }; // class pass_warn_unused_result
8965 } // anon namespace
8967 gimple_opt_pass *
8968 make_pass_warn_unused_result (gcc::context *ctxt)
8970 return new pass_warn_unused_result (ctxt);
8973 /* IPA passes, compilation of earlier functions or inlining
8974 might have changed some properties, such as marked functions nothrow,
8975 pure, const or noreturn.
8976 Remove redundant edges and basic blocks, and create new ones if necessary.
8978 This pass can't be executed as stand alone pass from pass manager, because
8979 in between inlining and this fixup the verify_flow_info would fail. */
8981 unsigned int
8982 execute_fixup_cfg (void)
8984 basic_block bb;
8985 gimple_stmt_iterator gsi;
8986 int todo = 0;
8987 gcov_type count_scale;
8988 edge e;
8989 edge_iterator ei;
8990 cgraph_node *node = cgraph_node::get (current_function_decl);
8992 count_scale
8993 = GCOV_COMPUTE_SCALE (node->count, ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8995 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
8996 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
8997 = apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count, count_scale);
8999 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
9000 e->count = apply_scale (e->count, count_scale);
9002 FOR_EACH_BB_FN (bb, cfun)
9004 bb->count = apply_scale (bb->count, count_scale);
9005 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9007 gimple *stmt = gsi_stmt (gsi);
9008 tree decl = is_gimple_call (stmt)
9009 ? gimple_call_fndecl (stmt)
9010 : NULL;
9011 if (decl)
9013 int flags = gimple_call_flags (stmt);
9014 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9016 if (gimple_purge_dead_abnormal_call_edges (bb))
9017 todo |= TODO_cleanup_cfg;
9019 if (gimple_in_ssa_p (cfun))
9021 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9022 update_stmt (stmt);
9026 if (flags & ECF_NORETURN
9027 && fixup_noreturn_call (stmt))
9028 todo |= TODO_cleanup_cfg;
9031 /* Remove stores to variables we marked write-only.
9032 Keep access when store has side effect, i.e. in case when source
9033 is volatile. */
9034 if (gimple_store_p (stmt)
9035 && !gimple_has_side_effects (stmt))
9037 tree lhs = get_base_address (gimple_get_lhs (stmt));
9039 if (TREE_CODE (lhs) == VAR_DECL
9040 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9041 && varpool_node::get (lhs)->writeonly)
9043 unlink_stmt_vdef (stmt);
9044 gsi_remove (&gsi, true);
9045 release_defs (stmt);
9046 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9047 continue;
9050 /* For calls we can simply remove LHS when it is known
9051 to be write-only. */
9052 if (is_gimple_call (stmt)
9053 && gimple_get_lhs (stmt))
9055 tree lhs = get_base_address (gimple_get_lhs (stmt));
9057 if (TREE_CODE (lhs) == VAR_DECL
9058 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9059 && varpool_node::get (lhs)->writeonly)
9061 gimple_call_set_lhs (stmt, NULL);
9062 update_stmt (stmt);
9063 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9067 if (maybe_clean_eh_stmt (stmt)
9068 && gimple_purge_dead_eh_edges (bb))
9069 todo |= TODO_cleanup_cfg;
9070 gsi_next (&gsi);
9073 FOR_EACH_EDGE (e, ei, bb->succs)
9074 e->count = apply_scale (e->count, count_scale);
9076 /* If we have a basic block with no successors that does not
9077 end with a control statement or a noreturn call end it with
9078 a call to __builtin_unreachable. This situation can occur
9079 when inlining a noreturn call that does in fact return. */
9080 if (EDGE_COUNT (bb->succs) == 0)
9082 gimple *stmt = last_stmt (bb);
9083 if (!stmt
9084 || (!is_ctrl_stmt (stmt)
9085 && (!is_gimple_call (stmt)
9086 || !gimple_call_noreturn_p (stmt))))
9088 if (stmt && is_gimple_call (stmt))
9089 gimple_call_set_ctrl_altering (stmt, false);
9090 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9091 stmt = gimple_build_call (fndecl, 0);
9092 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9093 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9094 if (!cfun->after_inlining)
9096 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9097 int freq
9098 = compute_call_stmt_bb_frequency (current_function_decl,
9099 bb);
9100 node->create_edge (cgraph_node::get_create (fndecl),
9101 call_stmt, bb->count, freq);
9106 if (count_scale != REG_BR_PROB_BASE)
9107 compute_function_frequency ();
9109 if (current_loops
9110 && (todo & TODO_cleanup_cfg))
9111 loops_state_set (LOOPS_NEED_FIXUP);
9113 return todo;
9116 namespace {
9118 const pass_data pass_data_fixup_cfg =
9120 GIMPLE_PASS, /* type */
9121 "fixup_cfg", /* name */
9122 OPTGROUP_NONE, /* optinfo_flags */
9123 TV_NONE, /* tv_id */
9124 PROP_cfg, /* properties_required */
9125 0, /* properties_provided */
9126 0, /* properties_destroyed */
9127 0, /* todo_flags_start */
9128 0, /* todo_flags_finish */
9131 class pass_fixup_cfg : public gimple_opt_pass
9133 public:
9134 pass_fixup_cfg (gcc::context *ctxt)
9135 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9138 /* opt_pass methods: */
9139 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9140 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9142 }; // class pass_fixup_cfg
9144 } // anon namespace
9146 gimple_opt_pass *
9147 make_pass_fixup_cfg (gcc::context *ctxt)
9149 return new pass_fixup_cfg (ctxt);
9152 /* Garbage collection support for edge_def. */
9154 extern void gt_ggc_mx (tree&);
9155 extern void gt_ggc_mx (gimple *&);
9156 extern void gt_ggc_mx (rtx&);
9157 extern void gt_ggc_mx (basic_block&);
9159 static void
9160 gt_ggc_mx (rtx_insn *& x)
9162 if (x)
9163 gt_ggc_mx_rtx_def ((void *) x);
9166 void
9167 gt_ggc_mx (edge_def *e)
9169 tree block = LOCATION_BLOCK (e->goto_locus);
9170 gt_ggc_mx (e->src);
9171 gt_ggc_mx (e->dest);
9172 if (current_ir_type () == IR_GIMPLE)
9173 gt_ggc_mx (e->insns.g);
9174 else
9175 gt_ggc_mx (e->insns.r);
9176 gt_ggc_mx (block);
9179 /* PCH support for edge_def. */
9181 extern void gt_pch_nx (tree&);
9182 extern void gt_pch_nx (gimple *&);
9183 extern void gt_pch_nx (rtx&);
9184 extern void gt_pch_nx (basic_block&);
9186 static void
9187 gt_pch_nx (rtx_insn *& x)
9189 if (x)
9190 gt_pch_nx_rtx_def ((void *) x);
9193 void
9194 gt_pch_nx (edge_def *e)
9196 tree block = LOCATION_BLOCK (e->goto_locus);
9197 gt_pch_nx (e->src);
9198 gt_pch_nx (e->dest);
9199 if (current_ir_type () == IR_GIMPLE)
9200 gt_pch_nx (e->insns.g);
9201 else
9202 gt_pch_nx (e->insns.r);
9203 gt_pch_nx (block);
9206 void
9207 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9209 tree block = LOCATION_BLOCK (e->goto_locus);
9210 op (&(e->src), cookie);
9211 op (&(e->dest), cookie);
9212 if (current_ir_type () == IR_GIMPLE)
9213 op (&(e->insns.g), cookie);
9214 else
9215 op (&(e->insns.r), cookie);
9216 op (&(block), cookie);
9219 #if CHECKING_P
9221 namespace selftest {
9223 /* Helper function for CFG selftests: create a dummy function decl
9224 and push it as cfun. */
9226 static tree
9227 push_fndecl (const char *name)
9229 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9230 /* FIXME: this uses input_location: */
9231 tree fndecl = build_fn_decl (name, fn_type);
9232 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9233 NULL_TREE, integer_type_node);
9234 DECL_RESULT (fndecl) = retval;
9235 push_struct_function (fndecl);
9236 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9237 ASSERT_TRUE (fun != NULL);
9238 init_empty_tree_cfg_for_function (fun);
9239 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9240 ASSERT_EQ (0, n_edges_for_fn (fun));
9241 return fndecl;
9244 /* These tests directly create CFGs.
9245 Compare with the static fns within tree-cfg.c:
9246 - build_gimple_cfg
9247 - make_blocks: calls create_basic_block (seq, bb);
9248 - make_edges. */
9250 /* Verify a simple cfg of the form:
9251 ENTRY -> A -> B -> C -> EXIT. */
9253 static void
9254 test_linear_chain ()
9256 gimple_register_cfg_hooks ();
9258 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9259 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9261 /* Create some empty blocks. */
9262 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9263 basic_block bb_b = create_empty_bb (bb_a);
9264 basic_block bb_c = create_empty_bb (bb_b);
9266 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9267 ASSERT_EQ (0, n_edges_for_fn (fun));
9269 /* Create some edges: a simple linear chain of BBs. */
9270 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9271 make_edge (bb_a, bb_b, 0);
9272 make_edge (bb_b, bb_c, 0);
9273 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9275 /* Verify the edges. */
9276 ASSERT_EQ (4, n_edges_for_fn (fun));
9277 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9278 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9279 ASSERT_EQ (1, bb_a->preds->length ());
9280 ASSERT_EQ (1, bb_a->succs->length ());
9281 ASSERT_EQ (1, bb_b->preds->length ());
9282 ASSERT_EQ (1, bb_b->succs->length ());
9283 ASSERT_EQ (1, bb_c->preds->length ());
9284 ASSERT_EQ (1, bb_c->succs->length ());
9285 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9286 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9288 /* Verify the dominance information
9289 Each BB in our simple chain should be dominated by the one before
9290 it. */
9291 calculate_dominance_info (CDI_DOMINATORS);
9292 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9293 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9294 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9295 ASSERT_EQ (1, dom_by_b.length ());
9296 ASSERT_EQ (bb_c, dom_by_b[0]);
9297 free_dominance_info (CDI_DOMINATORS);
9298 dom_by_b.release ();
9300 /* Similarly for post-dominance: each BB in our chain is post-dominated
9301 by the one after it. */
9302 calculate_dominance_info (CDI_POST_DOMINATORS);
9303 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9304 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9305 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9306 ASSERT_EQ (1, postdom_by_b.length ());
9307 ASSERT_EQ (bb_a, postdom_by_b[0]);
9308 free_dominance_info (CDI_POST_DOMINATORS);
9309 postdom_by_b.release ();
9311 pop_cfun ();
9314 /* Verify a simple CFG of the form:
9315 ENTRY
9319 /t \f
9325 EXIT. */
9327 static void
9328 test_diamond ()
9330 gimple_register_cfg_hooks ();
9332 tree fndecl = push_fndecl ("cfg_test_diamond");
9333 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9335 /* Create some empty blocks. */
9336 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9337 basic_block bb_b = create_empty_bb (bb_a);
9338 basic_block bb_c = create_empty_bb (bb_a);
9339 basic_block bb_d = create_empty_bb (bb_b);
9341 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9342 ASSERT_EQ (0, n_edges_for_fn (fun));
9344 /* Create the edges. */
9345 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9346 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9347 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9348 make_edge (bb_b, bb_d, 0);
9349 make_edge (bb_c, bb_d, 0);
9350 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9352 /* Verify the edges. */
9353 ASSERT_EQ (6, n_edges_for_fn (fun));
9354 ASSERT_EQ (1, bb_a->preds->length ());
9355 ASSERT_EQ (2, bb_a->succs->length ());
9356 ASSERT_EQ (1, bb_b->preds->length ());
9357 ASSERT_EQ (1, bb_b->succs->length ());
9358 ASSERT_EQ (1, bb_c->preds->length ());
9359 ASSERT_EQ (1, bb_c->succs->length ());
9360 ASSERT_EQ (2, bb_d->preds->length ());
9361 ASSERT_EQ (1, bb_d->succs->length ());
9363 /* Verify the dominance information. */
9364 calculate_dominance_info (CDI_DOMINATORS);
9365 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9366 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9367 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9368 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9369 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9370 dom_by_a.release ();
9371 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9372 ASSERT_EQ (0, dom_by_b.length ());
9373 dom_by_b.release ();
9374 free_dominance_info (CDI_DOMINATORS);
9376 /* Similarly for post-dominance. */
9377 calculate_dominance_info (CDI_POST_DOMINATORS);
9378 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9379 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9380 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9381 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9382 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9383 postdom_by_d.release ();
9384 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9385 ASSERT_EQ (0, postdom_by_b.length ());
9386 postdom_by_b.release ();
9387 free_dominance_info (CDI_POST_DOMINATORS);
9389 pop_cfun ();
9392 /* Verify that we can handle a CFG containing a "complete" aka
9393 fully-connected subgraph (where A B C D below all have edges
9394 pointing to each other node, also to themselves).
9395 e.g.:
9396 ENTRY EXIT
9402 A<--->B
9403 ^^ ^^
9404 | \ / |
9405 | X |
9406 | / \ |
9407 VV VV
9408 C<--->D
9411 static void
9412 test_fully_connected ()
9414 gimple_register_cfg_hooks ();
9416 tree fndecl = push_fndecl ("cfg_fully_connected");
9417 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9419 const int n = 4;
9421 /* Create some empty blocks. */
9422 auto_vec <basic_block> subgraph_nodes;
9423 for (int i = 0; i < n; i++)
9424 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9426 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9427 ASSERT_EQ (0, n_edges_for_fn (fun));
9429 /* Create the edges. */
9430 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9431 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9432 for (int i = 0; i < n; i++)
9433 for (int j = 0; j < n; j++)
9434 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9436 /* Verify the edges. */
9437 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9438 /* The first one is linked to ENTRY/EXIT as well as itself and
9439 everything else. */
9440 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9441 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9442 /* The other ones in the subgraph are linked to everything in
9443 the subgraph (including themselves). */
9444 for (int i = 1; i < n; i++)
9446 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9447 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9450 /* Verify the dominance information. */
9451 calculate_dominance_info (CDI_DOMINATORS);
9452 /* The initial block in the subgraph should be dominated by ENTRY. */
9453 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9454 get_immediate_dominator (CDI_DOMINATORS,
9455 subgraph_nodes[0]));
9456 /* Every other block in the subgraph should be dominated by the
9457 initial block. */
9458 for (int i = 1; i < n; i++)
9459 ASSERT_EQ (subgraph_nodes[0],
9460 get_immediate_dominator (CDI_DOMINATORS,
9461 subgraph_nodes[i]));
9462 free_dominance_info (CDI_DOMINATORS);
9464 /* Similarly for post-dominance. */
9465 calculate_dominance_info (CDI_POST_DOMINATORS);
9466 /* The initial block in the subgraph should be postdominated by EXIT. */
9467 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9468 get_immediate_dominator (CDI_POST_DOMINATORS,
9469 subgraph_nodes[0]));
9470 /* Every other block in the subgraph should be postdominated by the
9471 initial block, since that leads to EXIT. */
9472 for (int i = 1; i < n; i++)
9473 ASSERT_EQ (subgraph_nodes[0],
9474 get_immediate_dominator (CDI_POST_DOMINATORS,
9475 subgraph_nodes[i]));
9476 free_dominance_info (CDI_POST_DOMINATORS);
9478 pop_cfun ();
9481 /* Run all of the selftests within this file. */
9483 void
9484 tree_cfg_c_tests ()
9486 test_linear_chain ();
9487 test_diamond ();
9488 test_fully_connected ();
9491 } // namespace selftest
9493 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9494 - loop
9495 - nested loops
9496 - switch statement (a block with many out-edges)
9497 - something that jumps to itself
9498 - etc */
9500 #endif /* CHECKING_P */