[ree] PR rtl-optimization/78038: Handle global register dataflow definitions in ree
[official-gcc.git] / gcc / tree-cfg.c
blobdfa82aaef7375eb3529f2e16abf19dae891566d3
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-low.h"
58 #include "tree-cfgcleanup.h"
59 #include "gimplify.h"
60 #include "attribs.h"
61 #include "selftest.h"
63 /* This file contains functions for building the Control Flow Graph (CFG)
64 for a function tree. */
66 /* Local declarations. */
68 /* Initial capacity for the basic block array. */
69 static const int initial_cfg_capacity = 20;
71 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
72 which use a particular edge. The CASE_LABEL_EXPRs are chained together
73 via their CASE_CHAIN field, which we clear after we're done with the
74 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
76 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
77 update the case vector in response to edge redirections.
79 Right now this table is set up and torn down at key points in the
80 compilation process. It would be nice if we could make the table
81 more persistent. The key is getting notification of changes to
82 the CFG (particularly edge removal, creation and redirection). */
84 static hash_map<edge, tree> *edge_to_cases;
86 /* If we record edge_to_cases, this bitmap will hold indexes
87 of basic blocks that end in a GIMPLE_SWITCH which we touched
88 due to edge manipulations. */
90 static bitmap touched_switch_bbs;
92 /* CFG statistics. */
93 struct cfg_stats_d
95 long num_merged_labels;
98 static struct cfg_stats_d cfg_stats;
100 /* Data to pass to replace_block_vars_by_duplicates_1. */
101 struct replace_decls_d
103 hash_map<tree, tree> *vars_map;
104 tree to_context;
107 /* Hash table to store last discriminator assigned for each locus. */
108 struct locus_discrim_map
110 location_t locus;
111 int discriminator;
114 /* Hashtable helpers. */
116 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
118 static inline hashval_t hash (const locus_discrim_map *);
119 static inline bool equal (const locus_discrim_map *,
120 const locus_discrim_map *);
123 /* Trivial hash function for a location_t. ITEM is a pointer to
124 a hash table entry that maps a location_t to a discriminator. */
126 inline hashval_t
127 locus_discrim_hasher::hash (const locus_discrim_map *item)
129 return LOCATION_LINE (item->locus);
132 /* Equality function for the locus-to-discriminator map. A and B
133 point to the two hash table entries to compare. */
135 inline bool
136 locus_discrim_hasher::equal (const locus_discrim_map *a,
137 const locus_discrim_map *b)
139 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
142 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
144 /* Basic blocks and flowgraphs. */
145 static void make_blocks (gimple_seq);
147 /* Edges. */
148 static void make_edges (void);
149 static void assign_discriminators (void);
150 static void make_cond_expr_edges (basic_block);
151 static void make_gimple_switch_edges (gswitch *, basic_block);
152 static bool make_goto_expr_edges (basic_block);
153 static void make_gimple_asm_edges (basic_block);
154 static edge gimple_redirect_edge_and_branch (edge, basic_block);
155 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
157 /* Various helpers. */
158 static inline bool stmt_starts_bb_p (gimple *, gimple *);
159 static int gimple_verify_flow_info (void);
160 static void gimple_make_forwarder_block (edge);
161 static gimple *first_non_label_stmt (basic_block);
162 static bool verify_gimple_transaction (gtransaction *);
163 static bool call_can_make_abnormal_goto (gimple *);
165 /* Flowgraph optimization and cleanup. */
166 static void gimple_merge_blocks (basic_block, basic_block);
167 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
168 static void remove_bb (basic_block);
169 static edge find_taken_edge_computed_goto (basic_block, tree);
170 static edge find_taken_edge_cond_expr (basic_block, tree);
171 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
172 static tree find_case_label_for_value (gswitch *, tree);
174 void
175 init_empty_tree_cfg_for_function (struct function *fn)
177 /* Initialize the basic block array. */
178 init_flow (fn);
179 profile_status_for_fn (fn) = PROFILE_ABSENT;
180 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
181 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
182 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
183 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
184 initial_cfg_capacity);
186 /* Build a mapping of labels to their associated blocks. */
187 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
188 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
189 initial_cfg_capacity);
191 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
192 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
194 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
195 = EXIT_BLOCK_PTR_FOR_FN (fn);
196 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
197 = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 void
201 init_empty_tree_cfg (void)
203 init_empty_tree_cfg_for_function (cfun);
206 /*---------------------------------------------------------------------------
207 Create basic blocks
208 ---------------------------------------------------------------------------*/
210 /* Entry point to the CFG builder for trees. SEQ is the sequence of
211 statements to be added to the flowgraph. */
213 static void
214 build_gimple_cfg (gimple_seq seq)
216 /* Register specific gimple functions. */
217 gimple_register_cfg_hooks ();
219 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
221 init_empty_tree_cfg ();
223 make_blocks (seq);
225 /* Make sure there is always at least one block, even if it's empty. */
226 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
227 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
229 /* Adjust the size of the array. */
230 if (basic_block_info_for_fn (cfun)->length ()
231 < (size_t) n_basic_blocks_for_fn (cfun))
232 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
233 n_basic_blocks_for_fn (cfun));
235 /* To speed up statement iterator walks, we first purge dead labels. */
236 cleanup_dead_labels ();
238 /* Group case nodes to reduce the number of edges.
239 We do this after cleaning up dead labels because otherwise we miss
240 a lot of obvious case merging opportunities. */
241 group_case_labels ();
243 /* Create the edges of the flowgraph. */
244 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
245 make_edges ();
246 assign_discriminators ();
247 cleanup_dead_labels ();
248 delete discriminator_per_locus;
249 discriminator_per_locus = NULL;
252 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
253 them and propagate the information to LOOP. We assume that the annotations
254 come immediately before the condition in BB, if any. */
256 static void
257 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
259 gimple_stmt_iterator gsi = gsi_last_bb (bb);
260 gimple *stmt = gsi_stmt (gsi);
262 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
263 return;
265 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
267 stmt = gsi_stmt (gsi);
268 if (gimple_code (stmt) != GIMPLE_CALL)
269 break;
270 if (!gimple_call_internal_p (stmt)
271 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
272 break;
274 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
276 case annot_expr_ivdep_kind:
277 loop->safelen = INT_MAX;
278 break;
279 case annot_expr_no_vector_kind:
280 loop->dont_vectorize = true;
281 break;
282 case annot_expr_vector_kind:
283 loop->force_vectorize = true;
284 cfun->has_force_vectorize_loops = true;
285 break;
286 default:
287 gcc_unreachable ();
290 stmt = gimple_build_assign (gimple_call_lhs (stmt),
291 gimple_call_arg (stmt, 0));
292 gsi_replace (&gsi, stmt, true);
296 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
297 them and propagate the information to the loop. We assume that the
298 annotations come immediately before the condition of the loop. */
300 static void
301 replace_loop_annotate (void)
303 struct loop *loop;
304 basic_block bb;
305 gimple_stmt_iterator gsi;
306 gimple *stmt;
308 FOR_EACH_LOOP (loop, 0)
310 /* First look into the header. */
311 replace_loop_annotate_in_block (loop->header, loop);
313 /* Then look into the latch, if any. */
314 if (loop->latch)
315 replace_loop_annotate_in_block (loop->latch, loop);
318 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
319 FOR_EACH_BB_FN (bb, cfun)
321 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
323 stmt = gsi_stmt (gsi);
324 if (gimple_code (stmt) != GIMPLE_CALL)
325 continue;
326 if (!gimple_call_internal_p (stmt)
327 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
328 continue;
330 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
332 case annot_expr_ivdep_kind:
333 case annot_expr_no_vector_kind:
334 case annot_expr_vector_kind:
335 break;
336 default:
337 gcc_unreachable ();
340 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
341 stmt = gimple_build_assign (gimple_call_lhs (stmt),
342 gimple_call_arg (stmt, 0));
343 gsi_replace (&gsi, stmt, true);
349 static unsigned int
350 execute_build_cfg (void)
352 gimple_seq body = gimple_body (current_function_decl);
354 build_gimple_cfg (body);
355 gimple_set_body (current_function_decl, NULL);
356 if (dump_file && (dump_flags & TDF_DETAILS))
358 fprintf (dump_file, "Scope blocks:\n");
359 dump_scope_blocks (dump_file, dump_flags);
361 cleanup_tree_cfg ();
362 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
363 replace_loop_annotate ();
364 return 0;
367 namespace {
369 const pass_data pass_data_build_cfg =
371 GIMPLE_PASS, /* type */
372 "cfg", /* name */
373 OPTGROUP_NONE, /* optinfo_flags */
374 TV_TREE_CFG, /* tv_id */
375 PROP_gimple_leh, /* properties_required */
376 ( PROP_cfg | PROP_loops ), /* properties_provided */
377 0, /* properties_destroyed */
378 0, /* todo_flags_start */
379 0, /* todo_flags_finish */
382 class pass_build_cfg : public gimple_opt_pass
384 public:
385 pass_build_cfg (gcc::context *ctxt)
386 : gimple_opt_pass (pass_data_build_cfg, ctxt)
389 /* opt_pass methods: */
390 virtual unsigned int execute (function *) { return execute_build_cfg (); }
392 }; // class pass_build_cfg
394 } // anon namespace
396 gimple_opt_pass *
397 make_pass_build_cfg (gcc::context *ctxt)
399 return new pass_build_cfg (ctxt);
403 /* Return true if T is a computed goto. */
405 bool
406 computed_goto_p (gimple *t)
408 return (gimple_code (t) == GIMPLE_GOTO
409 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
412 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
413 the other edge points to a bb with just __builtin_unreachable ().
414 I.e. return true for C->M edge in:
415 <bb C>:
417 if (something)
418 goto <bb N>;
419 else
420 goto <bb M>;
421 <bb N>:
422 __builtin_unreachable ();
423 <bb M>: */
425 bool
426 assert_unreachable_fallthru_edge_p (edge e)
428 basic_block pred_bb = e->src;
429 gimple *last = last_stmt (pred_bb);
430 if (last && gimple_code (last) == GIMPLE_COND)
432 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
433 if (other_bb == e->dest)
434 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
435 if (EDGE_COUNT (other_bb->succs) == 0)
437 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
438 gimple *stmt;
440 if (gsi_end_p (gsi))
441 return false;
442 stmt = gsi_stmt (gsi);
443 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
445 gsi_next (&gsi);
446 if (gsi_end_p (gsi))
447 return false;
448 stmt = gsi_stmt (gsi);
450 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
453 return false;
457 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
458 could alter control flow except via eh. We initialize the flag at
459 CFG build time and only ever clear it later. */
461 static void
462 gimple_call_initialize_ctrl_altering (gimple *stmt)
464 int flags = gimple_call_flags (stmt);
466 /* A call alters control flow if it can make an abnormal goto. */
467 if (call_can_make_abnormal_goto (stmt)
468 /* A call also alters control flow if it does not return. */
469 || flags & ECF_NORETURN
470 /* TM ending statements have backedges out of the transaction.
471 Return true so we split the basic block containing them.
472 Note that the TM_BUILTIN test is merely an optimization. */
473 || ((flags & ECF_TM_BUILTIN)
474 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
475 /* BUILT_IN_RETURN call is same as return statement. */
476 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
477 /* IFN_UNIQUE should be the last insn, to make checking for it
478 as cheap as possible. */
479 || (gimple_call_internal_p (stmt)
480 && gimple_call_internal_unique_p (stmt)))
481 gimple_call_set_ctrl_altering (stmt, true);
482 else
483 gimple_call_set_ctrl_altering (stmt, false);
487 /* Insert SEQ after BB and build a flowgraph. */
489 static basic_block
490 make_blocks_1 (gimple_seq seq, basic_block bb)
492 gimple_stmt_iterator i = gsi_start (seq);
493 gimple *stmt = NULL;
494 bool start_new_block = true;
495 bool first_stmt_of_seq = true;
497 while (!gsi_end_p (i))
499 gimple *prev_stmt;
501 prev_stmt = stmt;
502 stmt = gsi_stmt (i);
504 if (stmt && is_gimple_call (stmt))
505 gimple_call_initialize_ctrl_altering (stmt);
507 /* If the statement starts a new basic block or if we have determined
508 in a previous pass that we need to create a new block for STMT, do
509 so now. */
510 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
512 if (!first_stmt_of_seq)
513 gsi_split_seq_before (&i, &seq);
514 bb = create_basic_block (seq, bb);
515 start_new_block = false;
518 /* Now add STMT to BB and create the subgraphs for special statement
519 codes. */
520 gimple_set_bb (stmt, bb);
522 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
523 next iteration. */
524 if (stmt_ends_bb_p (stmt))
526 /* If the stmt can make abnormal goto use a new temporary
527 for the assignment to the LHS. This makes sure the old value
528 of the LHS is available on the abnormal edge. Otherwise
529 we will end up with overlapping life-ranges for abnormal
530 SSA names. */
531 if (gimple_has_lhs (stmt)
532 && stmt_can_make_abnormal_goto (stmt)
533 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
535 tree lhs = gimple_get_lhs (stmt);
536 tree tmp = create_tmp_var (TREE_TYPE (lhs));
537 gimple *s = gimple_build_assign (lhs, tmp);
538 gimple_set_location (s, gimple_location (stmt));
539 gimple_set_block (s, gimple_block (stmt));
540 gimple_set_lhs (stmt, tmp);
541 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
542 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
543 DECL_GIMPLE_REG_P (tmp) = 1;
544 gsi_insert_after (&i, s, GSI_SAME_STMT);
546 start_new_block = true;
549 gsi_next (&i);
550 first_stmt_of_seq = false;
552 return bb;
555 /* Build a flowgraph for the sequence of stmts SEQ. */
557 static void
558 make_blocks (gimple_seq seq)
560 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
563 /* Create and return a new empty basic block after bb AFTER. */
565 static basic_block
566 create_bb (void *h, void *e, basic_block after)
568 basic_block bb;
570 gcc_assert (!e);
572 /* Create and initialize a new basic block. Since alloc_block uses
573 GC allocation that clears memory to allocate a basic block, we do
574 not have to clear the newly allocated basic block here. */
575 bb = alloc_block ();
577 bb->index = last_basic_block_for_fn (cfun);
578 bb->flags = BB_NEW;
579 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
581 /* Add the new block to the linked list of blocks. */
582 link_block (bb, after);
584 /* Grow the basic block array if needed. */
585 if ((size_t) last_basic_block_for_fn (cfun)
586 == basic_block_info_for_fn (cfun)->length ())
588 size_t new_size =
589 (last_basic_block_for_fn (cfun)
590 + (last_basic_block_for_fn (cfun) + 3) / 4);
591 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
594 /* Add the newly created block to the array. */
595 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
597 n_basic_blocks_for_fn (cfun)++;
598 last_basic_block_for_fn (cfun)++;
600 return bb;
604 /*---------------------------------------------------------------------------
605 Edge creation
606 ---------------------------------------------------------------------------*/
608 /* If basic block BB has an abnormal edge to a basic block
609 containing IFN_ABNORMAL_DISPATCHER internal call, return
610 that the dispatcher's basic block, otherwise return NULL. */
612 basic_block
613 get_abnormal_succ_dispatcher (basic_block bb)
615 edge e;
616 edge_iterator ei;
618 FOR_EACH_EDGE (e, ei, bb->succs)
619 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
621 gimple_stmt_iterator gsi
622 = gsi_start_nondebug_after_labels_bb (e->dest);
623 gimple *g = gsi_stmt (gsi);
624 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
625 return e->dest;
627 return NULL;
630 /* Helper function for make_edges. Create a basic block with
631 with ABNORMAL_DISPATCHER internal call in it if needed, and
632 create abnormal edges from BBS to it and from it to FOR_BB
633 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
635 static void
636 handle_abnormal_edges (basic_block *dispatcher_bbs,
637 basic_block for_bb, int *bb_to_omp_idx,
638 auto_vec<basic_block> *bbs, bool computed_goto)
640 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
641 unsigned int idx = 0;
642 basic_block bb;
643 bool inner = false;
645 if (bb_to_omp_idx)
647 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
648 if (bb_to_omp_idx[for_bb->index] != 0)
649 inner = true;
652 /* If the dispatcher has been created already, then there are basic
653 blocks with abnormal edges to it, so just make a new edge to
654 for_bb. */
655 if (*dispatcher == NULL)
657 /* Check if there are any basic blocks that need to have
658 abnormal edges to this dispatcher. If there are none, return
659 early. */
660 if (bb_to_omp_idx == NULL)
662 if (bbs->is_empty ())
663 return;
665 else
667 FOR_EACH_VEC_ELT (*bbs, idx, bb)
668 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
669 break;
670 if (bb == NULL)
671 return;
674 /* Create the dispatcher bb. */
675 *dispatcher = create_basic_block (NULL, for_bb);
676 if (computed_goto)
678 /* Factor computed gotos into a common computed goto site. Also
679 record the location of that site so that we can un-factor the
680 gotos after we have converted back to normal form. */
681 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
683 /* Create the destination of the factored goto. Each original
684 computed goto will put its desired destination into this
685 variable and jump to the label we create immediately below. */
686 tree var = create_tmp_var (ptr_type_node, "gotovar");
688 /* Build a label for the new block which will contain the
689 factored computed goto. */
690 tree factored_label_decl
691 = create_artificial_label (UNKNOWN_LOCATION);
692 gimple *factored_computed_goto_label
693 = gimple_build_label (factored_label_decl);
694 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
696 /* Build our new computed goto. */
697 gimple *factored_computed_goto = gimple_build_goto (var);
698 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
700 FOR_EACH_VEC_ELT (*bbs, idx, bb)
702 if (bb_to_omp_idx
703 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
704 continue;
706 gsi = gsi_last_bb (bb);
707 gimple *last = gsi_stmt (gsi);
709 gcc_assert (computed_goto_p (last));
711 /* Copy the original computed goto's destination into VAR. */
712 gimple *assignment
713 = gimple_build_assign (var, gimple_goto_dest (last));
714 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
716 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
717 e->goto_locus = gimple_location (last);
718 gsi_remove (&gsi, true);
721 else
723 tree arg = inner ? boolean_true_node : boolean_false_node;
724 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
725 1, arg);
726 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
727 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
729 /* Create predecessor edges of the dispatcher. */
730 FOR_EACH_VEC_ELT (*bbs, idx, bb)
732 if (bb_to_omp_idx
733 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
734 continue;
735 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
740 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
743 /* Creates outgoing edges for BB. Returns 1 when it ends with an
744 computed goto, returns 2 when it ends with a statement that
745 might return to this function via an nonlocal goto, otherwise
746 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
748 static int
749 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
751 gimple *last = last_stmt (bb);
752 bool fallthru = false;
753 int ret = 0;
755 if (!last)
756 return ret;
758 switch (gimple_code (last))
760 case GIMPLE_GOTO:
761 if (make_goto_expr_edges (bb))
762 ret = 1;
763 fallthru = false;
764 break;
765 case GIMPLE_RETURN:
767 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
768 e->goto_locus = gimple_location (last);
769 fallthru = false;
771 break;
772 case GIMPLE_COND:
773 make_cond_expr_edges (bb);
774 fallthru = false;
775 break;
776 case GIMPLE_SWITCH:
777 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
778 fallthru = false;
779 break;
780 case GIMPLE_RESX:
781 make_eh_edges (last);
782 fallthru = false;
783 break;
784 case GIMPLE_EH_DISPATCH:
785 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
786 break;
788 case GIMPLE_CALL:
789 /* If this function receives a nonlocal goto, then we need to
790 make edges from this call site to all the nonlocal goto
791 handlers. */
792 if (stmt_can_make_abnormal_goto (last))
793 ret = 2;
795 /* If this statement has reachable exception handlers, then
796 create abnormal edges to them. */
797 make_eh_edges (last);
799 /* BUILTIN_RETURN is really a return statement. */
800 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
802 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
803 fallthru = false;
805 /* Some calls are known not to return. */
806 else
807 fallthru = !gimple_call_noreturn_p (last);
808 break;
810 case GIMPLE_ASSIGN:
811 /* A GIMPLE_ASSIGN may throw internally and thus be considered
812 control-altering. */
813 if (is_ctrl_altering_stmt (last))
814 make_eh_edges (last);
815 fallthru = true;
816 break;
818 case GIMPLE_ASM:
819 make_gimple_asm_edges (bb);
820 fallthru = true;
821 break;
823 CASE_GIMPLE_OMP:
824 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
825 break;
827 case GIMPLE_TRANSACTION:
829 gtransaction *txn = as_a <gtransaction *> (last);
830 tree label1 = gimple_transaction_label_norm (txn);
831 tree label2 = gimple_transaction_label_uninst (txn);
833 if (label1)
834 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
835 if (label2)
836 make_edge (bb, label_to_block (label2),
837 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
839 tree label3 = gimple_transaction_label_over (txn);
840 if (gimple_transaction_subcode (txn)
841 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
842 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
844 fallthru = false;
846 break;
848 default:
849 gcc_assert (!stmt_ends_bb_p (last));
850 fallthru = true;
851 break;
854 if (fallthru)
855 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
857 return ret;
860 /* Join all the blocks in the flowgraph. */
862 static void
863 make_edges (void)
865 basic_block bb;
866 struct omp_region *cur_region = NULL;
867 auto_vec<basic_block> ab_edge_goto;
868 auto_vec<basic_block> ab_edge_call;
869 int *bb_to_omp_idx = NULL;
870 int cur_omp_region_idx = 0;
872 /* Create an edge from entry to the first block with executable
873 statements in it. */
874 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
875 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
876 EDGE_FALLTHRU);
878 /* Traverse the basic block array placing edges. */
879 FOR_EACH_BB_FN (bb, cfun)
881 int mer;
883 if (bb_to_omp_idx)
884 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
886 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
887 if (mer == 1)
888 ab_edge_goto.safe_push (bb);
889 else if (mer == 2)
890 ab_edge_call.safe_push (bb);
892 if (cur_region && bb_to_omp_idx == NULL)
893 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
896 /* Computed gotos are hell to deal with, especially if there are
897 lots of them with a large number of destinations. So we factor
898 them to a common computed goto location before we build the
899 edge list. After we convert back to normal form, we will un-factor
900 the computed gotos since factoring introduces an unwanted jump.
901 For non-local gotos and abnormal edges from calls to calls that return
902 twice or forced labels, factor the abnormal edges too, by having all
903 abnormal edges from the calls go to a common artificial basic block
904 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
905 basic block to all forced labels and calls returning twice.
906 We do this per-OpenMP structured block, because those regions
907 are guaranteed to be single entry single exit by the standard,
908 so it is not allowed to enter or exit such regions abnormally this way,
909 thus all computed gotos, non-local gotos and setjmp/longjmp calls
910 must not transfer control across SESE region boundaries. */
911 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
913 gimple_stmt_iterator gsi;
914 basic_block dispatcher_bb_array[2] = { NULL, NULL };
915 basic_block *dispatcher_bbs = dispatcher_bb_array;
916 int count = n_basic_blocks_for_fn (cfun);
918 if (bb_to_omp_idx)
919 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
921 FOR_EACH_BB_FN (bb, cfun)
923 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
925 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
926 tree target;
928 if (!label_stmt)
929 break;
931 target = gimple_label_label (label_stmt);
933 /* Make an edge to every label block that has been marked as a
934 potential target for a computed goto or a non-local goto. */
935 if (FORCED_LABEL (target))
936 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
937 &ab_edge_goto, true);
938 if (DECL_NONLOCAL (target))
940 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
941 &ab_edge_call, false);
942 break;
946 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
947 gsi_next_nondebug (&gsi);
948 if (!gsi_end_p (gsi))
950 /* Make an edge to every setjmp-like call. */
951 gimple *call_stmt = gsi_stmt (gsi);
952 if (is_gimple_call (call_stmt)
953 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
954 || gimple_call_builtin_p (call_stmt,
955 BUILT_IN_SETJMP_RECEIVER)))
956 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
957 &ab_edge_call, false);
961 if (bb_to_omp_idx)
962 XDELETE (dispatcher_bbs);
965 XDELETE (bb_to_omp_idx);
967 free_omp_regions ();
970 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
971 needed. Returns true if new bbs were created.
972 Note: This is transitional code, and should not be used for new code. We
973 should be able to get rid of this by rewriting all target va-arg
974 gimplification hooks to use an interface gimple_build_cond_value as described
975 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
977 bool
978 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
980 gimple *stmt = gsi_stmt (*gsi);
981 basic_block bb = gimple_bb (stmt);
982 basic_block lastbb, afterbb;
983 int old_num_bbs = n_basic_blocks_for_fn (cfun);
984 edge e;
985 lastbb = make_blocks_1 (seq, bb);
986 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
987 return false;
988 e = split_block (bb, stmt);
989 /* Move e->dest to come after the new basic blocks. */
990 afterbb = e->dest;
991 unlink_block (afterbb);
992 link_block (afterbb, lastbb);
993 redirect_edge_succ (e, bb->next_bb);
994 bb = bb->next_bb;
995 while (bb != afterbb)
997 struct omp_region *cur_region = NULL;
998 int cur_omp_region_idx = 0;
999 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1000 gcc_assert (!mer && !cur_region);
1001 add_bb_to_loop (bb, afterbb->loop_father);
1002 bb = bb->next_bb;
1004 return true;
1007 /* Find the next available discriminator value for LOCUS. The
1008 discriminator distinguishes among several basic blocks that
1009 share a common locus, allowing for more accurate sample-based
1010 profiling. */
1012 static int
1013 next_discriminator_for_locus (location_t locus)
1015 struct locus_discrim_map item;
1016 struct locus_discrim_map **slot;
1018 item.locus = locus;
1019 item.discriminator = 0;
1020 slot = discriminator_per_locus->find_slot_with_hash (
1021 &item, LOCATION_LINE (locus), INSERT);
1022 gcc_assert (slot);
1023 if (*slot == HTAB_EMPTY_ENTRY)
1025 *slot = XNEW (struct locus_discrim_map);
1026 gcc_assert (*slot);
1027 (*slot)->locus = locus;
1028 (*slot)->discriminator = 0;
1030 (*slot)->discriminator++;
1031 return (*slot)->discriminator;
1034 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1036 static bool
1037 same_line_p (location_t locus1, location_t locus2)
1039 expanded_location from, to;
1041 if (locus1 == locus2)
1042 return true;
1044 from = expand_location (locus1);
1045 to = expand_location (locus2);
1047 if (from.line != to.line)
1048 return false;
1049 if (from.file == to.file)
1050 return true;
1051 return (from.file != NULL
1052 && to.file != NULL
1053 && filename_cmp (from.file, to.file) == 0);
1056 /* Assign discriminators to each basic block. */
1058 static void
1059 assign_discriminators (void)
1061 basic_block bb;
1063 FOR_EACH_BB_FN (bb, cfun)
1065 edge e;
1066 edge_iterator ei;
1067 gimple *last = last_stmt (bb);
1068 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1070 if (locus == UNKNOWN_LOCATION)
1071 continue;
1073 FOR_EACH_EDGE (e, ei, bb->succs)
1075 gimple *first = first_non_label_stmt (e->dest);
1076 gimple *last = last_stmt (e->dest);
1077 if ((first && same_line_p (locus, gimple_location (first)))
1078 || (last && same_line_p (locus, gimple_location (last))))
1080 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1081 bb->discriminator = next_discriminator_for_locus (locus);
1082 else
1083 e->dest->discriminator = next_discriminator_for_locus (locus);
1089 /* Create the edges for a GIMPLE_COND starting at block BB. */
1091 static void
1092 make_cond_expr_edges (basic_block bb)
1094 gcond *entry = as_a <gcond *> (last_stmt (bb));
1095 gimple *then_stmt, *else_stmt;
1096 basic_block then_bb, else_bb;
1097 tree then_label, else_label;
1098 edge e;
1100 gcc_assert (entry);
1101 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1103 /* Entry basic blocks for each component. */
1104 then_label = gimple_cond_true_label (entry);
1105 else_label = gimple_cond_false_label (entry);
1106 then_bb = label_to_block (then_label);
1107 else_bb = label_to_block (else_label);
1108 then_stmt = first_stmt (then_bb);
1109 else_stmt = first_stmt (else_bb);
1111 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1112 e->goto_locus = gimple_location (then_stmt);
1113 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1114 if (e)
1115 e->goto_locus = gimple_location (else_stmt);
1117 /* We do not need the labels anymore. */
1118 gimple_cond_set_true_label (entry, NULL_TREE);
1119 gimple_cond_set_false_label (entry, NULL_TREE);
1123 /* Called for each element in the hash table (P) as we delete the
1124 edge to cases hash table.
1126 Clear all the CASE_CHAINs to prevent problems with copying of
1127 SWITCH_EXPRs and structure sharing rules, then free the hash table
1128 element. */
1130 bool
1131 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1133 tree t, next;
1135 for (t = value; t; t = next)
1137 next = CASE_CHAIN (t);
1138 CASE_CHAIN (t) = NULL;
1141 return true;
1144 /* Start recording information mapping edges to case labels. */
1146 void
1147 start_recording_case_labels (void)
1149 gcc_assert (edge_to_cases == NULL);
1150 edge_to_cases = new hash_map<edge, tree>;
1151 touched_switch_bbs = BITMAP_ALLOC (NULL);
1154 /* Return nonzero if we are recording information for case labels. */
1156 static bool
1157 recording_case_labels_p (void)
1159 return (edge_to_cases != NULL);
1162 /* Stop recording information mapping edges to case labels and
1163 remove any information we have recorded. */
1164 void
1165 end_recording_case_labels (void)
1167 bitmap_iterator bi;
1168 unsigned i;
1169 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1170 delete edge_to_cases;
1171 edge_to_cases = NULL;
1172 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1174 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1175 if (bb)
1177 gimple *stmt = last_stmt (bb);
1178 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1179 group_case_labels_stmt (as_a <gswitch *> (stmt));
1182 BITMAP_FREE (touched_switch_bbs);
1185 /* If we are inside a {start,end}_recording_cases block, then return
1186 a chain of CASE_LABEL_EXPRs from T which reference E.
1188 Otherwise return NULL. */
1190 static tree
1191 get_cases_for_edge (edge e, gswitch *t)
1193 tree *slot;
1194 size_t i, n;
1196 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1197 chains available. Return NULL so the caller can detect this case. */
1198 if (!recording_case_labels_p ())
1199 return NULL;
1201 slot = edge_to_cases->get (e);
1202 if (slot)
1203 return *slot;
1205 /* If we did not find E in the hash table, then this must be the first
1206 time we have been queried for information about E & T. Add all the
1207 elements from T to the hash table then perform the query again. */
1209 n = gimple_switch_num_labels (t);
1210 for (i = 0; i < n; i++)
1212 tree elt = gimple_switch_label (t, i);
1213 tree lab = CASE_LABEL (elt);
1214 basic_block label_bb = label_to_block (lab);
1215 edge this_edge = find_edge (e->src, label_bb);
1217 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1218 a new chain. */
1219 tree &s = edge_to_cases->get_or_insert (this_edge);
1220 CASE_CHAIN (elt) = s;
1221 s = elt;
1224 return *edge_to_cases->get (e);
1227 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1229 static void
1230 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1232 size_t i, n;
1234 n = gimple_switch_num_labels (entry);
1236 for (i = 0; i < n; ++i)
1238 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1239 basic_block label_bb = label_to_block (lab);
1240 make_edge (bb, label_bb, 0);
1245 /* Return the basic block holding label DEST. */
1247 basic_block
1248 label_to_block_fn (struct function *ifun, tree dest)
1250 int uid = LABEL_DECL_UID (dest);
1252 /* We would die hard when faced by an undefined label. Emit a label to
1253 the very first basic block. This will hopefully make even the dataflow
1254 and undefined variable warnings quite right. */
1255 if (seen_error () && uid < 0)
1257 gimple_stmt_iterator gsi =
1258 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1259 gimple *stmt;
1261 stmt = gimple_build_label (dest);
1262 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1263 uid = LABEL_DECL_UID (dest);
1265 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1266 return NULL;
1267 return (*ifun->cfg->x_label_to_block_map)[uid];
1270 /* Create edges for a goto statement at block BB. Returns true
1271 if abnormal edges should be created. */
1273 static bool
1274 make_goto_expr_edges (basic_block bb)
1276 gimple_stmt_iterator last = gsi_last_bb (bb);
1277 gimple *goto_t = gsi_stmt (last);
1279 /* A simple GOTO creates normal edges. */
1280 if (simple_goto_p (goto_t))
1282 tree dest = gimple_goto_dest (goto_t);
1283 basic_block label_bb = label_to_block (dest);
1284 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1285 e->goto_locus = gimple_location (goto_t);
1286 gsi_remove (&last, true);
1287 return false;
1290 /* A computed GOTO creates abnormal edges. */
1291 return true;
1294 /* Create edges for an asm statement with labels at block BB. */
1296 static void
1297 make_gimple_asm_edges (basic_block bb)
1299 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1300 int i, n = gimple_asm_nlabels (stmt);
1302 for (i = 0; i < n; ++i)
1304 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1305 basic_block label_bb = label_to_block (label);
1306 make_edge (bb, label_bb, 0);
1310 /*---------------------------------------------------------------------------
1311 Flowgraph analysis
1312 ---------------------------------------------------------------------------*/
1314 /* Cleanup useless labels in basic blocks. This is something we wish
1315 to do early because it allows us to group case labels before creating
1316 the edges for the CFG, and it speeds up block statement iterators in
1317 all passes later on.
1318 We rerun this pass after CFG is created, to get rid of the labels that
1319 are no longer referenced. After then we do not run it any more, since
1320 (almost) no new labels should be created. */
1322 /* A map from basic block index to the leading label of that block. */
1323 static struct label_record
1325 /* The label. */
1326 tree label;
1328 /* True if the label is referenced from somewhere. */
1329 bool used;
1330 } *label_for_bb;
1332 /* Given LABEL return the first label in the same basic block. */
1334 static tree
1335 main_block_label (tree label)
1337 basic_block bb = label_to_block (label);
1338 tree main_label = label_for_bb[bb->index].label;
1340 /* label_to_block possibly inserted undefined label into the chain. */
1341 if (!main_label)
1343 label_for_bb[bb->index].label = label;
1344 main_label = label;
1347 label_for_bb[bb->index].used = true;
1348 return main_label;
1351 /* Clean up redundant labels within the exception tree. */
1353 static void
1354 cleanup_dead_labels_eh (void)
1356 eh_landing_pad lp;
1357 eh_region r;
1358 tree lab;
1359 int i;
1361 if (cfun->eh == NULL)
1362 return;
1364 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1365 if (lp && lp->post_landing_pad)
1367 lab = main_block_label (lp->post_landing_pad);
1368 if (lab != lp->post_landing_pad)
1370 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1371 EH_LANDING_PAD_NR (lab) = lp->index;
1375 FOR_ALL_EH_REGION (r)
1376 switch (r->type)
1378 case ERT_CLEANUP:
1379 case ERT_MUST_NOT_THROW:
1380 break;
1382 case ERT_TRY:
1384 eh_catch c;
1385 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1387 lab = c->label;
1388 if (lab)
1389 c->label = main_block_label (lab);
1392 break;
1394 case ERT_ALLOWED_EXCEPTIONS:
1395 lab = r->u.allowed.label;
1396 if (lab)
1397 r->u.allowed.label = main_block_label (lab);
1398 break;
1403 /* Cleanup redundant labels. This is a three-step process:
1404 1) Find the leading label for each block.
1405 2) Redirect all references to labels to the leading labels.
1406 3) Cleanup all useless labels. */
1408 void
1409 cleanup_dead_labels (void)
1411 basic_block bb;
1412 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1414 /* Find a suitable label for each block. We use the first user-defined
1415 label if there is one, or otherwise just the first label we see. */
1416 FOR_EACH_BB_FN (bb, cfun)
1418 gimple_stmt_iterator i;
1420 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1422 tree label;
1423 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1425 if (!label_stmt)
1426 break;
1428 label = gimple_label_label (label_stmt);
1430 /* If we have not yet seen a label for the current block,
1431 remember this one and see if there are more labels. */
1432 if (!label_for_bb[bb->index].label)
1434 label_for_bb[bb->index].label = label;
1435 continue;
1438 /* If we did see a label for the current block already, but it
1439 is an artificially created label, replace it if the current
1440 label is a user defined label. */
1441 if (!DECL_ARTIFICIAL (label)
1442 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1444 label_for_bb[bb->index].label = label;
1445 break;
1450 /* Now redirect all jumps/branches to the selected label.
1451 First do so for each block ending in a control statement. */
1452 FOR_EACH_BB_FN (bb, cfun)
1454 gimple *stmt = last_stmt (bb);
1455 tree label, new_label;
1457 if (!stmt)
1458 continue;
1460 switch (gimple_code (stmt))
1462 case GIMPLE_COND:
1464 gcond *cond_stmt = as_a <gcond *> (stmt);
1465 label = gimple_cond_true_label (cond_stmt);
1466 if (label)
1468 new_label = main_block_label (label);
1469 if (new_label != label)
1470 gimple_cond_set_true_label (cond_stmt, new_label);
1473 label = gimple_cond_false_label (cond_stmt);
1474 if (label)
1476 new_label = main_block_label (label);
1477 if (new_label != label)
1478 gimple_cond_set_false_label (cond_stmt, new_label);
1481 break;
1483 case GIMPLE_SWITCH:
1485 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1486 size_t i, n = gimple_switch_num_labels (switch_stmt);
1488 /* Replace all destination labels. */
1489 for (i = 0; i < n; ++i)
1491 tree case_label = gimple_switch_label (switch_stmt, i);
1492 label = CASE_LABEL (case_label);
1493 new_label = main_block_label (label);
1494 if (new_label != label)
1495 CASE_LABEL (case_label) = new_label;
1497 break;
1500 case GIMPLE_ASM:
1502 gasm *asm_stmt = as_a <gasm *> (stmt);
1503 int i, n = gimple_asm_nlabels (asm_stmt);
1505 for (i = 0; i < n; ++i)
1507 tree cons = gimple_asm_label_op (asm_stmt, i);
1508 tree label = main_block_label (TREE_VALUE (cons));
1509 TREE_VALUE (cons) = label;
1511 break;
1514 /* We have to handle gotos until they're removed, and we don't
1515 remove them until after we've created the CFG edges. */
1516 case GIMPLE_GOTO:
1517 if (!computed_goto_p (stmt))
1519 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1520 label = gimple_goto_dest (goto_stmt);
1521 new_label = main_block_label (label);
1522 if (new_label != label)
1523 gimple_goto_set_dest (goto_stmt, new_label);
1525 break;
1527 case GIMPLE_TRANSACTION:
1529 gtransaction *txn = as_a <gtransaction *> (stmt);
1531 label = gimple_transaction_label_norm (txn);
1532 if (label)
1534 new_label = main_block_label (label);
1535 if (new_label != label)
1536 gimple_transaction_set_label_norm (txn, new_label);
1539 label = gimple_transaction_label_uninst (txn);
1540 if (label)
1542 new_label = main_block_label (label);
1543 if (new_label != label)
1544 gimple_transaction_set_label_uninst (txn, new_label);
1547 label = gimple_transaction_label_over (txn);
1548 if (label)
1550 new_label = main_block_label (label);
1551 if (new_label != label)
1552 gimple_transaction_set_label_over (txn, new_label);
1555 break;
1557 default:
1558 break;
1562 /* Do the same for the exception region tree labels. */
1563 cleanup_dead_labels_eh ();
1565 /* Finally, purge dead labels. All user-defined labels and labels that
1566 can be the target of non-local gotos and labels which have their
1567 address taken are preserved. */
1568 FOR_EACH_BB_FN (bb, cfun)
1570 gimple_stmt_iterator i;
1571 tree label_for_this_bb = label_for_bb[bb->index].label;
1573 if (!label_for_this_bb)
1574 continue;
1576 /* If the main label of the block is unused, we may still remove it. */
1577 if (!label_for_bb[bb->index].used)
1578 label_for_this_bb = NULL;
1580 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1582 tree label;
1583 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1585 if (!label_stmt)
1586 break;
1588 label = gimple_label_label (label_stmt);
1590 if (label == label_for_this_bb
1591 || !DECL_ARTIFICIAL (label)
1592 || DECL_NONLOCAL (label)
1593 || FORCED_LABEL (label))
1594 gsi_next (&i);
1595 else
1596 gsi_remove (&i, true);
1600 free (label_for_bb);
1603 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1604 the ones jumping to the same label.
1605 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1607 void
1608 group_case_labels_stmt (gswitch *stmt)
1610 int old_size = gimple_switch_num_labels (stmt);
1611 int i, j, new_size = old_size;
1612 basic_block default_bb = NULL;
1614 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1616 /* Look for possible opportunities to merge cases. */
1617 i = 1;
1618 while (i < old_size)
1620 tree base_case, base_high;
1621 basic_block base_bb;
1623 base_case = gimple_switch_label (stmt, i);
1625 gcc_assert (base_case);
1626 base_bb = label_to_block (CASE_LABEL (base_case));
1628 /* Discard cases that have the same destination as the
1629 default case. */
1630 if (base_bb == default_bb)
1632 gimple_switch_set_label (stmt, i, NULL_TREE);
1633 i++;
1634 new_size--;
1635 continue;
1638 base_high = CASE_HIGH (base_case)
1639 ? CASE_HIGH (base_case)
1640 : CASE_LOW (base_case);
1641 i++;
1643 /* Try to merge case labels. Break out when we reach the end
1644 of the label vector or when we cannot merge the next case
1645 label with the current one. */
1646 while (i < old_size)
1648 tree merge_case = gimple_switch_label (stmt, i);
1649 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1650 wide_int bhp1 = wi::add (base_high, 1);
1652 /* Merge the cases if they jump to the same place,
1653 and their ranges are consecutive. */
1654 if (merge_bb == base_bb
1655 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1657 base_high = CASE_HIGH (merge_case) ?
1658 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1659 CASE_HIGH (base_case) = base_high;
1660 gimple_switch_set_label (stmt, i, NULL_TREE);
1661 new_size--;
1662 i++;
1664 else
1665 break;
1669 /* Compress the case labels in the label vector, and adjust the
1670 length of the vector. */
1671 for (i = 0, j = 0; i < new_size; i++)
1673 while (! gimple_switch_label (stmt, j))
1674 j++;
1675 gimple_switch_set_label (stmt, i,
1676 gimple_switch_label (stmt, j++));
1679 gcc_assert (new_size <= old_size);
1680 gimple_switch_set_num_labels (stmt, new_size);
1683 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1684 and scan the sorted vector of cases. Combine the ones jumping to the
1685 same label. */
1687 void
1688 group_case_labels (void)
1690 basic_block bb;
1692 FOR_EACH_BB_FN (bb, cfun)
1694 gimple *stmt = last_stmt (bb);
1695 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1696 group_case_labels_stmt (as_a <gswitch *> (stmt));
1700 /* Checks whether we can merge block B into block A. */
1702 static bool
1703 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1705 gimple *stmt;
1707 if (!single_succ_p (a))
1708 return false;
1710 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1711 return false;
1713 if (single_succ (a) != b)
1714 return false;
1716 if (!single_pred_p (b))
1717 return false;
1719 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1720 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1721 return false;
1723 /* If A ends by a statement causing exceptions or something similar, we
1724 cannot merge the blocks. */
1725 stmt = last_stmt (a);
1726 if (stmt && stmt_ends_bb_p (stmt))
1727 return false;
1729 /* Do not allow a block with only a non-local label to be merged. */
1730 if (stmt)
1731 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1732 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1733 return false;
1735 /* Examine the labels at the beginning of B. */
1736 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1737 gsi_next (&gsi))
1739 tree lab;
1740 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1741 if (!label_stmt)
1742 break;
1743 lab = gimple_label_label (label_stmt);
1745 /* Do not remove user forced labels or for -O0 any user labels. */
1746 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1747 return false;
1750 /* Protect simple loop latches. We only want to avoid merging
1751 the latch with the loop header or with a block in another
1752 loop in this case. */
1753 if (current_loops
1754 && b->loop_father->latch == b
1755 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1756 && (b->loop_father->header == a
1757 || b->loop_father != a->loop_father))
1758 return false;
1760 /* It must be possible to eliminate all phi nodes in B. If ssa form
1761 is not up-to-date and a name-mapping is registered, we cannot eliminate
1762 any phis. Symbols marked for renaming are never a problem though. */
1763 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1764 gsi_next (&gsi))
1766 gphi *phi = gsi.phi ();
1767 /* Technically only new names matter. */
1768 if (name_registered_for_update_p (PHI_RESULT (phi)))
1769 return false;
1772 /* When not optimizing, don't merge if we'd lose goto_locus. */
1773 if (!optimize
1774 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1776 location_t goto_locus = single_succ_edge (a)->goto_locus;
1777 gimple_stmt_iterator prev, next;
1778 prev = gsi_last_nondebug_bb (a);
1779 next = gsi_after_labels (b);
1780 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1781 gsi_next_nondebug (&next);
1782 if ((gsi_end_p (prev)
1783 || gimple_location (gsi_stmt (prev)) != goto_locus)
1784 && (gsi_end_p (next)
1785 || gimple_location (gsi_stmt (next)) != goto_locus))
1786 return false;
1789 return true;
1792 /* Replaces all uses of NAME by VAL. */
1794 void
1795 replace_uses_by (tree name, tree val)
1797 imm_use_iterator imm_iter;
1798 use_operand_p use;
1799 gimple *stmt;
1800 edge e;
1802 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1804 /* Mark the block if we change the last stmt in it. */
1805 if (cfgcleanup_altered_bbs
1806 && stmt_ends_bb_p (stmt))
1807 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1809 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1811 replace_exp (use, val);
1813 if (gimple_code (stmt) == GIMPLE_PHI)
1815 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1816 PHI_ARG_INDEX_FROM_USE (use));
1817 if (e->flags & EDGE_ABNORMAL
1818 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1820 /* This can only occur for virtual operands, since
1821 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1822 would prevent replacement. */
1823 gcc_checking_assert (virtual_operand_p (name));
1824 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1829 if (gimple_code (stmt) != GIMPLE_PHI)
1831 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1832 gimple *orig_stmt = stmt;
1833 size_t i;
1835 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1836 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1837 only change sth from non-invariant to invariant, and only
1838 when propagating constants. */
1839 if (is_gimple_min_invariant (val))
1840 for (i = 0; i < gimple_num_ops (stmt); i++)
1842 tree op = gimple_op (stmt, i);
1843 /* Operands may be empty here. For example, the labels
1844 of a GIMPLE_COND are nulled out following the creation
1845 of the corresponding CFG edges. */
1846 if (op && TREE_CODE (op) == ADDR_EXPR)
1847 recompute_tree_invariant_for_addr_expr (op);
1850 if (fold_stmt (&gsi))
1851 stmt = gsi_stmt (gsi);
1853 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1854 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1856 update_stmt (stmt);
1860 gcc_checking_assert (has_zero_uses (name));
1862 /* Also update the trees stored in loop structures. */
1863 if (current_loops)
1865 struct loop *loop;
1867 FOR_EACH_LOOP (loop, 0)
1869 substitute_in_loop_info (loop, name, val);
1874 /* Merge block B into block A. */
1876 static void
1877 gimple_merge_blocks (basic_block a, basic_block b)
1879 gimple_stmt_iterator last, gsi;
1880 gphi_iterator psi;
1882 if (dump_file)
1883 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1885 /* Remove all single-valued PHI nodes from block B of the form
1886 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1887 gsi = gsi_last_bb (a);
1888 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1890 gimple *phi = gsi_stmt (psi);
1891 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1892 gimple *copy;
1893 bool may_replace_uses = (virtual_operand_p (def)
1894 || may_propagate_copy (def, use));
1896 /* In case we maintain loop closed ssa form, do not propagate arguments
1897 of loop exit phi nodes. */
1898 if (current_loops
1899 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1900 && !virtual_operand_p (def)
1901 && TREE_CODE (use) == SSA_NAME
1902 && a->loop_father != b->loop_father)
1903 may_replace_uses = false;
1905 if (!may_replace_uses)
1907 gcc_assert (!virtual_operand_p (def));
1909 /* Note that just emitting the copies is fine -- there is no problem
1910 with ordering of phi nodes. This is because A is the single
1911 predecessor of B, therefore results of the phi nodes cannot
1912 appear as arguments of the phi nodes. */
1913 copy = gimple_build_assign (def, use);
1914 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1915 remove_phi_node (&psi, false);
1917 else
1919 /* If we deal with a PHI for virtual operands, we can simply
1920 propagate these without fussing with folding or updating
1921 the stmt. */
1922 if (virtual_operand_p (def))
1924 imm_use_iterator iter;
1925 use_operand_p use_p;
1926 gimple *stmt;
1928 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1929 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1930 SET_USE (use_p, use);
1932 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1933 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1935 else
1936 replace_uses_by (def, use);
1938 remove_phi_node (&psi, true);
1942 /* Ensure that B follows A. */
1943 move_block_after (b, a);
1945 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1946 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1948 /* Remove labels from B and set gimple_bb to A for other statements. */
1949 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1951 gimple *stmt = gsi_stmt (gsi);
1952 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1954 tree label = gimple_label_label (label_stmt);
1955 int lp_nr;
1957 gsi_remove (&gsi, false);
1959 /* Now that we can thread computed gotos, we might have
1960 a situation where we have a forced label in block B
1961 However, the label at the start of block B might still be
1962 used in other ways (think about the runtime checking for
1963 Fortran assigned gotos). So we can not just delete the
1964 label. Instead we move the label to the start of block A. */
1965 if (FORCED_LABEL (label))
1967 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1968 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1970 /* Other user labels keep around in a form of a debug stmt. */
1971 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1973 gimple *dbg = gimple_build_debug_bind (label,
1974 integer_zero_node,
1975 stmt);
1976 gimple_debug_bind_reset_value (dbg);
1977 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1980 lp_nr = EH_LANDING_PAD_NR (label);
1981 if (lp_nr)
1983 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1984 lp->post_landing_pad = NULL;
1987 else
1989 gimple_set_bb (stmt, a);
1990 gsi_next (&gsi);
1994 /* When merging two BBs, if their counts are different, the larger count
1995 is selected as the new bb count. This is to handle inconsistent
1996 profiles. */
1997 if (a->loop_father == b->loop_father)
1999 a->count = MAX (a->count, b->count);
2000 a->frequency = MAX (a->frequency, b->frequency);
2003 /* Merge the sequences. */
2004 last = gsi_last_bb (a);
2005 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2006 set_bb_seq (b, NULL);
2008 if (cfgcleanup_altered_bbs)
2009 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2013 /* Return the one of two successors of BB that is not reachable by a
2014 complex edge, if there is one. Else, return BB. We use
2015 this in optimizations that use post-dominators for their heuristics,
2016 to catch the cases in C++ where function calls are involved. */
2018 basic_block
2019 single_noncomplex_succ (basic_block bb)
2021 edge e0, e1;
2022 if (EDGE_COUNT (bb->succs) != 2)
2023 return bb;
2025 e0 = EDGE_SUCC (bb, 0);
2026 e1 = EDGE_SUCC (bb, 1);
2027 if (e0->flags & EDGE_COMPLEX)
2028 return e1->dest;
2029 if (e1->flags & EDGE_COMPLEX)
2030 return e0->dest;
2032 return bb;
2035 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2037 void
2038 notice_special_calls (gcall *call)
2040 int flags = gimple_call_flags (call);
2042 if (flags & ECF_MAY_BE_ALLOCA)
2043 cfun->calls_alloca = true;
2044 if (flags & ECF_RETURNS_TWICE)
2045 cfun->calls_setjmp = true;
2049 /* Clear flags set by notice_special_calls. Used by dead code removal
2050 to update the flags. */
2052 void
2053 clear_special_calls (void)
2055 cfun->calls_alloca = false;
2056 cfun->calls_setjmp = false;
2059 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2061 static void
2062 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2064 /* Since this block is no longer reachable, we can just delete all
2065 of its PHI nodes. */
2066 remove_phi_nodes (bb);
2068 /* Remove edges to BB's successors. */
2069 while (EDGE_COUNT (bb->succs) > 0)
2070 remove_edge (EDGE_SUCC (bb, 0));
2074 /* Remove statements of basic block BB. */
2076 static void
2077 remove_bb (basic_block bb)
2079 gimple_stmt_iterator i;
2081 if (dump_file)
2083 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2084 if (dump_flags & TDF_DETAILS)
2086 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2087 fprintf (dump_file, "\n");
2091 if (current_loops)
2093 struct loop *loop = bb->loop_father;
2095 /* If a loop gets removed, clean up the information associated
2096 with it. */
2097 if (loop->latch == bb
2098 || loop->header == bb)
2099 free_numbers_of_iterations_estimates_loop (loop);
2102 /* Remove all the instructions in the block. */
2103 if (bb_seq (bb) != NULL)
2105 /* Walk backwards so as to get a chance to substitute all
2106 released DEFs into debug stmts. See
2107 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2108 details. */
2109 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2111 gimple *stmt = gsi_stmt (i);
2112 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2113 if (label_stmt
2114 && (FORCED_LABEL (gimple_label_label (label_stmt))
2115 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2117 basic_block new_bb;
2118 gimple_stmt_iterator new_gsi;
2120 /* A non-reachable non-local label may still be referenced.
2121 But it no longer needs to carry the extra semantics of
2122 non-locality. */
2123 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2125 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2126 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2129 new_bb = bb->prev_bb;
2130 new_gsi = gsi_start_bb (new_bb);
2131 gsi_remove (&i, false);
2132 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2134 else
2136 /* Release SSA definitions. */
2137 release_defs (stmt);
2138 gsi_remove (&i, true);
2141 if (gsi_end_p (i))
2142 i = gsi_last_bb (bb);
2143 else
2144 gsi_prev (&i);
2148 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2149 bb->il.gimple.seq = NULL;
2150 bb->il.gimple.phi_nodes = NULL;
2154 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2155 predicate VAL, return the edge that will be taken out of the block.
2156 If VAL does not match a unique edge, NULL is returned. */
2158 edge
2159 find_taken_edge (basic_block bb, tree val)
2161 gimple *stmt;
2163 stmt = last_stmt (bb);
2165 gcc_assert (stmt);
2166 gcc_assert (is_ctrl_stmt (stmt));
2168 if (val == NULL)
2169 return NULL;
2171 if (!is_gimple_min_invariant (val))
2172 return NULL;
2174 if (gimple_code (stmt) == GIMPLE_COND)
2175 return find_taken_edge_cond_expr (bb, val);
2177 if (gimple_code (stmt) == GIMPLE_SWITCH)
2178 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2180 if (computed_goto_p (stmt))
2182 /* Only optimize if the argument is a label, if the argument is
2183 not a label then we can not construct a proper CFG.
2185 It may be the case that we only need to allow the LABEL_REF to
2186 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2187 appear inside a LABEL_EXPR just to be safe. */
2188 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2189 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2190 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2191 return NULL;
2194 gcc_unreachable ();
2197 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2198 statement, determine which of the outgoing edges will be taken out of the
2199 block. Return NULL if either edge may be taken. */
2201 static edge
2202 find_taken_edge_computed_goto (basic_block bb, tree val)
2204 basic_block dest;
2205 edge e = NULL;
2207 dest = label_to_block (val);
2208 if (dest)
2210 e = find_edge (bb, dest);
2211 gcc_assert (e != NULL);
2214 return e;
2217 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2218 statement, determine which of the two edges will be taken out of the
2219 block. Return NULL if either edge may be taken. */
2221 static edge
2222 find_taken_edge_cond_expr (basic_block bb, tree val)
2224 edge true_edge, false_edge;
2226 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2228 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2229 return (integer_zerop (val) ? false_edge : true_edge);
2232 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2233 statement, determine which edge will be taken out of the block. Return
2234 NULL if any edge may be taken. */
2236 static edge
2237 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2238 tree val)
2240 basic_block dest_bb;
2241 edge e;
2242 tree taken_case;
2244 taken_case = find_case_label_for_value (switch_stmt, val);
2245 dest_bb = label_to_block (CASE_LABEL (taken_case));
2247 e = find_edge (bb, dest_bb);
2248 gcc_assert (e);
2249 return e;
2253 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2254 We can make optimal use here of the fact that the case labels are
2255 sorted: We can do a binary search for a case matching VAL. */
2257 static tree
2258 find_case_label_for_value (gswitch *switch_stmt, tree val)
2260 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2261 tree default_case = gimple_switch_default_label (switch_stmt);
2263 for (low = 0, high = n; high - low > 1; )
2265 size_t i = (high + low) / 2;
2266 tree t = gimple_switch_label (switch_stmt, i);
2267 int cmp;
2269 /* Cache the result of comparing CASE_LOW and val. */
2270 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2272 if (cmp > 0)
2273 high = i;
2274 else
2275 low = i;
2277 if (CASE_HIGH (t) == NULL)
2279 /* A singe-valued case label. */
2280 if (cmp == 0)
2281 return t;
2283 else
2285 /* A case range. We can only handle integer ranges. */
2286 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2287 return t;
2291 return default_case;
2295 /* Dump a basic block on stderr. */
2297 void
2298 gimple_debug_bb (basic_block bb)
2300 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2304 /* Dump basic block with index N on stderr. */
2306 basic_block
2307 gimple_debug_bb_n (int n)
2309 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2310 return BASIC_BLOCK_FOR_FN (cfun, n);
2314 /* Dump the CFG on stderr.
2316 FLAGS are the same used by the tree dumping functions
2317 (see TDF_* in dumpfile.h). */
2319 void
2320 gimple_debug_cfg (int flags)
2322 gimple_dump_cfg (stderr, flags);
2326 /* Dump the program showing basic block boundaries on the given FILE.
2328 FLAGS are the same used by the tree dumping functions (see TDF_* in
2329 tree.h). */
2331 void
2332 gimple_dump_cfg (FILE *file, int flags)
2334 if (flags & TDF_DETAILS)
2336 dump_function_header (file, current_function_decl, flags);
2337 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2338 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2339 last_basic_block_for_fn (cfun));
2341 brief_dump_cfg (file, flags | TDF_COMMENT);
2342 fprintf (file, "\n");
2345 if (flags & TDF_STATS)
2346 dump_cfg_stats (file);
2348 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2352 /* Dump CFG statistics on FILE. */
2354 void
2355 dump_cfg_stats (FILE *file)
2357 static long max_num_merged_labels = 0;
2358 unsigned long size, total = 0;
2359 long num_edges;
2360 basic_block bb;
2361 const char * const fmt_str = "%-30s%-13s%12s\n";
2362 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2363 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2364 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2365 const char *funcname = current_function_name ();
2367 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2369 fprintf (file, "---------------------------------------------------------\n");
2370 fprintf (file, fmt_str, "", " Number of ", "Memory");
2371 fprintf (file, fmt_str, "", " instances ", "used ");
2372 fprintf (file, "---------------------------------------------------------\n");
2374 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2375 total += size;
2376 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2377 SCALE (size), LABEL (size));
2379 num_edges = 0;
2380 FOR_EACH_BB_FN (bb, cfun)
2381 num_edges += EDGE_COUNT (bb->succs);
2382 size = num_edges * sizeof (struct edge_def);
2383 total += size;
2384 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2386 fprintf (file, "---------------------------------------------------------\n");
2387 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2388 LABEL (total));
2389 fprintf (file, "---------------------------------------------------------\n");
2390 fprintf (file, "\n");
2392 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2393 max_num_merged_labels = cfg_stats.num_merged_labels;
2395 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2396 cfg_stats.num_merged_labels, max_num_merged_labels);
2398 fprintf (file, "\n");
2402 /* Dump CFG statistics on stderr. Keep extern so that it's always
2403 linked in the final executable. */
2405 DEBUG_FUNCTION void
2406 debug_cfg_stats (void)
2408 dump_cfg_stats (stderr);
2411 /*---------------------------------------------------------------------------
2412 Miscellaneous helpers
2413 ---------------------------------------------------------------------------*/
2415 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2416 flow. Transfers of control flow associated with EH are excluded. */
2418 static bool
2419 call_can_make_abnormal_goto (gimple *t)
2421 /* If the function has no non-local labels, then a call cannot make an
2422 abnormal transfer of control. */
2423 if (!cfun->has_nonlocal_label
2424 && !cfun->calls_setjmp)
2425 return false;
2427 /* Likewise if the call has no side effects. */
2428 if (!gimple_has_side_effects (t))
2429 return false;
2431 /* Likewise if the called function is leaf. */
2432 if (gimple_call_flags (t) & ECF_LEAF)
2433 return false;
2435 return true;
2439 /* Return true if T can make an abnormal transfer of control flow.
2440 Transfers of control flow associated with EH are excluded. */
2442 bool
2443 stmt_can_make_abnormal_goto (gimple *t)
2445 if (computed_goto_p (t))
2446 return true;
2447 if (is_gimple_call (t))
2448 return call_can_make_abnormal_goto (t);
2449 return false;
2453 /* Return true if T represents a stmt that always transfers control. */
2455 bool
2456 is_ctrl_stmt (gimple *t)
2458 switch (gimple_code (t))
2460 case GIMPLE_COND:
2461 case GIMPLE_SWITCH:
2462 case GIMPLE_GOTO:
2463 case GIMPLE_RETURN:
2464 case GIMPLE_RESX:
2465 return true;
2466 default:
2467 return false;
2472 /* Return true if T is a statement that may alter the flow of control
2473 (e.g., a call to a non-returning function). */
2475 bool
2476 is_ctrl_altering_stmt (gimple *t)
2478 gcc_assert (t);
2480 switch (gimple_code (t))
2482 case GIMPLE_CALL:
2483 /* Per stmt call flag indicates whether the call could alter
2484 controlflow. */
2485 if (gimple_call_ctrl_altering_p (t))
2486 return true;
2487 break;
2489 case GIMPLE_EH_DISPATCH:
2490 /* EH_DISPATCH branches to the individual catch handlers at
2491 this level of a try or allowed-exceptions region. It can
2492 fallthru to the next statement as well. */
2493 return true;
2495 case GIMPLE_ASM:
2496 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2497 return true;
2498 break;
2500 CASE_GIMPLE_OMP:
2501 /* OpenMP directives alter control flow. */
2502 return true;
2504 case GIMPLE_TRANSACTION:
2505 /* A transaction start alters control flow. */
2506 return true;
2508 default:
2509 break;
2512 /* If a statement can throw, it alters control flow. */
2513 return stmt_can_throw_internal (t);
2517 /* Return true if T is a simple local goto. */
2519 bool
2520 simple_goto_p (gimple *t)
2522 return (gimple_code (t) == GIMPLE_GOTO
2523 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2527 /* Return true if STMT should start a new basic block. PREV_STMT is
2528 the statement preceding STMT. It is used when STMT is a label or a
2529 case label. Labels should only start a new basic block if their
2530 previous statement wasn't a label. Otherwise, sequence of labels
2531 would generate unnecessary basic blocks that only contain a single
2532 label. */
2534 static inline bool
2535 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2537 if (stmt == NULL)
2538 return false;
2540 /* Labels start a new basic block only if the preceding statement
2541 wasn't a label of the same type. This prevents the creation of
2542 consecutive blocks that have nothing but a single label. */
2543 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2545 /* Nonlocal and computed GOTO targets always start a new block. */
2546 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2547 || FORCED_LABEL (gimple_label_label (label_stmt)))
2548 return true;
2550 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2552 if (DECL_NONLOCAL (gimple_label_label (
2553 as_a <glabel *> (prev_stmt))))
2554 return true;
2556 cfg_stats.num_merged_labels++;
2557 return false;
2559 else
2560 return true;
2562 else if (gimple_code (stmt) == GIMPLE_CALL
2563 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2564 /* setjmp acts similar to a nonlocal GOTO target and thus should
2565 start a new block. */
2566 return true;
2568 return false;
2572 /* Return true if T should end a basic block. */
2574 bool
2575 stmt_ends_bb_p (gimple *t)
2577 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2580 /* Remove block annotations and other data structures. */
2582 void
2583 delete_tree_cfg_annotations (struct function *fn)
2585 vec_free (label_to_block_map_for_fn (fn));
2588 /* Return the virtual phi in BB. */
2590 gphi *
2591 get_virtual_phi (basic_block bb)
2593 for (gphi_iterator gsi = gsi_start_phis (bb);
2594 !gsi_end_p (gsi);
2595 gsi_next (&gsi))
2597 gphi *phi = gsi.phi ();
2599 if (virtual_operand_p (PHI_RESULT (phi)))
2600 return phi;
2603 return NULL;
2606 /* Return the first statement in basic block BB. */
2608 gimple *
2609 first_stmt (basic_block bb)
2611 gimple_stmt_iterator i = gsi_start_bb (bb);
2612 gimple *stmt = NULL;
2614 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2616 gsi_next (&i);
2617 stmt = NULL;
2619 return stmt;
2622 /* Return the first non-label statement in basic block BB. */
2624 static gimple *
2625 first_non_label_stmt (basic_block bb)
2627 gimple_stmt_iterator i = gsi_start_bb (bb);
2628 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2629 gsi_next (&i);
2630 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2633 /* Return the last statement in basic block BB. */
2635 gimple *
2636 last_stmt (basic_block bb)
2638 gimple_stmt_iterator i = gsi_last_bb (bb);
2639 gimple *stmt = NULL;
2641 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2643 gsi_prev (&i);
2644 stmt = NULL;
2646 return stmt;
2649 /* Return the last statement of an otherwise empty block. Return NULL
2650 if the block is totally empty, or if it contains more than one
2651 statement. */
2653 gimple *
2654 last_and_only_stmt (basic_block bb)
2656 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2657 gimple *last, *prev;
2659 if (gsi_end_p (i))
2660 return NULL;
2662 last = gsi_stmt (i);
2663 gsi_prev_nondebug (&i);
2664 if (gsi_end_p (i))
2665 return last;
2667 /* Empty statements should no longer appear in the instruction stream.
2668 Everything that might have appeared before should be deleted by
2669 remove_useless_stmts, and the optimizers should just gsi_remove
2670 instead of smashing with build_empty_stmt.
2672 Thus the only thing that should appear here in a block containing
2673 one executable statement is a label. */
2674 prev = gsi_stmt (i);
2675 if (gimple_code (prev) == GIMPLE_LABEL)
2676 return last;
2677 else
2678 return NULL;
2681 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2683 static void
2684 reinstall_phi_args (edge new_edge, edge old_edge)
2686 edge_var_map *vm;
2687 int i;
2688 gphi_iterator phis;
2690 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2691 if (!v)
2692 return;
2694 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2695 v->iterate (i, &vm) && !gsi_end_p (phis);
2696 i++, gsi_next (&phis))
2698 gphi *phi = phis.phi ();
2699 tree result = redirect_edge_var_map_result (vm);
2700 tree arg = redirect_edge_var_map_def (vm);
2702 gcc_assert (result == gimple_phi_result (phi));
2704 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2707 redirect_edge_var_map_clear (old_edge);
2710 /* Returns the basic block after which the new basic block created
2711 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2712 near its "logical" location. This is of most help to humans looking
2713 at debugging dumps. */
2715 basic_block
2716 split_edge_bb_loc (edge edge_in)
2718 basic_block dest = edge_in->dest;
2719 basic_block dest_prev = dest->prev_bb;
2721 if (dest_prev)
2723 edge e = find_edge (dest_prev, dest);
2724 if (e && !(e->flags & EDGE_COMPLEX))
2725 return edge_in->src;
2727 return dest_prev;
2730 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2731 Abort on abnormal edges. */
2733 static basic_block
2734 gimple_split_edge (edge edge_in)
2736 basic_block new_bb, after_bb, dest;
2737 edge new_edge, e;
2739 /* Abnormal edges cannot be split. */
2740 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2742 dest = edge_in->dest;
2744 after_bb = split_edge_bb_loc (edge_in);
2746 new_bb = create_empty_bb (after_bb);
2747 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2748 new_bb->count = edge_in->count;
2749 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2750 new_edge->probability = REG_BR_PROB_BASE;
2751 new_edge->count = edge_in->count;
2753 e = redirect_edge_and_branch (edge_in, new_bb);
2754 gcc_assert (e == edge_in);
2755 reinstall_phi_args (new_edge, e);
2757 return new_bb;
2761 /* Verify properties of the address expression T with base object BASE. */
2763 static tree
2764 verify_address (tree t, tree base)
2766 bool old_constant;
2767 bool old_side_effects;
2768 bool new_constant;
2769 bool new_side_effects;
2771 old_constant = TREE_CONSTANT (t);
2772 old_side_effects = TREE_SIDE_EFFECTS (t);
2774 recompute_tree_invariant_for_addr_expr (t);
2775 new_side_effects = TREE_SIDE_EFFECTS (t);
2776 new_constant = TREE_CONSTANT (t);
2778 if (old_constant != new_constant)
2780 error ("constant not recomputed when ADDR_EXPR changed");
2781 return t;
2783 if (old_side_effects != new_side_effects)
2785 error ("side effects not recomputed when ADDR_EXPR changed");
2786 return t;
2789 if (!(VAR_P (base)
2790 || TREE_CODE (base) == PARM_DECL
2791 || TREE_CODE (base) == RESULT_DECL))
2792 return NULL_TREE;
2794 if (DECL_GIMPLE_REG_P (base))
2796 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2797 return base;
2800 return NULL_TREE;
2803 /* Callback for walk_tree, check that all elements with address taken are
2804 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2805 inside a PHI node. */
2807 static tree
2808 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2810 tree t = *tp, x;
2812 if (TYPE_P (t))
2813 *walk_subtrees = 0;
2815 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2816 #define CHECK_OP(N, MSG) \
2817 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2818 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2820 switch (TREE_CODE (t))
2822 case SSA_NAME:
2823 if (SSA_NAME_IN_FREE_LIST (t))
2825 error ("SSA name in freelist but still referenced");
2826 return *tp;
2828 break;
2830 case PARM_DECL:
2831 case VAR_DECL:
2832 case RESULT_DECL:
2834 tree context = decl_function_context (t);
2835 if (context != cfun->decl
2836 && !SCOPE_FILE_SCOPE_P (context)
2837 && !TREE_STATIC (t)
2838 && !DECL_EXTERNAL (t))
2840 error ("Local declaration from a different function");
2841 return t;
2844 break;
2846 case INDIRECT_REF:
2847 error ("INDIRECT_REF in gimple IL");
2848 return t;
2850 case MEM_REF:
2851 x = TREE_OPERAND (t, 0);
2852 if (!POINTER_TYPE_P (TREE_TYPE (x))
2853 || !is_gimple_mem_ref_addr (x))
2855 error ("invalid first operand of MEM_REF");
2856 return x;
2858 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2859 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2861 error ("invalid offset operand of MEM_REF");
2862 return TREE_OPERAND (t, 1);
2864 if (TREE_CODE (x) == ADDR_EXPR)
2866 tree va = verify_address (x, TREE_OPERAND (x, 0));
2867 if (va)
2868 return va;
2869 x = TREE_OPERAND (x, 0);
2871 walk_tree (&x, verify_expr, data, NULL);
2872 *walk_subtrees = 0;
2873 break;
2875 case ASSERT_EXPR:
2876 x = fold (ASSERT_EXPR_COND (t));
2877 if (x == boolean_false_node)
2879 error ("ASSERT_EXPR with an always-false condition");
2880 return *tp;
2882 break;
2884 case MODIFY_EXPR:
2885 error ("MODIFY_EXPR not expected while having tuples");
2886 return *tp;
2888 case ADDR_EXPR:
2890 tree tem;
2892 gcc_assert (is_gimple_address (t));
2894 /* Skip any references (they will be checked when we recurse down the
2895 tree) and ensure that any variable used as a prefix is marked
2896 addressable. */
2897 for (x = TREE_OPERAND (t, 0);
2898 handled_component_p (x);
2899 x = TREE_OPERAND (x, 0))
2902 if ((tem = verify_address (t, x)))
2903 return tem;
2905 if (!(VAR_P (x)
2906 || TREE_CODE (x) == PARM_DECL
2907 || TREE_CODE (x) == RESULT_DECL))
2908 return NULL;
2910 if (!TREE_ADDRESSABLE (x))
2912 error ("address taken, but ADDRESSABLE bit not set");
2913 return x;
2916 break;
2919 case COND_EXPR:
2920 x = COND_EXPR_COND (t);
2921 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2923 error ("non-integral used in condition");
2924 return x;
2926 if (!is_gimple_condexpr (x))
2928 error ("invalid conditional operand");
2929 return x;
2931 break;
2933 case NON_LVALUE_EXPR:
2934 case TRUTH_NOT_EXPR:
2935 gcc_unreachable ();
2937 CASE_CONVERT:
2938 case FIX_TRUNC_EXPR:
2939 case FLOAT_EXPR:
2940 case NEGATE_EXPR:
2941 case ABS_EXPR:
2942 case BIT_NOT_EXPR:
2943 CHECK_OP (0, "invalid operand to unary operator");
2944 break;
2946 case REALPART_EXPR:
2947 case IMAGPART_EXPR:
2948 case BIT_FIELD_REF:
2949 if (!is_gimple_reg_type (TREE_TYPE (t)))
2951 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2952 return t;
2955 if (TREE_CODE (t) == BIT_FIELD_REF)
2957 tree t0 = TREE_OPERAND (t, 0);
2958 tree t1 = TREE_OPERAND (t, 1);
2959 tree t2 = TREE_OPERAND (t, 2);
2960 if (!tree_fits_uhwi_p (t1)
2961 || !tree_fits_uhwi_p (t2))
2963 error ("invalid position or size operand to BIT_FIELD_REF");
2964 return t;
2966 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2967 && (TYPE_PRECISION (TREE_TYPE (t))
2968 != tree_to_uhwi (t1)))
2970 error ("integral result type precision does not match "
2971 "field size of BIT_FIELD_REF");
2972 return t;
2974 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2975 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2976 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
2977 != tree_to_uhwi (t1)))
2979 error ("mode size of non-integral result does not "
2980 "match field size of BIT_FIELD_REF");
2981 return t;
2983 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2984 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2985 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2987 error ("position plus size exceeds size of referenced object in "
2988 "BIT_FIELD_REF");
2989 return t;
2992 t = TREE_OPERAND (t, 0);
2994 /* Fall-through. */
2995 case COMPONENT_REF:
2996 case ARRAY_REF:
2997 case ARRAY_RANGE_REF:
2998 case VIEW_CONVERT_EXPR:
2999 /* We have a nest of references. Verify that each of the operands
3000 that determine where to reference is either a constant or a variable,
3001 verify that the base is valid, and then show we've already checked
3002 the subtrees. */
3003 while (handled_component_p (t))
3005 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3006 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3007 else if (TREE_CODE (t) == ARRAY_REF
3008 || TREE_CODE (t) == ARRAY_RANGE_REF)
3010 CHECK_OP (1, "invalid array index");
3011 if (TREE_OPERAND (t, 2))
3012 CHECK_OP (2, "invalid array lower bound");
3013 if (TREE_OPERAND (t, 3))
3014 CHECK_OP (3, "invalid array stride");
3016 else if (TREE_CODE (t) == BIT_FIELD_REF
3017 || TREE_CODE (t) == REALPART_EXPR
3018 || TREE_CODE (t) == IMAGPART_EXPR)
3020 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3021 "REALPART_EXPR");
3022 return t;
3025 t = TREE_OPERAND (t, 0);
3028 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3030 error ("invalid reference prefix");
3031 return t;
3033 walk_tree (&t, verify_expr, data, NULL);
3034 *walk_subtrees = 0;
3035 break;
3036 case PLUS_EXPR:
3037 case MINUS_EXPR:
3038 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3039 POINTER_PLUS_EXPR. */
3040 if (POINTER_TYPE_P (TREE_TYPE (t)))
3042 error ("invalid operand to plus/minus, type is a pointer");
3043 return t;
3045 CHECK_OP (0, "invalid operand to binary operator");
3046 CHECK_OP (1, "invalid operand to binary operator");
3047 break;
3049 case POINTER_PLUS_EXPR:
3050 /* Check to make sure the first operand is a pointer or reference type. */
3051 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3053 error ("invalid operand to pointer plus, first operand is not a pointer");
3054 return t;
3056 /* Check to make sure the second operand is a ptrofftype. */
3057 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3059 error ("invalid operand to pointer plus, second operand is not an "
3060 "integer type of appropriate width");
3061 return t;
3063 /* FALLTHROUGH */
3064 case LT_EXPR:
3065 case LE_EXPR:
3066 case GT_EXPR:
3067 case GE_EXPR:
3068 case EQ_EXPR:
3069 case NE_EXPR:
3070 case UNORDERED_EXPR:
3071 case ORDERED_EXPR:
3072 case UNLT_EXPR:
3073 case UNLE_EXPR:
3074 case UNGT_EXPR:
3075 case UNGE_EXPR:
3076 case UNEQ_EXPR:
3077 case LTGT_EXPR:
3078 case MULT_EXPR:
3079 case TRUNC_DIV_EXPR:
3080 case CEIL_DIV_EXPR:
3081 case FLOOR_DIV_EXPR:
3082 case ROUND_DIV_EXPR:
3083 case TRUNC_MOD_EXPR:
3084 case CEIL_MOD_EXPR:
3085 case FLOOR_MOD_EXPR:
3086 case ROUND_MOD_EXPR:
3087 case RDIV_EXPR:
3088 case EXACT_DIV_EXPR:
3089 case MIN_EXPR:
3090 case MAX_EXPR:
3091 case LSHIFT_EXPR:
3092 case RSHIFT_EXPR:
3093 case LROTATE_EXPR:
3094 case RROTATE_EXPR:
3095 case BIT_IOR_EXPR:
3096 case BIT_XOR_EXPR:
3097 case BIT_AND_EXPR:
3098 CHECK_OP (0, "invalid operand to binary operator");
3099 CHECK_OP (1, "invalid operand to binary operator");
3100 break;
3102 case CONSTRUCTOR:
3103 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3104 *walk_subtrees = 0;
3105 break;
3107 case CASE_LABEL_EXPR:
3108 if (CASE_CHAIN (t))
3110 error ("invalid CASE_CHAIN");
3111 return t;
3113 break;
3115 default:
3116 break;
3118 return NULL;
3120 #undef CHECK_OP
3124 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3125 Returns true if there is an error, otherwise false. */
3127 static bool
3128 verify_types_in_gimple_min_lval (tree expr)
3130 tree op;
3132 if (is_gimple_id (expr))
3133 return false;
3135 if (TREE_CODE (expr) != TARGET_MEM_REF
3136 && TREE_CODE (expr) != MEM_REF)
3138 error ("invalid expression for min lvalue");
3139 return true;
3142 /* TARGET_MEM_REFs are strange beasts. */
3143 if (TREE_CODE (expr) == TARGET_MEM_REF)
3144 return false;
3146 op = TREE_OPERAND (expr, 0);
3147 if (!is_gimple_val (op))
3149 error ("invalid operand in indirect reference");
3150 debug_generic_stmt (op);
3151 return true;
3153 /* Memory references now generally can involve a value conversion. */
3155 return false;
3158 /* Verify if EXPR is a valid GIMPLE reference expression. If
3159 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3160 if there is an error, otherwise false. */
3162 static bool
3163 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3165 while (handled_component_p (expr))
3167 tree op = TREE_OPERAND (expr, 0);
3169 if (TREE_CODE (expr) == ARRAY_REF
3170 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3172 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3173 || (TREE_OPERAND (expr, 2)
3174 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3175 || (TREE_OPERAND (expr, 3)
3176 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3178 error ("invalid operands to array reference");
3179 debug_generic_stmt (expr);
3180 return true;
3184 /* Verify if the reference array element types are compatible. */
3185 if (TREE_CODE (expr) == ARRAY_REF
3186 && !useless_type_conversion_p (TREE_TYPE (expr),
3187 TREE_TYPE (TREE_TYPE (op))))
3189 error ("type mismatch in array reference");
3190 debug_generic_stmt (TREE_TYPE (expr));
3191 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3192 return true;
3194 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3195 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3196 TREE_TYPE (TREE_TYPE (op))))
3198 error ("type mismatch in array range reference");
3199 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3200 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3201 return true;
3204 if ((TREE_CODE (expr) == REALPART_EXPR
3205 || TREE_CODE (expr) == IMAGPART_EXPR)
3206 && !useless_type_conversion_p (TREE_TYPE (expr),
3207 TREE_TYPE (TREE_TYPE (op))))
3209 error ("type mismatch in real/imagpart reference");
3210 debug_generic_stmt (TREE_TYPE (expr));
3211 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3212 return true;
3215 if (TREE_CODE (expr) == COMPONENT_REF
3216 && !useless_type_conversion_p (TREE_TYPE (expr),
3217 TREE_TYPE (TREE_OPERAND (expr, 1))))
3219 error ("type mismatch in component reference");
3220 debug_generic_stmt (TREE_TYPE (expr));
3221 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3222 return true;
3225 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3227 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3228 that their operand is not an SSA name or an invariant when
3229 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3230 bug). Otherwise there is nothing to verify, gross mismatches at
3231 most invoke undefined behavior. */
3232 if (require_lvalue
3233 && (TREE_CODE (op) == SSA_NAME
3234 || is_gimple_min_invariant (op)))
3236 error ("conversion of an SSA_NAME on the left hand side");
3237 debug_generic_stmt (expr);
3238 return true;
3240 else if (TREE_CODE (op) == SSA_NAME
3241 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3243 error ("conversion of register to a different size");
3244 debug_generic_stmt (expr);
3245 return true;
3247 else if (!handled_component_p (op))
3248 return false;
3251 expr = op;
3254 if (TREE_CODE (expr) == MEM_REF)
3256 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3258 error ("invalid address operand in MEM_REF");
3259 debug_generic_stmt (expr);
3260 return true;
3262 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3263 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3265 error ("invalid offset operand in MEM_REF");
3266 debug_generic_stmt (expr);
3267 return true;
3270 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3272 if (!TMR_BASE (expr)
3273 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3275 error ("invalid address operand in TARGET_MEM_REF");
3276 return true;
3278 if (!TMR_OFFSET (expr)
3279 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3280 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3282 error ("invalid offset operand in TARGET_MEM_REF");
3283 debug_generic_stmt (expr);
3284 return true;
3288 return ((require_lvalue || !is_gimple_min_invariant (expr))
3289 && verify_types_in_gimple_min_lval (expr));
3292 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3293 list of pointer-to types that is trivially convertible to DEST. */
3295 static bool
3296 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3298 tree src;
3300 if (!TYPE_POINTER_TO (src_obj))
3301 return true;
3303 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3304 if (useless_type_conversion_p (dest, src))
3305 return true;
3307 return false;
3310 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3311 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3313 static bool
3314 valid_fixed_convert_types_p (tree type1, tree type2)
3316 return (FIXED_POINT_TYPE_P (type1)
3317 && (INTEGRAL_TYPE_P (type2)
3318 || SCALAR_FLOAT_TYPE_P (type2)
3319 || FIXED_POINT_TYPE_P (type2)));
3322 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3323 is a problem, otherwise false. */
3325 static bool
3326 verify_gimple_call (gcall *stmt)
3328 tree fn = gimple_call_fn (stmt);
3329 tree fntype, fndecl;
3330 unsigned i;
3332 if (gimple_call_internal_p (stmt))
3334 if (fn)
3336 error ("gimple call has two targets");
3337 debug_generic_stmt (fn);
3338 return true;
3341 else
3343 if (!fn)
3345 error ("gimple call has no target");
3346 return true;
3350 if (fn && !is_gimple_call_addr (fn))
3352 error ("invalid function in gimple call");
3353 debug_generic_stmt (fn);
3354 return true;
3357 if (fn
3358 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3359 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3360 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3362 error ("non-function in gimple call");
3363 return true;
3366 fndecl = gimple_call_fndecl (stmt);
3367 if (fndecl
3368 && TREE_CODE (fndecl) == FUNCTION_DECL
3369 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3370 && !DECL_PURE_P (fndecl)
3371 && !TREE_READONLY (fndecl))
3373 error ("invalid pure const state for function");
3374 return true;
3377 tree lhs = gimple_call_lhs (stmt);
3378 if (lhs
3379 && (!is_gimple_lvalue (lhs)
3380 || verify_types_in_gimple_reference (lhs, true)))
3382 error ("invalid LHS in gimple call");
3383 return true;
3386 if (gimple_call_ctrl_altering_p (stmt)
3387 && gimple_call_noreturn_p (stmt)
3388 && should_remove_lhs_p (lhs))
3390 error ("LHS in noreturn call");
3391 return true;
3394 fntype = gimple_call_fntype (stmt);
3395 if (fntype
3396 && lhs
3397 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3398 /* ??? At least C++ misses conversions at assignments from
3399 void * call results.
3400 ??? Java is completely off. Especially with functions
3401 returning java.lang.Object.
3402 For now simply allow arbitrary pointer type conversions. */
3403 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3404 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3406 error ("invalid conversion in gimple call");
3407 debug_generic_stmt (TREE_TYPE (lhs));
3408 debug_generic_stmt (TREE_TYPE (fntype));
3409 return true;
3412 if (gimple_call_chain (stmt)
3413 && !is_gimple_val (gimple_call_chain (stmt)))
3415 error ("invalid static chain in gimple call");
3416 debug_generic_stmt (gimple_call_chain (stmt));
3417 return true;
3420 /* If there is a static chain argument, the call should either be
3421 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3422 if (gimple_call_chain (stmt)
3423 && fndecl
3424 && !DECL_STATIC_CHAIN (fndecl))
3426 error ("static chain with function that doesn%'t use one");
3427 return true;
3430 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3432 switch (DECL_FUNCTION_CODE (fndecl))
3434 case BUILT_IN_UNREACHABLE:
3435 case BUILT_IN_TRAP:
3436 if (gimple_call_num_args (stmt) > 0)
3438 /* Built-in unreachable with parameters might not be caught by
3439 undefined behavior sanitizer. Front-ends do check users do not
3440 call them that way but we also produce calls to
3441 __builtin_unreachable internally, for example when IPA figures
3442 out a call cannot happen in a legal program. In such cases,
3443 we must make sure arguments are stripped off. */
3444 error ("__builtin_unreachable or __builtin_trap call with "
3445 "arguments");
3446 return true;
3448 break;
3449 default:
3450 break;
3454 /* ??? The C frontend passes unpromoted arguments in case it
3455 didn't see a function declaration before the call. So for now
3456 leave the call arguments mostly unverified. Once we gimplify
3457 unit-at-a-time we have a chance to fix this. */
3459 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3461 tree arg = gimple_call_arg (stmt, i);
3462 if ((is_gimple_reg_type (TREE_TYPE (arg))
3463 && !is_gimple_val (arg))
3464 || (!is_gimple_reg_type (TREE_TYPE (arg))
3465 && !is_gimple_lvalue (arg)))
3467 error ("invalid argument to gimple call");
3468 debug_generic_expr (arg);
3469 return true;
3473 return false;
3476 /* Verifies the gimple comparison with the result type TYPE and
3477 the operands OP0 and OP1, comparison code is CODE. */
3479 static bool
3480 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3482 tree op0_type = TREE_TYPE (op0);
3483 tree op1_type = TREE_TYPE (op1);
3485 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3487 error ("invalid operands in gimple comparison");
3488 return true;
3491 /* For comparisons we do not have the operations type as the
3492 effective type the comparison is carried out in. Instead
3493 we require that either the first operand is trivially
3494 convertible into the second, or the other way around.
3495 Because we special-case pointers to void we allow
3496 comparisons of pointers with the same mode as well. */
3497 if (!useless_type_conversion_p (op0_type, op1_type)
3498 && !useless_type_conversion_p (op1_type, op0_type)
3499 && (!POINTER_TYPE_P (op0_type)
3500 || !POINTER_TYPE_P (op1_type)
3501 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3503 error ("mismatching comparison operand types");
3504 debug_generic_expr (op0_type);
3505 debug_generic_expr (op1_type);
3506 return true;
3509 /* The resulting type of a comparison may be an effective boolean type. */
3510 if (INTEGRAL_TYPE_P (type)
3511 && (TREE_CODE (type) == BOOLEAN_TYPE
3512 || TYPE_PRECISION (type) == 1))
3514 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3515 || TREE_CODE (op1_type) == VECTOR_TYPE)
3516 && code != EQ_EXPR && code != NE_EXPR
3517 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3518 && !VECTOR_INTEGER_TYPE_P (op0_type))
3520 error ("unsupported operation or type for vector comparison"
3521 " returning a boolean");
3522 debug_generic_expr (op0_type);
3523 debug_generic_expr (op1_type);
3524 return true;
3527 /* Or a boolean vector type with the same element count
3528 as the comparison operand types. */
3529 else if (TREE_CODE (type) == VECTOR_TYPE
3530 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3532 if (TREE_CODE (op0_type) != VECTOR_TYPE
3533 || TREE_CODE (op1_type) != VECTOR_TYPE)
3535 error ("non-vector operands in vector comparison");
3536 debug_generic_expr (op0_type);
3537 debug_generic_expr (op1_type);
3538 return true;
3541 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3543 error ("invalid vector comparison resulting type");
3544 debug_generic_expr (type);
3545 return true;
3548 else
3550 error ("bogus comparison result type");
3551 debug_generic_expr (type);
3552 return true;
3555 return false;
3558 /* Verify a gimple assignment statement STMT with an unary rhs.
3559 Returns true if anything is wrong. */
3561 static bool
3562 verify_gimple_assign_unary (gassign *stmt)
3564 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3565 tree lhs = gimple_assign_lhs (stmt);
3566 tree lhs_type = TREE_TYPE (lhs);
3567 tree rhs1 = gimple_assign_rhs1 (stmt);
3568 tree rhs1_type = TREE_TYPE (rhs1);
3570 if (!is_gimple_reg (lhs))
3572 error ("non-register as LHS of unary operation");
3573 return true;
3576 if (!is_gimple_val (rhs1))
3578 error ("invalid operand in unary operation");
3579 return true;
3582 /* First handle conversions. */
3583 switch (rhs_code)
3585 CASE_CONVERT:
3587 /* Allow conversions from pointer type to integral type only if
3588 there is no sign or zero extension involved.
3589 For targets were the precision of ptrofftype doesn't match that
3590 of pointers we need to allow arbitrary conversions to ptrofftype. */
3591 if ((POINTER_TYPE_P (lhs_type)
3592 && INTEGRAL_TYPE_P (rhs1_type))
3593 || (POINTER_TYPE_P (rhs1_type)
3594 && INTEGRAL_TYPE_P (lhs_type)
3595 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3596 || ptrofftype_p (sizetype))))
3597 return false;
3599 /* Allow conversion from integral to offset type and vice versa. */
3600 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3601 && INTEGRAL_TYPE_P (rhs1_type))
3602 || (INTEGRAL_TYPE_P (lhs_type)
3603 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3604 return false;
3606 /* Otherwise assert we are converting between types of the
3607 same kind. */
3608 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3610 error ("invalid types in nop conversion");
3611 debug_generic_expr (lhs_type);
3612 debug_generic_expr (rhs1_type);
3613 return true;
3616 return false;
3619 case ADDR_SPACE_CONVERT_EXPR:
3621 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3622 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3623 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3625 error ("invalid types in address space conversion");
3626 debug_generic_expr (lhs_type);
3627 debug_generic_expr (rhs1_type);
3628 return true;
3631 return false;
3634 case FIXED_CONVERT_EXPR:
3636 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3637 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3639 error ("invalid types in fixed-point conversion");
3640 debug_generic_expr (lhs_type);
3641 debug_generic_expr (rhs1_type);
3642 return true;
3645 return false;
3648 case FLOAT_EXPR:
3650 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3651 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3652 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3654 error ("invalid types in conversion to floating point");
3655 debug_generic_expr (lhs_type);
3656 debug_generic_expr (rhs1_type);
3657 return true;
3660 return false;
3663 case FIX_TRUNC_EXPR:
3665 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3666 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3667 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3669 error ("invalid types in conversion to integer");
3670 debug_generic_expr (lhs_type);
3671 debug_generic_expr (rhs1_type);
3672 return true;
3675 return false;
3677 case REDUC_MAX_EXPR:
3678 case REDUC_MIN_EXPR:
3679 case REDUC_PLUS_EXPR:
3680 if (!VECTOR_TYPE_P (rhs1_type)
3681 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3683 error ("reduction should convert from vector to element type");
3684 debug_generic_expr (lhs_type);
3685 debug_generic_expr (rhs1_type);
3686 return true;
3688 return false;
3690 case VEC_UNPACK_HI_EXPR:
3691 case VEC_UNPACK_LO_EXPR:
3692 case VEC_UNPACK_FLOAT_HI_EXPR:
3693 case VEC_UNPACK_FLOAT_LO_EXPR:
3694 /* FIXME. */
3695 return false;
3697 case NEGATE_EXPR:
3698 case ABS_EXPR:
3699 case BIT_NOT_EXPR:
3700 case PAREN_EXPR:
3701 case CONJ_EXPR:
3702 break;
3704 default:
3705 gcc_unreachable ();
3708 /* For the remaining codes assert there is no conversion involved. */
3709 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3711 error ("non-trivial conversion in unary operation");
3712 debug_generic_expr (lhs_type);
3713 debug_generic_expr (rhs1_type);
3714 return true;
3717 return false;
3720 /* Verify a gimple assignment statement STMT with a binary rhs.
3721 Returns true if anything is wrong. */
3723 static bool
3724 verify_gimple_assign_binary (gassign *stmt)
3726 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3727 tree lhs = gimple_assign_lhs (stmt);
3728 tree lhs_type = TREE_TYPE (lhs);
3729 tree rhs1 = gimple_assign_rhs1 (stmt);
3730 tree rhs1_type = TREE_TYPE (rhs1);
3731 tree rhs2 = gimple_assign_rhs2 (stmt);
3732 tree rhs2_type = TREE_TYPE (rhs2);
3734 if (!is_gimple_reg (lhs))
3736 error ("non-register as LHS of binary operation");
3737 return true;
3740 if (!is_gimple_val (rhs1)
3741 || !is_gimple_val (rhs2))
3743 error ("invalid operands in binary operation");
3744 return true;
3747 /* First handle operations that involve different types. */
3748 switch (rhs_code)
3750 case COMPLEX_EXPR:
3752 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3753 || !(INTEGRAL_TYPE_P (rhs1_type)
3754 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3755 || !(INTEGRAL_TYPE_P (rhs2_type)
3756 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3758 error ("type mismatch in complex expression");
3759 debug_generic_expr (lhs_type);
3760 debug_generic_expr (rhs1_type);
3761 debug_generic_expr (rhs2_type);
3762 return true;
3765 return false;
3768 case LSHIFT_EXPR:
3769 case RSHIFT_EXPR:
3770 case LROTATE_EXPR:
3771 case RROTATE_EXPR:
3773 /* Shifts and rotates are ok on integral types, fixed point
3774 types and integer vector types. */
3775 if ((!INTEGRAL_TYPE_P (rhs1_type)
3776 && !FIXED_POINT_TYPE_P (rhs1_type)
3777 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3778 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3779 || (!INTEGRAL_TYPE_P (rhs2_type)
3780 /* Vector shifts of vectors are also ok. */
3781 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3782 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3783 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3784 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3785 || !useless_type_conversion_p (lhs_type, rhs1_type))
3787 error ("type mismatch in shift expression");
3788 debug_generic_expr (lhs_type);
3789 debug_generic_expr (rhs1_type);
3790 debug_generic_expr (rhs2_type);
3791 return true;
3794 return false;
3797 case WIDEN_LSHIFT_EXPR:
3799 if (!INTEGRAL_TYPE_P (lhs_type)
3800 || !INTEGRAL_TYPE_P (rhs1_type)
3801 || TREE_CODE (rhs2) != INTEGER_CST
3802 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3804 error ("type mismatch in widening vector shift expression");
3805 debug_generic_expr (lhs_type);
3806 debug_generic_expr (rhs1_type);
3807 debug_generic_expr (rhs2_type);
3808 return true;
3811 return false;
3814 case VEC_WIDEN_LSHIFT_HI_EXPR:
3815 case VEC_WIDEN_LSHIFT_LO_EXPR:
3817 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3818 || TREE_CODE (lhs_type) != VECTOR_TYPE
3819 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3820 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3821 || TREE_CODE (rhs2) != INTEGER_CST
3822 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3823 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3825 error ("type mismatch in widening vector shift expression");
3826 debug_generic_expr (lhs_type);
3827 debug_generic_expr (rhs1_type);
3828 debug_generic_expr (rhs2_type);
3829 return true;
3832 return false;
3835 case PLUS_EXPR:
3836 case MINUS_EXPR:
3838 tree lhs_etype = lhs_type;
3839 tree rhs1_etype = rhs1_type;
3840 tree rhs2_etype = rhs2_type;
3841 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3843 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3844 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3846 error ("invalid non-vector operands to vector valued plus");
3847 return true;
3849 lhs_etype = TREE_TYPE (lhs_type);
3850 rhs1_etype = TREE_TYPE (rhs1_type);
3851 rhs2_etype = TREE_TYPE (rhs2_type);
3853 if (POINTER_TYPE_P (lhs_etype)
3854 || POINTER_TYPE_P (rhs1_etype)
3855 || POINTER_TYPE_P (rhs2_etype))
3857 error ("invalid (pointer) operands to plus/minus");
3858 return true;
3861 /* Continue with generic binary expression handling. */
3862 break;
3865 case POINTER_PLUS_EXPR:
3867 if (!POINTER_TYPE_P (rhs1_type)
3868 || !useless_type_conversion_p (lhs_type, rhs1_type)
3869 || !ptrofftype_p (rhs2_type))
3871 error ("type mismatch in pointer plus expression");
3872 debug_generic_stmt (lhs_type);
3873 debug_generic_stmt (rhs1_type);
3874 debug_generic_stmt (rhs2_type);
3875 return true;
3878 return false;
3881 case TRUTH_ANDIF_EXPR:
3882 case TRUTH_ORIF_EXPR:
3883 case TRUTH_AND_EXPR:
3884 case TRUTH_OR_EXPR:
3885 case TRUTH_XOR_EXPR:
3887 gcc_unreachable ();
3889 case LT_EXPR:
3890 case LE_EXPR:
3891 case GT_EXPR:
3892 case GE_EXPR:
3893 case EQ_EXPR:
3894 case NE_EXPR:
3895 case UNORDERED_EXPR:
3896 case ORDERED_EXPR:
3897 case UNLT_EXPR:
3898 case UNLE_EXPR:
3899 case UNGT_EXPR:
3900 case UNGE_EXPR:
3901 case UNEQ_EXPR:
3902 case LTGT_EXPR:
3903 /* Comparisons are also binary, but the result type is not
3904 connected to the operand types. */
3905 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3907 case WIDEN_MULT_EXPR:
3908 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3909 return true;
3910 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3911 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3913 case WIDEN_SUM_EXPR:
3914 case VEC_WIDEN_MULT_HI_EXPR:
3915 case VEC_WIDEN_MULT_LO_EXPR:
3916 case VEC_WIDEN_MULT_EVEN_EXPR:
3917 case VEC_WIDEN_MULT_ODD_EXPR:
3918 case VEC_PACK_TRUNC_EXPR:
3919 case VEC_PACK_SAT_EXPR:
3920 case VEC_PACK_FIX_TRUNC_EXPR:
3921 /* FIXME. */
3922 return false;
3924 case MULT_EXPR:
3925 case MULT_HIGHPART_EXPR:
3926 case TRUNC_DIV_EXPR:
3927 case CEIL_DIV_EXPR:
3928 case FLOOR_DIV_EXPR:
3929 case ROUND_DIV_EXPR:
3930 case TRUNC_MOD_EXPR:
3931 case CEIL_MOD_EXPR:
3932 case FLOOR_MOD_EXPR:
3933 case ROUND_MOD_EXPR:
3934 case RDIV_EXPR:
3935 case EXACT_DIV_EXPR:
3936 case MIN_EXPR:
3937 case MAX_EXPR:
3938 case BIT_IOR_EXPR:
3939 case BIT_XOR_EXPR:
3940 case BIT_AND_EXPR:
3941 /* Continue with generic binary expression handling. */
3942 break;
3944 default:
3945 gcc_unreachable ();
3948 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3949 || !useless_type_conversion_p (lhs_type, rhs2_type))
3951 error ("type mismatch in binary expression");
3952 debug_generic_stmt (lhs_type);
3953 debug_generic_stmt (rhs1_type);
3954 debug_generic_stmt (rhs2_type);
3955 return true;
3958 return false;
3961 /* Verify a gimple assignment statement STMT with a ternary rhs.
3962 Returns true if anything is wrong. */
3964 static bool
3965 verify_gimple_assign_ternary (gassign *stmt)
3967 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3968 tree lhs = gimple_assign_lhs (stmt);
3969 tree lhs_type = TREE_TYPE (lhs);
3970 tree rhs1 = gimple_assign_rhs1 (stmt);
3971 tree rhs1_type = TREE_TYPE (rhs1);
3972 tree rhs2 = gimple_assign_rhs2 (stmt);
3973 tree rhs2_type = TREE_TYPE (rhs2);
3974 tree rhs3 = gimple_assign_rhs3 (stmt);
3975 tree rhs3_type = TREE_TYPE (rhs3);
3977 if (!is_gimple_reg (lhs))
3979 error ("non-register as LHS of ternary operation");
3980 return true;
3983 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3984 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3985 || !is_gimple_val (rhs2)
3986 || !is_gimple_val (rhs3))
3988 error ("invalid operands in ternary operation");
3989 return true;
3992 /* First handle operations that involve different types. */
3993 switch (rhs_code)
3995 case WIDEN_MULT_PLUS_EXPR:
3996 case WIDEN_MULT_MINUS_EXPR:
3997 if ((!INTEGRAL_TYPE_P (rhs1_type)
3998 && !FIXED_POINT_TYPE_P (rhs1_type))
3999 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4000 || !useless_type_conversion_p (lhs_type, rhs3_type)
4001 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4002 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4004 error ("type mismatch in widening multiply-accumulate expression");
4005 debug_generic_expr (lhs_type);
4006 debug_generic_expr (rhs1_type);
4007 debug_generic_expr (rhs2_type);
4008 debug_generic_expr (rhs3_type);
4009 return true;
4011 break;
4013 case FMA_EXPR:
4014 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4015 || !useless_type_conversion_p (lhs_type, rhs2_type)
4016 || !useless_type_conversion_p (lhs_type, rhs3_type))
4018 error ("type mismatch in fused multiply-add expression");
4019 debug_generic_expr (lhs_type);
4020 debug_generic_expr (rhs1_type);
4021 debug_generic_expr (rhs2_type);
4022 debug_generic_expr (rhs3_type);
4023 return true;
4025 break;
4027 case VEC_COND_EXPR:
4028 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4029 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4030 != TYPE_VECTOR_SUBPARTS (lhs_type))
4032 error ("the first argument of a VEC_COND_EXPR must be of a "
4033 "boolean vector type of the same number of elements "
4034 "as the result");
4035 debug_generic_expr (lhs_type);
4036 debug_generic_expr (rhs1_type);
4037 return true;
4039 /* Fallthrough. */
4040 case COND_EXPR:
4041 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4042 || !useless_type_conversion_p (lhs_type, rhs3_type))
4044 error ("type mismatch in conditional expression");
4045 debug_generic_expr (lhs_type);
4046 debug_generic_expr (rhs2_type);
4047 debug_generic_expr (rhs3_type);
4048 return true;
4050 break;
4052 case VEC_PERM_EXPR:
4053 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4054 || !useless_type_conversion_p (lhs_type, rhs2_type))
4056 error ("type mismatch in vector permute expression");
4057 debug_generic_expr (lhs_type);
4058 debug_generic_expr (rhs1_type);
4059 debug_generic_expr (rhs2_type);
4060 debug_generic_expr (rhs3_type);
4061 return true;
4064 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4065 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4066 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4068 error ("vector types expected in vector permute expression");
4069 debug_generic_expr (lhs_type);
4070 debug_generic_expr (rhs1_type);
4071 debug_generic_expr (rhs2_type);
4072 debug_generic_expr (rhs3_type);
4073 return true;
4076 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4077 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4078 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4079 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4080 != TYPE_VECTOR_SUBPARTS (lhs_type))
4082 error ("vectors with different element number found "
4083 "in vector permute expression");
4084 debug_generic_expr (lhs_type);
4085 debug_generic_expr (rhs1_type);
4086 debug_generic_expr (rhs2_type);
4087 debug_generic_expr (rhs3_type);
4088 return true;
4091 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4092 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4093 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4095 error ("invalid mask type in vector permute expression");
4096 debug_generic_expr (lhs_type);
4097 debug_generic_expr (rhs1_type);
4098 debug_generic_expr (rhs2_type);
4099 debug_generic_expr (rhs3_type);
4100 return true;
4103 return false;
4105 case SAD_EXPR:
4106 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4107 || !useless_type_conversion_p (lhs_type, rhs3_type)
4108 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4109 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4111 error ("type mismatch in sad expression");
4112 debug_generic_expr (lhs_type);
4113 debug_generic_expr (rhs1_type);
4114 debug_generic_expr (rhs2_type);
4115 debug_generic_expr (rhs3_type);
4116 return true;
4119 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4120 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4121 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4123 error ("vector types expected in sad expression");
4124 debug_generic_expr (lhs_type);
4125 debug_generic_expr (rhs1_type);
4126 debug_generic_expr (rhs2_type);
4127 debug_generic_expr (rhs3_type);
4128 return true;
4131 return false;
4133 case BIT_INSERT_EXPR:
4134 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4136 error ("type mismatch in BIT_INSERT_EXPR");
4137 debug_generic_expr (lhs_type);
4138 debug_generic_expr (rhs1_type);
4139 return true;
4141 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4142 && INTEGRAL_TYPE_P (rhs2_type))
4143 || (VECTOR_TYPE_P (rhs1_type)
4144 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4146 error ("not allowed type combination in BIT_INSERT_EXPR");
4147 debug_generic_expr (rhs1_type);
4148 debug_generic_expr (rhs2_type);
4149 return true;
4151 if (! tree_fits_uhwi_p (rhs3)
4152 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4154 error ("invalid position or size in BIT_INSERT_EXPR");
4155 return true;
4157 if (INTEGRAL_TYPE_P (rhs1_type))
4159 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4160 if (bitpos >= TYPE_PRECISION (rhs1_type)
4161 || (bitpos + TYPE_PRECISION (rhs2_type)
4162 > TYPE_PRECISION (rhs1_type)))
4164 error ("insertion out of range in BIT_INSERT_EXPR");
4165 return true;
4168 else if (VECTOR_TYPE_P (rhs1_type))
4170 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4171 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4172 if (bitpos % bitsize != 0)
4174 error ("vector insertion not at element boundary");
4175 return true;
4178 return false;
4180 case DOT_PROD_EXPR:
4181 case REALIGN_LOAD_EXPR:
4182 /* FIXME. */
4183 return false;
4185 default:
4186 gcc_unreachable ();
4188 return false;
4191 /* Verify a gimple assignment statement STMT with a single rhs.
4192 Returns true if anything is wrong. */
4194 static bool
4195 verify_gimple_assign_single (gassign *stmt)
4197 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4198 tree lhs = gimple_assign_lhs (stmt);
4199 tree lhs_type = TREE_TYPE (lhs);
4200 tree rhs1 = gimple_assign_rhs1 (stmt);
4201 tree rhs1_type = TREE_TYPE (rhs1);
4202 bool res = false;
4204 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4206 error ("non-trivial conversion at assignment");
4207 debug_generic_expr (lhs_type);
4208 debug_generic_expr (rhs1_type);
4209 return true;
4212 if (gimple_clobber_p (stmt)
4213 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4215 error ("non-decl/MEM_REF LHS in clobber statement");
4216 debug_generic_expr (lhs);
4217 return true;
4220 if (handled_component_p (lhs)
4221 || TREE_CODE (lhs) == MEM_REF
4222 || TREE_CODE (lhs) == TARGET_MEM_REF)
4223 res |= verify_types_in_gimple_reference (lhs, true);
4225 /* Special codes we cannot handle via their class. */
4226 switch (rhs_code)
4228 case ADDR_EXPR:
4230 tree op = TREE_OPERAND (rhs1, 0);
4231 if (!is_gimple_addressable (op))
4233 error ("invalid operand in unary expression");
4234 return true;
4237 /* Technically there is no longer a need for matching types, but
4238 gimple hygiene asks for this check. In LTO we can end up
4239 combining incompatible units and thus end up with addresses
4240 of globals that change their type to a common one. */
4241 if (!in_lto_p
4242 && !types_compatible_p (TREE_TYPE (op),
4243 TREE_TYPE (TREE_TYPE (rhs1)))
4244 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4245 TREE_TYPE (op)))
4247 error ("type mismatch in address expression");
4248 debug_generic_stmt (TREE_TYPE (rhs1));
4249 debug_generic_stmt (TREE_TYPE (op));
4250 return true;
4253 return verify_types_in_gimple_reference (op, true);
4256 /* tcc_reference */
4257 case INDIRECT_REF:
4258 error ("INDIRECT_REF in gimple IL");
4259 return true;
4261 case COMPONENT_REF:
4262 case BIT_FIELD_REF:
4263 case ARRAY_REF:
4264 case ARRAY_RANGE_REF:
4265 case VIEW_CONVERT_EXPR:
4266 case REALPART_EXPR:
4267 case IMAGPART_EXPR:
4268 case TARGET_MEM_REF:
4269 case MEM_REF:
4270 if (!is_gimple_reg (lhs)
4271 && is_gimple_reg_type (TREE_TYPE (lhs)))
4273 error ("invalid rhs for gimple memory store");
4274 debug_generic_stmt (lhs);
4275 debug_generic_stmt (rhs1);
4276 return true;
4278 return res || verify_types_in_gimple_reference (rhs1, false);
4280 /* tcc_constant */
4281 case SSA_NAME:
4282 case INTEGER_CST:
4283 case REAL_CST:
4284 case FIXED_CST:
4285 case COMPLEX_CST:
4286 case VECTOR_CST:
4287 case STRING_CST:
4288 return res;
4290 /* tcc_declaration */
4291 case CONST_DECL:
4292 return res;
4293 case VAR_DECL:
4294 case PARM_DECL:
4295 if (!is_gimple_reg (lhs)
4296 && !is_gimple_reg (rhs1)
4297 && is_gimple_reg_type (TREE_TYPE (lhs)))
4299 error ("invalid rhs for gimple memory store");
4300 debug_generic_stmt (lhs);
4301 debug_generic_stmt (rhs1);
4302 return true;
4304 return res;
4306 case CONSTRUCTOR:
4307 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4309 unsigned int i;
4310 tree elt_i, elt_v, elt_t = NULL_TREE;
4312 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4313 return res;
4314 /* For vector CONSTRUCTORs we require that either it is empty
4315 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4316 (then the element count must be correct to cover the whole
4317 outer vector and index must be NULL on all elements, or it is
4318 a CONSTRUCTOR of scalar elements, where we as an exception allow
4319 smaller number of elements (assuming zero filling) and
4320 consecutive indexes as compared to NULL indexes (such
4321 CONSTRUCTORs can appear in the IL from FEs). */
4322 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4324 if (elt_t == NULL_TREE)
4326 elt_t = TREE_TYPE (elt_v);
4327 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4329 tree elt_t = TREE_TYPE (elt_v);
4330 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4331 TREE_TYPE (elt_t)))
4333 error ("incorrect type of vector CONSTRUCTOR"
4334 " elements");
4335 debug_generic_stmt (rhs1);
4336 return true;
4338 else if (CONSTRUCTOR_NELTS (rhs1)
4339 * TYPE_VECTOR_SUBPARTS (elt_t)
4340 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4342 error ("incorrect number of vector CONSTRUCTOR"
4343 " elements");
4344 debug_generic_stmt (rhs1);
4345 return true;
4348 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4349 elt_t))
4351 error ("incorrect type of vector CONSTRUCTOR elements");
4352 debug_generic_stmt (rhs1);
4353 return true;
4355 else if (CONSTRUCTOR_NELTS (rhs1)
4356 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4358 error ("incorrect number of vector CONSTRUCTOR elements");
4359 debug_generic_stmt (rhs1);
4360 return true;
4363 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4365 error ("incorrect type of vector CONSTRUCTOR elements");
4366 debug_generic_stmt (rhs1);
4367 return true;
4369 if (elt_i != NULL_TREE
4370 && (TREE_CODE (elt_t) == VECTOR_TYPE
4371 || TREE_CODE (elt_i) != INTEGER_CST
4372 || compare_tree_int (elt_i, i) != 0))
4374 error ("vector CONSTRUCTOR with non-NULL element index");
4375 debug_generic_stmt (rhs1);
4376 return true;
4378 if (!is_gimple_val (elt_v))
4380 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4381 debug_generic_stmt (rhs1);
4382 return true;
4386 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4388 error ("non-vector CONSTRUCTOR with elements");
4389 debug_generic_stmt (rhs1);
4390 return true;
4392 return res;
4393 case OBJ_TYPE_REF:
4394 case ASSERT_EXPR:
4395 case WITH_SIZE_EXPR:
4396 /* FIXME. */
4397 return res;
4399 default:;
4402 return res;
4405 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4406 is a problem, otherwise false. */
4408 static bool
4409 verify_gimple_assign (gassign *stmt)
4411 switch (gimple_assign_rhs_class (stmt))
4413 case GIMPLE_SINGLE_RHS:
4414 return verify_gimple_assign_single (stmt);
4416 case GIMPLE_UNARY_RHS:
4417 return verify_gimple_assign_unary (stmt);
4419 case GIMPLE_BINARY_RHS:
4420 return verify_gimple_assign_binary (stmt);
4422 case GIMPLE_TERNARY_RHS:
4423 return verify_gimple_assign_ternary (stmt);
4425 default:
4426 gcc_unreachable ();
4430 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4431 is a problem, otherwise false. */
4433 static bool
4434 verify_gimple_return (greturn *stmt)
4436 tree op = gimple_return_retval (stmt);
4437 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4439 /* We cannot test for present return values as we do not fix up missing
4440 return values from the original source. */
4441 if (op == NULL)
4442 return false;
4444 if (!is_gimple_val (op)
4445 && TREE_CODE (op) != RESULT_DECL)
4447 error ("invalid operand in return statement");
4448 debug_generic_stmt (op);
4449 return true;
4452 if ((TREE_CODE (op) == RESULT_DECL
4453 && DECL_BY_REFERENCE (op))
4454 || (TREE_CODE (op) == SSA_NAME
4455 && SSA_NAME_VAR (op)
4456 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4457 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4458 op = TREE_TYPE (op);
4460 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4462 error ("invalid conversion in return statement");
4463 debug_generic_stmt (restype);
4464 debug_generic_stmt (TREE_TYPE (op));
4465 return true;
4468 return false;
4472 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4473 is a problem, otherwise false. */
4475 static bool
4476 verify_gimple_goto (ggoto *stmt)
4478 tree dest = gimple_goto_dest (stmt);
4480 /* ??? We have two canonical forms of direct goto destinations, a
4481 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4482 if (TREE_CODE (dest) != LABEL_DECL
4483 && (!is_gimple_val (dest)
4484 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4486 error ("goto destination is neither a label nor a pointer");
4487 return true;
4490 return false;
4493 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4494 is a problem, otherwise false. */
4496 static bool
4497 verify_gimple_switch (gswitch *stmt)
4499 unsigned int i, n;
4500 tree elt, prev_upper_bound = NULL_TREE;
4501 tree index_type, elt_type = NULL_TREE;
4503 if (!is_gimple_val (gimple_switch_index (stmt)))
4505 error ("invalid operand to switch statement");
4506 debug_generic_stmt (gimple_switch_index (stmt));
4507 return true;
4510 index_type = TREE_TYPE (gimple_switch_index (stmt));
4511 if (! INTEGRAL_TYPE_P (index_type))
4513 error ("non-integral type switch statement");
4514 debug_generic_expr (index_type);
4515 return true;
4518 elt = gimple_switch_label (stmt, 0);
4519 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4521 error ("invalid default case label in switch statement");
4522 debug_generic_expr (elt);
4523 return true;
4526 n = gimple_switch_num_labels (stmt);
4527 for (i = 1; i < n; i++)
4529 elt = gimple_switch_label (stmt, i);
4531 if (! CASE_LOW (elt))
4533 error ("invalid case label in switch statement");
4534 debug_generic_expr (elt);
4535 return true;
4537 if (CASE_HIGH (elt)
4538 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4540 error ("invalid case range in switch statement");
4541 debug_generic_expr (elt);
4542 return true;
4545 if (elt_type)
4547 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4548 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4550 error ("type mismatch for case label in switch statement");
4551 debug_generic_expr (elt);
4552 return true;
4555 else
4557 elt_type = TREE_TYPE (CASE_LOW (elt));
4558 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4560 error ("type precision mismatch in switch statement");
4561 return true;
4565 if (prev_upper_bound)
4567 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4569 error ("case labels not sorted in switch statement");
4570 return true;
4574 prev_upper_bound = CASE_HIGH (elt);
4575 if (! prev_upper_bound)
4576 prev_upper_bound = CASE_LOW (elt);
4579 return false;
4582 /* Verify a gimple debug statement STMT.
4583 Returns true if anything is wrong. */
4585 static bool
4586 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4588 /* There isn't much that could be wrong in a gimple debug stmt. A
4589 gimple debug bind stmt, for example, maps a tree, that's usually
4590 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4591 component or member of an aggregate type, to another tree, that
4592 can be an arbitrary expression. These stmts expand into debug
4593 insns, and are converted to debug notes by var-tracking.c. */
4594 return false;
4597 /* Verify a gimple label statement STMT.
4598 Returns true if anything is wrong. */
4600 static bool
4601 verify_gimple_label (glabel *stmt)
4603 tree decl = gimple_label_label (stmt);
4604 int uid;
4605 bool err = false;
4607 if (TREE_CODE (decl) != LABEL_DECL)
4608 return true;
4609 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4610 && DECL_CONTEXT (decl) != current_function_decl)
4612 error ("label's context is not the current function decl");
4613 err |= true;
4616 uid = LABEL_DECL_UID (decl);
4617 if (cfun->cfg
4618 && (uid == -1
4619 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4621 error ("incorrect entry in label_to_block_map");
4622 err |= true;
4625 uid = EH_LANDING_PAD_NR (decl);
4626 if (uid)
4628 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4629 if (decl != lp->post_landing_pad)
4631 error ("incorrect setting of landing pad number");
4632 err |= true;
4636 return err;
4639 /* Verify a gimple cond statement STMT.
4640 Returns true if anything is wrong. */
4642 static bool
4643 verify_gimple_cond (gcond *stmt)
4645 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4647 error ("invalid comparison code in gimple cond");
4648 return true;
4650 if (!(!gimple_cond_true_label (stmt)
4651 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4652 || !(!gimple_cond_false_label (stmt)
4653 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4655 error ("invalid labels in gimple cond");
4656 return true;
4659 return verify_gimple_comparison (boolean_type_node,
4660 gimple_cond_lhs (stmt),
4661 gimple_cond_rhs (stmt),
4662 gimple_cond_code (stmt));
4665 /* Verify the GIMPLE statement STMT. Returns true if there is an
4666 error, otherwise false. */
4668 static bool
4669 verify_gimple_stmt (gimple *stmt)
4671 switch (gimple_code (stmt))
4673 case GIMPLE_ASSIGN:
4674 return verify_gimple_assign (as_a <gassign *> (stmt));
4676 case GIMPLE_LABEL:
4677 return verify_gimple_label (as_a <glabel *> (stmt));
4679 case GIMPLE_CALL:
4680 return verify_gimple_call (as_a <gcall *> (stmt));
4682 case GIMPLE_COND:
4683 return verify_gimple_cond (as_a <gcond *> (stmt));
4685 case GIMPLE_GOTO:
4686 return verify_gimple_goto (as_a <ggoto *> (stmt));
4688 case GIMPLE_SWITCH:
4689 return verify_gimple_switch (as_a <gswitch *> (stmt));
4691 case GIMPLE_RETURN:
4692 return verify_gimple_return (as_a <greturn *> (stmt));
4694 case GIMPLE_ASM:
4695 return false;
4697 case GIMPLE_TRANSACTION:
4698 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4700 /* Tuples that do not have tree operands. */
4701 case GIMPLE_NOP:
4702 case GIMPLE_PREDICT:
4703 case GIMPLE_RESX:
4704 case GIMPLE_EH_DISPATCH:
4705 case GIMPLE_EH_MUST_NOT_THROW:
4706 return false;
4708 CASE_GIMPLE_OMP:
4709 /* OpenMP directives are validated by the FE and never operated
4710 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4711 non-gimple expressions when the main index variable has had
4712 its address taken. This does not affect the loop itself
4713 because the header of an GIMPLE_OMP_FOR is merely used to determine
4714 how to setup the parallel iteration. */
4715 return false;
4717 case GIMPLE_DEBUG:
4718 return verify_gimple_debug (stmt);
4720 default:
4721 gcc_unreachable ();
4725 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4726 and false otherwise. */
4728 static bool
4729 verify_gimple_phi (gimple *phi)
4731 bool err = false;
4732 unsigned i;
4733 tree phi_result = gimple_phi_result (phi);
4734 bool virtual_p;
4736 if (!phi_result)
4738 error ("invalid PHI result");
4739 return true;
4742 virtual_p = virtual_operand_p (phi_result);
4743 if (TREE_CODE (phi_result) != SSA_NAME
4744 || (virtual_p
4745 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4747 error ("invalid PHI result");
4748 err = true;
4751 for (i = 0; i < gimple_phi_num_args (phi); i++)
4753 tree t = gimple_phi_arg_def (phi, i);
4755 if (!t)
4757 error ("missing PHI def");
4758 err |= true;
4759 continue;
4761 /* Addressable variables do have SSA_NAMEs but they
4762 are not considered gimple values. */
4763 else if ((TREE_CODE (t) == SSA_NAME
4764 && virtual_p != virtual_operand_p (t))
4765 || (virtual_p
4766 && (TREE_CODE (t) != SSA_NAME
4767 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4768 || (!virtual_p
4769 && !is_gimple_val (t)))
4771 error ("invalid PHI argument");
4772 debug_generic_expr (t);
4773 err |= true;
4775 #ifdef ENABLE_TYPES_CHECKING
4776 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4778 error ("incompatible types in PHI argument %u", i);
4779 debug_generic_stmt (TREE_TYPE (phi_result));
4780 debug_generic_stmt (TREE_TYPE (t));
4781 err |= true;
4783 #endif
4786 return err;
4789 /* Verify the GIMPLE statements inside the sequence STMTS. */
4791 static bool
4792 verify_gimple_in_seq_2 (gimple_seq stmts)
4794 gimple_stmt_iterator ittr;
4795 bool err = false;
4797 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4799 gimple *stmt = gsi_stmt (ittr);
4801 switch (gimple_code (stmt))
4803 case GIMPLE_BIND:
4804 err |= verify_gimple_in_seq_2 (
4805 gimple_bind_body (as_a <gbind *> (stmt)));
4806 break;
4808 case GIMPLE_TRY:
4809 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4810 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4811 break;
4813 case GIMPLE_EH_FILTER:
4814 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4815 break;
4817 case GIMPLE_EH_ELSE:
4819 geh_else *eh_else = as_a <geh_else *> (stmt);
4820 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4821 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4823 break;
4825 case GIMPLE_CATCH:
4826 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4827 as_a <gcatch *> (stmt)));
4828 break;
4830 case GIMPLE_TRANSACTION:
4831 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4832 break;
4834 default:
4836 bool err2 = verify_gimple_stmt (stmt);
4837 if (err2)
4838 debug_gimple_stmt (stmt);
4839 err |= err2;
4844 return err;
4847 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4848 is a problem, otherwise false. */
4850 static bool
4851 verify_gimple_transaction (gtransaction *stmt)
4853 tree lab;
4855 lab = gimple_transaction_label_norm (stmt);
4856 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4857 return true;
4858 lab = gimple_transaction_label_uninst (stmt);
4859 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4860 return true;
4861 lab = gimple_transaction_label_over (stmt);
4862 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4863 return true;
4865 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4869 /* Verify the GIMPLE statements inside the statement list STMTS. */
4871 DEBUG_FUNCTION void
4872 verify_gimple_in_seq (gimple_seq stmts)
4874 timevar_push (TV_TREE_STMT_VERIFY);
4875 if (verify_gimple_in_seq_2 (stmts))
4876 internal_error ("verify_gimple failed");
4877 timevar_pop (TV_TREE_STMT_VERIFY);
4880 /* Return true when the T can be shared. */
4882 static bool
4883 tree_node_can_be_shared (tree t)
4885 if (IS_TYPE_OR_DECL_P (t)
4886 || is_gimple_min_invariant (t)
4887 || TREE_CODE (t) == SSA_NAME
4888 || t == error_mark_node
4889 || TREE_CODE (t) == IDENTIFIER_NODE)
4890 return true;
4892 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4893 return true;
4895 if (DECL_P (t))
4896 return true;
4898 return false;
4901 /* Called via walk_tree. Verify tree sharing. */
4903 static tree
4904 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4906 hash_set<void *> *visited = (hash_set<void *> *) data;
4908 if (tree_node_can_be_shared (*tp))
4910 *walk_subtrees = false;
4911 return NULL;
4914 if (visited->add (*tp))
4915 return *tp;
4917 return NULL;
4920 /* Called via walk_gimple_stmt. Verify tree sharing. */
4922 static tree
4923 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4925 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4926 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4929 static bool eh_error_found;
4930 bool
4931 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4932 hash_set<gimple *> *visited)
4934 if (!visited->contains (stmt))
4936 error ("dead STMT in EH table");
4937 debug_gimple_stmt (stmt);
4938 eh_error_found = true;
4940 return true;
4943 /* Verify if the location LOCs block is in BLOCKS. */
4945 static bool
4946 verify_location (hash_set<tree> *blocks, location_t loc)
4948 tree block = LOCATION_BLOCK (loc);
4949 if (block != NULL_TREE
4950 && !blocks->contains (block))
4952 error ("location references block not in block tree");
4953 return true;
4955 if (block != NULL_TREE)
4956 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4957 return false;
4960 /* Called via walk_tree. Verify that expressions have no blocks. */
4962 static tree
4963 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4965 if (!EXPR_P (*tp))
4967 *walk_subtrees = false;
4968 return NULL;
4971 location_t loc = EXPR_LOCATION (*tp);
4972 if (LOCATION_BLOCK (loc) != NULL)
4973 return *tp;
4975 return NULL;
4978 /* Called via walk_tree. Verify locations of expressions. */
4980 static tree
4981 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4983 hash_set<tree> *blocks = (hash_set<tree> *) data;
4985 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
4987 tree t = DECL_DEBUG_EXPR (*tp);
4988 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4989 if (addr)
4990 return addr;
4992 if ((VAR_P (*tp)
4993 || TREE_CODE (*tp) == PARM_DECL
4994 || TREE_CODE (*tp) == RESULT_DECL)
4995 && DECL_HAS_VALUE_EXPR_P (*tp))
4997 tree t = DECL_VALUE_EXPR (*tp);
4998 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4999 if (addr)
5000 return addr;
5003 if (!EXPR_P (*tp))
5005 *walk_subtrees = false;
5006 return NULL;
5009 location_t loc = EXPR_LOCATION (*tp);
5010 if (verify_location (blocks, loc))
5011 return *tp;
5013 return NULL;
5016 /* Called via walk_gimple_op. Verify locations of expressions. */
5018 static tree
5019 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5021 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5022 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5025 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5027 static void
5028 collect_subblocks (hash_set<tree> *blocks, tree block)
5030 tree t;
5031 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5033 blocks->add (t);
5034 collect_subblocks (blocks, t);
5038 /* Verify the GIMPLE statements in the CFG of FN. */
5040 DEBUG_FUNCTION void
5041 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5043 basic_block bb;
5044 bool err = false;
5046 timevar_push (TV_TREE_STMT_VERIFY);
5047 hash_set<void *> visited;
5048 hash_set<gimple *> visited_stmts;
5050 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5051 hash_set<tree> blocks;
5052 if (DECL_INITIAL (fn->decl))
5054 blocks.add (DECL_INITIAL (fn->decl));
5055 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5058 FOR_EACH_BB_FN (bb, fn)
5060 gimple_stmt_iterator gsi;
5062 for (gphi_iterator gpi = gsi_start_phis (bb);
5063 !gsi_end_p (gpi);
5064 gsi_next (&gpi))
5066 gphi *phi = gpi.phi ();
5067 bool err2 = false;
5068 unsigned i;
5070 visited_stmts.add (phi);
5072 if (gimple_bb (phi) != bb)
5074 error ("gimple_bb (phi) is set to a wrong basic block");
5075 err2 = true;
5078 err2 |= verify_gimple_phi (phi);
5080 /* Only PHI arguments have locations. */
5081 if (gimple_location (phi) != UNKNOWN_LOCATION)
5083 error ("PHI node with location");
5084 err2 = true;
5087 for (i = 0; i < gimple_phi_num_args (phi); i++)
5089 tree arg = gimple_phi_arg_def (phi, i);
5090 tree addr = walk_tree (&arg, verify_node_sharing_1,
5091 &visited, NULL);
5092 if (addr)
5094 error ("incorrect sharing of tree nodes");
5095 debug_generic_expr (addr);
5096 err2 |= true;
5098 location_t loc = gimple_phi_arg_location (phi, i);
5099 if (virtual_operand_p (gimple_phi_result (phi))
5100 && loc != UNKNOWN_LOCATION)
5102 error ("virtual PHI with argument locations");
5103 err2 = true;
5105 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5106 if (addr)
5108 debug_generic_expr (addr);
5109 err2 = true;
5111 err2 |= verify_location (&blocks, loc);
5114 if (err2)
5115 debug_gimple_stmt (phi);
5116 err |= err2;
5119 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5121 gimple *stmt = gsi_stmt (gsi);
5122 bool err2 = false;
5123 struct walk_stmt_info wi;
5124 tree addr;
5125 int lp_nr;
5127 visited_stmts.add (stmt);
5129 if (gimple_bb (stmt) != bb)
5131 error ("gimple_bb (stmt) is set to a wrong basic block");
5132 err2 = true;
5135 err2 |= verify_gimple_stmt (stmt);
5136 err2 |= verify_location (&blocks, gimple_location (stmt));
5138 memset (&wi, 0, sizeof (wi));
5139 wi.info = (void *) &visited;
5140 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5141 if (addr)
5143 error ("incorrect sharing of tree nodes");
5144 debug_generic_expr (addr);
5145 err2 |= true;
5148 memset (&wi, 0, sizeof (wi));
5149 wi.info = (void *) &blocks;
5150 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5151 if (addr)
5153 debug_generic_expr (addr);
5154 err2 |= true;
5157 /* ??? Instead of not checking these stmts at all the walker
5158 should know its context via wi. */
5159 if (!is_gimple_debug (stmt)
5160 && !is_gimple_omp (stmt))
5162 memset (&wi, 0, sizeof (wi));
5163 addr = walk_gimple_op (stmt, verify_expr, &wi);
5164 if (addr)
5166 debug_generic_expr (addr);
5167 inform (gimple_location (stmt), "in statement");
5168 err2 |= true;
5172 /* If the statement is marked as part of an EH region, then it is
5173 expected that the statement could throw. Verify that when we
5174 have optimizations that simplify statements such that we prove
5175 that they cannot throw, that we update other data structures
5176 to match. */
5177 lp_nr = lookup_stmt_eh_lp (stmt);
5178 if (lp_nr > 0)
5180 if (!stmt_could_throw_p (stmt))
5182 if (verify_nothrow)
5184 error ("statement marked for throw, but doesn%'t");
5185 err2 |= true;
5188 else if (!gsi_one_before_end_p (gsi))
5190 error ("statement marked for throw in middle of block");
5191 err2 |= true;
5195 if (err2)
5196 debug_gimple_stmt (stmt);
5197 err |= err2;
5201 eh_error_found = false;
5202 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5203 if (eh_table)
5204 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5205 (&visited_stmts);
5207 if (err || eh_error_found)
5208 internal_error ("verify_gimple failed");
5210 verify_histograms ();
5211 timevar_pop (TV_TREE_STMT_VERIFY);
5215 /* Verifies that the flow information is OK. */
5217 static int
5218 gimple_verify_flow_info (void)
5220 int err = 0;
5221 basic_block bb;
5222 gimple_stmt_iterator gsi;
5223 gimple *stmt;
5224 edge e;
5225 edge_iterator ei;
5227 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5228 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5230 error ("ENTRY_BLOCK has IL associated with it");
5231 err = 1;
5234 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5235 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5237 error ("EXIT_BLOCK has IL associated with it");
5238 err = 1;
5241 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5242 if (e->flags & EDGE_FALLTHRU)
5244 error ("fallthru to exit from bb %d", e->src->index);
5245 err = 1;
5248 FOR_EACH_BB_FN (bb, cfun)
5250 bool found_ctrl_stmt = false;
5252 stmt = NULL;
5254 /* Skip labels on the start of basic block. */
5255 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5257 tree label;
5258 gimple *prev_stmt = stmt;
5260 stmt = gsi_stmt (gsi);
5262 if (gimple_code (stmt) != GIMPLE_LABEL)
5263 break;
5265 label = gimple_label_label (as_a <glabel *> (stmt));
5266 if (prev_stmt && DECL_NONLOCAL (label))
5268 error ("nonlocal label ");
5269 print_generic_expr (stderr, label, 0);
5270 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5271 bb->index);
5272 err = 1;
5275 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5277 error ("EH landing pad label ");
5278 print_generic_expr (stderr, label, 0);
5279 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5280 bb->index);
5281 err = 1;
5284 if (label_to_block (label) != bb)
5286 error ("label ");
5287 print_generic_expr (stderr, label, 0);
5288 fprintf (stderr, " to block does not match in bb %d",
5289 bb->index);
5290 err = 1;
5293 if (decl_function_context (label) != current_function_decl)
5295 error ("label ");
5296 print_generic_expr (stderr, label, 0);
5297 fprintf (stderr, " has incorrect context in bb %d",
5298 bb->index);
5299 err = 1;
5303 /* Verify that body of basic block BB is free of control flow. */
5304 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5306 gimple *stmt = gsi_stmt (gsi);
5308 if (found_ctrl_stmt)
5310 error ("control flow in the middle of basic block %d",
5311 bb->index);
5312 err = 1;
5315 if (stmt_ends_bb_p (stmt))
5316 found_ctrl_stmt = true;
5318 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5320 error ("label ");
5321 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5322 fprintf (stderr, " in the middle of basic block %d", bb->index);
5323 err = 1;
5327 gsi = gsi_last_bb (bb);
5328 if (gsi_end_p (gsi))
5329 continue;
5331 stmt = gsi_stmt (gsi);
5333 if (gimple_code (stmt) == GIMPLE_LABEL)
5334 continue;
5336 err |= verify_eh_edges (stmt);
5338 if (is_ctrl_stmt (stmt))
5340 FOR_EACH_EDGE (e, ei, bb->succs)
5341 if (e->flags & EDGE_FALLTHRU)
5343 error ("fallthru edge after a control statement in bb %d",
5344 bb->index);
5345 err = 1;
5349 if (gimple_code (stmt) != GIMPLE_COND)
5351 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5352 after anything else but if statement. */
5353 FOR_EACH_EDGE (e, ei, bb->succs)
5354 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5356 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5357 bb->index);
5358 err = 1;
5362 switch (gimple_code (stmt))
5364 case GIMPLE_COND:
5366 edge true_edge;
5367 edge false_edge;
5369 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5371 if (!true_edge
5372 || !false_edge
5373 || !(true_edge->flags & EDGE_TRUE_VALUE)
5374 || !(false_edge->flags & EDGE_FALSE_VALUE)
5375 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5376 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5377 || EDGE_COUNT (bb->succs) >= 3)
5379 error ("wrong outgoing edge flags at end of bb %d",
5380 bb->index);
5381 err = 1;
5384 break;
5386 case GIMPLE_GOTO:
5387 if (simple_goto_p (stmt))
5389 error ("explicit goto at end of bb %d", bb->index);
5390 err = 1;
5392 else
5394 /* FIXME. We should double check that the labels in the
5395 destination blocks have their address taken. */
5396 FOR_EACH_EDGE (e, ei, bb->succs)
5397 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5398 | EDGE_FALSE_VALUE))
5399 || !(e->flags & EDGE_ABNORMAL))
5401 error ("wrong outgoing edge flags at end of bb %d",
5402 bb->index);
5403 err = 1;
5406 break;
5408 case GIMPLE_CALL:
5409 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5410 break;
5411 /* fallthru */
5412 case GIMPLE_RETURN:
5413 if (!single_succ_p (bb)
5414 || (single_succ_edge (bb)->flags
5415 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5416 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5418 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5419 err = 1;
5421 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5423 error ("return edge does not point to exit in bb %d",
5424 bb->index);
5425 err = 1;
5427 break;
5429 case GIMPLE_SWITCH:
5431 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5432 tree prev;
5433 edge e;
5434 size_t i, n;
5436 n = gimple_switch_num_labels (switch_stmt);
5438 /* Mark all the destination basic blocks. */
5439 for (i = 0; i < n; ++i)
5441 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5442 basic_block label_bb = label_to_block (lab);
5443 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5444 label_bb->aux = (void *)1;
5447 /* Verify that the case labels are sorted. */
5448 prev = gimple_switch_label (switch_stmt, 0);
5449 for (i = 1; i < n; ++i)
5451 tree c = gimple_switch_label (switch_stmt, i);
5452 if (!CASE_LOW (c))
5454 error ("found default case not at the start of "
5455 "case vector");
5456 err = 1;
5457 continue;
5459 if (CASE_LOW (prev)
5460 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5462 error ("case labels not sorted: ");
5463 print_generic_expr (stderr, prev, 0);
5464 fprintf (stderr," is greater than ");
5465 print_generic_expr (stderr, c, 0);
5466 fprintf (stderr," but comes before it.\n");
5467 err = 1;
5469 prev = c;
5471 /* VRP will remove the default case if it can prove it will
5472 never be executed. So do not verify there always exists
5473 a default case here. */
5475 FOR_EACH_EDGE (e, ei, bb->succs)
5477 if (!e->dest->aux)
5479 error ("extra outgoing edge %d->%d",
5480 bb->index, e->dest->index);
5481 err = 1;
5484 e->dest->aux = (void *)2;
5485 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5486 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5488 error ("wrong outgoing edge flags at end of bb %d",
5489 bb->index);
5490 err = 1;
5494 /* Check that we have all of them. */
5495 for (i = 0; i < n; ++i)
5497 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5498 basic_block label_bb = label_to_block (lab);
5500 if (label_bb->aux != (void *)2)
5502 error ("missing edge %i->%i", bb->index, label_bb->index);
5503 err = 1;
5507 FOR_EACH_EDGE (e, ei, bb->succs)
5508 e->dest->aux = (void *)0;
5510 break;
5512 case GIMPLE_EH_DISPATCH:
5513 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5514 break;
5516 default:
5517 break;
5521 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5522 verify_dominators (CDI_DOMINATORS);
5524 return err;
5528 /* Updates phi nodes after creating a forwarder block joined
5529 by edge FALLTHRU. */
5531 static void
5532 gimple_make_forwarder_block (edge fallthru)
5534 edge e;
5535 edge_iterator ei;
5536 basic_block dummy, bb;
5537 tree var;
5538 gphi_iterator gsi;
5540 dummy = fallthru->src;
5541 bb = fallthru->dest;
5543 if (single_pred_p (bb))
5544 return;
5546 /* If we redirected a branch we must create new PHI nodes at the
5547 start of BB. */
5548 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5550 gphi *phi, *new_phi;
5552 phi = gsi.phi ();
5553 var = gimple_phi_result (phi);
5554 new_phi = create_phi_node (var, bb);
5555 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5556 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5557 UNKNOWN_LOCATION);
5560 /* Add the arguments we have stored on edges. */
5561 FOR_EACH_EDGE (e, ei, bb->preds)
5563 if (e == fallthru)
5564 continue;
5566 flush_pending_stmts (e);
5571 /* Return a non-special label in the head of basic block BLOCK.
5572 Create one if it doesn't exist. */
5574 tree
5575 gimple_block_label (basic_block bb)
5577 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5578 bool first = true;
5579 tree label;
5580 glabel *stmt;
5582 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5584 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5585 if (!stmt)
5586 break;
5587 label = gimple_label_label (stmt);
5588 if (!DECL_NONLOCAL (label))
5590 if (!first)
5591 gsi_move_before (&i, &s);
5592 return label;
5596 label = create_artificial_label (UNKNOWN_LOCATION);
5597 stmt = gimple_build_label (label);
5598 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5599 return label;
5603 /* Attempt to perform edge redirection by replacing a possibly complex
5604 jump instruction by a goto or by removing the jump completely.
5605 This can apply only if all edges now point to the same block. The
5606 parameters and return values are equivalent to
5607 redirect_edge_and_branch. */
5609 static edge
5610 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5612 basic_block src = e->src;
5613 gimple_stmt_iterator i;
5614 gimple *stmt;
5616 /* We can replace or remove a complex jump only when we have exactly
5617 two edges. */
5618 if (EDGE_COUNT (src->succs) != 2
5619 /* Verify that all targets will be TARGET. Specifically, the
5620 edge that is not E must also go to TARGET. */
5621 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5622 return NULL;
5624 i = gsi_last_bb (src);
5625 if (gsi_end_p (i))
5626 return NULL;
5628 stmt = gsi_stmt (i);
5630 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5632 gsi_remove (&i, true);
5633 e = ssa_redirect_edge (e, target);
5634 e->flags = EDGE_FALLTHRU;
5635 return e;
5638 return NULL;
5642 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5643 edge representing the redirected branch. */
5645 static edge
5646 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5648 basic_block bb = e->src;
5649 gimple_stmt_iterator gsi;
5650 edge ret;
5651 gimple *stmt;
5653 if (e->flags & EDGE_ABNORMAL)
5654 return NULL;
5656 if (e->dest == dest)
5657 return NULL;
5659 if (e->flags & EDGE_EH)
5660 return redirect_eh_edge (e, dest);
5662 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5664 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5665 if (ret)
5666 return ret;
5669 gsi = gsi_last_bb (bb);
5670 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5672 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5674 case GIMPLE_COND:
5675 /* For COND_EXPR, we only need to redirect the edge. */
5676 break;
5678 case GIMPLE_GOTO:
5679 /* No non-abnormal edges should lead from a non-simple goto, and
5680 simple ones should be represented implicitly. */
5681 gcc_unreachable ();
5683 case GIMPLE_SWITCH:
5685 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5686 tree label = gimple_block_label (dest);
5687 tree cases = get_cases_for_edge (e, switch_stmt);
5689 /* If we have a list of cases associated with E, then use it
5690 as it's a lot faster than walking the entire case vector. */
5691 if (cases)
5693 edge e2 = find_edge (e->src, dest);
5694 tree last, first;
5696 first = cases;
5697 while (cases)
5699 last = cases;
5700 CASE_LABEL (cases) = label;
5701 cases = CASE_CHAIN (cases);
5704 /* If there was already an edge in the CFG, then we need
5705 to move all the cases associated with E to E2. */
5706 if (e2)
5708 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5710 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5711 CASE_CHAIN (cases2) = first;
5713 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5715 else
5717 size_t i, n = gimple_switch_num_labels (switch_stmt);
5719 for (i = 0; i < n; i++)
5721 tree elt = gimple_switch_label (switch_stmt, i);
5722 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5723 CASE_LABEL (elt) = label;
5727 break;
5729 case GIMPLE_ASM:
5731 gasm *asm_stmt = as_a <gasm *> (stmt);
5732 int i, n = gimple_asm_nlabels (asm_stmt);
5733 tree label = NULL;
5735 for (i = 0; i < n; ++i)
5737 tree cons = gimple_asm_label_op (asm_stmt, i);
5738 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5740 if (!label)
5741 label = gimple_block_label (dest);
5742 TREE_VALUE (cons) = label;
5746 /* If we didn't find any label matching the former edge in the
5747 asm labels, we must be redirecting the fallthrough
5748 edge. */
5749 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5751 break;
5753 case GIMPLE_RETURN:
5754 gsi_remove (&gsi, true);
5755 e->flags |= EDGE_FALLTHRU;
5756 break;
5758 case GIMPLE_OMP_RETURN:
5759 case GIMPLE_OMP_CONTINUE:
5760 case GIMPLE_OMP_SECTIONS_SWITCH:
5761 case GIMPLE_OMP_FOR:
5762 /* The edges from OMP constructs can be simply redirected. */
5763 break;
5765 case GIMPLE_EH_DISPATCH:
5766 if (!(e->flags & EDGE_FALLTHRU))
5767 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5768 break;
5770 case GIMPLE_TRANSACTION:
5771 if (e->flags & EDGE_TM_ABORT)
5772 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5773 gimple_block_label (dest));
5774 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5775 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5776 gimple_block_label (dest));
5777 else
5778 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5779 gimple_block_label (dest));
5780 break;
5782 default:
5783 /* Otherwise it must be a fallthru edge, and we don't need to
5784 do anything besides redirecting it. */
5785 gcc_assert (e->flags & EDGE_FALLTHRU);
5786 break;
5789 /* Update/insert PHI nodes as necessary. */
5791 /* Now update the edges in the CFG. */
5792 e = ssa_redirect_edge (e, dest);
5794 return e;
5797 /* Returns true if it is possible to remove edge E by redirecting
5798 it to the destination of the other edge from E->src. */
5800 static bool
5801 gimple_can_remove_branch_p (const_edge e)
5803 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5804 return false;
5806 return true;
5809 /* Simple wrapper, as we can always redirect fallthru edges. */
5811 static basic_block
5812 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5814 e = gimple_redirect_edge_and_branch (e, dest);
5815 gcc_assert (e);
5817 return NULL;
5821 /* Splits basic block BB after statement STMT (but at least after the
5822 labels). If STMT is NULL, BB is split just after the labels. */
5824 static basic_block
5825 gimple_split_block (basic_block bb, void *stmt)
5827 gimple_stmt_iterator gsi;
5828 gimple_stmt_iterator gsi_tgt;
5829 gimple_seq list;
5830 basic_block new_bb;
5831 edge e;
5832 edge_iterator ei;
5834 new_bb = create_empty_bb (bb);
5836 /* Redirect the outgoing edges. */
5837 new_bb->succs = bb->succs;
5838 bb->succs = NULL;
5839 FOR_EACH_EDGE (e, ei, new_bb->succs)
5840 e->src = new_bb;
5842 /* Get a stmt iterator pointing to the first stmt to move. */
5843 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5844 gsi = gsi_after_labels (bb);
5845 else
5847 gsi = gsi_for_stmt ((gimple *) stmt);
5848 gsi_next (&gsi);
5851 /* Move everything from GSI to the new basic block. */
5852 if (gsi_end_p (gsi))
5853 return new_bb;
5855 /* Split the statement list - avoid re-creating new containers as this
5856 brings ugly quadratic memory consumption in the inliner.
5857 (We are still quadratic since we need to update stmt BB pointers,
5858 sadly.) */
5859 gsi_split_seq_before (&gsi, &list);
5860 set_bb_seq (new_bb, list);
5861 for (gsi_tgt = gsi_start (list);
5862 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5863 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5865 return new_bb;
5869 /* Moves basic block BB after block AFTER. */
5871 static bool
5872 gimple_move_block_after (basic_block bb, basic_block after)
5874 if (bb->prev_bb == after)
5875 return true;
5877 unlink_block (bb);
5878 link_block (bb, after);
5880 return true;
5884 /* Return TRUE if block BB has no executable statements, otherwise return
5885 FALSE. */
5887 static bool
5888 gimple_empty_block_p (basic_block bb)
5890 /* BB must have no executable statements. */
5891 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5892 if (phi_nodes (bb))
5893 return false;
5894 if (gsi_end_p (gsi))
5895 return true;
5896 if (is_gimple_debug (gsi_stmt (gsi)))
5897 gsi_next_nondebug (&gsi);
5898 return gsi_end_p (gsi);
5902 /* Split a basic block if it ends with a conditional branch and if the
5903 other part of the block is not empty. */
5905 static basic_block
5906 gimple_split_block_before_cond_jump (basic_block bb)
5908 gimple *last, *split_point;
5909 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5910 if (gsi_end_p (gsi))
5911 return NULL;
5912 last = gsi_stmt (gsi);
5913 if (gimple_code (last) != GIMPLE_COND
5914 && gimple_code (last) != GIMPLE_SWITCH)
5915 return NULL;
5916 gsi_prev (&gsi);
5917 split_point = gsi_stmt (gsi);
5918 return split_block (bb, split_point)->dest;
5922 /* Return true if basic_block can be duplicated. */
5924 static bool
5925 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5927 return true;
5930 /* Create a duplicate of the basic block BB. NOTE: This does not
5931 preserve SSA form. */
5933 static basic_block
5934 gimple_duplicate_bb (basic_block bb)
5936 basic_block new_bb;
5937 gimple_stmt_iterator gsi_tgt;
5939 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5941 /* Copy the PHI nodes. We ignore PHI node arguments here because
5942 the incoming edges have not been setup yet. */
5943 for (gphi_iterator gpi = gsi_start_phis (bb);
5944 !gsi_end_p (gpi);
5945 gsi_next (&gpi))
5947 gphi *phi, *copy;
5948 phi = gpi.phi ();
5949 copy = create_phi_node (NULL_TREE, new_bb);
5950 create_new_def_for (gimple_phi_result (phi), copy,
5951 gimple_phi_result_ptr (copy));
5952 gimple_set_uid (copy, gimple_uid (phi));
5955 gsi_tgt = gsi_start_bb (new_bb);
5956 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5957 !gsi_end_p (gsi);
5958 gsi_next (&gsi))
5960 def_operand_p def_p;
5961 ssa_op_iter op_iter;
5962 tree lhs;
5963 gimple *stmt, *copy;
5965 stmt = gsi_stmt (gsi);
5966 if (gimple_code (stmt) == GIMPLE_LABEL)
5967 continue;
5969 /* Don't duplicate label debug stmts. */
5970 if (gimple_debug_bind_p (stmt)
5971 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5972 == LABEL_DECL)
5973 continue;
5975 /* Create a new copy of STMT and duplicate STMT's virtual
5976 operands. */
5977 copy = gimple_copy (stmt);
5978 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5980 maybe_duplicate_eh_stmt (copy, stmt);
5981 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5983 /* When copying around a stmt writing into a local non-user
5984 aggregate, make sure it won't share stack slot with other
5985 vars. */
5986 lhs = gimple_get_lhs (stmt);
5987 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5989 tree base = get_base_address (lhs);
5990 if (base
5991 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
5992 && DECL_IGNORED_P (base)
5993 && !TREE_STATIC (base)
5994 && !DECL_EXTERNAL (base)
5995 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
5996 DECL_NONSHAREABLE (base) = 1;
5999 /* Create new names for all the definitions created by COPY and
6000 add replacement mappings for each new name. */
6001 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6002 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6005 return new_bb;
6008 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6010 static void
6011 add_phi_args_after_copy_edge (edge e_copy)
6013 basic_block bb, bb_copy = e_copy->src, dest;
6014 edge e;
6015 edge_iterator ei;
6016 gphi *phi, *phi_copy;
6017 tree def;
6018 gphi_iterator psi, psi_copy;
6020 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6021 return;
6023 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6025 if (e_copy->dest->flags & BB_DUPLICATED)
6026 dest = get_bb_original (e_copy->dest);
6027 else
6028 dest = e_copy->dest;
6030 e = find_edge (bb, dest);
6031 if (!e)
6033 /* During loop unrolling the target of the latch edge is copied.
6034 In this case we are not looking for edge to dest, but to
6035 duplicated block whose original was dest. */
6036 FOR_EACH_EDGE (e, ei, bb->succs)
6038 if ((e->dest->flags & BB_DUPLICATED)
6039 && get_bb_original (e->dest) == dest)
6040 break;
6043 gcc_assert (e != NULL);
6046 for (psi = gsi_start_phis (e->dest),
6047 psi_copy = gsi_start_phis (e_copy->dest);
6048 !gsi_end_p (psi);
6049 gsi_next (&psi), gsi_next (&psi_copy))
6051 phi = psi.phi ();
6052 phi_copy = psi_copy.phi ();
6053 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6054 add_phi_arg (phi_copy, def, e_copy,
6055 gimple_phi_arg_location_from_edge (phi, e));
6060 /* Basic block BB_COPY was created by code duplication. Add phi node
6061 arguments for edges going out of BB_COPY. The blocks that were
6062 duplicated have BB_DUPLICATED set. */
6064 void
6065 add_phi_args_after_copy_bb (basic_block bb_copy)
6067 edge e_copy;
6068 edge_iterator ei;
6070 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6072 add_phi_args_after_copy_edge (e_copy);
6076 /* Blocks in REGION_COPY array of length N_REGION were created by
6077 duplication of basic blocks. Add phi node arguments for edges
6078 going from these blocks. If E_COPY is not NULL, also add
6079 phi node arguments for its destination.*/
6081 void
6082 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6083 edge e_copy)
6085 unsigned i;
6087 for (i = 0; i < n_region; i++)
6088 region_copy[i]->flags |= BB_DUPLICATED;
6090 for (i = 0; i < n_region; i++)
6091 add_phi_args_after_copy_bb (region_copy[i]);
6092 if (e_copy)
6093 add_phi_args_after_copy_edge (e_copy);
6095 for (i = 0; i < n_region; i++)
6096 region_copy[i]->flags &= ~BB_DUPLICATED;
6099 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6100 important exit edge EXIT. By important we mean that no SSA name defined
6101 inside region is live over the other exit edges of the region. All entry
6102 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6103 to the duplicate of the region. Dominance and loop information is
6104 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6105 UPDATE_DOMINANCE is false then we assume that the caller will update the
6106 dominance information after calling this function. The new basic
6107 blocks are stored to REGION_COPY in the same order as they had in REGION,
6108 provided that REGION_COPY is not NULL.
6109 The function returns false if it is unable to copy the region,
6110 true otherwise. */
6112 bool
6113 gimple_duplicate_sese_region (edge entry, edge exit,
6114 basic_block *region, unsigned n_region,
6115 basic_block *region_copy,
6116 bool update_dominance)
6118 unsigned i;
6119 bool free_region_copy = false, copying_header = false;
6120 struct loop *loop = entry->dest->loop_father;
6121 edge exit_copy;
6122 vec<basic_block> doms;
6123 edge redirected;
6124 int total_freq = 0, entry_freq = 0;
6125 gcov_type total_count = 0, entry_count = 0;
6127 if (!can_copy_bbs_p (region, n_region))
6128 return false;
6130 /* Some sanity checking. Note that we do not check for all possible
6131 missuses of the functions. I.e. if you ask to copy something weird,
6132 it will work, but the state of structures probably will not be
6133 correct. */
6134 for (i = 0; i < n_region; i++)
6136 /* We do not handle subloops, i.e. all the blocks must belong to the
6137 same loop. */
6138 if (region[i]->loop_father != loop)
6139 return false;
6141 if (region[i] != entry->dest
6142 && region[i] == loop->header)
6143 return false;
6146 /* In case the function is used for loop header copying (which is the primary
6147 use), ensure that EXIT and its copy will be new latch and entry edges. */
6148 if (loop->header == entry->dest)
6150 copying_header = true;
6152 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6153 return false;
6155 for (i = 0; i < n_region; i++)
6156 if (region[i] != exit->src
6157 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6158 return false;
6161 initialize_original_copy_tables ();
6163 if (copying_header)
6164 set_loop_copy (loop, loop_outer (loop));
6165 else
6166 set_loop_copy (loop, loop);
6168 if (!region_copy)
6170 region_copy = XNEWVEC (basic_block, n_region);
6171 free_region_copy = true;
6174 /* Record blocks outside the region that are dominated by something
6175 inside. */
6176 if (update_dominance)
6178 doms.create (0);
6179 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6182 if (entry->dest->count)
6184 total_count = entry->dest->count;
6185 entry_count = entry->count;
6186 /* Fix up corner cases, to avoid division by zero or creation of negative
6187 frequencies. */
6188 if (entry_count > total_count)
6189 entry_count = total_count;
6191 else
6193 total_freq = entry->dest->frequency;
6194 entry_freq = EDGE_FREQUENCY (entry);
6195 /* Fix up corner cases, to avoid division by zero or creation of negative
6196 frequencies. */
6197 if (total_freq == 0)
6198 total_freq = 1;
6199 else if (entry_freq > total_freq)
6200 entry_freq = total_freq;
6203 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6204 split_edge_bb_loc (entry), update_dominance);
6205 if (total_count)
6207 scale_bbs_frequencies_gcov_type (region, n_region,
6208 total_count - entry_count,
6209 total_count);
6210 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6211 total_count);
6213 else
6215 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6216 total_freq);
6217 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6220 if (copying_header)
6222 loop->header = exit->dest;
6223 loop->latch = exit->src;
6226 /* Redirect the entry and add the phi node arguments. */
6227 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6228 gcc_assert (redirected != NULL);
6229 flush_pending_stmts (entry);
6231 /* Concerning updating of dominators: We must recount dominators
6232 for entry block and its copy. Anything that is outside of the
6233 region, but was dominated by something inside needs recounting as
6234 well. */
6235 if (update_dominance)
6237 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6238 doms.safe_push (get_bb_original (entry->dest));
6239 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6240 doms.release ();
6243 /* Add the other PHI node arguments. */
6244 add_phi_args_after_copy (region_copy, n_region, NULL);
6246 if (free_region_copy)
6247 free (region_copy);
6249 free_original_copy_tables ();
6250 return true;
6253 /* Checks if BB is part of the region defined by N_REGION BBS. */
6254 static bool
6255 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6257 unsigned int n;
6259 for (n = 0; n < n_region; n++)
6261 if (bb == bbs[n])
6262 return true;
6264 return false;
6267 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6268 are stored to REGION_COPY in the same order in that they appear
6269 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6270 the region, EXIT an exit from it. The condition guarding EXIT
6271 is moved to ENTRY. Returns true if duplication succeeds, false
6272 otherwise.
6274 For example,
6276 some_code;
6277 if (cond)
6279 else
6282 is transformed to
6284 if (cond)
6286 some_code;
6289 else
6291 some_code;
6296 bool
6297 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6298 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6299 basic_block *region_copy ATTRIBUTE_UNUSED)
6301 unsigned i;
6302 bool free_region_copy = false;
6303 struct loop *loop = exit->dest->loop_father;
6304 struct loop *orig_loop = entry->dest->loop_father;
6305 basic_block switch_bb, entry_bb, nentry_bb;
6306 vec<basic_block> doms;
6307 int total_freq = 0, exit_freq = 0;
6308 gcov_type total_count = 0, exit_count = 0;
6309 edge exits[2], nexits[2], e;
6310 gimple_stmt_iterator gsi;
6311 gimple *cond_stmt;
6312 edge sorig, snew;
6313 basic_block exit_bb;
6314 gphi_iterator psi;
6315 gphi *phi;
6316 tree def;
6317 struct loop *target, *aloop, *cloop;
6319 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6320 exits[0] = exit;
6321 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6323 if (!can_copy_bbs_p (region, n_region))
6324 return false;
6326 initialize_original_copy_tables ();
6327 set_loop_copy (orig_loop, loop);
6329 target= loop;
6330 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6332 if (bb_part_of_region_p (aloop->header, region, n_region))
6334 cloop = duplicate_loop (aloop, target);
6335 duplicate_subloops (aloop, cloop);
6339 if (!region_copy)
6341 region_copy = XNEWVEC (basic_block, n_region);
6342 free_region_copy = true;
6345 gcc_assert (!need_ssa_update_p (cfun));
6347 /* Record blocks outside the region that are dominated by something
6348 inside. */
6349 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6351 if (exit->src->count)
6353 total_count = exit->src->count;
6354 exit_count = exit->count;
6355 /* Fix up corner cases, to avoid division by zero or creation of negative
6356 frequencies. */
6357 if (exit_count > total_count)
6358 exit_count = total_count;
6360 else
6362 total_freq = exit->src->frequency;
6363 exit_freq = EDGE_FREQUENCY (exit);
6364 /* Fix up corner cases, to avoid division by zero or creation of negative
6365 frequencies. */
6366 if (total_freq == 0)
6367 total_freq = 1;
6368 if (exit_freq > total_freq)
6369 exit_freq = total_freq;
6372 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6373 split_edge_bb_loc (exit), true);
6374 if (total_count)
6376 scale_bbs_frequencies_gcov_type (region, n_region,
6377 total_count - exit_count,
6378 total_count);
6379 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6380 total_count);
6382 else
6384 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6385 total_freq);
6386 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6389 /* Create the switch block, and put the exit condition to it. */
6390 entry_bb = entry->dest;
6391 nentry_bb = get_bb_copy (entry_bb);
6392 if (!last_stmt (entry->src)
6393 || !stmt_ends_bb_p (last_stmt (entry->src)))
6394 switch_bb = entry->src;
6395 else
6396 switch_bb = split_edge (entry);
6397 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6399 gsi = gsi_last_bb (switch_bb);
6400 cond_stmt = last_stmt (exit->src);
6401 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6402 cond_stmt = gimple_copy (cond_stmt);
6404 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6406 sorig = single_succ_edge (switch_bb);
6407 sorig->flags = exits[1]->flags;
6408 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6410 /* Register the new edge from SWITCH_BB in loop exit lists. */
6411 rescan_loop_exit (snew, true, false);
6413 /* Add the PHI node arguments. */
6414 add_phi_args_after_copy (region_copy, n_region, snew);
6416 /* Get rid of now superfluous conditions and associated edges (and phi node
6417 arguments). */
6418 exit_bb = exit->dest;
6420 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6421 PENDING_STMT (e) = NULL;
6423 /* The latch of ORIG_LOOP was copied, and so was the backedge
6424 to the original header. We redirect this backedge to EXIT_BB. */
6425 for (i = 0; i < n_region; i++)
6426 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6428 gcc_assert (single_succ_edge (region_copy[i]));
6429 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6430 PENDING_STMT (e) = NULL;
6431 for (psi = gsi_start_phis (exit_bb);
6432 !gsi_end_p (psi);
6433 gsi_next (&psi))
6435 phi = psi.phi ();
6436 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6437 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6440 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6441 PENDING_STMT (e) = NULL;
6443 /* Anything that is outside of the region, but was dominated by something
6444 inside needs to update dominance info. */
6445 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6446 doms.release ();
6447 /* Update the SSA web. */
6448 update_ssa (TODO_update_ssa);
6450 if (free_region_copy)
6451 free (region_copy);
6453 free_original_copy_tables ();
6454 return true;
6457 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6458 adding blocks when the dominator traversal reaches EXIT. This
6459 function silently assumes that ENTRY strictly dominates EXIT. */
6461 void
6462 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6463 vec<basic_block> *bbs_p)
6465 basic_block son;
6467 for (son = first_dom_son (CDI_DOMINATORS, entry);
6468 son;
6469 son = next_dom_son (CDI_DOMINATORS, son))
6471 bbs_p->safe_push (son);
6472 if (son != exit)
6473 gather_blocks_in_sese_region (son, exit, bbs_p);
6477 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6478 The duplicates are recorded in VARS_MAP. */
6480 static void
6481 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6482 tree to_context)
6484 tree t = *tp, new_t;
6485 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6487 if (DECL_CONTEXT (t) == to_context)
6488 return;
6490 bool existed;
6491 tree &loc = vars_map->get_or_insert (t, &existed);
6493 if (!existed)
6495 if (SSA_VAR_P (t))
6497 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6498 add_local_decl (f, new_t);
6500 else
6502 gcc_assert (TREE_CODE (t) == CONST_DECL);
6503 new_t = copy_node (t);
6505 DECL_CONTEXT (new_t) = to_context;
6507 loc = new_t;
6509 else
6510 new_t = loc;
6512 *tp = new_t;
6516 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6517 VARS_MAP maps old ssa names and var_decls to the new ones. */
6519 static tree
6520 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6521 tree to_context)
6523 tree new_name;
6525 gcc_assert (!virtual_operand_p (name));
6527 tree *loc = vars_map->get (name);
6529 if (!loc)
6531 tree decl = SSA_NAME_VAR (name);
6532 if (decl)
6534 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6535 replace_by_duplicate_decl (&decl, vars_map, to_context);
6536 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6537 decl, SSA_NAME_DEF_STMT (name));
6539 else
6540 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6541 name, SSA_NAME_DEF_STMT (name));
6543 /* Now that we've used the def stmt to define new_name, make sure it
6544 doesn't define name anymore. */
6545 SSA_NAME_DEF_STMT (name) = NULL;
6547 vars_map->put (name, new_name);
6549 else
6550 new_name = *loc;
6552 return new_name;
6555 struct move_stmt_d
6557 tree orig_block;
6558 tree new_block;
6559 tree from_context;
6560 tree to_context;
6561 hash_map<tree, tree> *vars_map;
6562 htab_t new_label_map;
6563 hash_map<void *, void *> *eh_map;
6564 bool remap_decls_p;
6567 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6568 contained in *TP if it has been ORIG_BLOCK previously and change the
6569 DECL_CONTEXT of every local variable referenced in *TP. */
6571 static tree
6572 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6574 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6575 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6576 tree t = *tp;
6578 if (EXPR_P (t))
6580 tree block = TREE_BLOCK (t);
6581 if (block == p->orig_block
6582 || (p->orig_block == NULL_TREE
6583 && block != NULL_TREE))
6584 TREE_SET_BLOCK (t, p->new_block);
6585 else if (flag_checking && block != NULL_TREE)
6587 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6588 block = BLOCK_SUPERCONTEXT (block);
6589 gcc_assert (block == p->orig_block);
6592 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6594 if (TREE_CODE (t) == SSA_NAME)
6595 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6596 else if (TREE_CODE (t) == PARM_DECL
6597 && gimple_in_ssa_p (cfun))
6598 *tp = *(p->vars_map->get (t));
6599 else if (TREE_CODE (t) == LABEL_DECL)
6601 if (p->new_label_map)
6603 struct tree_map in, *out;
6604 in.base.from = t;
6605 out = (struct tree_map *)
6606 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6607 if (out)
6608 *tp = t = out->to;
6611 DECL_CONTEXT (t) = p->to_context;
6613 else if (p->remap_decls_p)
6615 /* Replace T with its duplicate. T should no longer appear in the
6616 parent function, so this looks wasteful; however, it may appear
6617 in referenced_vars, and more importantly, as virtual operands of
6618 statements, and in alias lists of other variables. It would be
6619 quite difficult to expunge it from all those places. ??? It might
6620 suffice to do this for addressable variables. */
6621 if ((VAR_P (t) && !is_global_var (t))
6622 || TREE_CODE (t) == CONST_DECL)
6623 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6625 *walk_subtrees = 0;
6627 else if (TYPE_P (t))
6628 *walk_subtrees = 0;
6630 return NULL_TREE;
6633 /* Helper for move_stmt_r. Given an EH region number for the source
6634 function, map that to the duplicate EH regio number in the dest. */
6636 static int
6637 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6639 eh_region old_r, new_r;
6641 old_r = get_eh_region_from_number (old_nr);
6642 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6644 return new_r->index;
6647 /* Similar, but operate on INTEGER_CSTs. */
6649 static tree
6650 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6652 int old_nr, new_nr;
6654 old_nr = tree_to_shwi (old_t_nr);
6655 new_nr = move_stmt_eh_region_nr (old_nr, p);
6657 return build_int_cst (integer_type_node, new_nr);
6660 /* Like move_stmt_op, but for gimple statements.
6662 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6663 contained in the current statement in *GSI_P and change the
6664 DECL_CONTEXT of every local variable referenced in the current
6665 statement. */
6667 static tree
6668 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6669 struct walk_stmt_info *wi)
6671 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6672 gimple *stmt = gsi_stmt (*gsi_p);
6673 tree block = gimple_block (stmt);
6675 if (block == p->orig_block
6676 || (p->orig_block == NULL_TREE
6677 && block != NULL_TREE))
6678 gimple_set_block (stmt, p->new_block);
6680 switch (gimple_code (stmt))
6682 case GIMPLE_CALL:
6683 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6685 tree r, fndecl = gimple_call_fndecl (stmt);
6686 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6687 switch (DECL_FUNCTION_CODE (fndecl))
6689 case BUILT_IN_EH_COPY_VALUES:
6690 r = gimple_call_arg (stmt, 1);
6691 r = move_stmt_eh_region_tree_nr (r, p);
6692 gimple_call_set_arg (stmt, 1, r);
6693 /* FALLTHRU */
6695 case BUILT_IN_EH_POINTER:
6696 case BUILT_IN_EH_FILTER:
6697 r = gimple_call_arg (stmt, 0);
6698 r = move_stmt_eh_region_tree_nr (r, p);
6699 gimple_call_set_arg (stmt, 0, r);
6700 break;
6702 default:
6703 break;
6706 break;
6708 case GIMPLE_RESX:
6710 gresx *resx_stmt = as_a <gresx *> (stmt);
6711 int r = gimple_resx_region (resx_stmt);
6712 r = move_stmt_eh_region_nr (r, p);
6713 gimple_resx_set_region (resx_stmt, r);
6715 break;
6717 case GIMPLE_EH_DISPATCH:
6719 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6720 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6721 r = move_stmt_eh_region_nr (r, p);
6722 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6724 break;
6726 case GIMPLE_OMP_RETURN:
6727 case GIMPLE_OMP_CONTINUE:
6728 break;
6729 default:
6730 if (is_gimple_omp (stmt))
6732 /* Do not remap variables inside OMP directives. Variables
6733 referenced in clauses and directive header belong to the
6734 parent function and should not be moved into the child
6735 function. */
6736 bool save_remap_decls_p = p->remap_decls_p;
6737 p->remap_decls_p = false;
6738 *handled_ops_p = true;
6740 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6741 move_stmt_op, wi);
6743 p->remap_decls_p = save_remap_decls_p;
6745 break;
6748 return NULL_TREE;
6751 /* Move basic block BB from function CFUN to function DEST_FN. The
6752 block is moved out of the original linked list and placed after
6753 block AFTER in the new list. Also, the block is removed from the
6754 original array of blocks and placed in DEST_FN's array of blocks.
6755 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6756 updated to reflect the moved edges.
6758 The local variables are remapped to new instances, VARS_MAP is used
6759 to record the mapping. */
6761 static void
6762 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6763 basic_block after, bool update_edge_count_p,
6764 struct move_stmt_d *d)
6766 struct control_flow_graph *cfg;
6767 edge_iterator ei;
6768 edge e;
6769 gimple_stmt_iterator si;
6770 unsigned old_len, new_len;
6772 /* Remove BB from dominance structures. */
6773 delete_from_dominance_info (CDI_DOMINATORS, bb);
6775 /* Move BB from its current loop to the copy in the new function. */
6776 if (current_loops)
6778 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6779 if (new_loop)
6780 bb->loop_father = new_loop;
6783 /* Link BB to the new linked list. */
6784 move_block_after (bb, after);
6786 /* Update the edge count in the corresponding flowgraphs. */
6787 if (update_edge_count_p)
6788 FOR_EACH_EDGE (e, ei, bb->succs)
6790 cfun->cfg->x_n_edges--;
6791 dest_cfun->cfg->x_n_edges++;
6794 /* Remove BB from the original basic block array. */
6795 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6796 cfun->cfg->x_n_basic_blocks--;
6798 /* Grow DEST_CFUN's basic block array if needed. */
6799 cfg = dest_cfun->cfg;
6800 cfg->x_n_basic_blocks++;
6801 if (bb->index >= cfg->x_last_basic_block)
6802 cfg->x_last_basic_block = bb->index + 1;
6804 old_len = vec_safe_length (cfg->x_basic_block_info);
6805 if ((unsigned) cfg->x_last_basic_block >= old_len)
6807 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6808 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6811 (*cfg->x_basic_block_info)[bb->index] = bb;
6813 /* Remap the variables in phi nodes. */
6814 for (gphi_iterator psi = gsi_start_phis (bb);
6815 !gsi_end_p (psi); )
6817 gphi *phi = psi.phi ();
6818 use_operand_p use;
6819 tree op = PHI_RESULT (phi);
6820 ssa_op_iter oi;
6821 unsigned i;
6823 if (virtual_operand_p (op))
6825 /* Remove the phi nodes for virtual operands (alias analysis will be
6826 run for the new function, anyway). */
6827 remove_phi_node (&psi, true);
6828 continue;
6831 SET_PHI_RESULT (phi,
6832 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6833 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6835 op = USE_FROM_PTR (use);
6836 if (TREE_CODE (op) == SSA_NAME)
6837 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6840 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6842 location_t locus = gimple_phi_arg_location (phi, i);
6843 tree block = LOCATION_BLOCK (locus);
6845 if (locus == UNKNOWN_LOCATION)
6846 continue;
6847 if (d->orig_block == NULL_TREE || block == d->orig_block)
6849 locus = set_block (locus, d->new_block);
6850 gimple_phi_arg_set_location (phi, i, locus);
6854 gsi_next (&psi);
6857 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6859 gimple *stmt = gsi_stmt (si);
6860 struct walk_stmt_info wi;
6862 memset (&wi, 0, sizeof (wi));
6863 wi.info = d;
6864 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6866 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6868 tree label = gimple_label_label (label_stmt);
6869 int uid = LABEL_DECL_UID (label);
6871 gcc_assert (uid > -1);
6873 old_len = vec_safe_length (cfg->x_label_to_block_map);
6874 if (old_len <= (unsigned) uid)
6876 new_len = 3 * uid / 2 + 1;
6877 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6880 (*cfg->x_label_to_block_map)[uid] = bb;
6881 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6883 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6885 if (uid >= dest_cfun->cfg->last_label_uid)
6886 dest_cfun->cfg->last_label_uid = uid + 1;
6889 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6890 remove_stmt_from_eh_lp_fn (cfun, stmt);
6892 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6893 gimple_remove_stmt_histograms (cfun, stmt);
6895 /* We cannot leave any operands allocated from the operand caches of
6896 the current function. */
6897 free_stmt_operands (cfun, stmt);
6898 push_cfun (dest_cfun);
6899 update_stmt (stmt);
6900 pop_cfun ();
6903 FOR_EACH_EDGE (e, ei, bb->succs)
6904 if (e->goto_locus != UNKNOWN_LOCATION)
6906 tree block = LOCATION_BLOCK (e->goto_locus);
6907 if (d->orig_block == NULL_TREE
6908 || block == d->orig_block)
6909 e->goto_locus = set_block (e->goto_locus, d->new_block);
6913 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6914 the outermost EH region. Use REGION as the incoming base EH region. */
6916 static eh_region
6917 find_outermost_region_in_block (struct function *src_cfun,
6918 basic_block bb, eh_region region)
6920 gimple_stmt_iterator si;
6922 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6924 gimple *stmt = gsi_stmt (si);
6925 eh_region stmt_region;
6926 int lp_nr;
6928 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6929 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6930 if (stmt_region)
6932 if (region == NULL)
6933 region = stmt_region;
6934 else if (stmt_region != region)
6936 region = eh_region_outermost (src_cfun, stmt_region, region);
6937 gcc_assert (region != NULL);
6942 return region;
6945 static tree
6946 new_label_mapper (tree decl, void *data)
6948 htab_t hash = (htab_t) data;
6949 struct tree_map *m;
6950 void **slot;
6952 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6954 m = XNEW (struct tree_map);
6955 m->hash = DECL_UID (decl);
6956 m->base.from = decl;
6957 m->to = create_artificial_label (UNKNOWN_LOCATION);
6958 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6959 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6960 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6962 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6963 gcc_assert (*slot == NULL);
6965 *slot = m;
6967 return m->to;
6970 /* Tree walker to replace the decls used inside value expressions by
6971 duplicates. */
6973 static tree
6974 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
6976 struct replace_decls_d *rd = (struct replace_decls_d *)data;
6978 switch (TREE_CODE (*tp))
6980 case VAR_DECL:
6981 case PARM_DECL:
6982 case RESULT_DECL:
6983 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
6984 break;
6985 default:
6986 break;
6989 if (IS_TYPE_OR_DECL_P (*tp))
6990 *walk_subtrees = false;
6992 return NULL;
6995 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6996 subblocks. */
6998 static void
6999 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7000 tree to_context)
7002 tree *tp, t;
7004 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7006 t = *tp;
7007 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7008 continue;
7009 replace_by_duplicate_decl (&t, vars_map, to_context);
7010 if (t != *tp)
7012 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7014 tree x = DECL_VALUE_EXPR (*tp);
7015 struct replace_decls_d rd = { vars_map, to_context };
7016 unshare_expr (x);
7017 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7018 SET_DECL_VALUE_EXPR (t, x);
7019 DECL_HAS_VALUE_EXPR_P (t) = 1;
7021 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7022 *tp = t;
7026 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7027 replace_block_vars_by_duplicates (block, vars_map, to_context);
7030 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7031 from FN1 to FN2. */
7033 static void
7034 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7035 struct loop *loop)
7037 /* Discard it from the old loop array. */
7038 (*get_loops (fn1))[loop->num] = NULL;
7040 /* Place it in the new loop array, assigning it a new number. */
7041 loop->num = number_of_loops (fn2);
7042 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7044 /* Recurse to children. */
7045 for (loop = loop->inner; loop; loop = loop->next)
7046 fixup_loop_arrays_after_move (fn1, fn2, loop);
7049 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7050 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7052 DEBUG_FUNCTION void
7053 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7055 basic_block bb;
7056 edge_iterator ei;
7057 edge e;
7058 bitmap bbs = BITMAP_ALLOC (NULL);
7059 int i;
7061 gcc_assert (entry != NULL);
7062 gcc_assert (entry != exit);
7063 gcc_assert (bbs_p != NULL);
7065 gcc_assert (bbs_p->length () > 0);
7067 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7068 bitmap_set_bit (bbs, bb->index);
7070 gcc_assert (bitmap_bit_p (bbs, entry->index));
7071 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7073 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7075 if (bb == entry)
7077 gcc_assert (single_pred_p (entry));
7078 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7080 else
7081 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7083 e = ei_edge (ei);
7084 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7087 if (bb == exit)
7089 gcc_assert (single_succ_p (exit));
7090 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7092 else
7093 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7095 e = ei_edge (ei);
7096 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7100 BITMAP_FREE (bbs);
7103 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7105 bool
7106 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7108 bitmap release_names = (bitmap)data;
7110 if (TREE_CODE (from) != SSA_NAME)
7111 return true;
7113 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7114 return true;
7117 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7118 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7119 single basic block in the original CFG and the new basic block is
7120 returned. DEST_CFUN must not have a CFG yet.
7122 Note that the region need not be a pure SESE region. Blocks inside
7123 the region may contain calls to abort/exit. The only restriction
7124 is that ENTRY_BB should be the only entry point and it must
7125 dominate EXIT_BB.
7127 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7128 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7129 to the new function.
7131 All local variables referenced in the region are assumed to be in
7132 the corresponding BLOCK_VARS and unexpanded variable lists
7133 associated with DEST_CFUN.
7135 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7136 reimplement move_sese_region_to_fn by duplicating the region rather than
7137 moving it. */
7139 basic_block
7140 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7141 basic_block exit_bb, tree orig_block)
7143 vec<basic_block> bbs, dom_bbs;
7144 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7145 basic_block after, bb, *entry_pred, *exit_succ, abb;
7146 struct function *saved_cfun = cfun;
7147 int *entry_flag, *exit_flag;
7148 unsigned *entry_prob, *exit_prob;
7149 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7150 edge e;
7151 edge_iterator ei;
7152 htab_t new_label_map;
7153 hash_map<void *, void *> *eh_map;
7154 struct loop *loop = entry_bb->loop_father;
7155 struct loop *loop0 = get_loop (saved_cfun, 0);
7156 struct move_stmt_d d;
7158 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7159 region. */
7160 gcc_assert (entry_bb != exit_bb
7161 && (!exit_bb
7162 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7164 /* Collect all the blocks in the region. Manually add ENTRY_BB
7165 because it won't be added by dfs_enumerate_from. */
7166 bbs.create (0);
7167 bbs.safe_push (entry_bb);
7168 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7170 if (flag_checking)
7171 verify_sese (entry_bb, exit_bb, &bbs);
7173 /* The blocks that used to be dominated by something in BBS will now be
7174 dominated by the new block. */
7175 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7176 bbs.address (),
7177 bbs.length ());
7179 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7180 the predecessor edges to ENTRY_BB and the successor edges to
7181 EXIT_BB so that we can re-attach them to the new basic block that
7182 will replace the region. */
7183 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7184 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7185 entry_flag = XNEWVEC (int, num_entry_edges);
7186 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7187 i = 0;
7188 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7190 entry_prob[i] = e->probability;
7191 entry_flag[i] = e->flags;
7192 entry_pred[i++] = e->src;
7193 remove_edge (e);
7196 if (exit_bb)
7198 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7199 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7200 exit_flag = XNEWVEC (int, num_exit_edges);
7201 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7202 i = 0;
7203 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7205 exit_prob[i] = e->probability;
7206 exit_flag[i] = e->flags;
7207 exit_succ[i++] = e->dest;
7208 remove_edge (e);
7211 else
7213 num_exit_edges = 0;
7214 exit_succ = NULL;
7215 exit_flag = NULL;
7216 exit_prob = NULL;
7219 /* Switch context to the child function to initialize DEST_FN's CFG. */
7220 gcc_assert (dest_cfun->cfg == NULL);
7221 push_cfun (dest_cfun);
7223 init_empty_tree_cfg ();
7225 /* Initialize EH information for the new function. */
7226 eh_map = NULL;
7227 new_label_map = NULL;
7228 if (saved_cfun->eh)
7230 eh_region region = NULL;
7232 FOR_EACH_VEC_ELT (bbs, i, bb)
7233 region = find_outermost_region_in_block (saved_cfun, bb, region);
7235 init_eh_for_function ();
7236 if (region != NULL)
7238 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7239 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7240 new_label_mapper, new_label_map);
7244 /* Initialize an empty loop tree. */
7245 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7246 init_loops_structure (dest_cfun, loops, 1);
7247 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7248 set_loops_for_fn (dest_cfun, loops);
7250 /* Move the outlined loop tree part. */
7251 num_nodes = bbs.length ();
7252 FOR_EACH_VEC_ELT (bbs, i, bb)
7254 if (bb->loop_father->header == bb)
7256 struct loop *this_loop = bb->loop_father;
7257 struct loop *outer = loop_outer (this_loop);
7258 if (outer == loop
7259 /* If the SESE region contains some bbs ending with
7260 a noreturn call, those are considered to belong
7261 to the outermost loop in saved_cfun, rather than
7262 the entry_bb's loop_father. */
7263 || outer == loop0)
7265 if (outer != loop)
7266 num_nodes -= this_loop->num_nodes;
7267 flow_loop_tree_node_remove (bb->loop_father);
7268 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7269 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7272 else if (bb->loop_father == loop0 && loop0 != loop)
7273 num_nodes--;
7275 /* Remove loop exits from the outlined region. */
7276 if (loops_for_fn (saved_cfun)->exits)
7277 FOR_EACH_EDGE (e, ei, bb->succs)
7279 struct loops *l = loops_for_fn (saved_cfun);
7280 loop_exit **slot
7281 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7282 NO_INSERT);
7283 if (slot)
7284 l->exits->clear_slot (slot);
7289 /* Adjust the number of blocks in the tree root of the outlined part. */
7290 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7292 /* Setup a mapping to be used by move_block_to_fn. */
7293 loop->aux = current_loops->tree_root;
7294 loop0->aux = current_loops->tree_root;
7296 pop_cfun ();
7298 /* Move blocks from BBS into DEST_CFUN. */
7299 gcc_assert (bbs.length () >= 2);
7300 after = dest_cfun->cfg->x_entry_block_ptr;
7301 hash_map<tree, tree> vars_map;
7303 memset (&d, 0, sizeof (d));
7304 d.orig_block = orig_block;
7305 d.new_block = DECL_INITIAL (dest_cfun->decl);
7306 d.from_context = cfun->decl;
7307 d.to_context = dest_cfun->decl;
7308 d.vars_map = &vars_map;
7309 d.new_label_map = new_label_map;
7310 d.eh_map = eh_map;
7311 d.remap_decls_p = true;
7313 if (gimple_in_ssa_p (cfun))
7314 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7316 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7317 set_ssa_default_def (dest_cfun, arg, narg);
7318 vars_map.put (arg, narg);
7321 FOR_EACH_VEC_ELT (bbs, i, bb)
7323 /* No need to update edge counts on the last block. It has
7324 already been updated earlier when we detached the region from
7325 the original CFG. */
7326 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7327 after = bb;
7330 loop->aux = NULL;
7331 loop0->aux = NULL;
7332 /* Loop sizes are no longer correct, fix them up. */
7333 loop->num_nodes -= num_nodes;
7334 for (struct loop *outer = loop_outer (loop);
7335 outer; outer = loop_outer (outer))
7336 outer->num_nodes -= num_nodes;
7337 loop0->num_nodes -= bbs.length () - num_nodes;
7339 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7341 struct loop *aloop;
7342 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7343 if (aloop != NULL)
7345 if (aloop->simduid)
7347 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7348 d.to_context);
7349 dest_cfun->has_simduid_loops = true;
7351 if (aloop->force_vectorize)
7352 dest_cfun->has_force_vectorize_loops = true;
7356 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7357 if (orig_block)
7359 tree block;
7360 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7361 == NULL_TREE);
7362 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7363 = BLOCK_SUBBLOCKS (orig_block);
7364 for (block = BLOCK_SUBBLOCKS (orig_block);
7365 block; block = BLOCK_CHAIN (block))
7366 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7367 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7370 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7371 &vars_map, dest_cfun->decl);
7373 if (new_label_map)
7374 htab_delete (new_label_map);
7375 if (eh_map)
7376 delete eh_map;
7378 if (gimple_in_ssa_p (cfun))
7380 /* We need to release ssa-names in a defined order, so first find them,
7381 and then iterate in ascending version order. */
7382 bitmap release_names = BITMAP_ALLOC (NULL);
7383 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7384 bitmap_iterator bi;
7385 unsigned i;
7386 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7387 release_ssa_name (ssa_name (i));
7388 BITMAP_FREE (release_names);
7391 /* Rewire the entry and exit blocks. The successor to the entry
7392 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7393 the child function. Similarly, the predecessor of DEST_FN's
7394 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7395 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7396 various CFG manipulation function get to the right CFG.
7398 FIXME, this is silly. The CFG ought to become a parameter to
7399 these helpers. */
7400 push_cfun (dest_cfun);
7401 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7402 if (exit_bb)
7403 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7404 pop_cfun ();
7406 /* Back in the original function, the SESE region has disappeared,
7407 create a new basic block in its place. */
7408 bb = create_empty_bb (entry_pred[0]);
7409 if (current_loops)
7410 add_bb_to_loop (bb, loop);
7411 for (i = 0; i < num_entry_edges; i++)
7413 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7414 e->probability = entry_prob[i];
7417 for (i = 0; i < num_exit_edges; i++)
7419 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7420 e->probability = exit_prob[i];
7423 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7424 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7425 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7426 dom_bbs.release ();
7428 if (exit_bb)
7430 free (exit_prob);
7431 free (exit_flag);
7432 free (exit_succ);
7434 free (entry_prob);
7435 free (entry_flag);
7436 free (entry_pred);
7437 bbs.release ();
7439 return bb;
7442 /* Dump default def DEF to file FILE using FLAGS and indentation
7443 SPC. */
7445 static void
7446 dump_default_def (FILE *file, tree def, int spc, int flags)
7448 for (int i = 0; i < spc; ++i)
7449 fprintf (file, " ");
7450 dump_ssaname_info_to_file (file, def, spc);
7452 print_generic_expr (file, TREE_TYPE (def), flags);
7453 fprintf (file, " ");
7454 print_generic_expr (file, def, flags);
7455 fprintf (file, " = ");
7456 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7457 fprintf (file, ";\n");
7460 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7463 void
7464 dump_function_to_file (tree fndecl, FILE *file, int flags)
7466 tree arg, var, old_current_fndecl = current_function_decl;
7467 struct function *dsf;
7468 bool ignore_topmost_bind = false, any_var = false;
7469 basic_block bb;
7470 tree chain;
7471 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7472 && decl_is_tm_clone (fndecl));
7473 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7475 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7477 fprintf (file, "__attribute__((");
7479 bool first = true;
7480 tree chain;
7481 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7482 first = false, chain = TREE_CHAIN (chain))
7484 if (!first)
7485 fprintf (file, ", ");
7487 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7488 if (TREE_VALUE (chain) != NULL_TREE)
7490 fprintf (file, " (");
7491 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7492 fprintf (file, ")");
7496 fprintf (file, "))\n");
7499 current_function_decl = fndecl;
7500 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7502 arg = DECL_ARGUMENTS (fndecl);
7503 while (arg)
7505 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7506 fprintf (file, " ");
7507 print_generic_expr (file, arg, dump_flags);
7508 if (flags & TDF_VERBOSE)
7509 print_node (file, "", arg, 4);
7510 if (DECL_CHAIN (arg))
7511 fprintf (file, ", ");
7512 arg = DECL_CHAIN (arg);
7514 fprintf (file, ")\n");
7516 if (flags & TDF_VERBOSE)
7517 print_node (file, "", fndecl, 2);
7519 dsf = DECL_STRUCT_FUNCTION (fndecl);
7520 if (dsf && (flags & TDF_EH))
7521 dump_eh_tree (file, dsf);
7523 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7525 dump_node (fndecl, TDF_SLIM | flags, file);
7526 current_function_decl = old_current_fndecl;
7527 return;
7530 /* When GIMPLE is lowered, the variables are no longer available in
7531 BIND_EXPRs, so display them separately. */
7532 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7534 unsigned ix;
7535 ignore_topmost_bind = true;
7537 fprintf (file, "{\n");
7538 if (gimple_in_ssa_p (fun)
7539 && (flags & TDF_ALIAS))
7541 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7542 arg = DECL_CHAIN (arg))
7544 tree def = ssa_default_def (fun, arg);
7545 if (def)
7546 dump_default_def (file, def, 2, flags);
7549 tree res = DECL_RESULT (fun->decl);
7550 if (res != NULL_TREE
7551 && DECL_BY_REFERENCE (res))
7553 tree def = ssa_default_def (fun, res);
7554 if (def)
7555 dump_default_def (file, def, 2, flags);
7558 tree static_chain = fun->static_chain_decl;
7559 if (static_chain != NULL_TREE)
7561 tree def = ssa_default_def (fun, static_chain);
7562 if (def)
7563 dump_default_def (file, def, 2, flags);
7567 if (!vec_safe_is_empty (fun->local_decls))
7568 FOR_EACH_LOCAL_DECL (fun, ix, var)
7570 print_generic_decl (file, var, flags);
7571 if (flags & TDF_VERBOSE)
7572 print_node (file, "", var, 4);
7573 fprintf (file, "\n");
7575 any_var = true;
7578 tree name;
7580 if (gimple_in_ssa_p (cfun))
7581 FOR_EACH_SSA_NAME (ix, name, cfun)
7583 if (!SSA_NAME_VAR (name))
7585 fprintf (file, " ");
7586 print_generic_expr (file, TREE_TYPE (name), flags);
7587 fprintf (file, " ");
7588 print_generic_expr (file, name, flags);
7589 fprintf (file, ";\n");
7591 any_var = true;
7596 if (fun && fun->decl == fndecl
7597 && fun->cfg
7598 && basic_block_info_for_fn (fun))
7600 /* If the CFG has been built, emit a CFG-based dump. */
7601 if (!ignore_topmost_bind)
7602 fprintf (file, "{\n");
7604 if (any_var && n_basic_blocks_for_fn (fun))
7605 fprintf (file, "\n");
7607 FOR_EACH_BB_FN (bb, fun)
7608 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7610 fprintf (file, "}\n");
7612 else if (DECL_SAVED_TREE (fndecl) == NULL)
7614 /* The function is now in GIMPLE form but the CFG has not been
7615 built yet. Emit the single sequence of GIMPLE statements
7616 that make up its body. */
7617 gimple_seq body = gimple_body (fndecl);
7619 if (gimple_seq_first_stmt (body)
7620 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7621 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7622 print_gimple_seq (file, body, 0, flags);
7623 else
7625 if (!ignore_topmost_bind)
7626 fprintf (file, "{\n");
7628 if (any_var)
7629 fprintf (file, "\n");
7631 print_gimple_seq (file, body, 2, flags);
7632 fprintf (file, "}\n");
7635 else
7637 int indent;
7639 /* Make a tree based dump. */
7640 chain = DECL_SAVED_TREE (fndecl);
7641 if (chain && TREE_CODE (chain) == BIND_EXPR)
7643 if (ignore_topmost_bind)
7645 chain = BIND_EXPR_BODY (chain);
7646 indent = 2;
7648 else
7649 indent = 0;
7651 else
7653 if (!ignore_topmost_bind)
7655 fprintf (file, "{\n");
7656 /* No topmost bind, pretend it's ignored for later. */
7657 ignore_topmost_bind = true;
7659 indent = 2;
7662 if (any_var)
7663 fprintf (file, "\n");
7665 print_generic_stmt_indented (file, chain, flags, indent);
7666 if (ignore_topmost_bind)
7667 fprintf (file, "}\n");
7670 if (flags & TDF_ENUMERATE_LOCALS)
7671 dump_enumerated_decls (file, flags);
7672 fprintf (file, "\n\n");
7674 current_function_decl = old_current_fndecl;
7677 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7679 DEBUG_FUNCTION void
7680 debug_function (tree fn, int flags)
7682 dump_function_to_file (fn, stderr, flags);
7686 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7688 static void
7689 print_pred_bbs (FILE *file, basic_block bb)
7691 edge e;
7692 edge_iterator ei;
7694 FOR_EACH_EDGE (e, ei, bb->preds)
7695 fprintf (file, "bb_%d ", e->src->index);
7699 /* Print on FILE the indexes for the successors of basic_block BB. */
7701 static void
7702 print_succ_bbs (FILE *file, basic_block bb)
7704 edge e;
7705 edge_iterator ei;
7707 FOR_EACH_EDGE (e, ei, bb->succs)
7708 fprintf (file, "bb_%d ", e->dest->index);
7711 /* Print to FILE the basic block BB following the VERBOSITY level. */
7713 void
7714 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7716 char *s_indent = (char *) alloca ((size_t) indent + 1);
7717 memset ((void *) s_indent, ' ', (size_t) indent);
7718 s_indent[indent] = '\0';
7720 /* Print basic_block's header. */
7721 if (verbosity >= 2)
7723 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7724 print_pred_bbs (file, bb);
7725 fprintf (file, "}, succs = {");
7726 print_succ_bbs (file, bb);
7727 fprintf (file, "})\n");
7730 /* Print basic_block's body. */
7731 if (verbosity >= 3)
7733 fprintf (file, "%s {\n", s_indent);
7734 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7735 fprintf (file, "%s }\n", s_indent);
7739 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7741 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7742 VERBOSITY level this outputs the contents of the loop, or just its
7743 structure. */
7745 static void
7746 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7748 char *s_indent;
7749 basic_block bb;
7751 if (loop == NULL)
7752 return;
7754 s_indent = (char *) alloca ((size_t) indent + 1);
7755 memset ((void *) s_indent, ' ', (size_t) indent);
7756 s_indent[indent] = '\0';
7758 /* Print loop's header. */
7759 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7760 if (loop->header)
7761 fprintf (file, "header = %d", loop->header->index);
7762 else
7764 fprintf (file, "deleted)\n");
7765 return;
7767 if (loop->latch)
7768 fprintf (file, ", latch = %d", loop->latch->index);
7769 else
7770 fprintf (file, ", multiple latches");
7771 fprintf (file, ", niter = ");
7772 print_generic_expr (file, loop->nb_iterations, 0);
7774 if (loop->any_upper_bound)
7776 fprintf (file, ", upper_bound = ");
7777 print_decu (loop->nb_iterations_upper_bound, file);
7779 if (loop->any_likely_upper_bound)
7781 fprintf (file, ", likely_upper_bound = ");
7782 print_decu (loop->nb_iterations_likely_upper_bound, file);
7785 if (loop->any_estimate)
7787 fprintf (file, ", estimate = ");
7788 print_decu (loop->nb_iterations_estimate, file);
7790 fprintf (file, ")\n");
7792 /* Print loop's body. */
7793 if (verbosity >= 1)
7795 fprintf (file, "%s{\n", s_indent);
7796 FOR_EACH_BB_FN (bb, cfun)
7797 if (bb->loop_father == loop)
7798 print_loops_bb (file, bb, indent, verbosity);
7800 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7801 fprintf (file, "%s}\n", s_indent);
7805 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7806 spaces. Following VERBOSITY level this outputs the contents of the
7807 loop, or just its structure. */
7809 static void
7810 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7811 int verbosity)
7813 if (loop == NULL)
7814 return;
7816 print_loop (file, loop, indent, verbosity);
7817 print_loop_and_siblings (file, loop->next, indent, verbosity);
7820 /* Follow a CFG edge from the entry point of the program, and on entry
7821 of a loop, pretty print the loop structure on FILE. */
7823 void
7824 print_loops (FILE *file, int verbosity)
7826 basic_block bb;
7828 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7829 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7830 if (bb && bb->loop_father)
7831 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7834 /* Dump a loop. */
7836 DEBUG_FUNCTION void
7837 debug (struct loop &ref)
7839 print_loop (stderr, &ref, 0, /*verbosity*/0);
7842 DEBUG_FUNCTION void
7843 debug (struct loop *ptr)
7845 if (ptr)
7846 debug (*ptr);
7847 else
7848 fprintf (stderr, "<nil>\n");
7851 /* Dump a loop verbosely. */
7853 DEBUG_FUNCTION void
7854 debug_verbose (struct loop &ref)
7856 print_loop (stderr, &ref, 0, /*verbosity*/3);
7859 DEBUG_FUNCTION void
7860 debug_verbose (struct loop *ptr)
7862 if (ptr)
7863 debug (*ptr);
7864 else
7865 fprintf (stderr, "<nil>\n");
7869 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7871 DEBUG_FUNCTION void
7872 debug_loops (int verbosity)
7874 print_loops (stderr, verbosity);
7877 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7879 DEBUG_FUNCTION void
7880 debug_loop (struct loop *loop, int verbosity)
7882 print_loop (stderr, loop, 0, verbosity);
7885 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7886 level. */
7888 DEBUG_FUNCTION void
7889 debug_loop_num (unsigned num, int verbosity)
7891 debug_loop (get_loop (cfun, num), verbosity);
7894 /* Return true if BB ends with a call, possibly followed by some
7895 instructions that must stay with the call. Return false,
7896 otherwise. */
7898 static bool
7899 gimple_block_ends_with_call_p (basic_block bb)
7901 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7902 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7906 /* Return true if BB ends with a conditional branch. Return false,
7907 otherwise. */
7909 static bool
7910 gimple_block_ends_with_condjump_p (const_basic_block bb)
7912 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7913 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7917 /* Return true if statement T may terminate execution of BB in ways not
7918 explicitly represtented in the CFG. */
7920 bool
7921 stmt_can_terminate_bb_p (gimple *t)
7923 tree fndecl = NULL_TREE;
7924 int call_flags = 0;
7926 /* Eh exception not handled internally terminates execution of the whole
7927 function. */
7928 if (stmt_can_throw_external (t))
7929 return true;
7931 /* NORETURN and LONGJMP calls already have an edge to exit.
7932 CONST and PURE calls do not need one.
7933 We don't currently check for CONST and PURE here, although
7934 it would be a good idea, because those attributes are
7935 figured out from the RTL in mark_constant_function, and
7936 the counter incrementation code from -fprofile-arcs
7937 leads to different results from -fbranch-probabilities. */
7938 if (is_gimple_call (t))
7940 fndecl = gimple_call_fndecl (t);
7941 call_flags = gimple_call_flags (t);
7944 if (is_gimple_call (t)
7945 && fndecl
7946 && DECL_BUILT_IN (fndecl)
7947 && (call_flags & ECF_NOTHROW)
7948 && !(call_flags & ECF_RETURNS_TWICE)
7949 /* fork() doesn't really return twice, but the effect of
7950 wrapping it in __gcov_fork() which calls __gcov_flush()
7951 and clears the counters before forking has the same
7952 effect as returning twice. Force a fake edge. */
7953 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7954 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7955 return false;
7957 if (is_gimple_call (t))
7959 edge_iterator ei;
7960 edge e;
7961 basic_block bb;
7963 if (call_flags & (ECF_PURE | ECF_CONST)
7964 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
7965 return false;
7967 /* Function call may do longjmp, terminate program or do other things.
7968 Special case noreturn that have non-abnormal edges out as in this case
7969 the fact is sufficiently represented by lack of edges out of T. */
7970 if (!(call_flags & ECF_NORETURN))
7971 return true;
7973 bb = gimple_bb (t);
7974 FOR_EACH_EDGE (e, ei, bb->succs)
7975 if ((e->flags & EDGE_FAKE) == 0)
7976 return true;
7979 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7980 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7981 return true;
7983 return false;
7987 /* Add fake edges to the function exit for any non constant and non
7988 noreturn calls (or noreturn calls with EH/abnormal edges),
7989 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7990 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7991 that were split.
7993 The goal is to expose cases in which entering a basic block does
7994 not imply that all subsequent instructions must be executed. */
7996 static int
7997 gimple_flow_call_edges_add (sbitmap blocks)
7999 int i;
8000 int blocks_split = 0;
8001 int last_bb = last_basic_block_for_fn (cfun);
8002 bool check_last_block = false;
8004 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8005 return 0;
8007 if (! blocks)
8008 check_last_block = true;
8009 else
8010 check_last_block = bitmap_bit_p (blocks,
8011 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8013 /* In the last basic block, before epilogue generation, there will be
8014 a fallthru edge to EXIT. Special care is required if the last insn
8015 of the last basic block is a call because make_edge folds duplicate
8016 edges, which would result in the fallthru edge also being marked
8017 fake, which would result in the fallthru edge being removed by
8018 remove_fake_edges, which would result in an invalid CFG.
8020 Moreover, we can't elide the outgoing fake edge, since the block
8021 profiler needs to take this into account in order to solve the minimal
8022 spanning tree in the case that the call doesn't return.
8024 Handle this by adding a dummy instruction in a new last basic block. */
8025 if (check_last_block)
8027 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8028 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8029 gimple *t = NULL;
8031 if (!gsi_end_p (gsi))
8032 t = gsi_stmt (gsi);
8034 if (t && stmt_can_terminate_bb_p (t))
8036 edge e;
8038 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8039 if (e)
8041 gsi_insert_on_edge (e, gimple_build_nop ());
8042 gsi_commit_edge_inserts ();
8047 /* Now add fake edges to the function exit for any non constant
8048 calls since there is no way that we can determine if they will
8049 return or not... */
8050 for (i = 0; i < last_bb; i++)
8052 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8053 gimple_stmt_iterator gsi;
8054 gimple *stmt, *last_stmt;
8056 if (!bb)
8057 continue;
8059 if (blocks && !bitmap_bit_p (blocks, i))
8060 continue;
8062 gsi = gsi_last_nondebug_bb (bb);
8063 if (!gsi_end_p (gsi))
8065 last_stmt = gsi_stmt (gsi);
8068 stmt = gsi_stmt (gsi);
8069 if (stmt_can_terminate_bb_p (stmt))
8071 edge e;
8073 /* The handling above of the final block before the
8074 epilogue should be enough to verify that there is
8075 no edge to the exit block in CFG already.
8076 Calling make_edge in such case would cause us to
8077 mark that edge as fake and remove it later. */
8078 if (flag_checking && stmt == last_stmt)
8080 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8081 gcc_assert (e == NULL);
8084 /* Note that the following may create a new basic block
8085 and renumber the existing basic blocks. */
8086 if (stmt != last_stmt)
8088 e = split_block (bb, stmt);
8089 if (e)
8090 blocks_split++;
8092 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8094 gsi_prev (&gsi);
8096 while (!gsi_end_p (gsi));
8100 if (blocks_split)
8101 verify_flow_info ();
8103 return blocks_split;
8106 /* Removes edge E and all the blocks dominated by it, and updates dominance
8107 information. The IL in E->src needs to be updated separately.
8108 If dominance info is not available, only the edge E is removed.*/
8110 void
8111 remove_edge_and_dominated_blocks (edge e)
8113 vec<basic_block> bbs_to_remove = vNULL;
8114 vec<basic_block> bbs_to_fix_dom = vNULL;
8115 bitmap df, df_idom;
8116 edge f;
8117 edge_iterator ei;
8118 bool none_removed = false;
8119 unsigned i;
8120 basic_block bb, dbb;
8121 bitmap_iterator bi;
8123 /* If we are removing a path inside a non-root loop that may change
8124 loop ownership of blocks or remove loops. Mark loops for fixup. */
8125 if (current_loops
8126 && loop_outer (e->src->loop_father) != NULL
8127 && e->src->loop_father == e->dest->loop_father)
8128 loops_state_set (LOOPS_NEED_FIXUP);
8130 if (!dom_info_available_p (CDI_DOMINATORS))
8132 remove_edge (e);
8133 return;
8136 /* No updating is needed for edges to exit. */
8137 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8139 if (cfgcleanup_altered_bbs)
8140 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8141 remove_edge (e);
8142 return;
8145 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8146 that is not dominated by E->dest, then this set is empty. Otherwise,
8147 all the basic blocks dominated by E->dest are removed.
8149 Also, to DF_IDOM we store the immediate dominators of the blocks in
8150 the dominance frontier of E (i.e., of the successors of the
8151 removed blocks, if there are any, and of E->dest otherwise). */
8152 FOR_EACH_EDGE (f, ei, e->dest->preds)
8154 if (f == e)
8155 continue;
8157 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8159 none_removed = true;
8160 break;
8164 df = BITMAP_ALLOC (NULL);
8165 df_idom = BITMAP_ALLOC (NULL);
8167 if (none_removed)
8168 bitmap_set_bit (df_idom,
8169 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8170 else
8172 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8173 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8175 FOR_EACH_EDGE (f, ei, bb->succs)
8177 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8178 bitmap_set_bit (df, f->dest->index);
8181 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8182 bitmap_clear_bit (df, bb->index);
8184 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8186 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8187 bitmap_set_bit (df_idom,
8188 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8192 if (cfgcleanup_altered_bbs)
8194 /* Record the set of the altered basic blocks. */
8195 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8196 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8199 /* Remove E and the cancelled blocks. */
8200 if (none_removed)
8201 remove_edge (e);
8202 else
8204 /* Walk backwards so as to get a chance to substitute all
8205 released DEFs into debug stmts. See
8206 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8207 details. */
8208 for (i = bbs_to_remove.length (); i-- > 0; )
8209 delete_basic_block (bbs_to_remove[i]);
8212 /* Update the dominance information. The immediate dominator may change only
8213 for blocks whose immediate dominator belongs to DF_IDOM:
8215 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8216 removal. Let Z the arbitrary block such that idom(Z) = Y and
8217 Z dominates X after the removal. Before removal, there exists a path P
8218 from Y to X that avoids Z. Let F be the last edge on P that is
8219 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8220 dominates W, and because of P, Z does not dominate W), and W belongs to
8221 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8222 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8224 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8225 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8226 dbb;
8227 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8228 bbs_to_fix_dom.safe_push (dbb);
8231 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8233 BITMAP_FREE (df);
8234 BITMAP_FREE (df_idom);
8235 bbs_to_remove.release ();
8236 bbs_to_fix_dom.release ();
8239 /* Purge dead EH edges from basic block BB. */
8241 bool
8242 gimple_purge_dead_eh_edges (basic_block bb)
8244 bool changed = false;
8245 edge e;
8246 edge_iterator ei;
8247 gimple *stmt = last_stmt (bb);
8249 if (stmt && stmt_can_throw_internal (stmt))
8250 return false;
8252 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8254 if (e->flags & EDGE_EH)
8256 remove_edge_and_dominated_blocks (e);
8257 changed = true;
8259 else
8260 ei_next (&ei);
8263 return changed;
8266 /* Purge dead EH edges from basic block listed in BLOCKS. */
8268 bool
8269 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8271 bool changed = false;
8272 unsigned i;
8273 bitmap_iterator bi;
8275 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8277 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8279 /* Earlier gimple_purge_dead_eh_edges could have removed
8280 this basic block already. */
8281 gcc_assert (bb || changed);
8282 if (bb != NULL)
8283 changed |= gimple_purge_dead_eh_edges (bb);
8286 return changed;
8289 /* Purge dead abnormal call edges from basic block BB. */
8291 bool
8292 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8294 bool changed = false;
8295 edge e;
8296 edge_iterator ei;
8297 gimple *stmt = last_stmt (bb);
8299 if (!cfun->has_nonlocal_label
8300 && !cfun->calls_setjmp)
8301 return false;
8303 if (stmt && stmt_can_make_abnormal_goto (stmt))
8304 return false;
8306 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8308 if (e->flags & EDGE_ABNORMAL)
8310 if (e->flags & EDGE_FALLTHRU)
8311 e->flags &= ~EDGE_ABNORMAL;
8312 else
8313 remove_edge_and_dominated_blocks (e);
8314 changed = true;
8316 else
8317 ei_next (&ei);
8320 return changed;
8323 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8325 bool
8326 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8328 bool changed = false;
8329 unsigned i;
8330 bitmap_iterator bi;
8332 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8334 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8336 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8337 this basic block already. */
8338 gcc_assert (bb || changed);
8339 if (bb != NULL)
8340 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8343 return changed;
8346 /* This function is called whenever a new edge is created or
8347 redirected. */
8349 static void
8350 gimple_execute_on_growing_pred (edge e)
8352 basic_block bb = e->dest;
8354 if (!gimple_seq_empty_p (phi_nodes (bb)))
8355 reserve_phi_args_for_new_edge (bb);
8358 /* This function is called immediately before edge E is removed from
8359 the edge vector E->dest->preds. */
8361 static void
8362 gimple_execute_on_shrinking_pred (edge e)
8364 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8365 remove_phi_args (e);
8368 /*---------------------------------------------------------------------------
8369 Helper functions for Loop versioning
8370 ---------------------------------------------------------------------------*/
8372 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8373 of 'first'. Both of them are dominated by 'new_head' basic block. When
8374 'new_head' was created by 'second's incoming edge it received phi arguments
8375 on the edge by split_edge(). Later, additional edge 'e' was created to
8376 connect 'new_head' and 'first'. Now this routine adds phi args on this
8377 additional edge 'e' that new_head to second edge received as part of edge
8378 splitting. */
8380 static void
8381 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8382 basic_block new_head, edge e)
8384 gphi *phi1, *phi2;
8385 gphi_iterator psi1, psi2;
8386 tree def;
8387 edge e2 = find_edge (new_head, second);
8389 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8390 edge, we should always have an edge from NEW_HEAD to SECOND. */
8391 gcc_assert (e2 != NULL);
8393 /* Browse all 'second' basic block phi nodes and add phi args to
8394 edge 'e' for 'first' head. PHI args are always in correct order. */
8396 for (psi2 = gsi_start_phis (second),
8397 psi1 = gsi_start_phis (first);
8398 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8399 gsi_next (&psi2), gsi_next (&psi1))
8401 phi1 = psi1.phi ();
8402 phi2 = psi2.phi ();
8403 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8404 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8409 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8410 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8411 the destination of the ELSE part. */
8413 static void
8414 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8415 basic_block second_head ATTRIBUTE_UNUSED,
8416 basic_block cond_bb, void *cond_e)
8418 gimple_stmt_iterator gsi;
8419 gimple *new_cond_expr;
8420 tree cond_expr = (tree) cond_e;
8421 edge e0;
8423 /* Build new conditional expr */
8424 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8425 NULL_TREE, NULL_TREE);
8427 /* Add new cond in cond_bb. */
8428 gsi = gsi_last_bb (cond_bb);
8429 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8431 /* Adjust edges appropriately to connect new head with first head
8432 as well as second head. */
8433 e0 = single_succ_edge (cond_bb);
8434 e0->flags &= ~EDGE_FALLTHRU;
8435 e0->flags |= EDGE_FALSE_VALUE;
8439 /* Do book-keeping of basic block BB for the profile consistency checker.
8440 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8441 then do post-pass accounting. Store the counting in RECORD. */
8442 static void
8443 gimple_account_profile_record (basic_block bb, int after_pass,
8444 struct profile_record *record)
8446 gimple_stmt_iterator i;
8447 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8449 record->size[after_pass]
8450 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8451 if (profile_status_for_fn (cfun) == PROFILE_READ)
8452 record->time[after_pass]
8453 += estimate_num_insns (gsi_stmt (i),
8454 &eni_time_weights) * bb->count;
8455 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8456 record->time[after_pass]
8457 += estimate_num_insns (gsi_stmt (i),
8458 &eni_time_weights) * bb->frequency;
8462 struct cfg_hooks gimple_cfg_hooks = {
8463 "gimple",
8464 gimple_verify_flow_info,
8465 gimple_dump_bb, /* dump_bb */
8466 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8467 create_bb, /* create_basic_block */
8468 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8469 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8470 gimple_can_remove_branch_p, /* can_remove_branch_p */
8471 remove_bb, /* delete_basic_block */
8472 gimple_split_block, /* split_block */
8473 gimple_move_block_after, /* move_block_after */
8474 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8475 gimple_merge_blocks, /* merge_blocks */
8476 gimple_predict_edge, /* predict_edge */
8477 gimple_predicted_by_p, /* predicted_by_p */
8478 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8479 gimple_duplicate_bb, /* duplicate_block */
8480 gimple_split_edge, /* split_edge */
8481 gimple_make_forwarder_block, /* make_forward_block */
8482 NULL, /* tidy_fallthru_edge */
8483 NULL, /* force_nonfallthru */
8484 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8485 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8486 gimple_flow_call_edges_add, /* flow_call_edges_add */
8487 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8488 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8489 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8490 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8491 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8492 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8493 flush_pending_stmts, /* flush_pending_stmts */
8494 gimple_empty_block_p, /* block_empty_p */
8495 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8496 gimple_account_profile_record,
8500 /* Split all critical edges. */
8502 unsigned int
8503 split_critical_edges (void)
8505 basic_block bb;
8506 edge e;
8507 edge_iterator ei;
8509 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8510 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8511 mappings around the calls to split_edge. */
8512 start_recording_case_labels ();
8513 FOR_ALL_BB_FN (bb, cfun)
8515 FOR_EACH_EDGE (e, ei, bb->succs)
8517 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8518 split_edge (e);
8519 /* PRE inserts statements to edges and expects that
8520 since split_critical_edges was done beforehand, committing edge
8521 insertions will not split more edges. In addition to critical
8522 edges we must split edges that have multiple successors and
8523 end by control flow statements, such as RESX.
8524 Go ahead and split them too. This matches the logic in
8525 gimple_find_edge_insert_loc. */
8526 else if ((!single_pred_p (e->dest)
8527 || !gimple_seq_empty_p (phi_nodes (e->dest))
8528 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8529 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8530 && !(e->flags & EDGE_ABNORMAL))
8532 gimple_stmt_iterator gsi;
8534 gsi = gsi_last_bb (e->src);
8535 if (!gsi_end_p (gsi)
8536 && stmt_ends_bb_p (gsi_stmt (gsi))
8537 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8538 && !gimple_call_builtin_p (gsi_stmt (gsi),
8539 BUILT_IN_RETURN)))
8540 split_edge (e);
8544 end_recording_case_labels ();
8545 return 0;
8548 namespace {
8550 const pass_data pass_data_split_crit_edges =
8552 GIMPLE_PASS, /* type */
8553 "crited", /* name */
8554 OPTGROUP_NONE, /* optinfo_flags */
8555 TV_TREE_SPLIT_EDGES, /* tv_id */
8556 PROP_cfg, /* properties_required */
8557 PROP_no_crit_edges, /* properties_provided */
8558 0, /* properties_destroyed */
8559 0, /* todo_flags_start */
8560 0, /* todo_flags_finish */
8563 class pass_split_crit_edges : public gimple_opt_pass
8565 public:
8566 pass_split_crit_edges (gcc::context *ctxt)
8567 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8570 /* opt_pass methods: */
8571 virtual unsigned int execute (function *) { return split_critical_edges (); }
8573 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8574 }; // class pass_split_crit_edges
8576 } // anon namespace
8578 gimple_opt_pass *
8579 make_pass_split_crit_edges (gcc::context *ctxt)
8581 return new pass_split_crit_edges (ctxt);
8585 /* Insert COND expression which is GIMPLE_COND after STMT
8586 in basic block BB with appropriate basic block split
8587 and creation of a new conditionally executed basic block.
8588 Return created basic block. */
8589 basic_block
8590 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8592 edge fall = split_block (bb, stmt);
8593 gimple_stmt_iterator iter = gsi_last_bb (bb);
8594 basic_block new_bb;
8596 /* Insert cond statement. */
8597 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8598 if (gsi_end_p (iter))
8599 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8600 else
8601 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8603 /* Create conditionally executed block. */
8604 new_bb = create_empty_bb (bb);
8605 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8606 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8608 /* Fix edge for split bb. */
8609 fall->flags = EDGE_FALSE_VALUE;
8611 /* Update dominance info. */
8612 if (dom_info_available_p (CDI_DOMINATORS))
8614 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8615 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8618 /* Update loop info. */
8619 if (current_loops)
8620 add_bb_to_loop (new_bb, bb->loop_father);
8622 return new_bb;
8625 /* Build a ternary operation and gimplify it. Emit code before GSI.
8626 Return the gimple_val holding the result. */
8628 tree
8629 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8630 tree type, tree a, tree b, tree c)
8632 tree ret;
8633 location_t loc = gimple_location (gsi_stmt (*gsi));
8635 ret = fold_build3_loc (loc, code, type, a, b, c);
8636 STRIP_NOPS (ret);
8638 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8639 GSI_SAME_STMT);
8642 /* Build a binary operation and gimplify it. Emit code before GSI.
8643 Return the gimple_val holding the result. */
8645 tree
8646 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8647 tree type, tree a, tree b)
8649 tree ret;
8651 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8652 STRIP_NOPS (ret);
8654 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8655 GSI_SAME_STMT);
8658 /* Build a unary operation and gimplify it. Emit code before GSI.
8659 Return the gimple_val holding the result. */
8661 tree
8662 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8663 tree a)
8665 tree ret;
8667 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8668 STRIP_NOPS (ret);
8670 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8671 GSI_SAME_STMT);
8676 /* Given a basic block B which ends with a conditional and has
8677 precisely two successors, determine which of the edges is taken if
8678 the conditional is true and which is taken if the conditional is
8679 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8681 void
8682 extract_true_false_edges_from_block (basic_block b,
8683 edge *true_edge,
8684 edge *false_edge)
8686 edge e = EDGE_SUCC (b, 0);
8688 if (e->flags & EDGE_TRUE_VALUE)
8690 *true_edge = e;
8691 *false_edge = EDGE_SUCC (b, 1);
8693 else
8695 *false_edge = e;
8696 *true_edge = EDGE_SUCC (b, 1);
8701 /* From a controlling predicate in the immediate dominator DOM of
8702 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8703 predicate evaluates to true and false and store them to
8704 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8705 they are non-NULL. Returns true if the edges can be determined,
8706 else return false. */
8708 bool
8709 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8710 edge *true_controlled_edge,
8711 edge *false_controlled_edge)
8713 basic_block bb = phiblock;
8714 edge true_edge, false_edge, tem;
8715 edge e0 = NULL, e1 = NULL;
8717 /* We have to verify that one edge into the PHI node is dominated
8718 by the true edge of the predicate block and the other edge
8719 dominated by the false edge. This ensures that the PHI argument
8720 we are going to take is completely determined by the path we
8721 take from the predicate block.
8722 We can only use BB dominance checks below if the destination of
8723 the true/false edges are dominated by their edge, thus only
8724 have a single predecessor. */
8725 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8726 tem = EDGE_PRED (bb, 0);
8727 if (tem == true_edge
8728 || (single_pred_p (true_edge->dest)
8729 && (tem->src == true_edge->dest
8730 || dominated_by_p (CDI_DOMINATORS,
8731 tem->src, true_edge->dest))))
8732 e0 = tem;
8733 else if (tem == false_edge
8734 || (single_pred_p (false_edge->dest)
8735 && (tem->src == false_edge->dest
8736 || dominated_by_p (CDI_DOMINATORS,
8737 tem->src, false_edge->dest))))
8738 e1 = tem;
8739 else
8740 return false;
8741 tem = EDGE_PRED (bb, 1);
8742 if (tem == true_edge
8743 || (single_pred_p (true_edge->dest)
8744 && (tem->src == true_edge->dest
8745 || dominated_by_p (CDI_DOMINATORS,
8746 tem->src, true_edge->dest))))
8747 e0 = tem;
8748 else if (tem == false_edge
8749 || (single_pred_p (false_edge->dest)
8750 && (tem->src == false_edge->dest
8751 || dominated_by_p (CDI_DOMINATORS,
8752 tem->src, false_edge->dest))))
8753 e1 = tem;
8754 else
8755 return false;
8756 if (!e0 || !e1)
8757 return false;
8759 if (true_controlled_edge)
8760 *true_controlled_edge = e0;
8761 if (false_controlled_edge)
8762 *false_controlled_edge = e1;
8764 return true;
8769 /* Emit return warnings. */
8771 namespace {
8773 const pass_data pass_data_warn_function_return =
8775 GIMPLE_PASS, /* type */
8776 "*warn_function_return", /* name */
8777 OPTGROUP_NONE, /* optinfo_flags */
8778 TV_NONE, /* tv_id */
8779 PROP_cfg, /* properties_required */
8780 0, /* properties_provided */
8781 0, /* properties_destroyed */
8782 0, /* todo_flags_start */
8783 0, /* todo_flags_finish */
8786 class pass_warn_function_return : public gimple_opt_pass
8788 public:
8789 pass_warn_function_return (gcc::context *ctxt)
8790 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8793 /* opt_pass methods: */
8794 virtual unsigned int execute (function *);
8796 }; // class pass_warn_function_return
8798 unsigned int
8799 pass_warn_function_return::execute (function *fun)
8801 source_location location;
8802 gimple *last;
8803 edge e;
8804 edge_iterator ei;
8806 if (!targetm.warn_func_return (fun->decl))
8807 return 0;
8809 /* If we have a path to EXIT, then we do return. */
8810 if (TREE_THIS_VOLATILE (fun->decl)
8811 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8813 location = UNKNOWN_LOCATION;
8814 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8816 last = last_stmt (e->src);
8817 if ((gimple_code (last) == GIMPLE_RETURN
8818 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8819 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8820 break;
8822 if (location == UNKNOWN_LOCATION)
8823 location = cfun->function_end_locus;
8824 warning_at (location, 0, "%<noreturn%> function does return");
8827 /* If we see "return;" in some basic block, then we do reach the end
8828 without returning a value. */
8829 else if (warn_return_type
8830 && !TREE_NO_WARNING (fun->decl)
8831 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8832 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8834 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8836 gimple *last = last_stmt (e->src);
8837 greturn *return_stmt = dyn_cast <greturn *> (last);
8838 if (return_stmt
8839 && gimple_return_retval (return_stmt) == NULL
8840 && !gimple_no_warning_p (last))
8842 location = gimple_location (last);
8843 if (location == UNKNOWN_LOCATION)
8844 location = fun->function_end_locus;
8845 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8846 TREE_NO_WARNING (fun->decl) = 1;
8847 break;
8851 return 0;
8854 } // anon namespace
8856 gimple_opt_pass *
8857 make_pass_warn_function_return (gcc::context *ctxt)
8859 return new pass_warn_function_return (ctxt);
8862 /* Walk a gimplified function and warn for functions whose return value is
8863 ignored and attribute((warn_unused_result)) is set. This is done before
8864 inlining, so we don't have to worry about that. */
8866 static void
8867 do_warn_unused_result (gimple_seq seq)
8869 tree fdecl, ftype;
8870 gimple_stmt_iterator i;
8872 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8874 gimple *g = gsi_stmt (i);
8876 switch (gimple_code (g))
8878 case GIMPLE_BIND:
8879 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8880 break;
8881 case GIMPLE_TRY:
8882 do_warn_unused_result (gimple_try_eval (g));
8883 do_warn_unused_result (gimple_try_cleanup (g));
8884 break;
8885 case GIMPLE_CATCH:
8886 do_warn_unused_result (gimple_catch_handler (
8887 as_a <gcatch *> (g)));
8888 break;
8889 case GIMPLE_EH_FILTER:
8890 do_warn_unused_result (gimple_eh_filter_failure (g));
8891 break;
8893 case GIMPLE_CALL:
8894 if (gimple_call_lhs (g))
8895 break;
8896 if (gimple_call_internal_p (g))
8897 break;
8899 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8900 LHS. All calls whose value is ignored should be
8901 represented like this. Look for the attribute. */
8902 fdecl = gimple_call_fndecl (g);
8903 ftype = gimple_call_fntype (g);
8905 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8907 location_t loc = gimple_location (g);
8909 if (fdecl)
8910 warning_at (loc, OPT_Wunused_result,
8911 "ignoring return value of %qD, "
8912 "declared with attribute warn_unused_result",
8913 fdecl);
8914 else
8915 warning_at (loc, OPT_Wunused_result,
8916 "ignoring return value of function "
8917 "declared with attribute warn_unused_result");
8919 break;
8921 default:
8922 /* Not a container, not a call, or a call whose value is used. */
8923 break;
8928 namespace {
8930 const pass_data pass_data_warn_unused_result =
8932 GIMPLE_PASS, /* type */
8933 "*warn_unused_result", /* name */
8934 OPTGROUP_NONE, /* optinfo_flags */
8935 TV_NONE, /* tv_id */
8936 PROP_gimple_any, /* properties_required */
8937 0, /* properties_provided */
8938 0, /* properties_destroyed */
8939 0, /* todo_flags_start */
8940 0, /* todo_flags_finish */
8943 class pass_warn_unused_result : public gimple_opt_pass
8945 public:
8946 pass_warn_unused_result (gcc::context *ctxt)
8947 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8950 /* opt_pass methods: */
8951 virtual bool gate (function *) { return flag_warn_unused_result; }
8952 virtual unsigned int execute (function *)
8954 do_warn_unused_result (gimple_body (current_function_decl));
8955 return 0;
8958 }; // class pass_warn_unused_result
8960 } // anon namespace
8962 gimple_opt_pass *
8963 make_pass_warn_unused_result (gcc::context *ctxt)
8965 return new pass_warn_unused_result (ctxt);
8968 /* IPA passes, compilation of earlier functions or inlining
8969 might have changed some properties, such as marked functions nothrow,
8970 pure, const or noreturn.
8971 Remove redundant edges and basic blocks, and create new ones if necessary.
8973 This pass can't be executed as stand alone pass from pass manager, because
8974 in between inlining and this fixup the verify_flow_info would fail. */
8976 unsigned int
8977 execute_fixup_cfg (void)
8979 basic_block bb;
8980 gimple_stmt_iterator gsi;
8981 int todo = 0;
8982 gcov_type count_scale;
8983 edge e;
8984 edge_iterator ei;
8985 cgraph_node *node = cgraph_node::get (current_function_decl);
8987 count_scale
8988 = GCOV_COMPUTE_SCALE (node->count, ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8990 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
8991 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
8992 = apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count, count_scale);
8994 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8995 e->count = apply_scale (e->count, count_scale);
8997 FOR_EACH_BB_FN (bb, cfun)
8999 bb->count = apply_scale (bb->count, count_scale);
9000 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9002 gimple *stmt = gsi_stmt (gsi);
9003 tree decl = is_gimple_call (stmt)
9004 ? gimple_call_fndecl (stmt)
9005 : NULL;
9006 if (decl)
9008 int flags = gimple_call_flags (stmt);
9009 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9011 if (gimple_purge_dead_abnormal_call_edges (bb))
9012 todo |= TODO_cleanup_cfg;
9014 if (gimple_in_ssa_p (cfun))
9016 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9017 update_stmt (stmt);
9021 if (flags & ECF_NORETURN
9022 && fixup_noreturn_call (stmt))
9023 todo |= TODO_cleanup_cfg;
9026 /* Remove stores to variables we marked write-only.
9027 Keep access when store has side effect, i.e. in case when source
9028 is volatile. */
9029 if (gimple_store_p (stmt)
9030 && !gimple_has_side_effects (stmt))
9032 tree lhs = get_base_address (gimple_get_lhs (stmt));
9034 if (VAR_P (lhs)
9035 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9036 && varpool_node::get (lhs)->writeonly)
9038 unlink_stmt_vdef (stmt);
9039 gsi_remove (&gsi, true);
9040 release_defs (stmt);
9041 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9042 continue;
9045 /* For calls we can simply remove LHS when it is known
9046 to be write-only. */
9047 if (is_gimple_call (stmt)
9048 && gimple_get_lhs (stmt))
9050 tree lhs = get_base_address (gimple_get_lhs (stmt));
9052 if (VAR_P (lhs)
9053 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9054 && varpool_node::get (lhs)->writeonly)
9056 gimple_call_set_lhs (stmt, NULL);
9057 update_stmt (stmt);
9058 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9062 if (maybe_clean_eh_stmt (stmt)
9063 && gimple_purge_dead_eh_edges (bb))
9064 todo |= TODO_cleanup_cfg;
9065 gsi_next (&gsi);
9068 FOR_EACH_EDGE (e, ei, bb->succs)
9069 e->count = apply_scale (e->count, count_scale);
9071 /* If we have a basic block with no successors that does not
9072 end with a control statement or a noreturn call end it with
9073 a call to __builtin_unreachable. This situation can occur
9074 when inlining a noreturn call that does in fact return. */
9075 if (EDGE_COUNT (bb->succs) == 0)
9077 gimple *stmt = last_stmt (bb);
9078 if (!stmt
9079 || (!is_ctrl_stmt (stmt)
9080 && (!is_gimple_call (stmt)
9081 || !gimple_call_noreturn_p (stmt))))
9083 if (stmt && is_gimple_call (stmt))
9084 gimple_call_set_ctrl_altering (stmt, false);
9085 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9086 stmt = gimple_build_call (fndecl, 0);
9087 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9088 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9089 if (!cfun->after_inlining)
9091 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9092 int freq
9093 = compute_call_stmt_bb_frequency (current_function_decl,
9094 bb);
9095 node->create_edge (cgraph_node::get_create (fndecl),
9096 call_stmt, bb->count, freq);
9101 if (count_scale != REG_BR_PROB_BASE)
9102 compute_function_frequency ();
9104 if (current_loops
9105 && (todo & TODO_cleanup_cfg))
9106 loops_state_set (LOOPS_NEED_FIXUP);
9108 return todo;
9111 namespace {
9113 const pass_data pass_data_fixup_cfg =
9115 GIMPLE_PASS, /* type */
9116 "fixup_cfg", /* name */
9117 OPTGROUP_NONE, /* optinfo_flags */
9118 TV_NONE, /* tv_id */
9119 PROP_cfg, /* properties_required */
9120 0, /* properties_provided */
9121 0, /* properties_destroyed */
9122 0, /* todo_flags_start */
9123 0, /* todo_flags_finish */
9126 class pass_fixup_cfg : public gimple_opt_pass
9128 public:
9129 pass_fixup_cfg (gcc::context *ctxt)
9130 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9133 /* opt_pass methods: */
9134 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9135 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9137 }; // class pass_fixup_cfg
9139 } // anon namespace
9141 gimple_opt_pass *
9142 make_pass_fixup_cfg (gcc::context *ctxt)
9144 return new pass_fixup_cfg (ctxt);
9147 /* Garbage collection support for edge_def. */
9149 extern void gt_ggc_mx (tree&);
9150 extern void gt_ggc_mx (gimple *&);
9151 extern void gt_ggc_mx (rtx&);
9152 extern void gt_ggc_mx (basic_block&);
9154 static void
9155 gt_ggc_mx (rtx_insn *& x)
9157 if (x)
9158 gt_ggc_mx_rtx_def ((void *) x);
9161 void
9162 gt_ggc_mx (edge_def *e)
9164 tree block = LOCATION_BLOCK (e->goto_locus);
9165 gt_ggc_mx (e->src);
9166 gt_ggc_mx (e->dest);
9167 if (current_ir_type () == IR_GIMPLE)
9168 gt_ggc_mx (e->insns.g);
9169 else
9170 gt_ggc_mx (e->insns.r);
9171 gt_ggc_mx (block);
9174 /* PCH support for edge_def. */
9176 extern void gt_pch_nx (tree&);
9177 extern void gt_pch_nx (gimple *&);
9178 extern void gt_pch_nx (rtx&);
9179 extern void gt_pch_nx (basic_block&);
9181 static void
9182 gt_pch_nx (rtx_insn *& x)
9184 if (x)
9185 gt_pch_nx_rtx_def ((void *) x);
9188 void
9189 gt_pch_nx (edge_def *e)
9191 tree block = LOCATION_BLOCK (e->goto_locus);
9192 gt_pch_nx (e->src);
9193 gt_pch_nx (e->dest);
9194 if (current_ir_type () == IR_GIMPLE)
9195 gt_pch_nx (e->insns.g);
9196 else
9197 gt_pch_nx (e->insns.r);
9198 gt_pch_nx (block);
9201 void
9202 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9204 tree block = LOCATION_BLOCK (e->goto_locus);
9205 op (&(e->src), cookie);
9206 op (&(e->dest), cookie);
9207 if (current_ir_type () == IR_GIMPLE)
9208 op (&(e->insns.g), cookie);
9209 else
9210 op (&(e->insns.r), cookie);
9211 op (&(block), cookie);
9214 #if CHECKING_P
9216 namespace selftest {
9218 /* Helper function for CFG selftests: create a dummy function decl
9219 and push it as cfun. */
9221 static tree
9222 push_fndecl (const char *name)
9224 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9225 /* FIXME: this uses input_location: */
9226 tree fndecl = build_fn_decl (name, fn_type);
9227 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9228 NULL_TREE, integer_type_node);
9229 DECL_RESULT (fndecl) = retval;
9230 push_struct_function (fndecl);
9231 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9232 ASSERT_TRUE (fun != NULL);
9233 init_empty_tree_cfg_for_function (fun);
9234 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9235 ASSERT_EQ (0, n_edges_for_fn (fun));
9236 return fndecl;
9239 /* These tests directly create CFGs.
9240 Compare with the static fns within tree-cfg.c:
9241 - build_gimple_cfg
9242 - make_blocks: calls create_basic_block (seq, bb);
9243 - make_edges. */
9245 /* Verify a simple cfg of the form:
9246 ENTRY -> A -> B -> C -> EXIT. */
9248 static void
9249 test_linear_chain ()
9251 gimple_register_cfg_hooks ();
9253 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9254 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9256 /* Create some empty blocks. */
9257 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9258 basic_block bb_b = create_empty_bb (bb_a);
9259 basic_block bb_c = create_empty_bb (bb_b);
9261 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9262 ASSERT_EQ (0, n_edges_for_fn (fun));
9264 /* Create some edges: a simple linear chain of BBs. */
9265 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9266 make_edge (bb_a, bb_b, 0);
9267 make_edge (bb_b, bb_c, 0);
9268 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9270 /* Verify the edges. */
9271 ASSERT_EQ (4, n_edges_for_fn (fun));
9272 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9273 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9274 ASSERT_EQ (1, bb_a->preds->length ());
9275 ASSERT_EQ (1, bb_a->succs->length ());
9276 ASSERT_EQ (1, bb_b->preds->length ());
9277 ASSERT_EQ (1, bb_b->succs->length ());
9278 ASSERT_EQ (1, bb_c->preds->length ());
9279 ASSERT_EQ (1, bb_c->succs->length ());
9280 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9281 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9283 /* Verify the dominance information
9284 Each BB in our simple chain should be dominated by the one before
9285 it. */
9286 calculate_dominance_info (CDI_DOMINATORS);
9287 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9288 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9289 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9290 ASSERT_EQ (1, dom_by_b.length ());
9291 ASSERT_EQ (bb_c, dom_by_b[0]);
9292 free_dominance_info (CDI_DOMINATORS);
9293 dom_by_b.release ();
9295 /* Similarly for post-dominance: each BB in our chain is post-dominated
9296 by the one after it. */
9297 calculate_dominance_info (CDI_POST_DOMINATORS);
9298 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9299 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9300 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9301 ASSERT_EQ (1, postdom_by_b.length ());
9302 ASSERT_EQ (bb_a, postdom_by_b[0]);
9303 free_dominance_info (CDI_POST_DOMINATORS);
9304 postdom_by_b.release ();
9306 pop_cfun ();
9309 /* Verify a simple CFG of the form:
9310 ENTRY
9314 /t \f
9320 EXIT. */
9322 static void
9323 test_diamond ()
9325 gimple_register_cfg_hooks ();
9327 tree fndecl = push_fndecl ("cfg_test_diamond");
9328 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9330 /* Create some empty blocks. */
9331 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9332 basic_block bb_b = create_empty_bb (bb_a);
9333 basic_block bb_c = create_empty_bb (bb_a);
9334 basic_block bb_d = create_empty_bb (bb_b);
9336 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9337 ASSERT_EQ (0, n_edges_for_fn (fun));
9339 /* Create the edges. */
9340 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9341 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9342 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9343 make_edge (bb_b, bb_d, 0);
9344 make_edge (bb_c, bb_d, 0);
9345 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9347 /* Verify the edges. */
9348 ASSERT_EQ (6, n_edges_for_fn (fun));
9349 ASSERT_EQ (1, bb_a->preds->length ());
9350 ASSERT_EQ (2, bb_a->succs->length ());
9351 ASSERT_EQ (1, bb_b->preds->length ());
9352 ASSERT_EQ (1, bb_b->succs->length ());
9353 ASSERT_EQ (1, bb_c->preds->length ());
9354 ASSERT_EQ (1, bb_c->succs->length ());
9355 ASSERT_EQ (2, bb_d->preds->length ());
9356 ASSERT_EQ (1, bb_d->succs->length ());
9358 /* Verify the dominance information. */
9359 calculate_dominance_info (CDI_DOMINATORS);
9360 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9361 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9362 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9363 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9364 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9365 dom_by_a.release ();
9366 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9367 ASSERT_EQ (0, dom_by_b.length ());
9368 dom_by_b.release ();
9369 free_dominance_info (CDI_DOMINATORS);
9371 /* Similarly for post-dominance. */
9372 calculate_dominance_info (CDI_POST_DOMINATORS);
9373 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9374 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9375 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9376 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9377 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9378 postdom_by_d.release ();
9379 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9380 ASSERT_EQ (0, postdom_by_b.length ());
9381 postdom_by_b.release ();
9382 free_dominance_info (CDI_POST_DOMINATORS);
9384 pop_cfun ();
9387 /* Verify that we can handle a CFG containing a "complete" aka
9388 fully-connected subgraph (where A B C D below all have edges
9389 pointing to each other node, also to themselves).
9390 e.g.:
9391 ENTRY EXIT
9397 A<--->B
9398 ^^ ^^
9399 | \ / |
9400 | X |
9401 | / \ |
9402 VV VV
9403 C<--->D
9406 static void
9407 test_fully_connected ()
9409 gimple_register_cfg_hooks ();
9411 tree fndecl = push_fndecl ("cfg_fully_connected");
9412 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9414 const int n = 4;
9416 /* Create some empty blocks. */
9417 auto_vec <basic_block> subgraph_nodes;
9418 for (int i = 0; i < n; i++)
9419 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9421 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9422 ASSERT_EQ (0, n_edges_for_fn (fun));
9424 /* Create the edges. */
9425 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9426 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9427 for (int i = 0; i < n; i++)
9428 for (int j = 0; j < n; j++)
9429 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9431 /* Verify the edges. */
9432 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9433 /* The first one is linked to ENTRY/EXIT as well as itself and
9434 everything else. */
9435 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9436 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9437 /* The other ones in the subgraph are linked to everything in
9438 the subgraph (including themselves). */
9439 for (int i = 1; i < n; i++)
9441 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9442 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9445 /* Verify the dominance information. */
9446 calculate_dominance_info (CDI_DOMINATORS);
9447 /* The initial block in the subgraph should be dominated by ENTRY. */
9448 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9449 get_immediate_dominator (CDI_DOMINATORS,
9450 subgraph_nodes[0]));
9451 /* Every other block in the subgraph should be dominated by the
9452 initial block. */
9453 for (int i = 1; i < n; i++)
9454 ASSERT_EQ (subgraph_nodes[0],
9455 get_immediate_dominator (CDI_DOMINATORS,
9456 subgraph_nodes[i]));
9457 free_dominance_info (CDI_DOMINATORS);
9459 /* Similarly for post-dominance. */
9460 calculate_dominance_info (CDI_POST_DOMINATORS);
9461 /* The initial block in the subgraph should be postdominated by EXIT. */
9462 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9463 get_immediate_dominator (CDI_POST_DOMINATORS,
9464 subgraph_nodes[0]));
9465 /* Every other block in the subgraph should be postdominated by the
9466 initial block, since that leads to EXIT. */
9467 for (int i = 1; i < n; i++)
9468 ASSERT_EQ (subgraph_nodes[0],
9469 get_immediate_dominator (CDI_POST_DOMINATORS,
9470 subgraph_nodes[i]));
9471 free_dominance_info (CDI_POST_DOMINATORS);
9473 pop_cfun ();
9476 /* Run all of the selftests within this file. */
9478 void
9479 tree_cfg_c_tests ()
9481 test_linear_chain ();
9482 test_diamond ();
9483 test_fully_connected ();
9486 } // namespace selftest
9488 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9489 - loop
9490 - nested loops
9491 - switch statement (a block with many out-edges)
9492 - something that jumps to itself
9493 - etc */
9495 #endif /* CHECKING_P */