* ipa-polymorphic-call.c
[official-gcc.git] / gcc / tree-cfg.c
blob99b27c7dc97d979fb883cf9dce3e2d3231b8749d
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "trans-mem.h"
39 #include "stor-layout.h"
40 #include "print-tree.h"
41 #include "tm_p.h"
42 #include "predict.h"
43 #include "hard-reg-set.h"
44 #include "function.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "cfganal.h"
48 #include "basic-block.h"
49 #include "flags.h"
50 #include "gimple-pretty-print.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-fold.h"
54 #include "tree-eh.h"
55 #include "gimple-expr.h"
56 #include "is-a.h"
57 #include "gimple.h"
58 #include "gimple-iterator.h"
59 #include "gimplify-me.h"
60 #include "gimple-walk.h"
61 #include "gimple-ssa.h"
62 #include "plugin-api.h"
63 #include "ipa-ref.h"
64 #include "cgraph.h"
65 #include "tree-cfg.h"
66 #include "tree-phinodes.h"
67 #include "ssa-iterators.h"
68 #include "stringpool.h"
69 #include "tree-ssanames.h"
70 #include "tree-ssa-loop-manip.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "tree-into-ssa.h"
73 #include "hashtab.h"
74 #include "rtl.h"
75 #include "statistics.h"
76 #include "real.h"
77 #include "fixed-value.h"
78 #include "insn-config.h"
79 #include "expmed.h"
80 #include "dojump.h"
81 #include "explow.h"
82 #include "calls.h"
83 #include "emit-rtl.h"
84 #include "varasm.h"
85 #include "stmt.h"
86 #include "expr.h"
87 #include "tree-dfa.h"
88 #include "tree-ssa.h"
89 #include "tree-dump.h"
90 #include "tree-pass.h"
91 #include "diagnostic-core.h"
92 #include "except.h"
93 #include "cfgloop.h"
94 #include "tree-ssa-propagate.h"
95 #include "value-prof.h"
96 #include "tree-inline.h"
97 #include "target.h"
98 #include "tree-ssa-live.h"
99 #include "omp-low.h"
100 #include "tree-cfgcleanup.h"
101 #include "wide-int-print.h"
103 /* This file contains functions for building the Control Flow Graph (CFG)
104 for a function tree. */
106 /* Local declarations. */
108 /* Initial capacity for the basic block array. */
109 static const int initial_cfg_capacity = 20;
111 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
112 which use a particular edge. The CASE_LABEL_EXPRs are chained together
113 via their CASE_CHAIN field, which we clear after we're done with the
114 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
116 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
117 update the case vector in response to edge redirections.
119 Right now this table is set up and torn down at key points in the
120 compilation process. It would be nice if we could make the table
121 more persistent. The key is getting notification of changes to
122 the CFG (particularly edge removal, creation and redirection). */
124 static hash_map<edge, tree> *edge_to_cases;
126 /* If we record edge_to_cases, this bitmap will hold indexes
127 of basic blocks that end in a GIMPLE_SWITCH which we touched
128 due to edge manipulations. */
130 static bitmap touched_switch_bbs;
132 /* CFG statistics. */
133 struct cfg_stats_d
135 long num_merged_labels;
138 static struct cfg_stats_d cfg_stats;
140 /* Hash table to store last discriminator assigned for each locus. */
141 struct locus_discrim_map
143 location_t locus;
144 int discriminator;
147 /* Hashtable helpers. */
149 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
151 typedef locus_discrim_map *value_type;
152 typedef locus_discrim_map *compare_type;
153 static inline hashval_t hash (const locus_discrim_map *);
154 static inline bool equal (const locus_discrim_map *,
155 const locus_discrim_map *);
158 /* Trivial hash function for a location_t. ITEM is a pointer to
159 a hash table entry that maps a location_t to a discriminator. */
161 inline hashval_t
162 locus_discrim_hasher::hash (const locus_discrim_map *item)
164 return LOCATION_LINE (item->locus);
167 /* Equality function for the locus-to-discriminator map. A and B
168 point to the two hash table entries to compare. */
170 inline bool
171 locus_discrim_hasher::equal (const locus_discrim_map *a,
172 const locus_discrim_map *b)
174 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
177 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
179 /* Basic blocks and flowgraphs. */
180 static void make_blocks (gimple_seq);
182 /* Edges. */
183 static void make_edges (void);
184 static void assign_discriminators (void);
185 static void make_cond_expr_edges (basic_block);
186 static void make_gimple_switch_edges (gswitch *, basic_block);
187 static bool make_goto_expr_edges (basic_block);
188 static void make_gimple_asm_edges (basic_block);
189 static edge gimple_redirect_edge_and_branch (edge, basic_block);
190 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
192 /* Various helpers. */
193 static inline bool stmt_starts_bb_p (gimple, gimple);
194 static int gimple_verify_flow_info (void);
195 static void gimple_make_forwarder_block (edge);
196 static gimple first_non_label_stmt (basic_block);
197 static bool verify_gimple_transaction (gtransaction *);
198 static bool call_can_make_abnormal_goto (gimple);
200 /* Flowgraph optimization and cleanup. */
201 static void gimple_merge_blocks (basic_block, basic_block);
202 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
203 static void remove_bb (basic_block);
204 static edge find_taken_edge_computed_goto (basic_block, tree);
205 static edge find_taken_edge_cond_expr (basic_block, tree);
206 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
207 static tree find_case_label_for_value (gswitch *, tree);
209 void
210 init_empty_tree_cfg_for_function (struct function *fn)
212 /* Initialize the basic block array. */
213 init_flow (fn);
214 profile_status_for_fn (fn) = PROFILE_ABSENT;
215 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
216 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
217 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
218 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
219 initial_cfg_capacity);
221 /* Build a mapping of labels to their associated blocks. */
222 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
223 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
224 initial_cfg_capacity);
226 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
227 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
229 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
230 = EXIT_BLOCK_PTR_FOR_FN (fn);
231 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
232 = ENTRY_BLOCK_PTR_FOR_FN (fn);
235 void
236 init_empty_tree_cfg (void)
238 init_empty_tree_cfg_for_function (cfun);
241 /*---------------------------------------------------------------------------
242 Create basic blocks
243 ---------------------------------------------------------------------------*/
245 /* Entry point to the CFG builder for trees. SEQ is the sequence of
246 statements to be added to the flowgraph. */
248 static void
249 build_gimple_cfg (gimple_seq seq)
251 /* Register specific gimple functions. */
252 gimple_register_cfg_hooks ();
254 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
256 init_empty_tree_cfg ();
258 make_blocks (seq);
260 /* Make sure there is always at least one block, even if it's empty. */
261 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
262 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
264 /* Adjust the size of the array. */
265 if (basic_block_info_for_fn (cfun)->length ()
266 < (size_t) n_basic_blocks_for_fn (cfun))
267 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
268 n_basic_blocks_for_fn (cfun));
270 /* To speed up statement iterator walks, we first purge dead labels. */
271 cleanup_dead_labels ();
273 /* Group case nodes to reduce the number of edges.
274 We do this after cleaning up dead labels because otherwise we miss
275 a lot of obvious case merging opportunities. */
276 group_case_labels ();
278 /* Create the edges of the flowgraph. */
279 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
280 make_edges ();
281 assign_discriminators ();
282 cleanup_dead_labels ();
283 delete discriminator_per_locus;
284 discriminator_per_locus = NULL;
287 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
288 them and propagate the information to LOOP. We assume that the annotations
289 come immediately before the condition in BB, if any. */
291 static void
292 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
294 gimple_stmt_iterator gsi = gsi_last_bb (bb);
295 gimple stmt = gsi_stmt (gsi);
297 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
298 return;
300 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
302 stmt = gsi_stmt (gsi);
303 if (gimple_code (stmt) != GIMPLE_CALL)
304 break;
305 if (!gimple_call_internal_p (stmt)
306 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
307 break;
309 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
311 case annot_expr_ivdep_kind:
312 loop->safelen = INT_MAX;
313 break;
314 case annot_expr_no_vector_kind:
315 loop->dont_vectorize = true;
316 break;
317 case annot_expr_vector_kind:
318 loop->force_vectorize = true;
319 cfun->has_force_vectorize_loops = true;
320 break;
321 default:
322 gcc_unreachable ();
325 stmt = gimple_build_assign (gimple_call_lhs (stmt),
326 gimple_call_arg (stmt, 0));
327 gsi_replace (&gsi, stmt, true);
331 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
332 them and propagate the information to the loop. We assume that the
333 annotations come immediately before the condition of the loop. */
335 static void
336 replace_loop_annotate (void)
338 struct loop *loop;
339 basic_block bb;
340 gimple_stmt_iterator gsi;
341 gimple stmt;
343 FOR_EACH_LOOP (loop, 0)
345 /* First look into the header. */
346 replace_loop_annotate_in_block (loop->header, loop);
348 /* Then look into the latch, if any. */
349 if (loop->latch)
350 replace_loop_annotate_in_block (loop->latch, loop);
353 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
354 FOR_EACH_BB_FN (bb, cfun)
356 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
358 stmt = gsi_stmt (gsi);
359 if (gimple_code (stmt) != GIMPLE_CALL)
360 continue;
361 if (!gimple_call_internal_p (stmt)
362 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
363 continue;
365 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
367 case annot_expr_ivdep_kind:
368 case annot_expr_no_vector_kind:
369 case annot_expr_vector_kind:
370 break;
371 default:
372 gcc_unreachable ();
375 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
376 stmt = gimple_build_assign (gimple_call_lhs (stmt),
377 gimple_call_arg (stmt, 0));
378 gsi_replace (&gsi, stmt, true);
384 static unsigned int
385 execute_build_cfg (void)
387 gimple_seq body = gimple_body (current_function_decl);
389 build_gimple_cfg (body);
390 gimple_set_body (current_function_decl, NULL);
391 if (dump_file && (dump_flags & TDF_DETAILS))
393 fprintf (dump_file, "Scope blocks:\n");
394 dump_scope_blocks (dump_file, dump_flags);
396 cleanup_tree_cfg ();
397 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
398 replace_loop_annotate ();
399 return 0;
402 namespace {
404 const pass_data pass_data_build_cfg =
406 GIMPLE_PASS, /* type */
407 "cfg", /* name */
408 OPTGROUP_NONE, /* optinfo_flags */
409 TV_TREE_CFG, /* tv_id */
410 PROP_gimple_leh, /* properties_required */
411 ( PROP_cfg | PROP_loops ), /* properties_provided */
412 0, /* properties_destroyed */
413 0, /* todo_flags_start */
414 0, /* todo_flags_finish */
417 class pass_build_cfg : public gimple_opt_pass
419 public:
420 pass_build_cfg (gcc::context *ctxt)
421 : gimple_opt_pass (pass_data_build_cfg, ctxt)
424 /* opt_pass methods: */
425 virtual unsigned int execute (function *) { return execute_build_cfg (); }
427 }; // class pass_build_cfg
429 } // anon namespace
431 gimple_opt_pass *
432 make_pass_build_cfg (gcc::context *ctxt)
434 return new pass_build_cfg (ctxt);
438 /* Return true if T is a computed goto. */
440 bool
441 computed_goto_p (gimple t)
443 return (gimple_code (t) == GIMPLE_GOTO
444 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
447 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
448 the other edge points to a bb with just __builtin_unreachable ().
449 I.e. return true for C->M edge in:
450 <bb C>:
452 if (something)
453 goto <bb N>;
454 else
455 goto <bb M>;
456 <bb N>:
457 __builtin_unreachable ();
458 <bb M>: */
460 bool
461 assert_unreachable_fallthru_edge_p (edge e)
463 basic_block pred_bb = e->src;
464 gimple last = last_stmt (pred_bb);
465 if (last && gimple_code (last) == GIMPLE_COND)
467 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
468 if (other_bb == e->dest)
469 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
470 if (EDGE_COUNT (other_bb->succs) == 0)
472 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
473 gimple stmt;
475 if (gsi_end_p (gsi))
476 return false;
477 stmt = gsi_stmt (gsi);
478 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
480 gsi_next (&gsi);
481 if (gsi_end_p (gsi))
482 return false;
483 stmt = gsi_stmt (gsi);
485 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
488 return false;
492 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
493 could alter control flow except via eh. We initialize the flag at
494 CFG build time and only ever clear it later. */
496 static void
497 gimple_call_initialize_ctrl_altering (gimple stmt)
499 int flags = gimple_call_flags (stmt);
501 /* A call alters control flow if it can make an abnormal goto. */
502 if (call_can_make_abnormal_goto (stmt)
503 /* A call also alters control flow if it does not return. */
504 || flags & ECF_NORETURN
505 /* TM ending statements have backedges out of the transaction.
506 Return true so we split the basic block containing them.
507 Note that the TM_BUILTIN test is merely an optimization. */
508 || ((flags & ECF_TM_BUILTIN)
509 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
510 /* BUILT_IN_RETURN call is same as return statement. */
511 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
512 gimple_call_set_ctrl_altering (stmt, true);
513 else
514 gimple_call_set_ctrl_altering (stmt, false);
518 /* Insert SEQ after BB and build a flowgraph. */
520 static basic_block
521 make_blocks_1 (gimple_seq seq, basic_block bb)
523 gimple_stmt_iterator i = gsi_start (seq);
524 gimple stmt = NULL;
525 bool start_new_block = true;
526 bool first_stmt_of_seq = true;
528 while (!gsi_end_p (i))
530 gimple prev_stmt;
532 prev_stmt = stmt;
533 stmt = gsi_stmt (i);
535 if (stmt && is_gimple_call (stmt))
536 gimple_call_initialize_ctrl_altering (stmt);
538 /* If the statement starts a new basic block or if we have determined
539 in a previous pass that we need to create a new block for STMT, do
540 so now. */
541 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
543 if (!first_stmt_of_seq)
544 gsi_split_seq_before (&i, &seq);
545 bb = create_basic_block (seq, bb);
546 start_new_block = false;
549 /* Now add STMT to BB and create the subgraphs for special statement
550 codes. */
551 gimple_set_bb (stmt, bb);
553 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
554 next iteration. */
555 if (stmt_ends_bb_p (stmt))
557 /* If the stmt can make abnormal goto use a new temporary
558 for the assignment to the LHS. This makes sure the old value
559 of the LHS is available on the abnormal edge. Otherwise
560 we will end up with overlapping life-ranges for abnormal
561 SSA names. */
562 if (gimple_has_lhs (stmt)
563 && stmt_can_make_abnormal_goto (stmt)
564 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
566 tree lhs = gimple_get_lhs (stmt);
567 tree tmp = create_tmp_var (TREE_TYPE (lhs));
568 gimple s = gimple_build_assign (lhs, tmp);
569 gimple_set_location (s, gimple_location (stmt));
570 gimple_set_block (s, gimple_block (stmt));
571 gimple_set_lhs (stmt, tmp);
572 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
573 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
574 DECL_GIMPLE_REG_P (tmp) = 1;
575 gsi_insert_after (&i, s, GSI_SAME_STMT);
577 start_new_block = true;
580 gsi_next (&i);
581 first_stmt_of_seq = false;
583 return bb;
586 /* Build a flowgraph for the sequence of stmts SEQ. */
588 static void
589 make_blocks (gimple_seq seq)
591 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
594 /* Create and return a new empty basic block after bb AFTER. */
596 static basic_block
597 create_bb (void *h, void *e, basic_block after)
599 basic_block bb;
601 gcc_assert (!e);
603 /* Create and initialize a new basic block. Since alloc_block uses
604 GC allocation that clears memory to allocate a basic block, we do
605 not have to clear the newly allocated basic block here. */
606 bb = alloc_block ();
608 bb->index = last_basic_block_for_fn (cfun);
609 bb->flags = BB_NEW;
610 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
612 /* Add the new block to the linked list of blocks. */
613 link_block (bb, after);
615 /* Grow the basic block array if needed. */
616 if ((size_t) last_basic_block_for_fn (cfun)
617 == basic_block_info_for_fn (cfun)->length ())
619 size_t new_size =
620 (last_basic_block_for_fn (cfun)
621 + (last_basic_block_for_fn (cfun) + 3) / 4);
622 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
625 /* Add the newly created block to the array. */
626 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
628 n_basic_blocks_for_fn (cfun)++;
629 last_basic_block_for_fn (cfun)++;
631 return bb;
635 /*---------------------------------------------------------------------------
636 Edge creation
637 ---------------------------------------------------------------------------*/
639 /* Fold COND_EXPR_COND of each COND_EXPR. */
641 void
642 fold_cond_expr_cond (void)
644 basic_block bb;
646 FOR_EACH_BB_FN (bb, cfun)
648 gimple stmt = last_stmt (bb);
650 if (stmt && gimple_code (stmt) == GIMPLE_COND)
652 gcond *cond_stmt = as_a <gcond *> (stmt);
653 location_t loc = gimple_location (stmt);
654 tree cond;
655 bool zerop, onep;
657 fold_defer_overflow_warnings ();
658 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
659 boolean_type_node,
660 gimple_cond_lhs (cond_stmt),
661 gimple_cond_rhs (cond_stmt));
662 if (cond)
664 zerop = integer_zerop (cond);
665 onep = integer_onep (cond);
667 else
668 zerop = onep = false;
670 fold_undefer_overflow_warnings (zerop || onep,
671 stmt,
672 WARN_STRICT_OVERFLOW_CONDITIONAL);
673 if (zerop)
674 gimple_cond_make_false (cond_stmt);
675 else if (onep)
676 gimple_cond_make_true (cond_stmt);
681 /* If basic block BB has an abnormal edge to a basic block
682 containing IFN_ABNORMAL_DISPATCHER internal call, return
683 that the dispatcher's basic block, otherwise return NULL. */
685 basic_block
686 get_abnormal_succ_dispatcher (basic_block bb)
688 edge e;
689 edge_iterator ei;
691 FOR_EACH_EDGE (e, ei, bb->succs)
692 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
694 gimple_stmt_iterator gsi
695 = gsi_start_nondebug_after_labels_bb (e->dest);
696 gimple g = gsi_stmt (gsi);
697 if (g
698 && is_gimple_call (g)
699 && gimple_call_internal_p (g)
700 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
701 return e->dest;
703 return NULL;
706 /* Helper function for make_edges. Create a basic block with
707 with ABNORMAL_DISPATCHER internal call in it if needed, and
708 create abnormal edges from BBS to it and from it to FOR_BB
709 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
711 static void
712 handle_abnormal_edges (basic_block *dispatcher_bbs,
713 basic_block for_bb, int *bb_to_omp_idx,
714 auto_vec<basic_block> *bbs, bool computed_goto)
716 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
717 unsigned int idx = 0;
718 basic_block bb;
719 bool inner = false;
721 if (bb_to_omp_idx)
723 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
724 if (bb_to_omp_idx[for_bb->index] != 0)
725 inner = true;
728 /* If the dispatcher has been created already, then there are basic
729 blocks with abnormal edges to it, so just make a new edge to
730 for_bb. */
731 if (*dispatcher == NULL)
733 /* Check if there are any basic blocks that need to have
734 abnormal edges to this dispatcher. If there are none, return
735 early. */
736 if (bb_to_omp_idx == NULL)
738 if (bbs->is_empty ())
739 return;
741 else
743 FOR_EACH_VEC_ELT (*bbs, idx, bb)
744 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
745 break;
746 if (bb == NULL)
747 return;
750 /* Create the dispatcher bb. */
751 *dispatcher = create_basic_block (NULL, for_bb);
752 if (computed_goto)
754 /* Factor computed gotos into a common computed goto site. Also
755 record the location of that site so that we can un-factor the
756 gotos after we have converted back to normal form. */
757 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
759 /* Create the destination of the factored goto. Each original
760 computed goto will put its desired destination into this
761 variable and jump to the label we create immediately below. */
762 tree var = create_tmp_var (ptr_type_node, "gotovar");
764 /* Build a label for the new block which will contain the
765 factored computed goto. */
766 tree factored_label_decl
767 = create_artificial_label (UNKNOWN_LOCATION);
768 gimple factored_computed_goto_label
769 = gimple_build_label (factored_label_decl);
770 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
772 /* Build our new computed goto. */
773 gimple factored_computed_goto = gimple_build_goto (var);
774 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
776 FOR_EACH_VEC_ELT (*bbs, idx, bb)
778 if (bb_to_omp_idx
779 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
780 continue;
782 gsi = gsi_last_bb (bb);
783 gimple last = gsi_stmt (gsi);
785 gcc_assert (computed_goto_p (last));
787 /* Copy the original computed goto's destination into VAR. */
788 gimple assignment
789 = gimple_build_assign (var, gimple_goto_dest (last));
790 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
792 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
793 e->goto_locus = gimple_location (last);
794 gsi_remove (&gsi, true);
797 else
799 tree arg = inner ? boolean_true_node : boolean_false_node;
800 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
801 1, arg);
802 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
803 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
805 /* Create predecessor edges of the dispatcher. */
806 FOR_EACH_VEC_ELT (*bbs, idx, bb)
808 if (bb_to_omp_idx
809 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
810 continue;
811 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
816 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
819 /* Creates outgoing edges for BB. Returns 1 when it ends with an
820 computed goto, returns 2 when it ends with a statement that
821 might return to this function via an nonlocal goto, otherwise
822 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
824 static int
825 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
827 gimple last = last_stmt (bb);
828 bool fallthru = false;
829 int ret = 0;
831 if (!last)
832 return ret;
834 switch (gimple_code (last))
836 case GIMPLE_GOTO:
837 if (make_goto_expr_edges (bb))
838 ret = 1;
839 fallthru = false;
840 break;
841 case GIMPLE_RETURN:
843 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
844 e->goto_locus = gimple_location (last);
845 fallthru = false;
847 break;
848 case GIMPLE_COND:
849 make_cond_expr_edges (bb);
850 fallthru = false;
851 break;
852 case GIMPLE_SWITCH:
853 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
854 fallthru = false;
855 break;
856 case GIMPLE_RESX:
857 make_eh_edges (last);
858 fallthru = false;
859 break;
860 case GIMPLE_EH_DISPATCH:
861 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
862 break;
864 case GIMPLE_CALL:
865 /* If this function receives a nonlocal goto, then we need to
866 make edges from this call site to all the nonlocal goto
867 handlers. */
868 if (stmt_can_make_abnormal_goto (last))
869 ret = 2;
871 /* If this statement has reachable exception handlers, then
872 create abnormal edges to them. */
873 make_eh_edges (last);
875 /* BUILTIN_RETURN is really a return statement. */
876 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
878 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
879 fallthru = false;
881 /* Some calls are known not to return. */
882 else
883 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
884 break;
886 case GIMPLE_ASSIGN:
887 /* A GIMPLE_ASSIGN may throw internally and thus be considered
888 control-altering. */
889 if (is_ctrl_altering_stmt (last))
890 make_eh_edges (last);
891 fallthru = true;
892 break;
894 case GIMPLE_ASM:
895 make_gimple_asm_edges (bb);
896 fallthru = true;
897 break;
899 CASE_GIMPLE_OMP:
900 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
901 break;
903 case GIMPLE_TRANSACTION:
905 tree abort_label
906 = gimple_transaction_label (as_a <gtransaction *> (last));
907 if (abort_label)
908 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
909 fallthru = true;
911 break;
913 default:
914 gcc_assert (!stmt_ends_bb_p (last));
915 fallthru = true;
916 break;
919 if (fallthru)
920 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
922 return ret;
925 /* Join all the blocks in the flowgraph. */
927 static void
928 make_edges (void)
930 basic_block bb;
931 struct omp_region *cur_region = NULL;
932 auto_vec<basic_block> ab_edge_goto;
933 auto_vec<basic_block> ab_edge_call;
934 int *bb_to_omp_idx = NULL;
935 int cur_omp_region_idx = 0;
937 /* Create an edge from entry to the first block with executable
938 statements in it. */
939 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
940 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
941 EDGE_FALLTHRU);
943 /* Traverse the basic block array placing edges. */
944 FOR_EACH_BB_FN (bb, cfun)
946 int mer;
948 if (bb_to_omp_idx)
949 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
951 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
952 if (mer == 1)
953 ab_edge_goto.safe_push (bb);
954 else if (mer == 2)
955 ab_edge_call.safe_push (bb);
957 if (cur_region && bb_to_omp_idx == NULL)
958 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
961 /* Computed gotos are hell to deal with, especially if there are
962 lots of them with a large number of destinations. So we factor
963 them to a common computed goto location before we build the
964 edge list. After we convert back to normal form, we will un-factor
965 the computed gotos since factoring introduces an unwanted jump.
966 For non-local gotos and abnormal edges from calls to calls that return
967 twice or forced labels, factor the abnormal edges too, by having all
968 abnormal edges from the calls go to a common artificial basic block
969 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
970 basic block to all forced labels and calls returning twice.
971 We do this per-OpenMP structured block, because those regions
972 are guaranteed to be single entry single exit by the standard,
973 so it is not allowed to enter or exit such regions abnormally this way,
974 thus all computed gotos, non-local gotos and setjmp/longjmp calls
975 must not transfer control across SESE region boundaries. */
976 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
978 gimple_stmt_iterator gsi;
979 basic_block dispatcher_bb_array[2] = { NULL, NULL };
980 basic_block *dispatcher_bbs = dispatcher_bb_array;
981 int count = n_basic_blocks_for_fn (cfun);
983 if (bb_to_omp_idx)
984 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
986 FOR_EACH_BB_FN (bb, cfun)
988 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
990 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
991 tree target;
993 if (!label_stmt)
994 break;
996 target = gimple_label_label (label_stmt);
998 /* Make an edge to every label block that has been marked as a
999 potential target for a computed goto or a non-local goto. */
1000 if (FORCED_LABEL (target))
1001 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1002 &ab_edge_goto, true);
1003 if (DECL_NONLOCAL (target))
1005 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1006 &ab_edge_call, false);
1007 break;
1011 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1012 gsi_next_nondebug (&gsi);
1013 if (!gsi_end_p (gsi))
1015 /* Make an edge to every setjmp-like call. */
1016 gimple call_stmt = gsi_stmt (gsi);
1017 if (is_gimple_call (call_stmt)
1018 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1019 || gimple_call_builtin_p (call_stmt,
1020 BUILT_IN_SETJMP_RECEIVER)))
1021 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1022 &ab_edge_call, false);
1026 if (bb_to_omp_idx)
1027 XDELETE (dispatcher_bbs);
1030 XDELETE (bb_to_omp_idx);
1032 free_omp_regions ();
1034 /* Fold COND_EXPR_COND of each COND_EXPR. */
1035 fold_cond_expr_cond ();
1038 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1039 needed. Returns true if new bbs were created.
1040 Note: This is transitional code, and should not be used for new code. We
1041 should be able to get rid of this by rewriting all target va-arg
1042 gimplification hooks to use an interface gimple_build_cond_value as described
1043 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1045 bool
1046 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1048 gimple stmt = gsi_stmt (*gsi);
1049 basic_block bb = gimple_bb (stmt);
1050 basic_block lastbb, afterbb;
1051 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1052 edge e;
1053 lastbb = make_blocks_1 (seq, bb);
1054 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1055 return false;
1056 e = split_block (bb, stmt);
1057 /* Move e->dest to come after the new basic blocks. */
1058 afterbb = e->dest;
1059 unlink_block (afterbb);
1060 link_block (afterbb, lastbb);
1061 redirect_edge_succ (e, bb->next_bb);
1062 bb = bb->next_bb;
1063 while (bb != afterbb)
1065 struct omp_region *cur_region = NULL;
1066 int cur_omp_region_idx = 0;
1067 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1068 gcc_assert (!mer && !cur_region);
1069 add_bb_to_loop (bb, afterbb->loop_father);
1070 bb = bb->next_bb;
1072 return true;
1075 /* Find the next available discriminator value for LOCUS. The
1076 discriminator distinguishes among several basic blocks that
1077 share a common locus, allowing for more accurate sample-based
1078 profiling. */
1080 static int
1081 next_discriminator_for_locus (location_t locus)
1083 struct locus_discrim_map item;
1084 struct locus_discrim_map **slot;
1086 item.locus = locus;
1087 item.discriminator = 0;
1088 slot = discriminator_per_locus->find_slot_with_hash (
1089 &item, LOCATION_LINE (locus), INSERT);
1090 gcc_assert (slot);
1091 if (*slot == HTAB_EMPTY_ENTRY)
1093 *slot = XNEW (struct locus_discrim_map);
1094 gcc_assert (*slot);
1095 (*slot)->locus = locus;
1096 (*slot)->discriminator = 0;
1098 (*slot)->discriminator++;
1099 return (*slot)->discriminator;
1102 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1104 static bool
1105 same_line_p (location_t locus1, location_t locus2)
1107 expanded_location from, to;
1109 if (locus1 == locus2)
1110 return true;
1112 from = expand_location (locus1);
1113 to = expand_location (locus2);
1115 if (from.line != to.line)
1116 return false;
1117 if (from.file == to.file)
1118 return true;
1119 return (from.file != NULL
1120 && to.file != NULL
1121 && filename_cmp (from.file, to.file) == 0);
1124 /* Assign discriminators to each basic block. */
1126 static void
1127 assign_discriminators (void)
1129 basic_block bb;
1131 FOR_EACH_BB_FN (bb, cfun)
1133 edge e;
1134 edge_iterator ei;
1135 gimple last = last_stmt (bb);
1136 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1138 if (locus == UNKNOWN_LOCATION)
1139 continue;
1141 FOR_EACH_EDGE (e, ei, bb->succs)
1143 gimple first = first_non_label_stmt (e->dest);
1144 gimple last = last_stmt (e->dest);
1145 if ((first && same_line_p (locus, gimple_location (first)))
1146 || (last && same_line_p (locus, gimple_location (last))))
1148 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1149 bb->discriminator = next_discriminator_for_locus (locus);
1150 else
1151 e->dest->discriminator = next_discriminator_for_locus (locus);
1157 /* Create the edges for a GIMPLE_COND starting at block BB. */
1159 static void
1160 make_cond_expr_edges (basic_block bb)
1162 gcond *entry = as_a <gcond *> (last_stmt (bb));
1163 gimple then_stmt, else_stmt;
1164 basic_block then_bb, else_bb;
1165 tree then_label, else_label;
1166 edge e;
1168 gcc_assert (entry);
1169 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1171 /* Entry basic blocks for each component. */
1172 then_label = gimple_cond_true_label (entry);
1173 else_label = gimple_cond_false_label (entry);
1174 then_bb = label_to_block (then_label);
1175 else_bb = label_to_block (else_label);
1176 then_stmt = first_stmt (then_bb);
1177 else_stmt = first_stmt (else_bb);
1179 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1180 e->goto_locus = gimple_location (then_stmt);
1181 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1182 if (e)
1183 e->goto_locus = gimple_location (else_stmt);
1185 /* We do not need the labels anymore. */
1186 gimple_cond_set_true_label (entry, NULL_TREE);
1187 gimple_cond_set_false_label (entry, NULL_TREE);
1191 /* Called for each element in the hash table (P) as we delete the
1192 edge to cases hash table.
1194 Clear all the TREE_CHAINs to prevent problems with copying of
1195 SWITCH_EXPRs and structure sharing rules, then free the hash table
1196 element. */
1198 bool
1199 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1201 tree t, next;
1203 for (t = value; t; t = next)
1205 next = CASE_CHAIN (t);
1206 CASE_CHAIN (t) = NULL;
1209 return true;
1212 /* Start recording information mapping edges to case labels. */
1214 void
1215 start_recording_case_labels (void)
1217 gcc_assert (edge_to_cases == NULL);
1218 edge_to_cases = new hash_map<edge, tree>;
1219 touched_switch_bbs = BITMAP_ALLOC (NULL);
1222 /* Return nonzero if we are recording information for case labels. */
1224 static bool
1225 recording_case_labels_p (void)
1227 return (edge_to_cases != NULL);
1230 /* Stop recording information mapping edges to case labels and
1231 remove any information we have recorded. */
1232 void
1233 end_recording_case_labels (void)
1235 bitmap_iterator bi;
1236 unsigned i;
1237 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1238 delete edge_to_cases;
1239 edge_to_cases = NULL;
1240 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1242 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1243 if (bb)
1245 gimple stmt = last_stmt (bb);
1246 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1247 group_case_labels_stmt (as_a <gswitch *> (stmt));
1250 BITMAP_FREE (touched_switch_bbs);
1253 /* If we are inside a {start,end}_recording_cases block, then return
1254 a chain of CASE_LABEL_EXPRs from T which reference E.
1256 Otherwise return NULL. */
1258 static tree
1259 get_cases_for_edge (edge e, gswitch *t)
1261 tree *slot;
1262 size_t i, n;
1264 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1265 chains available. Return NULL so the caller can detect this case. */
1266 if (!recording_case_labels_p ())
1267 return NULL;
1269 slot = edge_to_cases->get (e);
1270 if (slot)
1271 return *slot;
1273 /* If we did not find E in the hash table, then this must be the first
1274 time we have been queried for information about E & T. Add all the
1275 elements from T to the hash table then perform the query again. */
1277 n = gimple_switch_num_labels (t);
1278 for (i = 0; i < n; i++)
1280 tree elt = gimple_switch_label (t, i);
1281 tree lab = CASE_LABEL (elt);
1282 basic_block label_bb = label_to_block (lab);
1283 edge this_edge = find_edge (e->src, label_bb);
1285 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1286 a new chain. */
1287 tree &s = edge_to_cases->get_or_insert (this_edge);
1288 CASE_CHAIN (elt) = s;
1289 s = elt;
1292 return *edge_to_cases->get (e);
1295 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1297 static void
1298 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1300 size_t i, n;
1302 n = gimple_switch_num_labels (entry);
1304 for (i = 0; i < n; ++i)
1306 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1307 basic_block label_bb = label_to_block (lab);
1308 make_edge (bb, label_bb, 0);
1313 /* Return the basic block holding label DEST. */
1315 basic_block
1316 label_to_block_fn (struct function *ifun, tree dest)
1318 int uid = LABEL_DECL_UID (dest);
1320 /* We would die hard when faced by an undefined label. Emit a label to
1321 the very first basic block. This will hopefully make even the dataflow
1322 and undefined variable warnings quite right. */
1323 if (seen_error () && uid < 0)
1325 gimple_stmt_iterator gsi =
1326 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1327 gimple stmt;
1329 stmt = gimple_build_label (dest);
1330 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1331 uid = LABEL_DECL_UID (dest);
1333 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1334 return NULL;
1335 return (*ifun->cfg->x_label_to_block_map)[uid];
1338 /* Create edges for a goto statement at block BB. Returns true
1339 if abnormal edges should be created. */
1341 static bool
1342 make_goto_expr_edges (basic_block bb)
1344 gimple_stmt_iterator last = gsi_last_bb (bb);
1345 gimple goto_t = gsi_stmt (last);
1347 /* A simple GOTO creates normal edges. */
1348 if (simple_goto_p (goto_t))
1350 tree dest = gimple_goto_dest (goto_t);
1351 basic_block label_bb = label_to_block (dest);
1352 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1353 e->goto_locus = gimple_location (goto_t);
1354 gsi_remove (&last, true);
1355 return false;
1358 /* A computed GOTO creates abnormal edges. */
1359 return true;
1362 /* Create edges for an asm statement with labels at block BB. */
1364 static void
1365 make_gimple_asm_edges (basic_block bb)
1367 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1368 int i, n = gimple_asm_nlabels (stmt);
1370 for (i = 0; i < n; ++i)
1372 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1373 basic_block label_bb = label_to_block (label);
1374 make_edge (bb, label_bb, 0);
1378 /*---------------------------------------------------------------------------
1379 Flowgraph analysis
1380 ---------------------------------------------------------------------------*/
1382 /* Cleanup useless labels in basic blocks. This is something we wish
1383 to do early because it allows us to group case labels before creating
1384 the edges for the CFG, and it speeds up block statement iterators in
1385 all passes later on.
1386 We rerun this pass after CFG is created, to get rid of the labels that
1387 are no longer referenced. After then we do not run it any more, since
1388 (almost) no new labels should be created. */
1390 /* A map from basic block index to the leading label of that block. */
1391 static struct label_record
1393 /* The label. */
1394 tree label;
1396 /* True if the label is referenced from somewhere. */
1397 bool used;
1398 } *label_for_bb;
1400 /* Given LABEL return the first label in the same basic block. */
1402 static tree
1403 main_block_label (tree label)
1405 basic_block bb = label_to_block (label);
1406 tree main_label = label_for_bb[bb->index].label;
1408 /* label_to_block possibly inserted undefined label into the chain. */
1409 if (!main_label)
1411 label_for_bb[bb->index].label = label;
1412 main_label = label;
1415 label_for_bb[bb->index].used = true;
1416 return main_label;
1419 /* Clean up redundant labels within the exception tree. */
1421 static void
1422 cleanup_dead_labels_eh (void)
1424 eh_landing_pad lp;
1425 eh_region r;
1426 tree lab;
1427 int i;
1429 if (cfun->eh == NULL)
1430 return;
1432 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1433 if (lp && lp->post_landing_pad)
1435 lab = main_block_label (lp->post_landing_pad);
1436 if (lab != lp->post_landing_pad)
1438 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1439 EH_LANDING_PAD_NR (lab) = lp->index;
1443 FOR_ALL_EH_REGION (r)
1444 switch (r->type)
1446 case ERT_CLEANUP:
1447 case ERT_MUST_NOT_THROW:
1448 break;
1450 case ERT_TRY:
1452 eh_catch c;
1453 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1455 lab = c->label;
1456 if (lab)
1457 c->label = main_block_label (lab);
1460 break;
1462 case ERT_ALLOWED_EXCEPTIONS:
1463 lab = r->u.allowed.label;
1464 if (lab)
1465 r->u.allowed.label = main_block_label (lab);
1466 break;
1471 /* Cleanup redundant labels. This is a three-step process:
1472 1) Find the leading label for each block.
1473 2) Redirect all references to labels to the leading labels.
1474 3) Cleanup all useless labels. */
1476 void
1477 cleanup_dead_labels (void)
1479 basic_block bb;
1480 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1482 /* Find a suitable label for each block. We use the first user-defined
1483 label if there is one, or otherwise just the first label we see. */
1484 FOR_EACH_BB_FN (bb, cfun)
1486 gimple_stmt_iterator i;
1488 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1490 tree label;
1491 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1493 if (!label_stmt)
1494 break;
1496 label = gimple_label_label (label_stmt);
1498 /* If we have not yet seen a label for the current block,
1499 remember this one and see if there are more labels. */
1500 if (!label_for_bb[bb->index].label)
1502 label_for_bb[bb->index].label = label;
1503 continue;
1506 /* If we did see a label for the current block already, but it
1507 is an artificially created label, replace it if the current
1508 label is a user defined label. */
1509 if (!DECL_ARTIFICIAL (label)
1510 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1512 label_for_bb[bb->index].label = label;
1513 break;
1518 /* Now redirect all jumps/branches to the selected label.
1519 First do so for each block ending in a control statement. */
1520 FOR_EACH_BB_FN (bb, cfun)
1522 gimple stmt = last_stmt (bb);
1523 tree label, new_label;
1525 if (!stmt)
1526 continue;
1528 switch (gimple_code (stmt))
1530 case GIMPLE_COND:
1532 gcond *cond_stmt = as_a <gcond *> (stmt);
1533 label = gimple_cond_true_label (cond_stmt);
1534 if (label)
1536 new_label = main_block_label (label);
1537 if (new_label != label)
1538 gimple_cond_set_true_label (cond_stmt, new_label);
1541 label = gimple_cond_false_label (cond_stmt);
1542 if (label)
1544 new_label = main_block_label (label);
1545 if (new_label != label)
1546 gimple_cond_set_false_label (cond_stmt, new_label);
1549 break;
1551 case GIMPLE_SWITCH:
1553 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1554 size_t i, n = gimple_switch_num_labels (switch_stmt);
1556 /* Replace all destination labels. */
1557 for (i = 0; i < n; ++i)
1559 tree case_label = gimple_switch_label (switch_stmt, i);
1560 label = CASE_LABEL (case_label);
1561 new_label = main_block_label (label);
1562 if (new_label != label)
1563 CASE_LABEL (case_label) = new_label;
1565 break;
1568 case GIMPLE_ASM:
1570 gasm *asm_stmt = as_a <gasm *> (stmt);
1571 int i, n = gimple_asm_nlabels (asm_stmt);
1573 for (i = 0; i < n; ++i)
1575 tree cons = gimple_asm_label_op (asm_stmt, i);
1576 tree label = main_block_label (TREE_VALUE (cons));
1577 TREE_VALUE (cons) = label;
1579 break;
1582 /* We have to handle gotos until they're removed, and we don't
1583 remove them until after we've created the CFG edges. */
1584 case GIMPLE_GOTO:
1585 if (!computed_goto_p (stmt))
1587 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1588 label = gimple_goto_dest (goto_stmt);
1589 new_label = main_block_label (label);
1590 if (new_label != label)
1591 gimple_goto_set_dest (goto_stmt, new_label);
1593 break;
1595 case GIMPLE_TRANSACTION:
1597 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1598 tree label = gimple_transaction_label (trans_stmt);
1599 if (label)
1601 tree new_label = main_block_label (label);
1602 if (new_label != label)
1603 gimple_transaction_set_label (trans_stmt, new_label);
1606 break;
1608 default:
1609 break;
1613 /* Do the same for the exception region tree labels. */
1614 cleanup_dead_labels_eh ();
1616 /* Finally, purge dead labels. All user-defined labels and labels that
1617 can be the target of non-local gotos and labels which have their
1618 address taken are preserved. */
1619 FOR_EACH_BB_FN (bb, cfun)
1621 gimple_stmt_iterator i;
1622 tree label_for_this_bb = label_for_bb[bb->index].label;
1624 if (!label_for_this_bb)
1625 continue;
1627 /* If the main label of the block is unused, we may still remove it. */
1628 if (!label_for_bb[bb->index].used)
1629 label_for_this_bb = NULL;
1631 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1633 tree label;
1634 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1636 if (!label_stmt)
1637 break;
1639 label = gimple_label_label (label_stmt);
1641 if (label == label_for_this_bb
1642 || !DECL_ARTIFICIAL (label)
1643 || DECL_NONLOCAL (label)
1644 || FORCED_LABEL (label))
1645 gsi_next (&i);
1646 else
1647 gsi_remove (&i, true);
1651 free (label_for_bb);
1654 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1655 the ones jumping to the same label.
1656 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1658 void
1659 group_case_labels_stmt (gswitch *stmt)
1661 int old_size = gimple_switch_num_labels (stmt);
1662 int i, j, new_size = old_size;
1663 basic_block default_bb = NULL;
1665 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1667 /* Look for possible opportunities to merge cases. */
1668 i = 1;
1669 while (i < old_size)
1671 tree base_case, base_high;
1672 basic_block base_bb;
1674 base_case = gimple_switch_label (stmt, i);
1676 gcc_assert (base_case);
1677 base_bb = label_to_block (CASE_LABEL (base_case));
1679 /* Discard cases that have the same destination as the
1680 default case. */
1681 if (base_bb == default_bb)
1683 gimple_switch_set_label (stmt, i, NULL_TREE);
1684 i++;
1685 new_size--;
1686 continue;
1689 base_high = CASE_HIGH (base_case)
1690 ? CASE_HIGH (base_case)
1691 : CASE_LOW (base_case);
1692 i++;
1694 /* Try to merge case labels. Break out when we reach the end
1695 of the label vector or when we cannot merge the next case
1696 label with the current one. */
1697 while (i < old_size)
1699 tree merge_case = gimple_switch_label (stmt, i);
1700 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1701 wide_int bhp1 = wi::add (base_high, 1);
1703 /* Merge the cases if they jump to the same place,
1704 and their ranges are consecutive. */
1705 if (merge_bb == base_bb
1706 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1708 base_high = CASE_HIGH (merge_case) ?
1709 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1710 CASE_HIGH (base_case) = base_high;
1711 gimple_switch_set_label (stmt, i, NULL_TREE);
1712 new_size--;
1713 i++;
1715 else
1716 break;
1720 /* Compress the case labels in the label vector, and adjust the
1721 length of the vector. */
1722 for (i = 0, j = 0; i < new_size; i++)
1724 while (! gimple_switch_label (stmt, j))
1725 j++;
1726 gimple_switch_set_label (stmt, i,
1727 gimple_switch_label (stmt, j++));
1730 gcc_assert (new_size <= old_size);
1731 gimple_switch_set_num_labels (stmt, new_size);
1734 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1735 and scan the sorted vector of cases. Combine the ones jumping to the
1736 same label. */
1738 void
1739 group_case_labels (void)
1741 basic_block bb;
1743 FOR_EACH_BB_FN (bb, cfun)
1745 gimple stmt = last_stmt (bb);
1746 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1747 group_case_labels_stmt (as_a <gswitch *> (stmt));
1751 /* Checks whether we can merge block B into block A. */
1753 static bool
1754 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1756 gimple stmt;
1758 if (!single_succ_p (a))
1759 return false;
1761 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1762 return false;
1764 if (single_succ (a) != b)
1765 return false;
1767 if (!single_pred_p (b))
1768 return false;
1770 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1771 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1772 return false;
1774 /* If A ends by a statement causing exceptions or something similar, we
1775 cannot merge the blocks. */
1776 stmt = last_stmt (a);
1777 if (stmt && stmt_ends_bb_p (stmt))
1778 return false;
1780 /* Do not allow a block with only a non-local label to be merged. */
1781 if (stmt)
1782 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1783 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1784 return false;
1786 /* Examine the labels at the beginning of B. */
1787 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1788 gsi_next (&gsi))
1790 tree lab;
1791 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1792 if (!label_stmt)
1793 break;
1794 lab = gimple_label_label (label_stmt);
1796 /* Do not remove user forced labels or for -O0 any user labels. */
1797 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1798 return false;
1801 /* Protect simple loop latches. We only want to avoid merging
1802 the latch with the loop header or with a block in another
1803 loop in this case. */
1804 if (current_loops
1805 && b->loop_father->latch == b
1806 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1807 && (b->loop_father->header == a
1808 || b->loop_father != a->loop_father))
1809 return false;
1811 /* It must be possible to eliminate all phi nodes in B. If ssa form
1812 is not up-to-date and a name-mapping is registered, we cannot eliminate
1813 any phis. Symbols marked for renaming are never a problem though. */
1814 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1815 gsi_next (&gsi))
1817 gphi *phi = gsi.phi ();
1818 /* Technically only new names matter. */
1819 if (name_registered_for_update_p (PHI_RESULT (phi)))
1820 return false;
1823 /* When not optimizing, don't merge if we'd lose goto_locus. */
1824 if (!optimize
1825 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1827 location_t goto_locus = single_succ_edge (a)->goto_locus;
1828 gimple_stmt_iterator prev, next;
1829 prev = gsi_last_nondebug_bb (a);
1830 next = gsi_after_labels (b);
1831 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1832 gsi_next_nondebug (&next);
1833 if ((gsi_end_p (prev)
1834 || gimple_location (gsi_stmt (prev)) != goto_locus)
1835 && (gsi_end_p (next)
1836 || gimple_location (gsi_stmt (next)) != goto_locus))
1837 return false;
1840 return true;
1843 /* Replaces all uses of NAME by VAL. */
1845 void
1846 replace_uses_by (tree name, tree val)
1848 imm_use_iterator imm_iter;
1849 use_operand_p use;
1850 gimple stmt;
1851 edge e;
1853 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1855 /* Mark the block if we change the last stmt in it. */
1856 if (cfgcleanup_altered_bbs
1857 && stmt_ends_bb_p (stmt))
1858 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1860 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1862 replace_exp (use, val);
1864 if (gimple_code (stmt) == GIMPLE_PHI)
1866 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1867 PHI_ARG_INDEX_FROM_USE (use));
1868 if (e->flags & EDGE_ABNORMAL
1869 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1871 /* This can only occur for virtual operands, since
1872 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1873 would prevent replacement. */
1874 gcc_checking_assert (virtual_operand_p (name));
1875 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1880 if (gimple_code (stmt) != GIMPLE_PHI)
1882 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1883 gimple orig_stmt = stmt;
1884 size_t i;
1886 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1887 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1888 only change sth from non-invariant to invariant, and only
1889 when propagating constants. */
1890 if (is_gimple_min_invariant (val))
1891 for (i = 0; i < gimple_num_ops (stmt); i++)
1893 tree op = gimple_op (stmt, i);
1894 /* Operands may be empty here. For example, the labels
1895 of a GIMPLE_COND are nulled out following the creation
1896 of the corresponding CFG edges. */
1897 if (op && TREE_CODE (op) == ADDR_EXPR)
1898 recompute_tree_invariant_for_addr_expr (op);
1901 if (fold_stmt (&gsi))
1902 stmt = gsi_stmt (gsi);
1904 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1905 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1907 update_stmt (stmt);
1911 gcc_checking_assert (has_zero_uses (name));
1913 /* Also update the trees stored in loop structures. */
1914 if (current_loops)
1916 struct loop *loop;
1918 FOR_EACH_LOOP (loop, 0)
1920 substitute_in_loop_info (loop, name, val);
1925 /* Merge block B into block A. */
1927 static void
1928 gimple_merge_blocks (basic_block a, basic_block b)
1930 gimple_stmt_iterator last, gsi;
1931 gphi_iterator psi;
1933 if (dump_file)
1934 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1936 /* Remove all single-valued PHI nodes from block B of the form
1937 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1938 gsi = gsi_last_bb (a);
1939 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1941 gimple phi = gsi_stmt (psi);
1942 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1943 gimple copy;
1944 bool may_replace_uses = (virtual_operand_p (def)
1945 || may_propagate_copy (def, use));
1947 /* In case we maintain loop closed ssa form, do not propagate arguments
1948 of loop exit phi nodes. */
1949 if (current_loops
1950 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1951 && !virtual_operand_p (def)
1952 && TREE_CODE (use) == SSA_NAME
1953 && a->loop_father != b->loop_father)
1954 may_replace_uses = false;
1956 if (!may_replace_uses)
1958 gcc_assert (!virtual_operand_p (def));
1960 /* Note that just emitting the copies is fine -- there is no problem
1961 with ordering of phi nodes. This is because A is the single
1962 predecessor of B, therefore results of the phi nodes cannot
1963 appear as arguments of the phi nodes. */
1964 copy = gimple_build_assign (def, use);
1965 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1966 remove_phi_node (&psi, false);
1968 else
1970 /* If we deal with a PHI for virtual operands, we can simply
1971 propagate these without fussing with folding or updating
1972 the stmt. */
1973 if (virtual_operand_p (def))
1975 imm_use_iterator iter;
1976 use_operand_p use_p;
1977 gimple stmt;
1979 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1980 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1981 SET_USE (use_p, use);
1983 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1984 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1986 else
1987 replace_uses_by (def, use);
1989 remove_phi_node (&psi, true);
1993 /* Ensure that B follows A. */
1994 move_block_after (b, a);
1996 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1997 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1999 /* Remove labels from B and set gimple_bb to A for other statements. */
2000 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2002 gimple stmt = gsi_stmt (gsi);
2003 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2005 tree label = gimple_label_label (label_stmt);
2006 int lp_nr;
2008 gsi_remove (&gsi, false);
2010 /* Now that we can thread computed gotos, we might have
2011 a situation where we have a forced label in block B
2012 However, the label at the start of block B might still be
2013 used in other ways (think about the runtime checking for
2014 Fortran assigned gotos). So we can not just delete the
2015 label. Instead we move the label to the start of block A. */
2016 if (FORCED_LABEL (label))
2018 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2019 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2021 /* Other user labels keep around in a form of a debug stmt. */
2022 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2024 gimple dbg = gimple_build_debug_bind (label,
2025 integer_zero_node,
2026 stmt);
2027 gimple_debug_bind_reset_value (dbg);
2028 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2031 lp_nr = EH_LANDING_PAD_NR (label);
2032 if (lp_nr)
2034 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2035 lp->post_landing_pad = NULL;
2038 else
2040 gimple_set_bb (stmt, a);
2041 gsi_next (&gsi);
2045 /* When merging two BBs, if their counts are different, the larger count
2046 is selected as the new bb count. This is to handle inconsistent
2047 profiles. */
2048 if (a->loop_father == b->loop_father)
2050 a->count = MAX (a->count, b->count);
2051 a->frequency = MAX (a->frequency, b->frequency);
2054 /* Merge the sequences. */
2055 last = gsi_last_bb (a);
2056 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2057 set_bb_seq (b, NULL);
2059 if (cfgcleanup_altered_bbs)
2060 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2064 /* Return the one of two successors of BB that is not reachable by a
2065 complex edge, if there is one. Else, return BB. We use
2066 this in optimizations that use post-dominators for their heuristics,
2067 to catch the cases in C++ where function calls are involved. */
2069 basic_block
2070 single_noncomplex_succ (basic_block bb)
2072 edge e0, e1;
2073 if (EDGE_COUNT (bb->succs) != 2)
2074 return bb;
2076 e0 = EDGE_SUCC (bb, 0);
2077 e1 = EDGE_SUCC (bb, 1);
2078 if (e0->flags & EDGE_COMPLEX)
2079 return e1->dest;
2080 if (e1->flags & EDGE_COMPLEX)
2081 return e0->dest;
2083 return bb;
2086 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2088 void
2089 notice_special_calls (gcall *call)
2091 int flags = gimple_call_flags (call);
2093 if (flags & ECF_MAY_BE_ALLOCA)
2094 cfun->calls_alloca = true;
2095 if (flags & ECF_RETURNS_TWICE)
2096 cfun->calls_setjmp = true;
2100 /* Clear flags set by notice_special_calls. Used by dead code removal
2101 to update the flags. */
2103 void
2104 clear_special_calls (void)
2106 cfun->calls_alloca = false;
2107 cfun->calls_setjmp = false;
2110 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2112 static void
2113 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2115 /* Since this block is no longer reachable, we can just delete all
2116 of its PHI nodes. */
2117 remove_phi_nodes (bb);
2119 /* Remove edges to BB's successors. */
2120 while (EDGE_COUNT (bb->succs) > 0)
2121 remove_edge (EDGE_SUCC (bb, 0));
2125 /* Remove statements of basic block BB. */
2127 static void
2128 remove_bb (basic_block bb)
2130 gimple_stmt_iterator i;
2132 if (dump_file)
2134 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2135 if (dump_flags & TDF_DETAILS)
2137 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2138 fprintf (dump_file, "\n");
2142 if (current_loops)
2144 struct loop *loop = bb->loop_father;
2146 /* If a loop gets removed, clean up the information associated
2147 with it. */
2148 if (loop->latch == bb
2149 || loop->header == bb)
2150 free_numbers_of_iterations_estimates_loop (loop);
2153 /* Remove all the instructions in the block. */
2154 if (bb_seq (bb) != NULL)
2156 /* Walk backwards so as to get a chance to substitute all
2157 released DEFs into debug stmts. See
2158 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2159 details. */
2160 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2162 gimple stmt = gsi_stmt (i);
2163 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2164 if (label_stmt
2165 && (FORCED_LABEL (gimple_label_label (label_stmt))
2166 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2168 basic_block new_bb;
2169 gimple_stmt_iterator new_gsi;
2171 /* A non-reachable non-local label may still be referenced.
2172 But it no longer needs to carry the extra semantics of
2173 non-locality. */
2174 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2176 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2177 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2180 new_bb = bb->prev_bb;
2181 new_gsi = gsi_start_bb (new_bb);
2182 gsi_remove (&i, false);
2183 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2185 else
2187 /* Release SSA definitions if we are in SSA. Note that we
2188 may be called when not in SSA. For example,
2189 final_cleanup calls this function via
2190 cleanup_tree_cfg. */
2191 if (gimple_in_ssa_p (cfun))
2192 release_defs (stmt);
2194 gsi_remove (&i, true);
2197 if (gsi_end_p (i))
2198 i = gsi_last_bb (bb);
2199 else
2200 gsi_prev (&i);
2204 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2205 bb->il.gimple.seq = NULL;
2206 bb->il.gimple.phi_nodes = NULL;
2210 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2211 predicate VAL, return the edge that will be taken out of the block.
2212 If VAL does not match a unique edge, NULL is returned. */
2214 edge
2215 find_taken_edge (basic_block bb, tree val)
2217 gimple stmt;
2219 stmt = last_stmt (bb);
2221 gcc_assert (stmt);
2222 gcc_assert (is_ctrl_stmt (stmt));
2224 if (val == NULL)
2225 return NULL;
2227 if (!is_gimple_min_invariant (val))
2228 return NULL;
2230 if (gimple_code (stmt) == GIMPLE_COND)
2231 return find_taken_edge_cond_expr (bb, val);
2233 if (gimple_code (stmt) == GIMPLE_SWITCH)
2234 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2236 if (computed_goto_p (stmt))
2238 /* Only optimize if the argument is a label, if the argument is
2239 not a label then we can not construct a proper CFG.
2241 It may be the case that we only need to allow the LABEL_REF to
2242 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2243 appear inside a LABEL_EXPR just to be safe. */
2244 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2245 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2246 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2247 return NULL;
2250 gcc_unreachable ();
2253 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2254 statement, determine which of the outgoing edges will be taken out of the
2255 block. Return NULL if either edge may be taken. */
2257 static edge
2258 find_taken_edge_computed_goto (basic_block bb, tree val)
2260 basic_block dest;
2261 edge e = NULL;
2263 dest = label_to_block (val);
2264 if (dest)
2266 e = find_edge (bb, dest);
2267 gcc_assert (e != NULL);
2270 return e;
2273 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2274 statement, determine which of the two edges will be taken out of the
2275 block. Return NULL if either edge may be taken. */
2277 static edge
2278 find_taken_edge_cond_expr (basic_block bb, tree val)
2280 edge true_edge, false_edge;
2282 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2284 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2285 return (integer_zerop (val) ? false_edge : true_edge);
2288 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2289 statement, determine which edge will be taken out of the block. Return
2290 NULL if any edge may be taken. */
2292 static edge
2293 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2294 tree val)
2296 basic_block dest_bb;
2297 edge e;
2298 tree taken_case;
2300 taken_case = find_case_label_for_value (switch_stmt, val);
2301 dest_bb = label_to_block (CASE_LABEL (taken_case));
2303 e = find_edge (bb, dest_bb);
2304 gcc_assert (e);
2305 return e;
2309 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2310 We can make optimal use here of the fact that the case labels are
2311 sorted: We can do a binary search for a case matching VAL. */
2313 static tree
2314 find_case_label_for_value (gswitch *switch_stmt, tree val)
2316 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2317 tree default_case = gimple_switch_default_label (switch_stmt);
2319 for (low = 0, high = n; high - low > 1; )
2321 size_t i = (high + low) / 2;
2322 tree t = gimple_switch_label (switch_stmt, i);
2323 int cmp;
2325 /* Cache the result of comparing CASE_LOW and val. */
2326 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2328 if (cmp > 0)
2329 high = i;
2330 else
2331 low = i;
2333 if (CASE_HIGH (t) == NULL)
2335 /* A singe-valued case label. */
2336 if (cmp == 0)
2337 return t;
2339 else
2341 /* A case range. We can only handle integer ranges. */
2342 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2343 return t;
2347 return default_case;
2351 /* Dump a basic block on stderr. */
2353 void
2354 gimple_debug_bb (basic_block bb)
2356 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2360 /* Dump basic block with index N on stderr. */
2362 basic_block
2363 gimple_debug_bb_n (int n)
2365 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2366 return BASIC_BLOCK_FOR_FN (cfun, n);
2370 /* Dump the CFG on stderr.
2372 FLAGS are the same used by the tree dumping functions
2373 (see TDF_* in dumpfile.h). */
2375 void
2376 gimple_debug_cfg (int flags)
2378 gimple_dump_cfg (stderr, flags);
2382 /* Dump the program showing basic block boundaries on the given FILE.
2384 FLAGS are the same used by the tree dumping functions (see TDF_* in
2385 tree.h). */
2387 void
2388 gimple_dump_cfg (FILE *file, int flags)
2390 if (flags & TDF_DETAILS)
2392 dump_function_header (file, current_function_decl, flags);
2393 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2394 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2395 last_basic_block_for_fn (cfun));
2397 brief_dump_cfg (file, flags | TDF_COMMENT);
2398 fprintf (file, "\n");
2401 if (flags & TDF_STATS)
2402 dump_cfg_stats (file);
2404 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2408 /* Dump CFG statistics on FILE. */
2410 void
2411 dump_cfg_stats (FILE *file)
2413 static long max_num_merged_labels = 0;
2414 unsigned long size, total = 0;
2415 long num_edges;
2416 basic_block bb;
2417 const char * const fmt_str = "%-30s%-13s%12s\n";
2418 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2419 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2420 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2421 const char *funcname = current_function_name ();
2423 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2425 fprintf (file, "---------------------------------------------------------\n");
2426 fprintf (file, fmt_str, "", " Number of ", "Memory");
2427 fprintf (file, fmt_str, "", " instances ", "used ");
2428 fprintf (file, "---------------------------------------------------------\n");
2430 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2431 total += size;
2432 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2433 SCALE (size), LABEL (size));
2435 num_edges = 0;
2436 FOR_EACH_BB_FN (bb, cfun)
2437 num_edges += EDGE_COUNT (bb->succs);
2438 size = num_edges * sizeof (struct edge_def);
2439 total += size;
2440 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2442 fprintf (file, "---------------------------------------------------------\n");
2443 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2444 LABEL (total));
2445 fprintf (file, "---------------------------------------------------------\n");
2446 fprintf (file, "\n");
2448 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2449 max_num_merged_labels = cfg_stats.num_merged_labels;
2451 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2452 cfg_stats.num_merged_labels, max_num_merged_labels);
2454 fprintf (file, "\n");
2458 /* Dump CFG statistics on stderr. Keep extern so that it's always
2459 linked in the final executable. */
2461 DEBUG_FUNCTION void
2462 debug_cfg_stats (void)
2464 dump_cfg_stats (stderr);
2467 /*---------------------------------------------------------------------------
2468 Miscellaneous helpers
2469 ---------------------------------------------------------------------------*/
2471 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2472 flow. Transfers of control flow associated with EH are excluded. */
2474 static bool
2475 call_can_make_abnormal_goto (gimple t)
2477 /* If the function has no non-local labels, then a call cannot make an
2478 abnormal transfer of control. */
2479 if (!cfun->has_nonlocal_label
2480 && !cfun->calls_setjmp)
2481 return false;
2483 /* Likewise if the call has no side effects. */
2484 if (!gimple_has_side_effects (t))
2485 return false;
2487 /* Likewise if the called function is leaf. */
2488 if (gimple_call_flags (t) & ECF_LEAF)
2489 return false;
2491 return true;
2495 /* Return true if T can make an abnormal transfer of control flow.
2496 Transfers of control flow associated with EH are excluded. */
2498 bool
2499 stmt_can_make_abnormal_goto (gimple t)
2501 if (computed_goto_p (t))
2502 return true;
2503 if (is_gimple_call (t))
2504 return call_can_make_abnormal_goto (t);
2505 return false;
2509 /* Return true if T represents a stmt that always transfers control. */
2511 bool
2512 is_ctrl_stmt (gimple t)
2514 switch (gimple_code (t))
2516 case GIMPLE_COND:
2517 case GIMPLE_SWITCH:
2518 case GIMPLE_GOTO:
2519 case GIMPLE_RETURN:
2520 case GIMPLE_RESX:
2521 return true;
2522 default:
2523 return false;
2528 /* Return true if T is a statement that may alter the flow of control
2529 (e.g., a call to a non-returning function). */
2531 bool
2532 is_ctrl_altering_stmt (gimple t)
2534 gcc_assert (t);
2536 switch (gimple_code (t))
2538 case GIMPLE_CALL:
2539 /* Per stmt call flag indicates whether the call could alter
2540 controlflow. */
2541 if (gimple_call_ctrl_altering_p (t))
2542 return true;
2543 break;
2545 case GIMPLE_EH_DISPATCH:
2546 /* EH_DISPATCH branches to the individual catch handlers at
2547 this level of a try or allowed-exceptions region. It can
2548 fallthru to the next statement as well. */
2549 return true;
2551 case GIMPLE_ASM:
2552 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2553 return true;
2554 break;
2556 CASE_GIMPLE_OMP:
2557 /* OpenMP directives alter control flow. */
2558 return true;
2560 case GIMPLE_TRANSACTION:
2561 /* A transaction start alters control flow. */
2562 return true;
2564 default:
2565 break;
2568 /* If a statement can throw, it alters control flow. */
2569 return stmt_can_throw_internal (t);
2573 /* Return true if T is a simple local goto. */
2575 bool
2576 simple_goto_p (gimple t)
2578 return (gimple_code (t) == GIMPLE_GOTO
2579 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2583 /* Return true if STMT should start a new basic block. PREV_STMT is
2584 the statement preceding STMT. It is used when STMT is a label or a
2585 case label. Labels should only start a new basic block if their
2586 previous statement wasn't a label. Otherwise, sequence of labels
2587 would generate unnecessary basic blocks that only contain a single
2588 label. */
2590 static inline bool
2591 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2593 if (stmt == NULL)
2594 return false;
2596 /* Labels start a new basic block only if the preceding statement
2597 wasn't a label of the same type. This prevents the creation of
2598 consecutive blocks that have nothing but a single label. */
2599 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2601 /* Nonlocal and computed GOTO targets always start a new block. */
2602 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2603 || FORCED_LABEL (gimple_label_label (label_stmt)))
2604 return true;
2606 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2608 if (DECL_NONLOCAL (gimple_label_label (
2609 as_a <glabel *> (prev_stmt))))
2610 return true;
2612 cfg_stats.num_merged_labels++;
2613 return false;
2615 else
2616 return true;
2618 else if (gimple_code (stmt) == GIMPLE_CALL
2619 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2620 /* setjmp acts similar to a nonlocal GOTO target and thus should
2621 start a new block. */
2622 return true;
2624 return false;
2628 /* Return true if T should end a basic block. */
2630 bool
2631 stmt_ends_bb_p (gimple t)
2633 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2636 /* Remove block annotations and other data structures. */
2638 void
2639 delete_tree_cfg_annotations (void)
2641 vec_free (label_to_block_map_for_fn (cfun));
2645 /* Return the first statement in basic block BB. */
2647 gimple
2648 first_stmt (basic_block bb)
2650 gimple_stmt_iterator i = gsi_start_bb (bb);
2651 gimple stmt = NULL;
2653 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2655 gsi_next (&i);
2656 stmt = NULL;
2658 return stmt;
2661 /* Return the first non-label statement in basic block BB. */
2663 static gimple
2664 first_non_label_stmt (basic_block bb)
2666 gimple_stmt_iterator i = gsi_start_bb (bb);
2667 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2668 gsi_next (&i);
2669 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2672 /* Return the last statement in basic block BB. */
2674 gimple
2675 last_stmt (basic_block bb)
2677 gimple_stmt_iterator i = gsi_last_bb (bb);
2678 gimple stmt = NULL;
2680 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2682 gsi_prev (&i);
2683 stmt = NULL;
2685 return stmt;
2688 /* Return the last statement of an otherwise empty block. Return NULL
2689 if the block is totally empty, or if it contains more than one
2690 statement. */
2692 gimple
2693 last_and_only_stmt (basic_block bb)
2695 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2696 gimple last, prev;
2698 if (gsi_end_p (i))
2699 return NULL;
2701 last = gsi_stmt (i);
2702 gsi_prev_nondebug (&i);
2703 if (gsi_end_p (i))
2704 return last;
2706 /* Empty statements should no longer appear in the instruction stream.
2707 Everything that might have appeared before should be deleted by
2708 remove_useless_stmts, and the optimizers should just gsi_remove
2709 instead of smashing with build_empty_stmt.
2711 Thus the only thing that should appear here in a block containing
2712 one executable statement is a label. */
2713 prev = gsi_stmt (i);
2714 if (gimple_code (prev) == GIMPLE_LABEL)
2715 return last;
2716 else
2717 return NULL;
2720 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2722 static void
2723 reinstall_phi_args (edge new_edge, edge old_edge)
2725 edge_var_map *vm;
2726 int i;
2727 gphi_iterator phis;
2729 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2730 if (!v)
2731 return;
2733 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2734 v->iterate (i, &vm) && !gsi_end_p (phis);
2735 i++, gsi_next (&phis))
2737 gphi *phi = phis.phi ();
2738 tree result = redirect_edge_var_map_result (vm);
2739 tree arg = redirect_edge_var_map_def (vm);
2741 gcc_assert (result == gimple_phi_result (phi));
2743 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2746 redirect_edge_var_map_clear (old_edge);
2749 /* Returns the basic block after which the new basic block created
2750 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2751 near its "logical" location. This is of most help to humans looking
2752 at debugging dumps. */
2754 basic_block
2755 split_edge_bb_loc (edge edge_in)
2757 basic_block dest = edge_in->dest;
2758 basic_block dest_prev = dest->prev_bb;
2760 if (dest_prev)
2762 edge e = find_edge (dest_prev, dest);
2763 if (e && !(e->flags & EDGE_COMPLEX))
2764 return edge_in->src;
2766 return dest_prev;
2769 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2770 Abort on abnormal edges. */
2772 static basic_block
2773 gimple_split_edge (edge edge_in)
2775 basic_block new_bb, after_bb, dest;
2776 edge new_edge, e;
2778 /* Abnormal edges cannot be split. */
2779 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2781 dest = edge_in->dest;
2783 after_bb = split_edge_bb_loc (edge_in);
2785 new_bb = create_empty_bb (after_bb);
2786 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2787 new_bb->count = edge_in->count;
2788 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2789 new_edge->probability = REG_BR_PROB_BASE;
2790 new_edge->count = edge_in->count;
2792 e = redirect_edge_and_branch (edge_in, new_bb);
2793 gcc_assert (e == edge_in);
2794 reinstall_phi_args (new_edge, e);
2796 return new_bb;
2800 /* Verify properties of the address expression T with base object BASE. */
2802 static tree
2803 verify_address (tree t, tree base)
2805 bool old_constant;
2806 bool old_side_effects;
2807 bool new_constant;
2808 bool new_side_effects;
2810 old_constant = TREE_CONSTANT (t);
2811 old_side_effects = TREE_SIDE_EFFECTS (t);
2813 recompute_tree_invariant_for_addr_expr (t);
2814 new_side_effects = TREE_SIDE_EFFECTS (t);
2815 new_constant = TREE_CONSTANT (t);
2817 if (old_constant != new_constant)
2819 error ("constant not recomputed when ADDR_EXPR changed");
2820 return t;
2822 if (old_side_effects != new_side_effects)
2824 error ("side effects not recomputed when ADDR_EXPR changed");
2825 return t;
2828 if (!(TREE_CODE (base) == VAR_DECL
2829 || TREE_CODE (base) == PARM_DECL
2830 || TREE_CODE (base) == RESULT_DECL))
2831 return NULL_TREE;
2833 if (DECL_GIMPLE_REG_P (base))
2835 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2836 return base;
2839 return NULL_TREE;
2842 /* Callback for walk_tree, check that all elements with address taken are
2843 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2844 inside a PHI node. */
2846 static tree
2847 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2849 tree t = *tp, x;
2851 if (TYPE_P (t))
2852 *walk_subtrees = 0;
2854 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2855 #define CHECK_OP(N, MSG) \
2856 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2857 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2859 switch (TREE_CODE (t))
2861 case SSA_NAME:
2862 if (SSA_NAME_IN_FREE_LIST (t))
2864 error ("SSA name in freelist but still referenced");
2865 return *tp;
2867 break;
2869 case INDIRECT_REF:
2870 error ("INDIRECT_REF in gimple IL");
2871 return t;
2873 case MEM_REF:
2874 x = TREE_OPERAND (t, 0);
2875 if (!POINTER_TYPE_P (TREE_TYPE (x))
2876 || !is_gimple_mem_ref_addr (x))
2878 error ("invalid first operand of MEM_REF");
2879 return x;
2881 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2882 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2884 error ("invalid offset operand of MEM_REF");
2885 return TREE_OPERAND (t, 1);
2887 if (TREE_CODE (x) == ADDR_EXPR
2888 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2889 return x;
2890 *walk_subtrees = 0;
2891 break;
2893 case ASSERT_EXPR:
2894 x = fold (ASSERT_EXPR_COND (t));
2895 if (x == boolean_false_node)
2897 error ("ASSERT_EXPR with an always-false condition");
2898 return *tp;
2900 break;
2902 case MODIFY_EXPR:
2903 error ("MODIFY_EXPR not expected while having tuples");
2904 return *tp;
2906 case ADDR_EXPR:
2908 tree tem;
2910 gcc_assert (is_gimple_address (t));
2912 /* Skip any references (they will be checked when we recurse down the
2913 tree) and ensure that any variable used as a prefix is marked
2914 addressable. */
2915 for (x = TREE_OPERAND (t, 0);
2916 handled_component_p (x);
2917 x = TREE_OPERAND (x, 0))
2920 if ((tem = verify_address (t, x)))
2921 return tem;
2923 if (!(TREE_CODE (x) == VAR_DECL
2924 || TREE_CODE (x) == PARM_DECL
2925 || TREE_CODE (x) == RESULT_DECL))
2926 return NULL;
2928 if (!TREE_ADDRESSABLE (x))
2930 error ("address taken, but ADDRESSABLE bit not set");
2931 return x;
2934 break;
2937 case COND_EXPR:
2938 x = COND_EXPR_COND (t);
2939 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2941 error ("non-integral used in condition");
2942 return x;
2944 if (!is_gimple_condexpr (x))
2946 error ("invalid conditional operand");
2947 return x;
2949 break;
2951 case NON_LVALUE_EXPR:
2952 case TRUTH_NOT_EXPR:
2953 gcc_unreachable ();
2955 CASE_CONVERT:
2956 case FIX_TRUNC_EXPR:
2957 case FLOAT_EXPR:
2958 case NEGATE_EXPR:
2959 case ABS_EXPR:
2960 case BIT_NOT_EXPR:
2961 CHECK_OP (0, "invalid operand to unary operator");
2962 break;
2964 case REALPART_EXPR:
2965 case IMAGPART_EXPR:
2966 case BIT_FIELD_REF:
2967 if (!is_gimple_reg_type (TREE_TYPE (t)))
2969 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2970 return t;
2973 if (TREE_CODE (t) == BIT_FIELD_REF)
2975 tree t0 = TREE_OPERAND (t, 0);
2976 tree t1 = TREE_OPERAND (t, 1);
2977 tree t2 = TREE_OPERAND (t, 2);
2978 if (!tree_fits_uhwi_p (t1)
2979 || !tree_fits_uhwi_p (t2))
2981 error ("invalid position or size operand to BIT_FIELD_REF");
2982 return t;
2984 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2985 && (TYPE_PRECISION (TREE_TYPE (t))
2986 != tree_to_uhwi (t1)))
2988 error ("integral result type precision does not match "
2989 "field size of BIT_FIELD_REF");
2990 return t;
2992 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2993 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2994 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2995 != tree_to_uhwi (t1)))
2997 error ("mode precision of non-integral result does not "
2998 "match field size of BIT_FIELD_REF");
2999 return t;
3001 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3002 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3003 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3005 error ("position plus size exceeds size of referenced object in "
3006 "BIT_FIELD_REF");
3007 return t;
3010 t = TREE_OPERAND (t, 0);
3012 /* Fall-through. */
3013 case COMPONENT_REF:
3014 case ARRAY_REF:
3015 case ARRAY_RANGE_REF:
3016 case VIEW_CONVERT_EXPR:
3017 /* We have a nest of references. Verify that each of the operands
3018 that determine where to reference is either a constant or a variable,
3019 verify that the base is valid, and then show we've already checked
3020 the subtrees. */
3021 while (handled_component_p (t))
3023 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3024 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3025 else if (TREE_CODE (t) == ARRAY_REF
3026 || TREE_CODE (t) == ARRAY_RANGE_REF)
3028 CHECK_OP (1, "invalid array index");
3029 if (TREE_OPERAND (t, 2))
3030 CHECK_OP (2, "invalid array lower bound");
3031 if (TREE_OPERAND (t, 3))
3032 CHECK_OP (3, "invalid array stride");
3034 else if (TREE_CODE (t) == BIT_FIELD_REF
3035 || TREE_CODE (t) == REALPART_EXPR
3036 || TREE_CODE (t) == IMAGPART_EXPR)
3038 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3039 "REALPART_EXPR");
3040 return t;
3043 t = TREE_OPERAND (t, 0);
3046 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3048 error ("invalid reference prefix");
3049 return t;
3051 *walk_subtrees = 0;
3052 break;
3053 case PLUS_EXPR:
3054 case MINUS_EXPR:
3055 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3056 POINTER_PLUS_EXPR. */
3057 if (POINTER_TYPE_P (TREE_TYPE (t)))
3059 error ("invalid operand to plus/minus, type is a pointer");
3060 return t;
3062 CHECK_OP (0, "invalid operand to binary operator");
3063 CHECK_OP (1, "invalid operand to binary operator");
3064 break;
3066 case POINTER_PLUS_EXPR:
3067 /* Check to make sure the first operand is a pointer or reference type. */
3068 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3070 error ("invalid operand to pointer plus, first operand is not a pointer");
3071 return t;
3073 /* Check to make sure the second operand is a ptrofftype. */
3074 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3076 error ("invalid operand to pointer plus, second operand is not an "
3077 "integer type of appropriate width");
3078 return t;
3080 /* FALLTHROUGH */
3081 case LT_EXPR:
3082 case LE_EXPR:
3083 case GT_EXPR:
3084 case GE_EXPR:
3085 case EQ_EXPR:
3086 case NE_EXPR:
3087 case UNORDERED_EXPR:
3088 case ORDERED_EXPR:
3089 case UNLT_EXPR:
3090 case UNLE_EXPR:
3091 case UNGT_EXPR:
3092 case UNGE_EXPR:
3093 case UNEQ_EXPR:
3094 case LTGT_EXPR:
3095 case MULT_EXPR:
3096 case TRUNC_DIV_EXPR:
3097 case CEIL_DIV_EXPR:
3098 case FLOOR_DIV_EXPR:
3099 case ROUND_DIV_EXPR:
3100 case TRUNC_MOD_EXPR:
3101 case CEIL_MOD_EXPR:
3102 case FLOOR_MOD_EXPR:
3103 case ROUND_MOD_EXPR:
3104 case RDIV_EXPR:
3105 case EXACT_DIV_EXPR:
3106 case MIN_EXPR:
3107 case MAX_EXPR:
3108 case LSHIFT_EXPR:
3109 case RSHIFT_EXPR:
3110 case LROTATE_EXPR:
3111 case RROTATE_EXPR:
3112 case BIT_IOR_EXPR:
3113 case BIT_XOR_EXPR:
3114 case BIT_AND_EXPR:
3115 CHECK_OP (0, "invalid operand to binary operator");
3116 CHECK_OP (1, "invalid operand to binary operator");
3117 break;
3119 case CONSTRUCTOR:
3120 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3121 *walk_subtrees = 0;
3122 break;
3124 case CASE_LABEL_EXPR:
3125 if (CASE_CHAIN (t))
3127 error ("invalid CASE_CHAIN");
3128 return t;
3130 break;
3132 default:
3133 break;
3135 return NULL;
3137 #undef CHECK_OP
3141 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3142 Returns true if there is an error, otherwise false. */
3144 static bool
3145 verify_types_in_gimple_min_lval (tree expr)
3147 tree op;
3149 if (is_gimple_id (expr))
3150 return false;
3152 if (TREE_CODE (expr) != TARGET_MEM_REF
3153 && TREE_CODE (expr) != MEM_REF)
3155 error ("invalid expression for min lvalue");
3156 return true;
3159 /* TARGET_MEM_REFs are strange beasts. */
3160 if (TREE_CODE (expr) == TARGET_MEM_REF)
3161 return false;
3163 op = TREE_OPERAND (expr, 0);
3164 if (!is_gimple_val (op))
3166 error ("invalid operand in indirect reference");
3167 debug_generic_stmt (op);
3168 return true;
3170 /* Memory references now generally can involve a value conversion. */
3172 return false;
3175 /* Verify if EXPR is a valid GIMPLE reference expression. If
3176 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3177 if there is an error, otherwise false. */
3179 static bool
3180 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3182 while (handled_component_p (expr))
3184 tree op = TREE_OPERAND (expr, 0);
3186 if (TREE_CODE (expr) == ARRAY_REF
3187 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3189 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3190 || (TREE_OPERAND (expr, 2)
3191 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3192 || (TREE_OPERAND (expr, 3)
3193 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3195 error ("invalid operands to array reference");
3196 debug_generic_stmt (expr);
3197 return true;
3201 /* Verify if the reference array element types are compatible. */
3202 if (TREE_CODE (expr) == ARRAY_REF
3203 && !useless_type_conversion_p (TREE_TYPE (expr),
3204 TREE_TYPE (TREE_TYPE (op))))
3206 error ("type mismatch in array reference");
3207 debug_generic_stmt (TREE_TYPE (expr));
3208 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3209 return true;
3211 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3212 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3213 TREE_TYPE (TREE_TYPE (op))))
3215 error ("type mismatch in array range reference");
3216 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3217 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3218 return true;
3221 if ((TREE_CODE (expr) == REALPART_EXPR
3222 || TREE_CODE (expr) == IMAGPART_EXPR)
3223 && !useless_type_conversion_p (TREE_TYPE (expr),
3224 TREE_TYPE (TREE_TYPE (op))))
3226 error ("type mismatch in real/imagpart reference");
3227 debug_generic_stmt (TREE_TYPE (expr));
3228 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3229 return true;
3232 if (TREE_CODE (expr) == COMPONENT_REF
3233 && !useless_type_conversion_p (TREE_TYPE (expr),
3234 TREE_TYPE (TREE_OPERAND (expr, 1))))
3236 error ("type mismatch in component reference");
3237 debug_generic_stmt (TREE_TYPE (expr));
3238 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3239 return true;
3242 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3244 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3245 that their operand is not an SSA name or an invariant when
3246 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3247 bug). Otherwise there is nothing to verify, gross mismatches at
3248 most invoke undefined behavior. */
3249 if (require_lvalue
3250 && (TREE_CODE (op) == SSA_NAME
3251 || is_gimple_min_invariant (op)))
3253 error ("conversion of an SSA_NAME on the left hand side");
3254 debug_generic_stmt (expr);
3255 return true;
3257 else if (TREE_CODE (op) == SSA_NAME
3258 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3260 error ("conversion of register to a different size");
3261 debug_generic_stmt (expr);
3262 return true;
3264 else if (!handled_component_p (op))
3265 return false;
3268 expr = op;
3271 if (TREE_CODE (expr) == MEM_REF)
3273 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3275 error ("invalid address operand in MEM_REF");
3276 debug_generic_stmt (expr);
3277 return true;
3279 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3280 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3282 error ("invalid offset operand in MEM_REF");
3283 debug_generic_stmt (expr);
3284 return true;
3287 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3289 if (!TMR_BASE (expr)
3290 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3292 error ("invalid address operand in TARGET_MEM_REF");
3293 return true;
3295 if (!TMR_OFFSET (expr)
3296 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3297 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3299 error ("invalid offset operand in TARGET_MEM_REF");
3300 debug_generic_stmt (expr);
3301 return true;
3305 return ((require_lvalue || !is_gimple_min_invariant (expr))
3306 && verify_types_in_gimple_min_lval (expr));
3309 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3310 list of pointer-to types that is trivially convertible to DEST. */
3312 static bool
3313 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3315 tree src;
3317 if (!TYPE_POINTER_TO (src_obj))
3318 return true;
3320 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3321 if (useless_type_conversion_p (dest, src))
3322 return true;
3324 return false;
3327 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3328 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3330 static bool
3331 valid_fixed_convert_types_p (tree type1, tree type2)
3333 return (FIXED_POINT_TYPE_P (type1)
3334 && (INTEGRAL_TYPE_P (type2)
3335 || SCALAR_FLOAT_TYPE_P (type2)
3336 || FIXED_POINT_TYPE_P (type2)));
3339 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3340 is a problem, otherwise false. */
3342 static bool
3343 verify_gimple_call (gcall *stmt)
3345 tree fn = gimple_call_fn (stmt);
3346 tree fntype, fndecl;
3347 unsigned i;
3349 if (gimple_call_internal_p (stmt))
3351 if (fn)
3353 error ("gimple call has two targets");
3354 debug_generic_stmt (fn);
3355 return true;
3358 else
3360 if (!fn)
3362 error ("gimple call has no target");
3363 return true;
3367 if (fn && !is_gimple_call_addr (fn))
3369 error ("invalid function in gimple call");
3370 debug_generic_stmt (fn);
3371 return true;
3374 if (fn
3375 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3376 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3377 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3379 error ("non-function in gimple call");
3380 return true;
3383 fndecl = gimple_call_fndecl (stmt);
3384 if (fndecl
3385 && TREE_CODE (fndecl) == FUNCTION_DECL
3386 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3387 && !DECL_PURE_P (fndecl)
3388 && !TREE_READONLY (fndecl))
3390 error ("invalid pure const state for function");
3391 return true;
3394 if (gimple_call_lhs (stmt)
3395 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3396 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3398 error ("invalid LHS in gimple call");
3399 return true;
3402 if (gimple_call_ctrl_altering_p (stmt)
3403 && gimple_call_lhs (stmt)
3404 && gimple_call_noreturn_p (stmt))
3406 error ("LHS in noreturn call");
3407 return true;
3410 fntype = gimple_call_fntype (stmt);
3411 if (fntype
3412 && gimple_call_lhs (stmt)
3413 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3414 TREE_TYPE (fntype))
3415 /* ??? At least C++ misses conversions at assignments from
3416 void * call results.
3417 ??? Java is completely off. Especially with functions
3418 returning java.lang.Object.
3419 For now simply allow arbitrary pointer type conversions. */
3420 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3421 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3423 error ("invalid conversion in gimple call");
3424 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3425 debug_generic_stmt (TREE_TYPE (fntype));
3426 return true;
3429 if (gimple_call_chain (stmt)
3430 && !is_gimple_val (gimple_call_chain (stmt)))
3432 error ("invalid static chain in gimple call");
3433 debug_generic_stmt (gimple_call_chain (stmt));
3434 return true;
3437 /* If there is a static chain argument, the call should either be
3438 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3439 if (gimple_call_chain (stmt)
3440 && fndecl
3441 && !DECL_STATIC_CHAIN (fndecl))
3443 error ("static chain with function that doesn%'t use one");
3444 return true;
3447 /* ??? The C frontend passes unpromoted arguments in case it
3448 didn't see a function declaration before the call. So for now
3449 leave the call arguments mostly unverified. Once we gimplify
3450 unit-at-a-time we have a chance to fix this. */
3452 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3454 tree arg = gimple_call_arg (stmt, i);
3455 if ((is_gimple_reg_type (TREE_TYPE (arg))
3456 && !is_gimple_val (arg))
3457 || (!is_gimple_reg_type (TREE_TYPE (arg))
3458 && !is_gimple_lvalue (arg)))
3460 error ("invalid argument to gimple call");
3461 debug_generic_expr (arg);
3462 return true;
3466 return false;
3469 /* Verifies the gimple comparison with the result type TYPE and
3470 the operands OP0 and OP1. */
3472 static bool
3473 verify_gimple_comparison (tree type, tree op0, tree op1)
3475 tree op0_type = TREE_TYPE (op0);
3476 tree op1_type = TREE_TYPE (op1);
3478 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3480 error ("invalid operands in gimple comparison");
3481 return true;
3484 /* For comparisons we do not have the operations type as the
3485 effective type the comparison is carried out in. Instead
3486 we require that either the first operand is trivially
3487 convertible into the second, or the other way around.
3488 Because we special-case pointers to void we allow
3489 comparisons of pointers with the same mode as well. */
3490 if (!useless_type_conversion_p (op0_type, op1_type)
3491 && !useless_type_conversion_p (op1_type, op0_type)
3492 && (!POINTER_TYPE_P (op0_type)
3493 || !POINTER_TYPE_P (op1_type)
3494 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3496 error ("mismatching comparison operand types");
3497 debug_generic_expr (op0_type);
3498 debug_generic_expr (op1_type);
3499 return true;
3502 /* The resulting type of a comparison may be an effective boolean type. */
3503 if (INTEGRAL_TYPE_P (type)
3504 && (TREE_CODE (type) == BOOLEAN_TYPE
3505 || TYPE_PRECISION (type) == 1))
3507 if (TREE_CODE (op0_type) == VECTOR_TYPE
3508 || TREE_CODE (op1_type) == VECTOR_TYPE)
3510 error ("vector comparison returning a boolean");
3511 debug_generic_expr (op0_type);
3512 debug_generic_expr (op1_type);
3513 return true;
3516 /* Or an integer vector type with the same size and element count
3517 as the comparison operand types. */
3518 else if (TREE_CODE (type) == VECTOR_TYPE
3519 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3521 if (TREE_CODE (op0_type) != VECTOR_TYPE
3522 || TREE_CODE (op1_type) != VECTOR_TYPE)
3524 error ("non-vector operands in vector comparison");
3525 debug_generic_expr (op0_type);
3526 debug_generic_expr (op1_type);
3527 return true;
3530 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3531 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3532 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3533 /* The result of a vector comparison is of signed
3534 integral type. */
3535 || TYPE_UNSIGNED (TREE_TYPE (type)))
3537 error ("invalid vector comparison resulting type");
3538 debug_generic_expr (type);
3539 return true;
3542 else
3544 error ("bogus comparison result type");
3545 debug_generic_expr (type);
3546 return true;
3549 return false;
3552 /* Verify a gimple assignment statement STMT with an unary rhs.
3553 Returns true if anything is wrong. */
3555 static bool
3556 verify_gimple_assign_unary (gassign *stmt)
3558 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3559 tree lhs = gimple_assign_lhs (stmt);
3560 tree lhs_type = TREE_TYPE (lhs);
3561 tree rhs1 = gimple_assign_rhs1 (stmt);
3562 tree rhs1_type = TREE_TYPE (rhs1);
3564 if (!is_gimple_reg (lhs))
3566 error ("non-register as LHS of unary operation");
3567 return true;
3570 if (!is_gimple_val (rhs1))
3572 error ("invalid operand in unary operation");
3573 return true;
3576 /* First handle conversions. */
3577 switch (rhs_code)
3579 CASE_CONVERT:
3581 /* Allow conversions from pointer type to integral type only if
3582 there is no sign or zero extension involved.
3583 For targets were the precision of ptrofftype doesn't match that
3584 of pointers we need to allow arbitrary conversions to ptrofftype. */
3585 if ((POINTER_TYPE_P (lhs_type)
3586 && INTEGRAL_TYPE_P (rhs1_type))
3587 || (POINTER_TYPE_P (rhs1_type)
3588 && INTEGRAL_TYPE_P (lhs_type)
3589 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3590 || ptrofftype_p (sizetype))))
3591 return false;
3593 /* Allow conversion from integral to offset type and vice versa. */
3594 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3595 && INTEGRAL_TYPE_P (rhs1_type))
3596 || (INTEGRAL_TYPE_P (lhs_type)
3597 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3598 return false;
3600 /* Otherwise assert we are converting between types of the
3601 same kind. */
3602 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3604 error ("invalid types in nop conversion");
3605 debug_generic_expr (lhs_type);
3606 debug_generic_expr (rhs1_type);
3607 return true;
3610 return false;
3613 case ADDR_SPACE_CONVERT_EXPR:
3615 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3616 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3617 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3619 error ("invalid types in address space conversion");
3620 debug_generic_expr (lhs_type);
3621 debug_generic_expr (rhs1_type);
3622 return true;
3625 return false;
3628 case FIXED_CONVERT_EXPR:
3630 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3631 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3633 error ("invalid types in fixed-point conversion");
3634 debug_generic_expr (lhs_type);
3635 debug_generic_expr (rhs1_type);
3636 return true;
3639 return false;
3642 case FLOAT_EXPR:
3644 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3645 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3646 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3648 error ("invalid types in conversion to floating point");
3649 debug_generic_expr (lhs_type);
3650 debug_generic_expr (rhs1_type);
3651 return true;
3654 return false;
3657 case FIX_TRUNC_EXPR:
3659 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3660 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3661 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3663 error ("invalid types in conversion to integer");
3664 debug_generic_expr (lhs_type);
3665 debug_generic_expr (rhs1_type);
3666 return true;
3669 return false;
3671 case REDUC_MAX_EXPR:
3672 case REDUC_MIN_EXPR:
3673 case REDUC_PLUS_EXPR:
3674 if (!VECTOR_TYPE_P (rhs1_type)
3675 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3677 error ("reduction should convert from vector to element type");
3678 debug_generic_expr (lhs_type);
3679 debug_generic_expr (rhs1_type);
3680 return true;
3682 return false;
3684 case VEC_UNPACK_HI_EXPR:
3685 case VEC_UNPACK_LO_EXPR:
3686 case VEC_UNPACK_FLOAT_HI_EXPR:
3687 case VEC_UNPACK_FLOAT_LO_EXPR:
3688 /* FIXME. */
3689 return false;
3691 case NEGATE_EXPR:
3692 case ABS_EXPR:
3693 case BIT_NOT_EXPR:
3694 case PAREN_EXPR:
3695 case CONJ_EXPR:
3696 break;
3698 default:
3699 gcc_unreachable ();
3702 /* For the remaining codes assert there is no conversion involved. */
3703 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3705 error ("non-trivial conversion in unary operation");
3706 debug_generic_expr (lhs_type);
3707 debug_generic_expr (rhs1_type);
3708 return true;
3711 return false;
3714 /* Verify a gimple assignment statement STMT with a binary rhs.
3715 Returns true if anything is wrong. */
3717 static bool
3718 verify_gimple_assign_binary (gassign *stmt)
3720 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3721 tree lhs = gimple_assign_lhs (stmt);
3722 tree lhs_type = TREE_TYPE (lhs);
3723 tree rhs1 = gimple_assign_rhs1 (stmt);
3724 tree rhs1_type = TREE_TYPE (rhs1);
3725 tree rhs2 = gimple_assign_rhs2 (stmt);
3726 tree rhs2_type = TREE_TYPE (rhs2);
3728 if (!is_gimple_reg (lhs))
3730 error ("non-register as LHS of binary operation");
3731 return true;
3734 if (!is_gimple_val (rhs1)
3735 || !is_gimple_val (rhs2))
3737 error ("invalid operands in binary operation");
3738 return true;
3741 /* First handle operations that involve different types. */
3742 switch (rhs_code)
3744 case COMPLEX_EXPR:
3746 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3747 || !(INTEGRAL_TYPE_P (rhs1_type)
3748 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3749 || !(INTEGRAL_TYPE_P (rhs2_type)
3750 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3752 error ("type mismatch in complex expression");
3753 debug_generic_expr (lhs_type);
3754 debug_generic_expr (rhs1_type);
3755 debug_generic_expr (rhs2_type);
3756 return true;
3759 return false;
3762 case LSHIFT_EXPR:
3763 case RSHIFT_EXPR:
3764 case LROTATE_EXPR:
3765 case RROTATE_EXPR:
3767 /* Shifts and rotates are ok on integral types, fixed point
3768 types and integer vector types. */
3769 if ((!INTEGRAL_TYPE_P (rhs1_type)
3770 && !FIXED_POINT_TYPE_P (rhs1_type)
3771 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3772 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3773 || (!INTEGRAL_TYPE_P (rhs2_type)
3774 /* Vector shifts of vectors are also ok. */
3775 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3776 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3777 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3778 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3779 || !useless_type_conversion_p (lhs_type, rhs1_type))
3781 error ("type mismatch in shift expression");
3782 debug_generic_expr (lhs_type);
3783 debug_generic_expr (rhs1_type);
3784 debug_generic_expr (rhs2_type);
3785 return true;
3788 return false;
3791 case WIDEN_LSHIFT_EXPR:
3793 if (!INTEGRAL_TYPE_P (lhs_type)
3794 || !INTEGRAL_TYPE_P (rhs1_type)
3795 || TREE_CODE (rhs2) != INTEGER_CST
3796 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3798 error ("type mismatch in widening vector shift expression");
3799 debug_generic_expr (lhs_type);
3800 debug_generic_expr (rhs1_type);
3801 debug_generic_expr (rhs2_type);
3802 return true;
3805 return false;
3808 case VEC_WIDEN_LSHIFT_HI_EXPR:
3809 case VEC_WIDEN_LSHIFT_LO_EXPR:
3811 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3812 || TREE_CODE (lhs_type) != VECTOR_TYPE
3813 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3814 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3815 || TREE_CODE (rhs2) != INTEGER_CST
3816 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3817 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3819 error ("type mismatch in widening vector shift expression");
3820 debug_generic_expr (lhs_type);
3821 debug_generic_expr (rhs1_type);
3822 debug_generic_expr (rhs2_type);
3823 return true;
3826 return false;
3829 case PLUS_EXPR:
3830 case MINUS_EXPR:
3832 tree lhs_etype = lhs_type;
3833 tree rhs1_etype = rhs1_type;
3834 tree rhs2_etype = rhs2_type;
3835 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3837 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3838 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3840 error ("invalid non-vector operands to vector valued plus");
3841 return true;
3843 lhs_etype = TREE_TYPE (lhs_type);
3844 rhs1_etype = TREE_TYPE (rhs1_type);
3845 rhs2_etype = TREE_TYPE (rhs2_type);
3847 if (POINTER_TYPE_P (lhs_etype)
3848 || POINTER_TYPE_P (rhs1_etype)
3849 || POINTER_TYPE_P (rhs2_etype))
3851 error ("invalid (pointer) operands to plus/minus");
3852 return true;
3855 /* Continue with generic binary expression handling. */
3856 break;
3859 case POINTER_PLUS_EXPR:
3861 if (!POINTER_TYPE_P (rhs1_type)
3862 || !useless_type_conversion_p (lhs_type, rhs1_type)
3863 || !ptrofftype_p (rhs2_type))
3865 error ("type mismatch in pointer plus expression");
3866 debug_generic_stmt (lhs_type);
3867 debug_generic_stmt (rhs1_type);
3868 debug_generic_stmt (rhs2_type);
3869 return true;
3872 return false;
3875 case TRUTH_ANDIF_EXPR:
3876 case TRUTH_ORIF_EXPR:
3877 case TRUTH_AND_EXPR:
3878 case TRUTH_OR_EXPR:
3879 case TRUTH_XOR_EXPR:
3881 gcc_unreachable ();
3883 case LT_EXPR:
3884 case LE_EXPR:
3885 case GT_EXPR:
3886 case GE_EXPR:
3887 case EQ_EXPR:
3888 case NE_EXPR:
3889 case UNORDERED_EXPR:
3890 case ORDERED_EXPR:
3891 case UNLT_EXPR:
3892 case UNLE_EXPR:
3893 case UNGT_EXPR:
3894 case UNGE_EXPR:
3895 case UNEQ_EXPR:
3896 case LTGT_EXPR:
3897 /* Comparisons are also binary, but the result type is not
3898 connected to the operand types. */
3899 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3901 case WIDEN_MULT_EXPR:
3902 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3903 return true;
3904 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3905 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3907 case WIDEN_SUM_EXPR:
3908 case VEC_WIDEN_MULT_HI_EXPR:
3909 case VEC_WIDEN_MULT_LO_EXPR:
3910 case VEC_WIDEN_MULT_EVEN_EXPR:
3911 case VEC_WIDEN_MULT_ODD_EXPR:
3912 case VEC_PACK_TRUNC_EXPR:
3913 case VEC_PACK_SAT_EXPR:
3914 case VEC_PACK_FIX_TRUNC_EXPR:
3915 /* FIXME. */
3916 return false;
3918 case MULT_EXPR:
3919 case MULT_HIGHPART_EXPR:
3920 case TRUNC_DIV_EXPR:
3921 case CEIL_DIV_EXPR:
3922 case FLOOR_DIV_EXPR:
3923 case ROUND_DIV_EXPR:
3924 case TRUNC_MOD_EXPR:
3925 case CEIL_MOD_EXPR:
3926 case FLOOR_MOD_EXPR:
3927 case ROUND_MOD_EXPR:
3928 case RDIV_EXPR:
3929 case EXACT_DIV_EXPR:
3930 case MIN_EXPR:
3931 case MAX_EXPR:
3932 case BIT_IOR_EXPR:
3933 case BIT_XOR_EXPR:
3934 case BIT_AND_EXPR:
3935 /* Continue with generic binary expression handling. */
3936 break;
3938 default:
3939 gcc_unreachable ();
3942 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3943 || !useless_type_conversion_p (lhs_type, rhs2_type))
3945 error ("type mismatch in binary expression");
3946 debug_generic_stmt (lhs_type);
3947 debug_generic_stmt (rhs1_type);
3948 debug_generic_stmt (rhs2_type);
3949 return true;
3952 return false;
3955 /* Verify a gimple assignment statement STMT with a ternary rhs.
3956 Returns true if anything is wrong. */
3958 static bool
3959 verify_gimple_assign_ternary (gassign *stmt)
3961 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3962 tree lhs = gimple_assign_lhs (stmt);
3963 tree lhs_type = TREE_TYPE (lhs);
3964 tree rhs1 = gimple_assign_rhs1 (stmt);
3965 tree rhs1_type = TREE_TYPE (rhs1);
3966 tree rhs2 = gimple_assign_rhs2 (stmt);
3967 tree rhs2_type = TREE_TYPE (rhs2);
3968 tree rhs3 = gimple_assign_rhs3 (stmt);
3969 tree rhs3_type = TREE_TYPE (rhs3);
3971 if (!is_gimple_reg (lhs))
3973 error ("non-register as LHS of ternary operation");
3974 return true;
3977 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3978 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3979 || !is_gimple_val (rhs2)
3980 || !is_gimple_val (rhs3))
3982 error ("invalid operands in ternary operation");
3983 return true;
3986 /* First handle operations that involve different types. */
3987 switch (rhs_code)
3989 case WIDEN_MULT_PLUS_EXPR:
3990 case WIDEN_MULT_MINUS_EXPR:
3991 if ((!INTEGRAL_TYPE_P (rhs1_type)
3992 && !FIXED_POINT_TYPE_P (rhs1_type))
3993 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3994 || !useless_type_conversion_p (lhs_type, rhs3_type)
3995 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3996 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3998 error ("type mismatch in widening multiply-accumulate expression");
3999 debug_generic_expr (lhs_type);
4000 debug_generic_expr (rhs1_type);
4001 debug_generic_expr (rhs2_type);
4002 debug_generic_expr (rhs3_type);
4003 return true;
4005 break;
4007 case FMA_EXPR:
4008 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4009 || !useless_type_conversion_p (lhs_type, rhs2_type)
4010 || !useless_type_conversion_p (lhs_type, rhs3_type))
4012 error ("type mismatch in fused multiply-add expression");
4013 debug_generic_expr (lhs_type);
4014 debug_generic_expr (rhs1_type);
4015 debug_generic_expr (rhs2_type);
4016 debug_generic_expr (rhs3_type);
4017 return true;
4019 break;
4021 case COND_EXPR:
4022 case VEC_COND_EXPR:
4023 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4024 || !useless_type_conversion_p (lhs_type, rhs3_type))
4026 error ("type mismatch in conditional expression");
4027 debug_generic_expr (lhs_type);
4028 debug_generic_expr (rhs2_type);
4029 debug_generic_expr (rhs3_type);
4030 return true;
4032 break;
4034 case VEC_PERM_EXPR:
4035 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4036 || !useless_type_conversion_p (lhs_type, rhs2_type))
4038 error ("type mismatch in vector permute expression");
4039 debug_generic_expr (lhs_type);
4040 debug_generic_expr (rhs1_type);
4041 debug_generic_expr (rhs2_type);
4042 debug_generic_expr (rhs3_type);
4043 return true;
4046 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4047 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4048 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4050 error ("vector types expected in vector permute expression");
4051 debug_generic_expr (lhs_type);
4052 debug_generic_expr (rhs1_type);
4053 debug_generic_expr (rhs2_type);
4054 debug_generic_expr (rhs3_type);
4055 return true;
4058 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4059 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4060 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4061 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4062 != TYPE_VECTOR_SUBPARTS (lhs_type))
4064 error ("vectors with different element number found "
4065 "in vector permute expression");
4066 debug_generic_expr (lhs_type);
4067 debug_generic_expr (rhs1_type);
4068 debug_generic_expr (rhs2_type);
4069 debug_generic_expr (rhs3_type);
4070 return true;
4073 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4074 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4075 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4077 error ("invalid mask type in vector permute expression");
4078 debug_generic_expr (lhs_type);
4079 debug_generic_expr (rhs1_type);
4080 debug_generic_expr (rhs2_type);
4081 debug_generic_expr (rhs3_type);
4082 return true;
4085 return false;
4087 case SAD_EXPR:
4088 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4089 || !useless_type_conversion_p (lhs_type, rhs3_type)
4090 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4091 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4092 > GET_MODE_BITSIZE (GET_MODE_INNER
4093 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4095 error ("type mismatch in sad expression");
4096 debug_generic_expr (lhs_type);
4097 debug_generic_expr (rhs1_type);
4098 debug_generic_expr (rhs2_type);
4099 debug_generic_expr (rhs3_type);
4100 return true;
4103 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4104 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4105 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4107 error ("vector types expected in sad expression");
4108 debug_generic_expr (lhs_type);
4109 debug_generic_expr (rhs1_type);
4110 debug_generic_expr (rhs2_type);
4111 debug_generic_expr (rhs3_type);
4112 return true;
4115 return false;
4117 case DOT_PROD_EXPR:
4118 case REALIGN_LOAD_EXPR:
4119 /* FIXME. */
4120 return false;
4122 default:
4123 gcc_unreachable ();
4125 return false;
4128 /* Verify a gimple assignment statement STMT with a single rhs.
4129 Returns true if anything is wrong. */
4131 static bool
4132 verify_gimple_assign_single (gassign *stmt)
4134 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4135 tree lhs = gimple_assign_lhs (stmt);
4136 tree lhs_type = TREE_TYPE (lhs);
4137 tree rhs1 = gimple_assign_rhs1 (stmt);
4138 tree rhs1_type = TREE_TYPE (rhs1);
4139 bool res = false;
4141 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4143 error ("non-trivial conversion at assignment");
4144 debug_generic_expr (lhs_type);
4145 debug_generic_expr (rhs1_type);
4146 return true;
4149 if (gimple_clobber_p (stmt)
4150 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4152 error ("non-decl/MEM_REF LHS in clobber statement");
4153 debug_generic_expr (lhs);
4154 return true;
4157 if (handled_component_p (lhs)
4158 || TREE_CODE (lhs) == MEM_REF
4159 || TREE_CODE (lhs) == TARGET_MEM_REF)
4160 res |= verify_types_in_gimple_reference (lhs, true);
4162 /* Special codes we cannot handle via their class. */
4163 switch (rhs_code)
4165 case ADDR_EXPR:
4167 tree op = TREE_OPERAND (rhs1, 0);
4168 if (!is_gimple_addressable (op))
4170 error ("invalid operand in unary expression");
4171 return true;
4174 /* Technically there is no longer a need for matching types, but
4175 gimple hygiene asks for this check. In LTO we can end up
4176 combining incompatible units and thus end up with addresses
4177 of globals that change their type to a common one. */
4178 if (!in_lto_p
4179 && !types_compatible_p (TREE_TYPE (op),
4180 TREE_TYPE (TREE_TYPE (rhs1)))
4181 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4182 TREE_TYPE (op)))
4184 error ("type mismatch in address expression");
4185 debug_generic_stmt (TREE_TYPE (rhs1));
4186 debug_generic_stmt (TREE_TYPE (op));
4187 return true;
4190 return verify_types_in_gimple_reference (op, true);
4193 /* tcc_reference */
4194 case INDIRECT_REF:
4195 error ("INDIRECT_REF in gimple IL");
4196 return true;
4198 case COMPONENT_REF:
4199 case BIT_FIELD_REF:
4200 case ARRAY_REF:
4201 case ARRAY_RANGE_REF:
4202 case VIEW_CONVERT_EXPR:
4203 case REALPART_EXPR:
4204 case IMAGPART_EXPR:
4205 case TARGET_MEM_REF:
4206 case MEM_REF:
4207 if (!is_gimple_reg (lhs)
4208 && is_gimple_reg_type (TREE_TYPE (lhs)))
4210 error ("invalid rhs for gimple memory store");
4211 debug_generic_stmt (lhs);
4212 debug_generic_stmt (rhs1);
4213 return true;
4215 return res || verify_types_in_gimple_reference (rhs1, false);
4217 /* tcc_constant */
4218 case SSA_NAME:
4219 case INTEGER_CST:
4220 case REAL_CST:
4221 case FIXED_CST:
4222 case COMPLEX_CST:
4223 case VECTOR_CST:
4224 case STRING_CST:
4225 return res;
4227 /* tcc_declaration */
4228 case CONST_DECL:
4229 return res;
4230 case VAR_DECL:
4231 case PARM_DECL:
4232 if (!is_gimple_reg (lhs)
4233 && !is_gimple_reg (rhs1)
4234 && is_gimple_reg_type (TREE_TYPE (lhs)))
4236 error ("invalid rhs for gimple memory store");
4237 debug_generic_stmt (lhs);
4238 debug_generic_stmt (rhs1);
4239 return true;
4241 return res;
4243 case CONSTRUCTOR:
4244 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4246 unsigned int i;
4247 tree elt_i, elt_v, elt_t = NULL_TREE;
4249 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4250 return res;
4251 /* For vector CONSTRUCTORs we require that either it is empty
4252 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4253 (then the element count must be correct to cover the whole
4254 outer vector and index must be NULL on all elements, or it is
4255 a CONSTRUCTOR of scalar elements, where we as an exception allow
4256 smaller number of elements (assuming zero filling) and
4257 consecutive indexes as compared to NULL indexes (such
4258 CONSTRUCTORs can appear in the IL from FEs). */
4259 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4261 if (elt_t == NULL_TREE)
4263 elt_t = TREE_TYPE (elt_v);
4264 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4266 tree elt_t = TREE_TYPE (elt_v);
4267 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4268 TREE_TYPE (elt_t)))
4270 error ("incorrect type of vector CONSTRUCTOR"
4271 " elements");
4272 debug_generic_stmt (rhs1);
4273 return true;
4275 else if (CONSTRUCTOR_NELTS (rhs1)
4276 * TYPE_VECTOR_SUBPARTS (elt_t)
4277 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4279 error ("incorrect number of vector CONSTRUCTOR"
4280 " elements");
4281 debug_generic_stmt (rhs1);
4282 return true;
4285 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4286 elt_t))
4288 error ("incorrect type of vector CONSTRUCTOR elements");
4289 debug_generic_stmt (rhs1);
4290 return true;
4292 else if (CONSTRUCTOR_NELTS (rhs1)
4293 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4295 error ("incorrect number of vector CONSTRUCTOR elements");
4296 debug_generic_stmt (rhs1);
4297 return true;
4300 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4302 error ("incorrect type of vector CONSTRUCTOR elements");
4303 debug_generic_stmt (rhs1);
4304 return true;
4306 if (elt_i != NULL_TREE
4307 && (TREE_CODE (elt_t) == VECTOR_TYPE
4308 || TREE_CODE (elt_i) != INTEGER_CST
4309 || compare_tree_int (elt_i, i) != 0))
4311 error ("vector CONSTRUCTOR with non-NULL element index");
4312 debug_generic_stmt (rhs1);
4313 return true;
4315 if (!is_gimple_val (elt_v))
4317 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4318 debug_generic_stmt (rhs1);
4319 return true;
4323 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4325 error ("non-vector CONSTRUCTOR with elements");
4326 debug_generic_stmt (rhs1);
4327 return true;
4329 return res;
4330 case OBJ_TYPE_REF:
4331 case ASSERT_EXPR:
4332 case WITH_SIZE_EXPR:
4333 /* FIXME. */
4334 return res;
4336 default:;
4339 return res;
4342 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4343 is a problem, otherwise false. */
4345 static bool
4346 verify_gimple_assign (gassign *stmt)
4348 switch (gimple_assign_rhs_class (stmt))
4350 case GIMPLE_SINGLE_RHS:
4351 return verify_gimple_assign_single (stmt);
4353 case GIMPLE_UNARY_RHS:
4354 return verify_gimple_assign_unary (stmt);
4356 case GIMPLE_BINARY_RHS:
4357 return verify_gimple_assign_binary (stmt);
4359 case GIMPLE_TERNARY_RHS:
4360 return verify_gimple_assign_ternary (stmt);
4362 default:
4363 gcc_unreachable ();
4367 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4368 is a problem, otherwise false. */
4370 static bool
4371 verify_gimple_return (greturn *stmt)
4373 tree op = gimple_return_retval (stmt);
4374 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4376 /* We cannot test for present return values as we do not fix up missing
4377 return values from the original source. */
4378 if (op == NULL)
4379 return false;
4381 if (!is_gimple_val (op)
4382 && TREE_CODE (op) != RESULT_DECL)
4384 error ("invalid operand in return statement");
4385 debug_generic_stmt (op);
4386 return true;
4389 if ((TREE_CODE (op) == RESULT_DECL
4390 && DECL_BY_REFERENCE (op))
4391 || (TREE_CODE (op) == SSA_NAME
4392 && SSA_NAME_VAR (op)
4393 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4394 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4395 op = TREE_TYPE (op);
4397 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4399 error ("invalid conversion in return statement");
4400 debug_generic_stmt (restype);
4401 debug_generic_stmt (TREE_TYPE (op));
4402 return true;
4405 return false;
4409 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4410 is a problem, otherwise false. */
4412 static bool
4413 verify_gimple_goto (ggoto *stmt)
4415 tree dest = gimple_goto_dest (stmt);
4417 /* ??? We have two canonical forms of direct goto destinations, a
4418 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4419 if (TREE_CODE (dest) != LABEL_DECL
4420 && (!is_gimple_val (dest)
4421 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4423 error ("goto destination is neither a label nor a pointer");
4424 return true;
4427 return false;
4430 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4431 is a problem, otherwise false. */
4433 static bool
4434 verify_gimple_switch (gswitch *stmt)
4436 unsigned int i, n;
4437 tree elt, prev_upper_bound = NULL_TREE;
4438 tree index_type, elt_type = NULL_TREE;
4440 if (!is_gimple_val (gimple_switch_index (stmt)))
4442 error ("invalid operand to switch statement");
4443 debug_generic_stmt (gimple_switch_index (stmt));
4444 return true;
4447 index_type = TREE_TYPE (gimple_switch_index (stmt));
4448 if (! INTEGRAL_TYPE_P (index_type))
4450 error ("non-integral type switch statement");
4451 debug_generic_expr (index_type);
4452 return true;
4455 elt = gimple_switch_label (stmt, 0);
4456 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4458 error ("invalid default case label in switch statement");
4459 debug_generic_expr (elt);
4460 return true;
4463 n = gimple_switch_num_labels (stmt);
4464 for (i = 1; i < n; i++)
4466 elt = gimple_switch_label (stmt, i);
4468 if (! CASE_LOW (elt))
4470 error ("invalid case label in switch statement");
4471 debug_generic_expr (elt);
4472 return true;
4474 if (CASE_HIGH (elt)
4475 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4477 error ("invalid case range in switch statement");
4478 debug_generic_expr (elt);
4479 return true;
4482 if (elt_type)
4484 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4485 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4487 error ("type mismatch for case label in switch statement");
4488 debug_generic_expr (elt);
4489 return true;
4492 else
4494 elt_type = TREE_TYPE (CASE_LOW (elt));
4495 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4497 error ("type precision mismatch in switch statement");
4498 return true;
4502 if (prev_upper_bound)
4504 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4506 error ("case labels not sorted in switch statement");
4507 return true;
4511 prev_upper_bound = CASE_HIGH (elt);
4512 if (! prev_upper_bound)
4513 prev_upper_bound = CASE_LOW (elt);
4516 return false;
4519 /* Verify a gimple debug statement STMT.
4520 Returns true if anything is wrong. */
4522 static bool
4523 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4525 /* There isn't much that could be wrong in a gimple debug stmt. A
4526 gimple debug bind stmt, for example, maps a tree, that's usually
4527 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4528 component or member of an aggregate type, to another tree, that
4529 can be an arbitrary expression. These stmts expand into debug
4530 insns, and are converted to debug notes by var-tracking.c. */
4531 return false;
4534 /* Verify a gimple label statement STMT.
4535 Returns true if anything is wrong. */
4537 static bool
4538 verify_gimple_label (glabel *stmt)
4540 tree decl = gimple_label_label (stmt);
4541 int uid;
4542 bool err = false;
4544 if (TREE_CODE (decl) != LABEL_DECL)
4545 return true;
4546 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4547 && DECL_CONTEXT (decl) != current_function_decl)
4549 error ("label's context is not the current function decl");
4550 err |= true;
4553 uid = LABEL_DECL_UID (decl);
4554 if (cfun->cfg
4555 && (uid == -1
4556 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4558 error ("incorrect entry in label_to_block_map");
4559 err |= true;
4562 uid = EH_LANDING_PAD_NR (decl);
4563 if (uid)
4565 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4566 if (decl != lp->post_landing_pad)
4568 error ("incorrect setting of landing pad number");
4569 err |= true;
4573 return err;
4576 /* Verify a gimple cond statement STMT.
4577 Returns true if anything is wrong. */
4579 static bool
4580 verify_gimple_cond (gcond *stmt)
4582 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4584 error ("invalid comparison code in gimple cond");
4585 return true;
4587 if (!(!gimple_cond_true_label (stmt)
4588 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4589 || !(!gimple_cond_false_label (stmt)
4590 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4592 error ("invalid labels in gimple cond");
4593 return true;
4596 return verify_gimple_comparison (boolean_type_node,
4597 gimple_cond_lhs (stmt),
4598 gimple_cond_rhs (stmt));
4601 /* Verify the GIMPLE statement STMT. Returns true if there is an
4602 error, otherwise false. */
4604 static bool
4605 verify_gimple_stmt (gimple stmt)
4607 switch (gimple_code (stmt))
4609 case GIMPLE_ASSIGN:
4610 return verify_gimple_assign (as_a <gassign *> (stmt));
4612 case GIMPLE_LABEL:
4613 return verify_gimple_label (as_a <glabel *> (stmt));
4615 case GIMPLE_CALL:
4616 return verify_gimple_call (as_a <gcall *> (stmt));
4618 case GIMPLE_COND:
4619 return verify_gimple_cond (as_a <gcond *> (stmt));
4621 case GIMPLE_GOTO:
4622 return verify_gimple_goto (as_a <ggoto *> (stmt));
4624 case GIMPLE_SWITCH:
4625 return verify_gimple_switch (as_a <gswitch *> (stmt));
4627 case GIMPLE_RETURN:
4628 return verify_gimple_return (as_a <greturn *> (stmt));
4630 case GIMPLE_ASM:
4631 return false;
4633 case GIMPLE_TRANSACTION:
4634 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4636 /* Tuples that do not have tree operands. */
4637 case GIMPLE_NOP:
4638 case GIMPLE_PREDICT:
4639 case GIMPLE_RESX:
4640 case GIMPLE_EH_DISPATCH:
4641 case GIMPLE_EH_MUST_NOT_THROW:
4642 return false;
4644 CASE_GIMPLE_OMP:
4645 /* OpenMP directives are validated by the FE and never operated
4646 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4647 non-gimple expressions when the main index variable has had
4648 its address taken. This does not affect the loop itself
4649 because the header of an GIMPLE_OMP_FOR is merely used to determine
4650 how to setup the parallel iteration. */
4651 return false;
4653 case GIMPLE_DEBUG:
4654 return verify_gimple_debug (stmt);
4656 default:
4657 gcc_unreachable ();
4661 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4662 and false otherwise. */
4664 static bool
4665 verify_gimple_phi (gimple phi)
4667 bool err = false;
4668 unsigned i;
4669 tree phi_result = gimple_phi_result (phi);
4670 bool virtual_p;
4672 if (!phi_result)
4674 error ("invalid PHI result");
4675 return true;
4678 virtual_p = virtual_operand_p (phi_result);
4679 if (TREE_CODE (phi_result) != SSA_NAME
4680 || (virtual_p
4681 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4683 error ("invalid PHI result");
4684 err = true;
4687 for (i = 0; i < gimple_phi_num_args (phi); i++)
4689 tree t = gimple_phi_arg_def (phi, i);
4691 if (!t)
4693 error ("missing PHI def");
4694 err |= true;
4695 continue;
4697 /* Addressable variables do have SSA_NAMEs but they
4698 are not considered gimple values. */
4699 else if ((TREE_CODE (t) == SSA_NAME
4700 && virtual_p != virtual_operand_p (t))
4701 || (virtual_p
4702 && (TREE_CODE (t) != SSA_NAME
4703 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4704 || (!virtual_p
4705 && !is_gimple_val (t)))
4707 error ("invalid PHI argument");
4708 debug_generic_expr (t);
4709 err |= true;
4711 #ifdef ENABLE_TYPES_CHECKING
4712 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4714 error ("incompatible types in PHI argument %u", i);
4715 debug_generic_stmt (TREE_TYPE (phi_result));
4716 debug_generic_stmt (TREE_TYPE (t));
4717 err |= true;
4719 #endif
4722 return err;
4725 /* Verify the GIMPLE statements inside the sequence STMTS. */
4727 static bool
4728 verify_gimple_in_seq_2 (gimple_seq stmts)
4730 gimple_stmt_iterator ittr;
4731 bool err = false;
4733 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4735 gimple stmt = gsi_stmt (ittr);
4737 switch (gimple_code (stmt))
4739 case GIMPLE_BIND:
4740 err |= verify_gimple_in_seq_2 (
4741 gimple_bind_body (as_a <gbind *> (stmt)));
4742 break;
4744 case GIMPLE_TRY:
4745 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4746 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4747 break;
4749 case GIMPLE_EH_FILTER:
4750 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4751 break;
4753 case GIMPLE_EH_ELSE:
4755 geh_else *eh_else = as_a <geh_else *> (stmt);
4756 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4757 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4759 break;
4761 case GIMPLE_CATCH:
4762 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4763 as_a <gcatch *> (stmt)));
4764 break;
4766 case GIMPLE_TRANSACTION:
4767 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4768 break;
4770 default:
4772 bool err2 = verify_gimple_stmt (stmt);
4773 if (err2)
4774 debug_gimple_stmt (stmt);
4775 err |= err2;
4780 return err;
4783 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4784 is a problem, otherwise false. */
4786 static bool
4787 verify_gimple_transaction (gtransaction *stmt)
4789 tree lab = gimple_transaction_label (stmt);
4790 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4791 return true;
4792 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4796 /* Verify the GIMPLE statements inside the statement list STMTS. */
4798 DEBUG_FUNCTION void
4799 verify_gimple_in_seq (gimple_seq stmts)
4801 timevar_push (TV_TREE_STMT_VERIFY);
4802 if (verify_gimple_in_seq_2 (stmts))
4803 internal_error ("verify_gimple failed");
4804 timevar_pop (TV_TREE_STMT_VERIFY);
4807 /* Return true when the T can be shared. */
4809 static bool
4810 tree_node_can_be_shared (tree t)
4812 if (IS_TYPE_OR_DECL_P (t)
4813 || is_gimple_min_invariant (t)
4814 || TREE_CODE (t) == SSA_NAME
4815 || t == error_mark_node
4816 || TREE_CODE (t) == IDENTIFIER_NODE)
4817 return true;
4819 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4820 return true;
4822 if (DECL_P (t))
4823 return true;
4825 return false;
4828 /* Called via walk_tree. Verify tree sharing. */
4830 static tree
4831 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4833 hash_set<void *> *visited = (hash_set<void *> *) data;
4835 if (tree_node_can_be_shared (*tp))
4837 *walk_subtrees = false;
4838 return NULL;
4841 if (visited->add (*tp))
4842 return *tp;
4844 return NULL;
4847 /* Called via walk_gimple_stmt. Verify tree sharing. */
4849 static tree
4850 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4852 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4853 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4856 static bool eh_error_found;
4857 bool
4858 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4859 hash_set<gimple> *visited)
4861 if (!visited->contains (stmt))
4863 error ("dead STMT in EH table");
4864 debug_gimple_stmt (stmt);
4865 eh_error_found = true;
4867 return true;
4870 /* Verify if the location LOCs block is in BLOCKS. */
4872 static bool
4873 verify_location (hash_set<tree> *blocks, location_t loc)
4875 tree block = LOCATION_BLOCK (loc);
4876 if (block != NULL_TREE
4877 && !blocks->contains (block))
4879 error ("location references block not in block tree");
4880 return true;
4882 if (block != NULL_TREE)
4883 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4884 return false;
4887 /* Called via walk_tree. Verify that expressions have no blocks. */
4889 static tree
4890 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4892 if (!EXPR_P (*tp))
4894 *walk_subtrees = false;
4895 return NULL;
4898 location_t loc = EXPR_LOCATION (*tp);
4899 if (LOCATION_BLOCK (loc) != NULL)
4900 return *tp;
4902 return NULL;
4905 /* Called via walk_tree. Verify locations of expressions. */
4907 static tree
4908 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4910 hash_set<tree> *blocks = (hash_set<tree> *) data;
4912 if (TREE_CODE (*tp) == VAR_DECL
4913 && DECL_HAS_DEBUG_EXPR_P (*tp))
4915 tree t = DECL_DEBUG_EXPR (*tp);
4916 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4917 if (addr)
4918 return addr;
4920 if ((TREE_CODE (*tp) == VAR_DECL
4921 || TREE_CODE (*tp) == PARM_DECL
4922 || TREE_CODE (*tp) == RESULT_DECL)
4923 && DECL_HAS_VALUE_EXPR_P (*tp))
4925 tree t = DECL_VALUE_EXPR (*tp);
4926 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4927 if (addr)
4928 return addr;
4931 if (!EXPR_P (*tp))
4933 *walk_subtrees = false;
4934 return NULL;
4937 location_t loc = EXPR_LOCATION (*tp);
4938 if (verify_location (blocks, loc))
4939 return *tp;
4941 return NULL;
4944 /* Called via walk_gimple_op. Verify locations of expressions. */
4946 static tree
4947 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4949 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4950 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4953 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4955 static void
4956 collect_subblocks (hash_set<tree> *blocks, tree block)
4958 tree t;
4959 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4961 blocks->add (t);
4962 collect_subblocks (blocks, t);
4966 /* Verify the GIMPLE statements in the CFG of FN. */
4968 DEBUG_FUNCTION void
4969 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4971 basic_block bb;
4972 bool err = false;
4974 timevar_push (TV_TREE_STMT_VERIFY);
4975 hash_set<void *> visited;
4976 hash_set<gimple> visited_stmts;
4978 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4979 hash_set<tree> blocks;
4980 if (DECL_INITIAL (fn->decl))
4982 blocks.add (DECL_INITIAL (fn->decl));
4983 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4986 FOR_EACH_BB_FN (bb, fn)
4988 gimple_stmt_iterator gsi;
4990 for (gphi_iterator gpi = gsi_start_phis (bb);
4991 !gsi_end_p (gpi);
4992 gsi_next (&gpi))
4994 gphi *phi = gpi.phi ();
4995 bool err2 = false;
4996 unsigned i;
4998 visited_stmts.add (phi);
5000 if (gimple_bb (phi) != bb)
5002 error ("gimple_bb (phi) is set to a wrong basic block");
5003 err2 = true;
5006 err2 |= verify_gimple_phi (phi);
5008 /* Only PHI arguments have locations. */
5009 if (gimple_location (phi) != UNKNOWN_LOCATION)
5011 error ("PHI node with location");
5012 err2 = true;
5015 for (i = 0; i < gimple_phi_num_args (phi); i++)
5017 tree arg = gimple_phi_arg_def (phi, i);
5018 tree addr = walk_tree (&arg, verify_node_sharing_1,
5019 &visited, NULL);
5020 if (addr)
5022 error ("incorrect sharing of tree nodes");
5023 debug_generic_expr (addr);
5024 err2 |= true;
5026 location_t loc = gimple_phi_arg_location (phi, i);
5027 if (virtual_operand_p (gimple_phi_result (phi))
5028 && loc != UNKNOWN_LOCATION)
5030 error ("virtual PHI with argument locations");
5031 err2 = true;
5033 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5034 if (addr)
5036 debug_generic_expr (addr);
5037 err2 = true;
5039 err2 |= verify_location (&blocks, loc);
5042 if (err2)
5043 debug_gimple_stmt (phi);
5044 err |= err2;
5047 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5049 gimple stmt = gsi_stmt (gsi);
5050 bool err2 = false;
5051 struct walk_stmt_info wi;
5052 tree addr;
5053 int lp_nr;
5055 visited_stmts.add (stmt);
5057 if (gimple_bb (stmt) != bb)
5059 error ("gimple_bb (stmt) is set to a wrong basic block");
5060 err2 = true;
5063 err2 |= verify_gimple_stmt (stmt);
5064 err2 |= verify_location (&blocks, gimple_location (stmt));
5066 memset (&wi, 0, sizeof (wi));
5067 wi.info = (void *) &visited;
5068 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5069 if (addr)
5071 error ("incorrect sharing of tree nodes");
5072 debug_generic_expr (addr);
5073 err2 |= true;
5076 memset (&wi, 0, sizeof (wi));
5077 wi.info = (void *) &blocks;
5078 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5079 if (addr)
5081 debug_generic_expr (addr);
5082 err2 |= true;
5085 /* ??? Instead of not checking these stmts at all the walker
5086 should know its context via wi. */
5087 if (!is_gimple_debug (stmt)
5088 && !is_gimple_omp (stmt))
5090 memset (&wi, 0, sizeof (wi));
5091 addr = walk_gimple_op (stmt, verify_expr, &wi);
5092 if (addr)
5094 debug_generic_expr (addr);
5095 inform (gimple_location (stmt), "in statement");
5096 err2 |= true;
5100 /* If the statement is marked as part of an EH region, then it is
5101 expected that the statement could throw. Verify that when we
5102 have optimizations that simplify statements such that we prove
5103 that they cannot throw, that we update other data structures
5104 to match. */
5105 lp_nr = lookup_stmt_eh_lp (stmt);
5106 if (lp_nr > 0)
5108 if (!stmt_could_throw_p (stmt))
5110 if (verify_nothrow)
5112 error ("statement marked for throw, but doesn%'t");
5113 err2 |= true;
5116 else if (!gsi_one_before_end_p (gsi))
5118 error ("statement marked for throw in middle of block");
5119 err2 |= true;
5123 if (err2)
5124 debug_gimple_stmt (stmt);
5125 err |= err2;
5129 eh_error_found = false;
5130 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5131 if (eh_table)
5132 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5133 (&visited_stmts);
5135 if (err || eh_error_found)
5136 internal_error ("verify_gimple failed");
5138 verify_histograms ();
5139 timevar_pop (TV_TREE_STMT_VERIFY);
5143 /* Verifies that the flow information is OK. */
5145 static int
5146 gimple_verify_flow_info (void)
5148 int err = 0;
5149 basic_block bb;
5150 gimple_stmt_iterator gsi;
5151 gimple stmt;
5152 edge e;
5153 edge_iterator ei;
5155 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5156 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5158 error ("ENTRY_BLOCK has IL associated with it");
5159 err = 1;
5162 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5163 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5165 error ("EXIT_BLOCK has IL associated with it");
5166 err = 1;
5169 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5170 if (e->flags & EDGE_FALLTHRU)
5172 error ("fallthru to exit from bb %d", e->src->index);
5173 err = 1;
5176 FOR_EACH_BB_FN (bb, cfun)
5178 bool found_ctrl_stmt = false;
5180 stmt = NULL;
5182 /* Skip labels on the start of basic block. */
5183 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5185 tree label;
5186 gimple prev_stmt = stmt;
5188 stmt = gsi_stmt (gsi);
5190 if (gimple_code (stmt) != GIMPLE_LABEL)
5191 break;
5193 label = gimple_label_label (as_a <glabel *> (stmt));
5194 if (prev_stmt && DECL_NONLOCAL (label))
5196 error ("nonlocal label ");
5197 print_generic_expr (stderr, label, 0);
5198 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5199 bb->index);
5200 err = 1;
5203 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5205 error ("EH landing pad label ");
5206 print_generic_expr (stderr, label, 0);
5207 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5208 bb->index);
5209 err = 1;
5212 if (label_to_block (label) != bb)
5214 error ("label ");
5215 print_generic_expr (stderr, label, 0);
5216 fprintf (stderr, " to block does not match in bb %d",
5217 bb->index);
5218 err = 1;
5221 if (decl_function_context (label) != current_function_decl)
5223 error ("label ");
5224 print_generic_expr (stderr, label, 0);
5225 fprintf (stderr, " has incorrect context in bb %d",
5226 bb->index);
5227 err = 1;
5231 /* Verify that body of basic block BB is free of control flow. */
5232 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5234 gimple stmt = gsi_stmt (gsi);
5236 if (found_ctrl_stmt)
5238 error ("control flow in the middle of basic block %d",
5239 bb->index);
5240 err = 1;
5243 if (stmt_ends_bb_p (stmt))
5244 found_ctrl_stmt = true;
5246 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5248 error ("label ");
5249 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5250 fprintf (stderr, " in the middle of basic block %d", bb->index);
5251 err = 1;
5255 gsi = gsi_last_bb (bb);
5256 if (gsi_end_p (gsi))
5257 continue;
5259 stmt = gsi_stmt (gsi);
5261 if (gimple_code (stmt) == GIMPLE_LABEL)
5262 continue;
5264 err |= verify_eh_edges (stmt);
5266 if (is_ctrl_stmt (stmt))
5268 FOR_EACH_EDGE (e, ei, bb->succs)
5269 if (e->flags & EDGE_FALLTHRU)
5271 error ("fallthru edge after a control statement in bb %d",
5272 bb->index);
5273 err = 1;
5277 if (gimple_code (stmt) != GIMPLE_COND)
5279 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5280 after anything else but if statement. */
5281 FOR_EACH_EDGE (e, ei, bb->succs)
5282 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5284 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5285 bb->index);
5286 err = 1;
5290 switch (gimple_code (stmt))
5292 case GIMPLE_COND:
5294 edge true_edge;
5295 edge false_edge;
5297 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5299 if (!true_edge
5300 || !false_edge
5301 || !(true_edge->flags & EDGE_TRUE_VALUE)
5302 || !(false_edge->flags & EDGE_FALSE_VALUE)
5303 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5304 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5305 || EDGE_COUNT (bb->succs) >= 3)
5307 error ("wrong outgoing edge flags at end of bb %d",
5308 bb->index);
5309 err = 1;
5312 break;
5314 case GIMPLE_GOTO:
5315 if (simple_goto_p (stmt))
5317 error ("explicit goto at end of bb %d", bb->index);
5318 err = 1;
5320 else
5322 /* FIXME. We should double check that the labels in the
5323 destination blocks have their address taken. */
5324 FOR_EACH_EDGE (e, ei, bb->succs)
5325 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5326 | EDGE_FALSE_VALUE))
5327 || !(e->flags & EDGE_ABNORMAL))
5329 error ("wrong outgoing edge flags at end of bb %d",
5330 bb->index);
5331 err = 1;
5334 break;
5336 case GIMPLE_CALL:
5337 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5338 break;
5339 /* ... fallthru ... */
5340 case GIMPLE_RETURN:
5341 if (!single_succ_p (bb)
5342 || (single_succ_edge (bb)->flags
5343 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5344 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5346 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5347 err = 1;
5349 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5351 error ("return edge does not point to exit in bb %d",
5352 bb->index);
5353 err = 1;
5355 break;
5357 case GIMPLE_SWITCH:
5359 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5360 tree prev;
5361 edge e;
5362 size_t i, n;
5364 n = gimple_switch_num_labels (switch_stmt);
5366 /* Mark all the destination basic blocks. */
5367 for (i = 0; i < n; ++i)
5369 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5370 basic_block label_bb = label_to_block (lab);
5371 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5372 label_bb->aux = (void *)1;
5375 /* Verify that the case labels are sorted. */
5376 prev = gimple_switch_label (switch_stmt, 0);
5377 for (i = 1; i < n; ++i)
5379 tree c = gimple_switch_label (switch_stmt, i);
5380 if (!CASE_LOW (c))
5382 error ("found default case not at the start of "
5383 "case vector");
5384 err = 1;
5385 continue;
5387 if (CASE_LOW (prev)
5388 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5390 error ("case labels not sorted: ");
5391 print_generic_expr (stderr, prev, 0);
5392 fprintf (stderr," is greater than ");
5393 print_generic_expr (stderr, c, 0);
5394 fprintf (stderr," but comes before it.\n");
5395 err = 1;
5397 prev = c;
5399 /* VRP will remove the default case if it can prove it will
5400 never be executed. So do not verify there always exists
5401 a default case here. */
5403 FOR_EACH_EDGE (e, ei, bb->succs)
5405 if (!e->dest->aux)
5407 error ("extra outgoing edge %d->%d",
5408 bb->index, e->dest->index);
5409 err = 1;
5412 e->dest->aux = (void *)2;
5413 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5414 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5416 error ("wrong outgoing edge flags at end of bb %d",
5417 bb->index);
5418 err = 1;
5422 /* Check that we have all of them. */
5423 for (i = 0; i < n; ++i)
5425 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5426 basic_block label_bb = label_to_block (lab);
5428 if (label_bb->aux != (void *)2)
5430 error ("missing edge %i->%i", bb->index, label_bb->index);
5431 err = 1;
5435 FOR_EACH_EDGE (e, ei, bb->succs)
5436 e->dest->aux = (void *)0;
5438 break;
5440 case GIMPLE_EH_DISPATCH:
5441 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5442 break;
5444 default:
5445 break;
5449 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5450 verify_dominators (CDI_DOMINATORS);
5452 return err;
5456 /* Updates phi nodes after creating a forwarder block joined
5457 by edge FALLTHRU. */
5459 static void
5460 gimple_make_forwarder_block (edge fallthru)
5462 edge e;
5463 edge_iterator ei;
5464 basic_block dummy, bb;
5465 tree var;
5466 gphi_iterator gsi;
5468 dummy = fallthru->src;
5469 bb = fallthru->dest;
5471 if (single_pred_p (bb))
5472 return;
5474 /* If we redirected a branch we must create new PHI nodes at the
5475 start of BB. */
5476 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5478 gphi *phi, *new_phi;
5480 phi = gsi.phi ();
5481 var = gimple_phi_result (phi);
5482 new_phi = create_phi_node (var, bb);
5483 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5484 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5485 UNKNOWN_LOCATION);
5488 /* Add the arguments we have stored on edges. */
5489 FOR_EACH_EDGE (e, ei, bb->preds)
5491 if (e == fallthru)
5492 continue;
5494 flush_pending_stmts (e);
5499 /* Return a non-special label in the head of basic block BLOCK.
5500 Create one if it doesn't exist. */
5502 tree
5503 gimple_block_label (basic_block bb)
5505 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5506 bool first = true;
5507 tree label;
5508 glabel *stmt;
5510 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5512 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5513 if (!stmt)
5514 break;
5515 label = gimple_label_label (stmt);
5516 if (!DECL_NONLOCAL (label))
5518 if (!first)
5519 gsi_move_before (&i, &s);
5520 return label;
5524 label = create_artificial_label (UNKNOWN_LOCATION);
5525 stmt = gimple_build_label (label);
5526 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5527 return label;
5531 /* Attempt to perform edge redirection by replacing a possibly complex
5532 jump instruction by a goto or by removing the jump completely.
5533 This can apply only if all edges now point to the same block. The
5534 parameters and return values are equivalent to
5535 redirect_edge_and_branch. */
5537 static edge
5538 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5540 basic_block src = e->src;
5541 gimple_stmt_iterator i;
5542 gimple stmt;
5544 /* We can replace or remove a complex jump only when we have exactly
5545 two edges. */
5546 if (EDGE_COUNT (src->succs) != 2
5547 /* Verify that all targets will be TARGET. Specifically, the
5548 edge that is not E must also go to TARGET. */
5549 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5550 return NULL;
5552 i = gsi_last_bb (src);
5553 if (gsi_end_p (i))
5554 return NULL;
5556 stmt = gsi_stmt (i);
5558 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5560 gsi_remove (&i, true);
5561 e = ssa_redirect_edge (e, target);
5562 e->flags = EDGE_FALLTHRU;
5563 return e;
5566 return NULL;
5570 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5571 edge representing the redirected branch. */
5573 static edge
5574 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5576 basic_block bb = e->src;
5577 gimple_stmt_iterator gsi;
5578 edge ret;
5579 gimple stmt;
5581 if (e->flags & EDGE_ABNORMAL)
5582 return NULL;
5584 if (e->dest == dest)
5585 return NULL;
5587 if (e->flags & EDGE_EH)
5588 return redirect_eh_edge (e, dest);
5590 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5592 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5593 if (ret)
5594 return ret;
5597 gsi = gsi_last_bb (bb);
5598 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5600 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5602 case GIMPLE_COND:
5603 /* For COND_EXPR, we only need to redirect the edge. */
5604 break;
5606 case GIMPLE_GOTO:
5607 /* No non-abnormal edges should lead from a non-simple goto, and
5608 simple ones should be represented implicitly. */
5609 gcc_unreachable ();
5611 case GIMPLE_SWITCH:
5613 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5614 tree label = gimple_block_label (dest);
5615 tree cases = get_cases_for_edge (e, switch_stmt);
5617 /* If we have a list of cases associated with E, then use it
5618 as it's a lot faster than walking the entire case vector. */
5619 if (cases)
5621 edge e2 = find_edge (e->src, dest);
5622 tree last, first;
5624 first = cases;
5625 while (cases)
5627 last = cases;
5628 CASE_LABEL (cases) = label;
5629 cases = CASE_CHAIN (cases);
5632 /* If there was already an edge in the CFG, then we need
5633 to move all the cases associated with E to E2. */
5634 if (e2)
5636 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5638 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5639 CASE_CHAIN (cases2) = first;
5641 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5643 else
5645 size_t i, n = gimple_switch_num_labels (switch_stmt);
5647 for (i = 0; i < n; i++)
5649 tree elt = gimple_switch_label (switch_stmt, i);
5650 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5651 CASE_LABEL (elt) = label;
5655 break;
5657 case GIMPLE_ASM:
5659 gasm *asm_stmt = as_a <gasm *> (stmt);
5660 int i, n = gimple_asm_nlabels (asm_stmt);
5661 tree label = NULL;
5663 for (i = 0; i < n; ++i)
5665 tree cons = gimple_asm_label_op (asm_stmt, i);
5666 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5668 if (!label)
5669 label = gimple_block_label (dest);
5670 TREE_VALUE (cons) = label;
5674 /* If we didn't find any label matching the former edge in the
5675 asm labels, we must be redirecting the fallthrough
5676 edge. */
5677 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5679 break;
5681 case GIMPLE_RETURN:
5682 gsi_remove (&gsi, true);
5683 e->flags |= EDGE_FALLTHRU;
5684 break;
5686 case GIMPLE_OMP_RETURN:
5687 case GIMPLE_OMP_CONTINUE:
5688 case GIMPLE_OMP_SECTIONS_SWITCH:
5689 case GIMPLE_OMP_FOR:
5690 /* The edges from OMP constructs can be simply redirected. */
5691 break;
5693 case GIMPLE_EH_DISPATCH:
5694 if (!(e->flags & EDGE_FALLTHRU))
5695 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5696 break;
5698 case GIMPLE_TRANSACTION:
5699 /* The ABORT edge has a stored label associated with it, otherwise
5700 the edges are simply redirectable. */
5701 if (e->flags == 0)
5702 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5703 gimple_block_label (dest));
5704 break;
5706 default:
5707 /* Otherwise it must be a fallthru edge, and we don't need to
5708 do anything besides redirecting it. */
5709 gcc_assert (e->flags & EDGE_FALLTHRU);
5710 break;
5713 /* Update/insert PHI nodes as necessary. */
5715 /* Now update the edges in the CFG. */
5716 e = ssa_redirect_edge (e, dest);
5718 return e;
5721 /* Returns true if it is possible to remove edge E by redirecting
5722 it to the destination of the other edge from E->src. */
5724 static bool
5725 gimple_can_remove_branch_p (const_edge e)
5727 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5728 return false;
5730 return true;
5733 /* Simple wrapper, as we can always redirect fallthru edges. */
5735 static basic_block
5736 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5738 e = gimple_redirect_edge_and_branch (e, dest);
5739 gcc_assert (e);
5741 return NULL;
5745 /* Splits basic block BB after statement STMT (but at least after the
5746 labels). If STMT is NULL, BB is split just after the labels. */
5748 static basic_block
5749 gimple_split_block (basic_block bb, void *stmt)
5751 gimple_stmt_iterator gsi;
5752 gimple_stmt_iterator gsi_tgt;
5753 gimple_seq list;
5754 basic_block new_bb;
5755 edge e;
5756 edge_iterator ei;
5758 new_bb = create_empty_bb (bb);
5760 /* Redirect the outgoing edges. */
5761 new_bb->succs = bb->succs;
5762 bb->succs = NULL;
5763 FOR_EACH_EDGE (e, ei, new_bb->succs)
5764 e->src = new_bb;
5766 /* Get a stmt iterator pointing to the first stmt to move. */
5767 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5768 gsi = gsi_after_labels (bb);
5769 else
5771 gsi = gsi_for_stmt ((gimple) stmt);
5772 gsi_next (&gsi);
5775 /* Move everything from GSI to the new basic block. */
5776 if (gsi_end_p (gsi))
5777 return new_bb;
5779 /* Split the statement list - avoid re-creating new containers as this
5780 brings ugly quadratic memory consumption in the inliner.
5781 (We are still quadratic since we need to update stmt BB pointers,
5782 sadly.) */
5783 gsi_split_seq_before (&gsi, &list);
5784 set_bb_seq (new_bb, list);
5785 for (gsi_tgt = gsi_start (list);
5786 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5787 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5789 return new_bb;
5793 /* Moves basic block BB after block AFTER. */
5795 static bool
5796 gimple_move_block_after (basic_block bb, basic_block after)
5798 if (bb->prev_bb == after)
5799 return true;
5801 unlink_block (bb);
5802 link_block (bb, after);
5804 return true;
5808 /* Return TRUE if block BB has no executable statements, otherwise return
5809 FALSE. */
5811 static bool
5812 gimple_empty_block_p (basic_block bb)
5814 /* BB must have no executable statements. */
5815 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5816 if (phi_nodes (bb))
5817 return false;
5818 if (gsi_end_p (gsi))
5819 return true;
5820 if (is_gimple_debug (gsi_stmt (gsi)))
5821 gsi_next_nondebug (&gsi);
5822 return gsi_end_p (gsi);
5826 /* Split a basic block if it ends with a conditional branch and if the
5827 other part of the block is not empty. */
5829 static basic_block
5830 gimple_split_block_before_cond_jump (basic_block bb)
5832 gimple last, split_point;
5833 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5834 if (gsi_end_p (gsi))
5835 return NULL;
5836 last = gsi_stmt (gsi);
5837 if (gimple_code (last) != GIMPLE_COND
5838 && gimple_code (last) != GIMPLE_SWITCH)
5839 return NULL;
5840 gsi_prev_nondebug (&gsi);
5841 split_point = gsi_stmt (gsi);
5842 return split_block (bb, split_point)->dest;
5846 /* Return true if basic_block can be duplicated. */
5848 static bool
5849 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5851 return true;
5854 /* Create a duplicate of the basic block BB. NOTE: This does not
5855 preserve SSA form. */
5857 static basic_block
5858 gimple_duplicate_bb (basic_block bb)
5860 basic_block new_bb;
5861 gimple_stmt_iterator gsi_tgt;
5863 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5865 /* Copy the PHI nodes. We ignore PHI node arguments here because
5866 the incoming edges have not been setup yet. */
5867 for (gphi_iterator gpi = gsi_start_phis (bb);
5868 !gsi_end_p (gpi);
5869 gsi_next (&gpi))
5871 gphi *phi, *copy;
5872 phi = gpi.phi ();
5873 copy = create_phi_node (NULL_TREE, new_bb);
5874 create_new_def_for (gimple_phi_result (phi), copy,
5875 gimple_phi_result_ptr (copy));
5876 gimple_set_uid (copy, gimple_uid (phi));
5879 gsi_tgt = gsi_start_bb (new_bb);
5880 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5881 !gsi_end_p (gsi);
5882 gsi_next (&gsi))
5884 def_operand_p def_p;
5885 ssa_op_iter op_iter;
5886 tree lhs;
5887 gimple stmt, copy;
5889 stmt = gsi_stmt (gsi);
5890 if (gimple_code (stmt) == GIMPLE_LABEL)
5891 continue;
5893 /* Don't duplicate label debug stmts. */
5894 if (gimple_debug_bind_p (stmt)
5895 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5896 == LABEL_DECL)
5897 continue;
5899 /* Create a new copy of STMT and duplicate STMT's virtual
5900 operands. */
5901 copy = gimple_copy (stmt);
5902 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5904 maybe_duplicate_eh_stmt (copy, stmt);
5905 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5907 /* When copying around a stmt writing into a local non-user
5908 aggregate, make sure it won't share stack slot with other
5909 vars. */
5910 lhs = gimple_get_lhs (stmt);
5911 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5913 tree base = get_base_address (lhs);
5914 if (base
5915 && (TREE_CODE (base) == VAR_DECL
5916 || TREE_CODE (base) == RESULT_DECL)
5917 && DECL_IGNORED_P (base)
5918 && !TREE_STATIC (base)
5919 && !DECL_EXTERNAL (base)
5920 && (TREE_CODE (base) != VAR_DECL
5921 || !DECL_HAS_VALUE_EXPR_P (base)))
5922 DECL_NONSHAREABLE (base) = 1;
5925 /* Create new names for all the definitions created by COPY and
5926 add replacement mappings for each new name. */
5927 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5928 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5931 return new_bb;
5934 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5936 static void
5937 add_phi_args_after_copy_edge (edge e_copy)
5939 basic_block bb, bb_copy = e_copy->src, dest;
5940 edge e;
5941 edge_iterator ei;
5942 gphi *phi, *phi_copy;
5943 tree def;
5944 gphi_iterator psi, psi_copy;
5946 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5947 return;
5949 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5951 if (e_copy->dest->flags & BB_DUPLICATED)
5952 dest = get_bb_original (e_copy->dest);
5953 else
5954 dest = e_copy->dest;
5956 e = find_edge (bb, dest);
5957 if (!e)
5959 /* During loop unrolling the target of the latch edge is copied.
5960 In this case we are not looking for edge to dest, but to
5961 duplicated block whose original was dest. */
5962 FOR_EACH_EDGE (e, ei, bb->succs)
5964 if ((e->dest->flags & BB_DUPLICATED)
5965 && get_bb_original (e->dest) == dest)
5966 break;
5969 gcc_assert (e != NULL);
5972 for (psi = gsi_start_phis (e->dest),
5973 psi_copy = gsi_start_phis (e_copy->dest);
5974 !gsi_end_p (psi);
5975 gsi_next (&psi), gsi_next (&psi_copy))
5977 phi = psi.phi ();
5978 phi_copy = psi_copy.phi ();
5979 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5980 add_phi_arg (phi_copy, def, e_copy,
5981 gimple_phi_arg_location_from_edge (phi, e));
5986 /* Basic block BB_COPY was created by code duplication. Add phi node
5987 arguments for edges going out of BB_COPY. The blocks that were
5988 duplicated have BB_DUPLICATED set. */
5990 void
5991 add_phi_args_after_copy_bb (basic_block bb_copy)
5993 edge e_copy;
5994 edge_iterator ei;
5996 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5998 add_phi_args_after_copy_edge (e_copy);
6002 /* Blocks in REGION_COPY array of length N_REGION were created by
6003 duplication of basic blocks. Add phi node arguments for edges
6004 going from these blocks. If E_COPY is not NULL, also add
6005 phi node arguments for its destination.*/
6007 void
6008 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6009 edge e_copy)
6011 unsigned i;
6013 for (i = 0; i < n_region; i++)
6014 region_copy[i]->flags |= BB_DUPLICATED;
6016 for (i = 0; i < n_region; i++)
6017 add_phi_args_after_copy_bb (region_copy[i]);
6018 if (e_copy)
6019 add_phi_args_after_copy_edge (e_copy);
6021 for (i = 0; i < n_region; i++)
6022 region_copy[i]->flags &= ~BB_DUPLICATED;
6025 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6026 important exit edge EXIT. By important we mean that no SSA name defined
6027 inside region is live over the other exit edges of the region. All entry
6028 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6029 to the duplicate of the region. Dominance and loop information is
6030 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6031 UPDATE_DOMINANCE is false then we assume that the caller will update the
6032 dominance information after calling this function. The new basic
6033 blocks are stored to REGION_COPY in the same order as they had in REGION,
6034 provided that REGION_COPY is not NULL.
6035 The function returns false if it is unable to copy the region,
6036 true otherwise. */
6038 bool
6039 gimple_duplicate_sese_region (edge entry, edge exit,
6040 basic_block *region, unsigned n_region,
6041 basic_block *region_copy,
6042 bool update_dominance)
6044 unsigned i;
6045 bool free_region_copy = false, copying_header = false;
6046 struct loop *loop = entry->dest->loop_father;
6047 edge exit_copy;
6048 vec<basic_block> doms;
6049 edge redirected;
6050 int total_freq = 0, entry_freq = 0;
6051 gcov_type total_count = 0, entry_count = 0;
6053 if (!can_copy_bbs_p (region, n_region))
6054 return false;
6056 /* Some sanity checking. Note that we do not check for all possible
6057 missuses of the functions. I.e. if you ask to copy something weird,
6058 it will work, but the state of structures probably will not be
6059 correct. */
6060 for (i = 0; i < n_region; i++)
6062 /* We do not handle subloops, i.e. all the blocks must belong to the
6063 same loop. */
6064 if (region[i]->loop_father != loop)
6065 return false;
6067 if (region[i] != entry->dest
6068 && region[i] == loop->header)
6069 return false;
6072 /* In case the function is used for loop header copying (which is the primary
6073 use), ensure that EXIT and its copy will be new latch and entry edges. */
6074 if (loop->header == entry->dest)
6076 copying_header = true;
6078 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6079 return false;
6081 for (i = 0; i < n_region; i++)
6082 if (region[i] != exit->src
6083 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6084 return false;
6087 initialize_original_copy_tables ();
6089 if (copying_header)
6090 set_loop_copy (loop, loop_outer (loop));
6091 else
6092 set_loop_copy (loop, loop);
6094 if (!region_copy)
6096 region_copy = XNEWVEC (basic_block, n_region);
6097 free_region_copy = true;
6100 /* Record blocks outside the region that are dominated by something
6101 inside. */
6102 if (update_dominance)
6104 doms.create (0);
6105 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6108 if (entry->dest->count)
6110 total_count = entry->dest->count;
6111 entry_count = entry->count;
6112 /* Fix up corner cases, to avoid division by zero or creation of negative
6113 frequencies. */
6114 if (entry_count > total_count)
6115 entry_count = total_count;
6117 else
6119 total_freq = entry->dest->frequency;
6120 entry_freq = EDGE_FREQUENCY (entry);
6121 /* Fix up corner cases, to avoid division by zero or creation of negative
6122 frequencies. */
6123 if (total_freq == 0)
6124 total_freq = 1;
6125 else if (entry_freq > total_freq)
6126 entry_freq = total_freq;
6129 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6130 split_edge_bb_loc (entry), update_dominance);
6131 if (total_count)
6133 scale_bbs_frequencies_gcov_type (region, n_region,
6134 total_count - entry_count,
6135 total_count);
6136 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6137 total_count);
6139 else
6141 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6142 total_freq);
6143 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6146 if (copying_header)
6148 loop->header = exit->dest;
6149 loop->latch = exit->src;
6152 /* Redirect the entry and add the phi node arguments. */
6153 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6154 gcc_assert (redirected != NULL);
6155 flush_pending_stmts (entry);
6157 /* Concerning updating of dominators: We must recount dominators
6158 for entry block and its copy. Anything that is outside of the
6159 region, but was dominated by something inside needs recounting as
6160 well. */
6161 if (update_dominance)
6163 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6164 doms.safe_push (get_bb_original (entry->dest));
6165 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6166 doms.release ();
6169 /* Add the other PHI node arguments. */
6170 add_phi_args_after_copy (region_copy, n_region, NULL);
6172 if (free_region_copy)
6173 free (region_copy);
6175 free_original_copy_tables ();
6176 return true;
6179 /* Checks if BB is part of the region defined by N_REGION BBS. */
6180 static bool
6181 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6183 unsigned int n;
6185 for (n = 0; n < n_region; n++)
6187 if (bb == bbs[n])
6188 return true;
6190 return false;
6193 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6194 are stored to REGION_COPY in the same order in that they appear
6195 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6196 the region, EXIT an exit from it. The condition guarding EXIT
6197 is moved to ENTRY. Returns true if duplication succeeds, false
6198 otherwise.
6200 For example,
6202 some_code;
6203 if (cond)
6205 else
6208 is transformed to
6210 if (cond)
6212 some_code;
6215 else
6217 some_code;
6222 bool
6223 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6224 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6225 basic_block *region_copy ATTRIBUTE_UNUSED)
6227 unsigned i;
6228 bool free_region_copy = false;
6229 struct loop *loop = exit->dest->loop_father;
6230 struct loop *orig_loop = entry->dest->loop_father;
6231 basic_block switch_bb, entry_bb, nentry_bb;
6232 vec<basic_block> doms;
6233 int total_freq = 0, exit_freq = 0;
6234 gcov_type total_count = 0, exit_count = 0;
6235 edge exits[2], nexits[2], e;
6236 gimple_stmt_iterator gsi;
6237 gimple cond_stmt;
6238 edge sorig, snew;
6239 basic_block exit_bb;
6240 gphi_iterator psi;
6241 gphi *phi;
6242 tree def;
6243 struct loop *target, *aloop, *cloop;
6245 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6246 exits[0] = exit;
6247 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6249 if (!can_copy_bbs_p (region, n_region))
6250 return false;
6252 initialize_original_copy_tables ();
6253 set_loop_copy (orig_loop, loop);
6255 target= loop;
6256 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6258 if (bb_part_of_region_p (aloop->header, region, n_region))
6260 cloop = duplicate_loop (aloop, target);
6261 duplicate_subloops (aloop, cloop);
6265 if (!region_copy)
6267 region_copy = XNEWVEC (basic_block, n_region);
6268 free_region_copy = true;
6271 gcc_assert (!need_ssa_update_p (cfun));
6273 /* Record blocks outside the region that are dominated by something
6274 inside. */
6275 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6277 if (exit->src->count)
6279 total_count = exit->src->count;
6280 exit_count = exit->count;
6281 /* Fix up corner cases, to avoid division by zero or creation of negative
6282 frequencies. */
6283 if (exit_count > total_count)
6284 exit_count = total_count;
6286 else
6288 total_freq = exit->src->frequency;
6289 exit_freq = EDGE_FREQUENCY (exit);
6290 /* Fix up corner cases, to avoid division by zero or creation of negative
6291 frequencies. */
6292 if (total_freq == 0)
6293 total_freq = 1;
6294 if (exit_freq > total_freq)
6295 exit_freq = total_freq;
6298 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6299 split_edge_bb_loc (exit), true);
6300 if (total_count)
6302 scale_bbs_frequencies_gcov_type (region, n_region,
6303 total_count - exit_count,
6304 total_count);
6305 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6306 total_count);
6308 else
6310 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6311 total_freq);
6312 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6315 /* Create the switch block, and put the exit condition to it. */
6316 entry_bb = entry->dest;
6317 nentry_bb = get_bb_copy (entry_bb);
6318 if (!last_stmt (entry->src)
6319 || !stmt_ends_bb_p (last_stmt (entry->src)))
6320 switch_bb = entry->src;
6321 else
6322 switch_bb = split_edge (entry);
6323 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6325 gsi = gsi_last_bb (switch_bb);
6326 cond_stmt = last_stmt (exit->src);
6327 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6328 cond_stmt = gimple_copy (cond_stmt);
6330 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6332 sorig = single_succ_edge (switch_bb);
6333 sorig->flags = exits[1]->flags;
6334 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6336 /* Register the new edge from SWITCH_BB in loop exit lists. */
6337 rescan_loop_exit (snew, true, false);
6339 /* Add the PHI node arguments. */
6340 add_phi_args_after_copy (region_copy, n_region, snew);
6342 /* Get rid of now superfluous conditions and associated edges (and phi node
6343 arguments). */
6344 exit_bb = exit->dest;
6346 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6347 PENDING_STMT (e) = NULL;
6349 /* The latch of ORIG_LOOP was copied, and so was the backedge
6350 to the original header. We redirect this backedge to EXIT_BB. */
6351 for (i = 0; i < n_region; i++)
6352 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6354 gcc_assert (single_succ_edge (region_copy[i]));
6355 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6356 PENDING_STMT (e) = NULL;
6357 for (psi = gsi_start_phis (exit_bb);
6358 !gsi_end_p (psi);
6359 gsi_next (&psi))
6361 phi = psi.phi ();
6362 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6363 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6366 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6367 PENDING_STMT (e) = NULL;
6369 /* Anything that is outside of the region, but was dominated by something
6370 inside needs to update dominance info. */
6371 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6372 doms.release ();
6373 /* Update the SSA web. */
6374 update_ssa (TODO_update_ssa);
6376 if (free_region_copy)
6377 free (region_copy);
6379 free_original_copy_tables ();
6380 return true;
6383 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6384 adding blocks when the dominator traversal reaches EXIT. This
6385 function silently assumes that ENTRY strictly dominates EXIT. */
6387 void
6388 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6389 vec<basic_block> *bbs_p)
6391 basic_block son;
6393 for (son = first_dom_son (CDI_DOMINATORS, entry);
6394 son;
6395 son = next_dom_son (CDI_DOMINATORS, son))
6397 bbs_p->safe_push (son);
6398 if (son != exit)
6399 gather_blocks_in_sese_region (son, exit, bbs_p);
6403 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6404 The duplicates are recorded in VARS_MAP. */
6406 static void
6407 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6408 tree to_context)
6410 tree t = *tp, new_t;
6411 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6413 if (DECL_CONTEXT (t) == to_context)
6414 return;
6416 bool existed;
6417 tree &loc = vars_map->get_or_insert (t, &existed);
6419 if (!existed)
6421 if (SSA_VAR_P (t))
6423 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6424 add_local_decl (f, new_t);
6426 else
6428 gcc_assert (TREE_CODE (t) == CONST_DECL);
6429 new_t = copy_node (t);
6431 DECL_CONTEXT (new_t) = to_context;
6433 loc = new_t;
6435 else
6436 new_t = loc;
6438 *tp = new_t;
6442 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6443 VARS_MAP maps old ssa names and var_decls to the new ones. */
6445 static tree
6446 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6447 tree to_context)
6449 tree new_name;
6451 gcc_assert (!virtual_operand_p (name));
6453 tree *loc = vars_map->get (name);
6455 if (!loc)
6457 tree decl = SSA_NAME_VAR (name);
6458 if (decl)
6460 replace_by_duplicate_decl (&decl, vars_map, to_context);
6461 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6462 decl, SSA_NAME_DEF_STMT (name));
6463 if (SSA_NAME_IS_DEFAULT_DEF (name))
6464 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6465 decl, new_name);
6467 else
6468 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6469 name, SSA_NAME_DEF_STMT (name));
6471 vars_map->put (name, new_name);
6473 else
6474 new_name = *loc;
6476 return new_name;
6479 struct move_stmt_d
6481 tree orig_block;
6482 tree new_block;
6483 tree from_context;
6484 tree to_context;
6485 hash_map<tree, tree> *vars_map;
6486 htab_t new_label_map;
6487 hash_map<void *, void *> *eh_map;
6488 bool remap_decls_p;
6491 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6492 contained in *TP if it has been ORIG_BLOCK previously and change the
6493 DECL_CONTEXT of every local variable referenced in *TP. */
6495 static tree
6496 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6498 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6499 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6500 tree t = *tp;
6502 if (EXPR_P (t))
6504 tree block = TREE_BLOCK (t);
6505 if (block == p->orig_block
6506 || (p->orig_block == NULL_TREE
6507 && block != NULL_TREE))
6508 TREE_SET_BLOCK (t, p->new_block);
6509 #ifdef ENABLE_CHECKING
6510 else if (block != NULL_TREE)
6512 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6513 block = BLOCK_SUPERCONTEXT (block);
6514 gcc_assert (block == p->orig_block);
6516 #endif
6518 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6520 if (TREE_CODE (t) == SSA_NAME)
6521 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6522 else if (TREE_CODE (t) == LABEL_DECL)
6524 if (p->new_label_map)
6526 struct tree_map in, *out;
6527 in.base.from = t;
6528 out = (struct tree_map *)
6529 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6530 if (out)
6531 *tp = t = out->to;
6534 DECL_CONTEXT (t) = p->to_context;
6536 else if (p->remap_decls_p)
6538 /* Replace T with its duplicate. T should no longer appear in the
6539 parent function, so this looks wasteful; however, it may appear
6540 in referenced_vars, and more importantly, as virtual operands of
6541 statements, and in alias lists of other variables. It would be
6542 quite difficult to expunge it from all those places. ??? It might
6543 suffice to do this for addressable variables. */
6544 if ((TREE_CODE (t) == VAR_DECL
6545 && !is_global_var (t))
6546 || TREE_CODE (t) == CONST_DECL)
6547 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6549 *walk_subtrees = 0;
6551 else if (TYPE_P (t))
6552 *walk_subtrees = 0;
6554 return NULL_TREE;
6557 /* Helper for move_stmt_r. Given an EH region number for the source
6558 function, map that to the duplicate EH regio number in the dest. */
6560 static int
6561 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6563 eh_region old_r, new_r;
6565 old_r = get_eh_region_from_number (old_nr);
6566 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6568 return new_r->index;
6571 /* Similar, but operate on INTEGER_CSTs. */
6573 static tree
6574 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6576 int old_nr, new_nr;
6578 old_nr = tree_to_shwi (old_t_nr);
6579 new_nr = move_stmt_eh_region_nr (old_nr, p);
6581 return build_int_cst (integer_type_node, new_nr);
6584 /* Like move_stmt_op, but for gimple statements.
6586 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6587 contained in the current statement in *GSI_P and change the
6588 DECL_CONTEXT of every local variable referenced in the current
6589 statement. */
6591 static tree
6592 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6593 struct walk_stmt_info *wi)
6595 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6596 gimple stmt = gsi_stmt (*gsi_p);
6597 tree block = gimple_block (stmt);
6599 if (block == p->orig_block
6600 || (p->orig_block == NULL_TREE
6601 && block != NULL_TREE))
6602 gimple_set_block (stmt, p->new_block);
6604 switch (gimple_code (stmt))
6606 case GIMPLE_CALL:
6607 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6609 tree r, fndecl = gimple_call_fndecl (stmt);
6610 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6611 switch (DECL_FUNCTION_CODE (fndecl))
6613 case BUILT_IN_EH_COPY_VALUES:
6614 r = gimple_call_arg (stmt, 1);
6615 r = move_stmt_eh_region_tree_nr (r, p);
6616 gimple_call_set_arg (stmt, 1, r);
6617 /* FALLTHRU */
6619 case BUILT_IN_EH_POINTER:
6620 case BUILT_IN_EH_FILTER:
6621 r = gimple_call_arg (stmt, 0);
6622 r = move_stmt_eh_region_tree_nr (r, p);
6623 gimple_call_set_arg (stmt, 0, r);
6624 break;
6626 default:
6627 break;
6630 break;
6632 case GIMPLE_RESX:
6634 gresx *resx_stmt = as_a <gresx *> (stmt);
6635 int r = gimple_resx_region (resx_stmt);
6636 r = move_stmt_eh_region_nr (r, p);
6637 gimple_resx_set_region (resx_stmt, r);
6639 break;
6641 case GIMPLE_EH_DISPATCH:
6643 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6644 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6645 r = move_stmt_eh_region_nr (r, p);
6646 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6648 break;
6650 case GIMPLE_OMP_RETURN:
6651 case GIMPLE_OMP_CONTINUE:
6652 break;
6653 default:
6654 if (is_gimple_omp (stmt))
6656 /* Do not remap variables inside OMP directives. Variables
6657 referenced in clauses and directive header belong to the
6658 parent function and should not be moved into the child
6659 function. */
6660 bool save_remap_decls_p = p->remap_decls_p;
6661 p->remap_decls_p = false;
6662 *handled_ops_p = true;
6664 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6665 move_stmt_op, wi);
6667 p->remap_decls_p = save_remap_decls_p;
6669 break;
6672 return NULL_TREE;
6675 /* Move basic block BB from function CFUN to function DEST_FN. The
6676 block is moved out of the original linked list and placed after
6677 block AFTER in the new list. Also, the block is removed from the
6678 original array of blocks and placed in DEST_FN's array of blocks.
6679 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6680 updated to reflect the moved edges.
6682 The local variables are remapped to new instances, VARS_MAP is used
6683 to record the mapping. */
6685 static void
6686 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6687 basic_block after, bool update_edge_count_p,
6688 struct move_stmt_d *d)
6690 struct control_flow_graph *cfg;
6691 edge_iterator ei;
6692 edge e;
6693 gimple_stmt_iterator si;
6694 unsigned old_len, new_len;
6696 /* Remove BB from dominance structures. */
6697 delete_from_dominance_info (CDI_DOMINATORS, bb);
6699 /* Move BB from its current loop to the copy in the new function. */
6700 if (current_loops)
6702 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6703 if (new_loop)
6704 bb->loop_father = new_loop;
6707 /* Link BB to the new linked list. */
6708 move_block_after (bb, after);
6710 /* Update the edge count in the corresponding flowgraphs. */
6711 if (update_edge_count_p)
6712 FOR_EACH_EDGE (e, ei, bb->succs)
6714 cfun->cfg->x_n_edges--;
6715 dest_cfun->cfg->x_n_edges++;
6718 /* Remove BB from the original basic block array. */
6719 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6720 cfun->cfg->x_n_basic_blocks--;
6722 /* Grow DEST_CFUN's basic block array if needed. */
6723 cfg = dest_cfun->cfg;
6724 cfg->x_n_basic_blocks++;
6725 if (bb->index >= cfg->x_last_basic_block)
6726 cfg->x_last_basic_block = bb->index + 1;
6728 old_len = vec_safe_length (cfg->x_basic_block_info);
6729 if ((unsigned) cfg->x_last_basic_block >= old_len)
6731 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6732 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6735 (*cfg->x_basic_block_info)[bb->index] = bb;
6737 /* Remap the variables in phi nodes. */
6738 for (gphi_iterator psi = gsi_start_phis (bb);
6739 !gsi_end_p (psi); )
6741 gphi *phi = psi.phi ();
6742 use_operand_p use;
6743 tree op = PHI_RESULT (phi);
6744 ssa_op_iter oi;
6745 unsigned i;
6747 if (virtual_operand_p (op))
6749 /* Remove the phi nodes for virtual operands (alias analysis will be
6750 run for the new function, anyway). */
6751 remove_phi_node (&psi, true);
6752 continue;
6755 SET_PHI_RESULT (phi,
6756 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6757 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6759 op = USE_FROM_PTR (use);
6760 if (TREE_CODE (op) == SSA_NAME)
6761 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6764 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6766 location_t locus = gimple_phi_arg_location (phi, i);
6767 tree block = LOCATION_BLOCK (locus);
6769 if (locus == UNKNOWN_LOCATION)
6770 continue;
6771 if (d->orig_block == NULL_TREE || block == d->orig_block)
6773 if (d->new_block == NULL_TREE)
6774 locus = LOCATION_LOCUS (locus);
6775 else
6776 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6777 gimple_phi_arg_set_location (phi, i, locus);
6781 gsi_next (&psi);
6784 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6786 gimple stmt = gsi_stmt (si);
6787 struct walk_stmt_info wi;
6789 memset (&wi, 0, sizeof (wi));
6790 wi.info = d;
6791 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6793 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6795 tree label = gimple_label_label (label_stmt);
6796 int uid = LABEL_DECL_UID (label);
6798 gcc_assert (uid > -1);
6800 old_len = vec_safe_length (cfg->x_label_to_block_map);
6801 if (old_len <= (unsigned) uid)
6803 new_len = 3 * uid / 2 + 1;
6804 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6807 (*cfg->x_label_to_block_map)[uid] = bb;
6808 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6810 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6812 if (uid >= dest_cfun->cfg->last_label_uid)
6813 dest_cfun->cfg->last_label_uid = uid + 1;
6816 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6817 remove_stmt_from_eh_lp_fn (cfun, stmt);
6819 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6820 gimple_remove_stmt_histograms (cfun, stmt);
6822 /* We cannot leave any operands allocated from the operand caches of
6823 the current function. */
6824 free_stmt_operands (cfun, stmt);
6825 push_cfun (dest_cfun);
6826 update_stmt (stmt);
6827 pop_cfun ();
6830 FOR_EACH_EDGE (e, ei, bb->succs)
6831 if (e->goto_locus != UNKNOWN_LOCATION)
6833 tree block = LOCATION_BLOCK (e->goto_locus);
6834 if (d->orig_block == NULL_TREE
6835 || block == d->orig_block)
6836 e->goto_locus = d->new_block ?
6837 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6838 LOCATION_LOCUS (e->goto_locus);
6842 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6843 the outermost EH region. Use REGION as the incoming base EH region. */
6845 static eh_region
6846 find_outermost_region_in_block (struct function *src_cfun,
6847 basic_block bb, eh_region region)
6849 gimple_stmt_iterator si;
6851 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6853 gimple stmt = gsi_stmt (si);
6854 eh_region stmt_region;
6855 int lp_nr;
6857 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6858 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6859 if (stmt_region)
6861 if (region == NULL)
6862 region = stmt_region;
6863 else if (stmt_region != region)
6865 region = eh_region_outermost (src_cfun, stmt_region, region);
6866 gcc_assert (region != NULL);
6871 return region;
6874 static tree
6875 new_label_mapper (tree decl, void *data)
6877 htab_t hash = (htab_t) data;
6878 struct tree_map *m;
6879 void **slot;
6881 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6883 m = XNEW (struct tree_map);
6884 m->hash = DECL_UID (decl);
6885 m->base.from = decl;
6886 m->to = create_artificial_label (UNKNOWN_LOCATION);
6887 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6888 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6889 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6891 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6892 gcc_assert (*slot == NULL);
6894 *slot = m;
6896 return m->to;
6899 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6900 subblocks. */
6902 static void
6903 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6904 tree to_context)
6906 tree *tp, t;
6908 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6910 t = *tp;
6911 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6912 continue;
6913 replace_by_duplicate_decl (&t, vars_map, to_context);
6914 if (t != *tp)
6916 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6918 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6919 DECL_HAS_VALUE_EXPR_P (t) = 1;
6921 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6922 *tp = t;
6926 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6927 replace_block_vars_by_duplicates (block, vars_map, to_context);
6930 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6931 from FN1 to FN2. */
6933 static void
6934 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6935 struct loop *loop)
6937 /* Discard it from the old loop array. */
6938 (*get_loops (fn1))[loop->num] = NULL;
6940 /* Place it in the new loop array, assigning it a new number. */
6941 loop->num = number_of_loops (fn2);
6942 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6944 /* Recurse to children. */
6945 for (loop = loop->inner; loop; loop = loop->next)
6946 fixup_loop_arrays_after_move (fn1, fn2, loop);
6949 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6950 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6952 DEBUG_FUNCTION void
6953 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6955 basic_block bb;
6956 edge_iterator ei;
6957 edge e;
6958 bitmap bbs = BITMAP_ALLOC (NULL);
6959 int i;
6961 gcc_assert (entry != NULL);
6962 gcc_assert (entry != exit);
6963 gcc_assert (bbs_p != NULL);
6965 gcc_assert (bbs_p->length () > 0);
6967 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6968 bitmap_set_bit (bbs, bb->index);
6970 gcc_assert (bitmap_bit_p (bbs, entry->index));
6971 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6973 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6975 if (bb == entry)
6977 gcc_assert (single_pred_p (entry));
6978 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6980 else
6981 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6983 e = ei_edge (ei);
6984 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6987 if (bb == exit)
6989 gcc_assert (single_succ_p (exit));
6990 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6992 else
6993 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6995 e = ei_edge (ei);
6996 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7000 BITMAP_FREE (bbs);
7004 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7005 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7006 single basic block in the original CFG and the new basic block is
7007 returned. DEST_CFUN must not have a CFG yet.
7009 Note that the region need not be a pure SESE region. Blocks inside
7010 the region may contain calls to abort/exit. The only restriction
7011 is that ENTRY_BB should be the only entry point and it must
7012 dominate EXIT_BB.
7014 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7015 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7016 to the new function.
7018 All local variables referenced in the region are assumed to be in
7019 the corresponding BLOCK_VARS and unexpanded variable lists
7020 associated with DEST_CFUN. */
7022 basic_block
7023 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7024 basic_block exit_bb, tree orig_block)
7026 vec<basic_block> bbs, dom_bbs;
7027 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7028 basic_block after, bb, *entry_pred, *exit_succ, abb;
7029 struct function *saved_cfun = cfun;
7030 int *entry_flag, *exit_flag;
7031 unsigned *entry_prob, *exit_prob;
7032 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7033 edge e;
7034 edge_iterator ei;
7035 htab_t new_label_map;
7036 hash_map<void *, void *> *eh_map;
7037 struct loop *loop = entry_bb->loop_father;
7038 struct loop *loop0 = get_loop (saved_cfun, 0);
7039 struct move_stmt_d d;
7041 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7042 region. */
7043 gcc_assert (entry_bb != exit_bb
7044 && (!exit_bb
7045 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7047 /* Collect all the blocks in the region. Manually add ENTRY_BB
7048 because it won't be added by dfs_enumerate_from. */
7049 bbs.create (0);
7050 bbs.safe_push (entry_bb);
7051 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7052 #ifdef ENABLE_CHECKING
7053 verify_sese (entry_bb, exit_bb, &bbs);
7054 #endif
7056 /* The blocks that used to be dominated by something in BBS will now be
7057 dominated by the new block. */
7058 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7059 bbs.address (),
7060 bbs.length ());
7062 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7063 the predecessor edges to ENTRY_BB and the successor edges to
7064 EXIT_BB so that we can re-attach them to the new basic block that
7065 will replace the region. */
7066 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7067 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7068 entry_flag = XNEWVEC (int, num_entry_edges);
7069 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7070 i = 0;
7071 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7073 entry_prob[i] = e->probability;
7074 entry_flag[i] = e->flags;
7075 entry_pred[i++] = e->src;
7076 remove_edge (e);
7079 if (exit_bb)
7081 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7082 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7083 exit_flag = XNEWVEC (int, num_exit_edges);
7084 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7085 i = 0;
7086 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7088 exit_prob[i] = e->probability;
7089 exit_flag[i] = e->flags;
7090 exit_succ[i++] = e->dest;
7091 remove_edge (e);
7094 else
7096 num_exit_edges = 0;
7097 exit_succ = NULL;
7098 exit_flag = NULL;
7099 exit_prob = NULL;
7102 /* Switch context to the child function to initialize DEST_FN's CFG. */
7103 gcc_assert (dest_cfun->cfg == NULL);
7104 push_cfun (dest_cfun);
7106 init_empty_tree_cfg ();
7108 /* Initialize EH information for the new function. */
7109 eh_map = NULL;
7110 new_label_map = NULL;
7111 if (saved_cfun->eh)
7113 eh_region region = NULL;
7115 FOR_EACH_VEC_ELT (bbs, i, bb)
7116 region = find_outermost_region_in_block (saved_cfun, bb, region);
7118 init_eh_for_function ();
7119 if (region != NULL)
7121 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7122 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7123 new_label_mapper, new_label_map);
7127 /* Initialize an empty loop tree. */
7128 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7129 init_loops_structure (dest_cfun, loops, 1);
7130 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7131 set_loops_for_fn (dest_cfun, loops);
7133 /* Move the outlined loop tree part. */
7134 num_nodes = bbs.length ();
7135 FOR_EACH_VEC_ELT (bbs, i, bb)
7137 if (bb->loop_father->header == bb)
7139 struct loop *this_loop = bb->loop_father;
7140 struct loop *outer = loop_outer (this_loop);
7141 if (outer == loop
7142 /* If the SESE region contains some bbs ending with
7143 a noreturn call, those are considered to belong
7144 to the outermost loop in saved_cfun, rather than
7145 the entry_bb's loop_father. */
7146 || outer == loop0)
7148 if (outer != loop)
7149 num_nodes -= this_loop->num_nodes;
7150 flow_loop_tree_node_remove (bb->loop_father);
7151 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7152 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7155 else if (bb->loop_father == loop0 && loop0 != loop)
7156 num_nodes--;
7158 /* Remove loop exits from the outlined region. */
7159 if (loops_for_fn (saved_cfun)->exits)
7160 FOR_EACH_EDGE (e, ei, bb->succs)
7162 struct loops *l = loops_for_fn (saved_cfun);
7163 loop_exit **slot
7164 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7165 NO_INSERT);
7166 if (slot)
7167 l->exits->clear_slot (slot);
7172 /* Adjust the number of blocks in the tree root of the outlined part. */
7173 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7175 /* Setup a mapping to be used by move_block_to_fn. */
7176 loop->aux = current_loops->tree_root;
7177 loop0->aux = current_loops->tree_root;
7179 pop_cfun ();
7181 /* Move blocks from BBS into DEST_CFUN. */
7182 gcc_assert (bbs.length () >= 2);
7183 after = dest_cfun->cfg->x_entry_block_ptr;
7184 hash_map<tree, tree> vars_map;
7186 memset (&d, 0, sizeof (d));
7187 d.orig_block = orig_block;
7188 d.new_block = DECL_INITIAL (dest_cfun->decl);
7189 d.from_context = cfun->decl;
7190 d.to_context = dest_cfun->decl;
7191 d.vars_map = &vars_map;
7192 d.new_label_map = new_label_map;
7193 d.eh_map = eh_map;
7194 d.remap_decls_p = true;
7196 FOR_EACH_VEC_ELT (bbs, i, bb)
7198 /* No need to update edge counts on the last block. It has
7199 already been updated earlier when we detached the region from
7200 the original CFG. */
7201 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7202 after = bb;
7205 loop->aux = NULL;
7206 loop0->aux = NULL;
7207 /* Loop sizes are no longer correct, fix them up. */
7208 loop->num_nodes -= num_nodes;
7209 for (struct loop *outer = loop_outer (loop);
7210 outer; outer = loop_outer (outer))
7211 outer->num_nodes -= num_nodes;
7212 loop0->num_nodes -= bbs.length () - num_nodes;
7214 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7216 struct loop *aloop;
7217 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7218 if (aloop != NULL)
7220 if (aloop->simduid)
7222 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7223 d.to_context);
7224 dest_cfun->has_simduid_loops = true;
7226 if (aloop->force_vectorize)
7227 dest_cfun->has_force_vectorize_loops = true;
7231 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7232 if (orig_block)
7234 tree block;
7235 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7236 == NULL_TREE);
7237 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7238 = BLOCK_SUBBLOCKS (orig_block);
7239 for (block = BLOCK_SUBBLOCKS (orig_block);
7240 block; block = BLOCK_CHAIN (block))
7241 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7242 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7245 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7246 &vars_map, dest_cfun->decl);
7248 if (new_label_map)
7249 htab_delete (new_label_map);
7250 if (eh_map)
7251 delete eh_map;
7253 /* Rewire the entry and exit blocks. The successor to the entry
7254 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7255 the child function. Similarly, the predecessor of DEST_FN's
7256 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7257 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7258 various CFG manipulation function get to the right CFG.
7260 FIXME, this is silly. The CFG ought to become a parameter to
7261 these helpers. */
7262 push_cfun (dest_cfun);
7263 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7264 if (exit_bb)
7265 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7266 pop_cfun ();
7268 /* Back in the original function, the SESE region has disappeared,
7269 create a new basic block in its place. */
7270 bb = create_empty_bb (entry_pred[0]);
7271 if (current_loops)
7272 add_bb_to_loop (bb, loop);
7273 for (i = 0; i < num_entry_edges; i++)
7275 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7276 e->probability = entry_prob[i];
7279 for (i = 0; i < num_exit_edges; i++)
7281 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7282 e->probability = exit_prob[i];
7285 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7286 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7287 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7288 dom_bbs.release ();
7290 if (exit_bb)
7292 free (exit_prob);
7293 free (exit_flag);
7294 free (exit_succ);
7296 free (entry_prob);
7297 free (entry_flag);
7298 free (entry_pred);
7299 bbs.release ();
7301 return bb;
7305 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7308 void
7309 dump_function_to_file (tree fndecl, FILE *file, int flags)
7311 tree arg, var, old_current_fndecl = current_function_decl;
7312 struct function *dsf;
7313 bool ignore_topmost_bind = false, any_var = false;
7314 basic_block bb;
7315 tree chain;
7316 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7317 && decl_is_tm_clone (fndecl));
7318 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7320 current_function_decl = fndecl;
7321 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7323 arg = DECL_ARGUMENTS (fndecl);
7324 while (arg)
7326 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7327 fprintf (file, " ");
7328 print_generic_expr (file, arg, dump_flags);
7329 if (flags & TDF_VERBOSE)
7330 print_node (file, "", arg, 4);
7331 if (DECL_CHAIN (arg))
7332 fprintf (file, ", ");
7333 arg = DECL_CHAIN (arg);
7335 fprintf (file, ")\n");
7337 if (flags & TDF_VERBOSE)
7338 print_node (file, "", fndecl, 2);
7340 dsf = DECL_STRUCT_FUNCTION (fndecl);
7341 if (dsf && (flags & TDF_EH))
7342 dump_eh_tree (file, dsf);
7344 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7346 dump_node (fndecl, TDF_SLIM | flags, file);
7347 current_function_decl = old_current_fndecl;
7348 return;
7351 /* When GIMPLE is lowered, the variables are no longer available in
7352 BIND_EXPRs, so display them separately. */
7353 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7355 unsigned ix;
7356 ignore_topmost_bind = true;
7358 fprintf (file, "{\n");
7359 if (!vec_safe_is_empty (fun->local_decls))
7360 FOR_EACH_LOCAL_DECL (fun, ix, var)
7362 print_generic_decl (file, var, flags);
7363 if (flags & TDF_VERBOSE)
7364 print_node (file, "", var, 4);
7365 fprintf (file, "\n");
7367 any_var = true;
7369 if (gimple_in_ssa_p (cfun))
7370 for (ix = 1; ix < num_ssa_names; ++ix)
7372 tree name = ssa_name (ix);
7373 if (name && !SSA_NAME_VAR (name))
7375 fprintf (file, " ");
7376 print_generic_expr (file, TREE_TYPE (name), flags);
7377 fprintf (file, " ");
7378 print_generic_expr (file, name, flags);
7379 fprintf (file, ";\n");
7381 any_var = true;
7386 if (fun && fun->decl == fndecl
7387 && fun->cfg
7388 && basic_block_info_for_fn (fun))
7390 /* If the CFG has been built, emit a CFG-based dump. */
7391 if (!ignore_topmost_bind)
7392 fprintf (file, "{\n");
7394 if (any_var && n_basic_blocks_for_fn (fun))
7395 fprintf (file, "\n");
7397 FOR_EACH_BB_FN (bb, fun)
7398 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7400 fprintf (file, "}\n");
7402 else if (DECL_SAVED_TREE (fndecl) == NULL)
7404 /* The function is now in GIMPLE form but the CFG has not been
7405 built yet. Emit the single sequence of GIMPLE statements
7406 that make up its body. */
7407 gimple_seq body = gimple_body (fndecl);
7409 if (gimple_seq_first_stmt (body)
7410 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7411 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7412 print_gimple_seq (file, body, 0, flags);
7413 else
7415 if (!ignore_topmost_bind)
7416 fprintf (file, "{\n");
7418 if (any_var)
7419 fprintf (file, "\n");
7421 print_gimple_seq (file, body, 2, flags);
7422 fprintf (file, "}\n");
7425 else
7427 int indent;
7429 /* Make a tree based dump. */
7430 chain = DECL_SAVED_TREE (fndecl);
7431 if (chain && TREE_CODE (chain) == BIND_EXPR)
7433 if (ignore_topmost_bind)
7435 chain = BIND_EXPR_BODY (chain);
7436 indent = 2;
7438 else
7439 indent = 0;
7441 else
7443 if (!ignore_topmost_bind)
7445 fprintf (file, "{\n");
7446 /* No topmost bind, pretend it's ignored for later. */
7447 ignore_topmost_bind = true;
7449 indent = 2;
7452 if (any_var)
7453 fprintf (file, "\n");
7455 print_generic_stmt_indented (file, chain, flags, indent);
7456 if (ignore_topmost_bind)
7457 fprintf (file, "}\n");
7460 if (flags & TDF_ENUMERATE_LOCALS)
7461 dump_enumerated_decls (file, flags);
7462 fprintf (file, "\n\n");
7464 current_function_decl = old_current_fndecl;
7467 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7469 DEBUG_FUNCTION void
7470 debug_function (tree fn, int flags)
7472 dump_function_to_file (fn, stderr, flags);
7476 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7478 static void
7479 print_pred_bbs (FILE *file, basic_block bb)
7481 edge e;
7482 edge_iterator ei;
7484 FOR_EACH_EDGE (e, ei, bb->preds)
7485 fprintf (file, "bb_%d ", e->src->index);
7489 /* Print on FILE the indexes for the successors of basic_block BB. */
7491 static void
7492 print_succ_bbs (FILE *file, basic_block bb)
7494 edge e;
7495 edge_iterator ei;
7497 FOR_EACH_EDGE (e, ei, bb->succs)
7498 fprintf (file, "bb_%d ", e->dest->index);
7501 /* Print to FILE the basic block BB following the VERBOSITY level. */
7503 void
7504 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7506 char *s_indent = (char *) alloca ((size_t) indent + 1);
7507 memset ((void *) s_indent, ' ', (size_t) indent);
7508 s_indent[indent] = '\0';
7510 /* Print basic_block's header. */
7511 if (verbosity >= 2)
7513 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7514 print_pred_bbs (file, bb);
7515 fprintf (file, "}, succs = {");
7516 print_succ_bbs (file, bb);
7517 fprintf (file, "})\n");
7520 /* Print basic_block's body. */
7521 if (verbosity >= 3)
7523 fprintf (file, "%s {\n", s_indent);
7524 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7525 fprintf (file, "%s }\n", s_indent);
7529 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7531 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7532 VERBOSITY level this outputs the contents of the loop, or just its
7533 structure. */
7535 static void
7536 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7538 char *s_indent;
7539 basic_block bb;
7541 if (loop == NULL)
7542 return;
7544 s_indent = (char *) alloca ((size_t) indent + 1);
7545 memset ((void *) s_indent, ' ', (size_t) indent);
7546 s_indent[indent] = '\0';
7548 /* Print loop's header. */
7549 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7550 if (loop->header)
7551 fprintf (file, "header = %d", loop->header->index);
7552 else
7554 fprintf (file, "deleted)\n");
7555 return;
7557 if (loop->latch)
7558 fprintf (file, ", latch = %d", loop->latch->index);
7559 else
7560 fprintf (file, ", multiple latches");
7561 fprintf (file, ", niter = ");
7562 print_generic_expr (file, loop->nb_iterations, 0);
7564 if (loop->any_upper_bound)
7566 fprintf (file, ", upper_bound = ");
7567 print_decu (loop->nb_iterations_upper_bound, file);
7570 if (loop->any_estimate)
7572 fprintf (file, ", estimate = ");
7573 print_decu (loop->nb_iterations_estimate, file);
7575 fprintf (file, ")\n");
7577 /* Print loop's body. */
7578 if (verbosity >= 1)
7580 fprintf (file, "%s{\n", s_indent);
7581 FOR_EACH_BB_FN (bb, cfun)
7582 if (bb->loop_father == loop)
7583 print_loops_bb (file, bb, indent, verbosity);
7585 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7586 fprintf (file, "%s}\n", s_indent);
7590 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7591 spaces. Following VERBOSITY level this outputs the contents of the
7592 loop, or just its structure. */
7594 static void
7595 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7596 int verbosity)
7598 if (loop == NULL)
7599 return;
7601 print_loop (file, loop, indent, verbosity);
7602 print_loop_and_siblings (file, loop->next, indent, verbosity);
7605 /* Follow a CFG edge from the entry point of the program, and on entry
7606 of a loop, pretty print the loop structure on FILE. */
7608 void
7609 print_loops (FILE *file, int verbosity)
7611 basic_block bb;
7613 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7614 if (bb && bb->loop_father)
7615 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7618 /* Dump a loop. */
7620 DEBUG_FUNCTION void
7621 debug (struct loop &ref)
7623 print_loop (stderr, &ref, 0, /*verbosity*/0);
7626 DEBUG_FUNCTION void
7627 debug (struct loop *ptr)
7629 if (ptr)
7630 debug (*ptr);
7631 else
7632 fprintf (stderr, "<nil>\n");
7635 /* Dump a loop verbosely. */
7637 DEBUG_FUNCTION void
7638 debug_verbose (struct loop &ref)
7640 print_loop (stderr, &ref, 0, /*verbosity*/3);
7643 DEBUG_FUNCTION void
7644 debug_verbose (struct loop *ptr)
7646 if (ptr)
7647 debug (*ptr);
7648 else
7649 fprintf (stderr, "<nil>\n");
7653 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7655 DEBUG_FUNCTION void
7656 debug_loops (int verbosity)
7658 print_loops (stderr, verbosity);
7661 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7663 DEBUG_FUNCTION void
7664 debug_loop (struct loop *loop, int verbosity)
7666 print_loop (stderr, loop, 0, verbosity);
7669 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7670 level. */
7672 DEBUG_FUNCTION void
7673 debug_loop_num (unsigned num, int verbosity)
7675 debug_loop (get_loop (cfun, num), verbosity);
7678 /* Return true if BB ends with a call, possibly followed by some
7679 instructions that must stay with the call. Return false,
7680 otherwise. */
7682 static bool
7683 gimple_block_ends_with_call_p (basic_block bb)
7685 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7686 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7690 /* Return true if BB ends with a conditional branch. Return false,
7691 otherwise. */
7693 static bool
7694 gimple_block_ends_with_condjump_p (const_basic_block bb)
7696 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7697 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7701 /* Return true if we need to add fake edge to exit at statement T.
7702 Helper function for gimple_flow_call_edges_add. */
7704 static bool
7705 need_fake_edge_p (gimple t)
7707 tree fndecl = NULL_TREE;
7708 int call_flags = 0;
7710 /* NORETURN and LONGJMP calls already have an edge to exit.
7711 CONST and PURE calls do not need one.
7712 We don't currently check for CONST and PURE here, although
7713 it would be a good idea, because those attributes are
7714 figured out from the RTL in mark_constant_function, and
7715 the counter incrementation code from -fprofile-arcs
7716 leads to different results from -fbranch-probabilities. */
7717 if (is_gimple_call (t))
7719 fndecl = gimple_call_fndecl (t);
7720 call_flags = gimple_call_flags (t);
7723 if (is_gimple_call (t)
7724 && fndecl
7725 && DECL_BUILT_IN (fndecl)
7726 && (call_flags & ECF_NOTHROW)
7727 && !(call_flags & ECF_RETURNS_TWICE)
7728 /* fork() doesn't really return twice, but the effect of
7729 wrapping it in __gcov_fork() which calls __gcov_flush()
7730 and clears the counters before forking has the same
7731 effect as returning twice. Force a fake edge. */
7732 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7733 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7734 return false;
7736 if (is_gimple_call (t))
7738 edge_iterator ei;
7739 edge e;
7740 basic_block bb;
7742 if (!(call_flags & ECF_NORETURN))
7743 return true;
7745 bb = gimple_bb (t);
7746 FOR_EACH_EDGE (e, ei, bb->succs)
7747 if ((e->flags & EDGE_FAKE) == 0)
7748 return true;
7751 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7752 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7753 return true;
7755 return false;
7759 /* Add fake edges to the function exit for any non constant and non
7760 noreturn calls (or noreturn calls with EH/abnormal edges),
7761 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7762 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7763 that were split.
7765 The goal is to expose cases in which entering a basic block does
7766 not imply that all subsequent instructions must be executed. */
7768 static int
7769 gimple_flow_call_edges_add (sbitmap blocks)
7771 int i;
7772 int blocks_split = 0;
7773 int last_bb = last_basic_block_for_fn (cfun);
7774 bool check_last_block = false;
7776 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7777 return 0;
7779 if (! blocks)
7780 check_last_block = true;
7781 else
7782 check_last_block = bitmap_bit_p (blocks,
7783 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7785 /* In the last basic block, before epilogue generation, there will be
7786 a fallthru edge to EXIT. Special care is required if the last insn
7787 of the last basic block is a call because make_edge folds duplicate
7788 edges, which would result in the fallthru edge also being marked
7789 fake, which would result in the fallthru edge being removed by
7790 remove_fake_edges, which would result in an invalid CFG.
7792 Moreover, we can't elide the outgoing fake edge, since the block
7793 profiler needs to take this into account in order to solve the minimal
7794 spanning tree in the case that the call doesn't return.
7796 Handle this by adding a dummy instruction in a new last basic block. */
7797 if (check_last_block)
7799 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7800 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7801 gimple t = NULL;
7803 if (!gsi_end_p (gsi))
7804 t = gsi_stmt (gsi);
7806 if (t && need_fake_edge_p (t))
7808 edge e;
7810 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7811 if (e)
7813 gsi_insert_on_edge (e, gimple_build_nop ());
7814 gsi_commit_edge_inserts ();
7819 /* Now add fake edges to the function exit for any non constant
7820 calls since there is no way that we can determine if they will
7821 return or not... */
7822 for (i = 0; i < last_bb; i++)
7824 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7825 gimple_stmt_iterator gsi;
7826 gimple stmt, last_stmt;
7828 if (!bb)
7829 continue;
7831 if (blocks && !bitmap_bit_p (blocks, i))
7832 continue;
7834 gsi = gsi_last_nondebug_bb (bb);
7835 if (!gsi_end_p (gsi))
7837 last_stmt = gsi_stmt (gsi);
7840 stmt = gsi_stmt (gsi);
7841 if (need_fake_edge_p (stmt))
7843 edge e;
7845 /* The handling above of the final block before the
7846 epilogue should be enough to verify that there is
7847 no edge to the exit block in CFG already.
7848 Calling make_edge in such case would cause us to
7849 mark that edge as fake and remove it later. */
7850 #ifdef ENABLE_CHECKING
7851 if (stmt == last_stmt)
7853 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7854 gcc_assert (e == NULL);
7856 #endif
7858 /* Note that the following may create a new basic block
7859 and renumber the existing basic blocks. */
7860 if (stmt != last_stmt)
7862 e = split_block (bb, stmt);
7863 if (e)
7864 blocks_split++;
7866 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7868 gsi_prev (&gsi);
7870 while (!gsi_end_p (gsi));
7874 if (blocks_split)
7875 verify_flow_info ();
7877 return blocks_split;
7880 /* Removes edge E and all the blocks dominated by it, and updates dominance
7881 information. The IL in E->src needs to be updated separately.
7882 If dominance info is not available, only the edge E is removed.*/
7884 void
7885 remove_edge_and_dominated_blocks (edge e)
7887 vec<basic_block> bbs_to_remove = vNULL;
7888 vec<basic_block> bbs_to_fix_dom = vNULL;
7889 bitmap df, df_idom;
7890 edge f;
7891 edge_iterator ei;
7892 bool none_removed = false;
7893 unsigned i;
7894 basic_block bb, dbb;
7895 bitmap_iterator bi;
7897 /* If we are removing a path inside a non-root loop that may change
7898 loop ownership of blocks or remove loops. Mark loops for fixup. */
7899 if (current_loops
7900 && loop_outer (e->src->loop_father) != NULL
7901 && e->src->loop_father == e->dest->loop_father)
7902 loops_state_set (LOOPS_NEED_FIXUP);
7904 if (!dom_info_available_p (CDI_DOMINATORS))
7906 remove_edge (e);
7907 return;
7910 /* No updating is needed for edges to exit. */
7911 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7913 if (cfgcleanup_altered_bbs)
7914 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7915 remove_edge (e);
7916 return;
7919 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7920 that is not dominated by E->dest, then this set is empty. Otherwise,
7921 all the basic blocks dominated by E->dest are removed.
7923 Also, to DF_IDOM we store the immediate dominators of the blocks in
7924 the dominance frontier of E (i.e., of the successors of the
7925 removed blocks, if there are any, and of E->dest otherwise). */
7926 FOR_EACH_EDGE (f, ei, e->dest->preds)
7928 if (f == e)
7929 continue;
7931 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7933 none_removed = true;
7934 break;
7938 df = BITMAP_ALLOC (NULL);
7939 df_idom = BITMAP_ALLOC (NULL);
7941 if (none_removed)
7942 bitmap_set_bit (df_idom,
7943 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7944 else
7946 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7947 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7949 FOR_EACH_EDGE (f, ei, bb->succs)
7951 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7952 bitmap_set_bit (df, f->dest->index);
7955 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7956 bitmap_clear_bit (df, bb->index);
7958 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7960 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7961 bitmap_set_bit (df_idom,
7962 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7966 if (cfgcleanup_altered_bbs)
7968 /* Record the set of the altered basic blocks. */
7969 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7970 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7973 /* Remove E and the cancelled blocks. */
7974 if (none_removed)
7975 remove_edge (e);
7976 else
7978 /* Walk backwards so as to get a chance to substitute all
7979 released DEFs into debug stmts. See
7980 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7981 details. */
7982 for (i = bbs_to_remove.length (); i-- > 0; )
7983 delete_basic_block (bbs_to_remove[i]);
7986 /* Update the dominance information. The immediate dominator may change only
7987 for blocks whose immediate dominator belongs to DF_IDOM:
7989 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7990 removal. Let Z the arbitrary block such that idom(Z) = Y and
7991 Z dominates X after the removal. Before removal, there exists a path P
7992 from Y to X that avoids Z. Let F be the last edge on P that is
7993 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7994 dominates W, and because of P, Z does not dominate W), and W belongs to
7995 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7996 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7998 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7999 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8000 dbb;
8001 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8002 bbs_to_fix_dom.safe_push (dbb);
8005 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8007 BITMAP_FREE (df);
8008 BITMAP_FREE (df_idom);
8009 bbs_to_remove.release ();
8010 bbs_to_fix_dom.release ();
8013 /* Purge dead EH edges from basic block BB. */
8015 bool
8016 gimple_purge_dead_eh_edges (basic_block bb)
8018 bool changed = false;
8019 edge e;
8020 edge_iterator ei;
8021 gimple stmt = last_stmt (bb);
8023 if (stmt && stmt_can_throw_internal (stmt))
8024 return false;
8026 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8028 if (e->flags & EDGE_EH)
8030 remove_edge_and_dominated_blocks (e);
8031 changed = true;
8033 else
8034 ei_next (&ei);
8037 return changed;
8040 /* Purge dead EH edges from basic block listed in BLOCKS. */
8042 bool
8043 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8045 bool changed = false;
8046 unsigned i;
8047 bitmap_iterator bi;
8049 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8051 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8053 /* Earlier gimple_purge_dead_eh_edges could have removed
8054 this basic block already. */
8055 gcc_assert (bb || changed);
8056 if (bb != NULL)
8057 changed |= gimple_purge_dead_eh_edges (bb);
8060 return changed;
8063 /* Purge dead abnormal call edges from basic block BB. */
8065 bool
8066 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8068 bool changed = false;
8069 edge e;
8070 edge_iterator ei;
8071 gimple stmt = last_stmt (bb);
8073 if (!cfun->has_nonlocal_label
8074 && !cfun->calls_setjmp)
8075 return false;
8077 if (stmt && stmt_can_make_abnormal_goto (stmt))
8078 return false;
8080 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8082 if (e->flags & EDGE_ABNORMAL)
8084 if (e->flags & EDGE_FALLTHRU)
8085 e->flags &= ~EDGE_ABNORMAL;
8086 else
8087 remove_edge_and_dominated_blocks (e);
8088 changed = true;
8090 else
8091 ei_next (&ei);
8094 return changed;
8097 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8099 bool
8100 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8102 bool changed = false;
8103 unsigned i;
8104 bitmap_iterator bi;
8106 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8108 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8110 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8111 this basic block already. */
8112 gcc_assert (bb || changed);
8113 if (bb != NULL)
8114 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8117 return changed;
8120 /* This function is called whenever a new edge is created or
8121 redirected. */
8123 static void
8124 gimple_execute_on_growing_pred (edge e)
8126 basic_block bb = e->dest;
8128 if (!gimple_seq_empty_p (phi_nodes (bb)))
8129 reserve_phi_args_for_new_edge (bb);
8132 /* This function is called immediately before edge E is removed from
8133 the edge vector E->dest->preds. */
8135 static void
8136 gimple_execute_on_shrinking_pred (edge e)
8138 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8139 remove_phi_args (e);
8142 /*---------------------------------------------------------------------------
8143 Helper functions for Loop versioning
8144 ---------------------------------------------------------------------------*/
8146 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8147 of 'first'. Both of them are dominated by 'new_head' basic block. When
8148 'new_head' was created by 'second's incoming edge it received phi arguments
8149 on the edge by split_edge(). Later, additional edge 'e' was created to
8150 connect 'new_head' and 'first'. Now this routine adds phi args on this
8151 additional edge 'e' that new_head to second edge received as part of edge
8152 splitting. */
8154 static void
8155 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8156 basic_block new_head, edge e)
8158 gphi *phi1, *phi2;
8159 gphi_iterator psi1, psi2;
8160 tree def;
8161 edge e2 = find_edge (new_head, second);
8163 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8164 edge, we should always have an edge from NEW_HEAD to SECOND. */
8165 gcc_assert (e2 != NULL);
8167 /* Browse all 'second' basic block phi nodes and add phi args to
8168 edge 'e' for 'first' head. PHI args are always in correct order. */
8170 for (psi2 = gsi_start_phis (second),
8171 psi1 = gsi_start_phis (first);
8172 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8173 gsi_next (&psi2), gsi_next (&psi1))
8175 phi1 = psi1.phi ();
8176 phi2 = psi2.phi ();
8177 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8178 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8183 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8184 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8185 the destination of the ELSE part. */
8187 static void
8188 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8189 basic_block second_head ATTRIBUTE_UNUSED,
8190 basic_block cond_bb, void *cond_e)
8192 gimple_stmt_iterator gsi;
8193 gimple new_cond_expr;
8194 tree cond_expr = (tree) cond_e;
8195 edge e0;
8197 /* Build new conditional expr */
8198 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8199 NULL_TREE, NULL_TREE);
8201 /* Add new cond in cond_bb. */
8202 gsi = gsi_last_bb (cond_bb);
8203 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8205 /* Adjust edges appropriately to connect new head with first head
8206 as well as second head. */
8207 e0 = single_succ_edge (cond_bb);
8208 e0->flags &= ~EDGE_FALLTHRU;
8209 e0->flags |= EDGE_FALSE_VALUE;
8213 /* Do book-keeping of basic block BB for the profile consistency checker.
8214 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8215 then do post-pass accounting. Store the counting in RECORD. */
8216 static void
8217 gimple_account_profile_record (basic_block bb, int after_pass,
8218 struct profile_record *record)
8220 gimple_stmt_iterator i;
8221 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8223 record->size[after_pass]
8224 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8225 if (profile_status_for_fn (cfun) == PROFILE_READ)
8226 record->time[after_pass]
8227 += estimate_num_insns (gsi_stmt (i),
8228 &eni_time_weights) * bb->count;
8229 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8230 record->time[after_pass]
8231 += estimate_num_insns (gsi_stmt (i),
8232 &eni_time_weights) * bb->frequency;
8236 struct cfg_hooks gimple_cfg_hooks = {
8237 "gimple",
8238 gimple_verify_flow_info,
8239 gimple_dump_bb, /* dump_bb */
8240 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8241 create_bb, /* create_basic_block */
8242 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8243 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8244 gimple_can_remove_branch_p, /* can_remove_branch_p */
8245 remove_bb, /* delete_basic_block */
8246 gimple_split_block, /* split_block */
8247 gimple_move_block_after, /* move_block_after */
8248 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8249 gimple_merge_blocks, /* merge_blocks */
8250 gimple_predict_edge, /* predict_edge */
8251 gimple_predicted_by_p, /* predicted_by_p */
8252 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8253 gimple_duplicate_bb, /* duplicate_block */
8254 gimple_split_edge, /* split_edge */
8255 gimple_make_forwarder_block, /* make_forward_block */
8256 NULL, /* tidy_fallthru_edge */
8257 NULL, /* force_nonfallthru */
8258 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8259 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8260 gimple_flow_call_edges_add, /* flow_call_edges_add */
8261 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8262 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8263 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8264 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8265 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8266 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8267 flush_pending_stmts, /* flush_pending_stmts */
8268 gimple_empty_block_p, /* block_empty_p */
8269 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8270 gimple_account_profile_record,
8274 /* Split all critical edges. */
8276 unsigned int
8277 split_critical_edges (void)
8279 basic_block bb;
8280 edge e;
8281 edge_iterator ei;
8283 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8284 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8285 mappings around the calls to split_edge. */
8286 start_recording_case_labels ();
8287 FOR_ALL_BB_FN (bb, cfun)
8289 FOR_EACH_EDGE (e, ei, bb->succs)
8291 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8292 split_edge (e);
8293 /* PRE inserts statements to edges and expects that
8294 since split_critical_edges was done beforehand, committing edge
8295 insertions will not split more edges. In addition to critical
8296 edges we must split edges that have multiple successors and
8297 end by control flow statements, such as RESX.
8298 Go ahead and split them too. This matches the logic in
8299 gimple_find_edge_insert_loc. */
8300 else if ((!single_pred_p (e->dest)
8301 || !gimple_seq_empty_p (phi_nodes (e->dest))
8302 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8303 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8304 && !(e->flags & EDGE_ABNORMAL))
8306 gimple_stmt_iterator gsi;
8308 gsi = gsi_last_bb (e->src);
8309 if (!gsi_end_p (gsi)
8310 && stmt_ends_bb_p (gsi_stmt (gsi))
8311 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8312 && !gimple_call_builtin_p (gsi_stmt (gsi),
8313 BUILT_IN_RETURN)))
8314 split_edge (e);
8318 end_recording_case_labels ();
8319 return 0;
8322 namespace {
8324 const pass_data pass_data_split_crit_edges =
8326 GIMPLE_PASS, /* type */
8327 "crited", /* name */
8328 OPTGROUP_NONE, /* optinfo_flags */
8329 TV_TREE_SPLIT_EDGES, /* tv_id */
8330 PROP_cfg, /* properties_required */
8331 PROP_no_crit_edges, /* properties_provided */
8332 0, /* properties_destroyed */
8333 0, /* todo_flags_start */
8334 0, /* todo_flags_finish */
8337 class pass_split_crit_edges : public gimple_opt_pass
8339 public:
8340 pass_split_crit_edges (gcc::context *ctxt)
8341 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8344 /* opt_pass methods: */
8345 virtual unsigned int execute (function *) { return split_critical_edges (); }
8347 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8348 }; // class pass_split_crit_edges
8350 } // anon namespace
8352 gimple_opt_pass *
8353 make_pass_split_crit_edges (gcc::context *ctxt)
8355 return new pass_split_crit_edges (ctxt);
8359 /* Insert COND expression which is GIMPLE_COND after STMT
8360 in basic block BB with appropriate basic block split
8361 and creation of a new conditionally executed basic block.
8362 Return created basic block. */
8363 basic_block
8364 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8366 edge fall = split_block (bb, stmt);
8367 gimple_stmt_iterator iter = gsi_last_bb (bb);
8368 basic_block new_bb;
8370 /* Insert cond statement. */
8371 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8372 if (gsi_end_p (iter))
8373 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8374 else
8375 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8377 /* Create conditionally executed block. */
8378 new_bb = create_empty_bb (bb);
8379 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8380 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8382 /* Fix edge for split bb. */
8383 fall->flags = EDGE_FALSE_VALUE;
8385 /* Update dominance info. */
8386 if (dom_info_available_p (CDI_DOMINATORS))
8388 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8389 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8392 /* Update loop info. */
8393 if (current_loops)
8394 add_bb_to_loop (new_bb, bb->loop_father);
8396 return new_bb;
8399 /* Build a ternary operation and gimplify it. Emit code before GSI.
8400 Return the gimple_val holding the result. */
8402 tree
8403 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8404 tree type, tree a, tree b, tree c)
8406 tree ret;
8407 location_t loc = gimple_location (gsi_stmt (*gsi));
8409 ret = fold_build3_loc (loc, code, type, a, b, c);
8410 STRIP_NOPS (ret);
8412 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8413 GSI_SAME_STMT);
8416 /* Build a binary operation and gimplify it. Emit code before GSI.
8417 Return the gimple_val holding the result. */
8419 tree
8420 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8421 tree type, tree a, tree b)
8423 tree ret;
8425 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8426 STRIP_NOPS (ret);
8428 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8429 GSI_SAME_STMT);
8432 /* Build a unary operation and gimplify it. Emit code before GSI.
8433 Return the gimple_val holding the result. */
8435 tree
8436 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8437 tree a)
8439 tree ret;
8441 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8442 STRIP_NOPS (ret);
8444 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8445 GSI_SAME_STMT);
8450 /* Given a basic block B which ends with a conditional and has
8451 precisely two successors, determine which of the edges is taken if
8452 the conditional is true and which is taken if the conditional is
8453 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8455 void
8456 extract_true_false_edges_from_block (basic_block b,
8457 edge *true_edge,
8458 edge *false_edge)
8460 edge e = EDGE_SUCC (b, 0);
8462 if (e->flags & EDGE_TRUE_VALUE)
8464 *true_edge = e;
8465 *false_edge = EDGE_SUCC (b, 1);
8467 else
8469 *false_edge = e;
8470 *true_edge = EDGE_SUCC (b, 1);
8474 /* Emit return warnings. */
8476 namespace {
8478 const pass_data pass_data_warn_function_return =
8480 GIMPLE_PASS, /* type */
8481 "*warn_function_return", /* name */
8482 OPTGROUP_NONE, /* optinfo_flags */
8483 TV_NONE, /* tv_id */
8484 PROP_cfg, /* properties_required */
8485 0, /* properties_provided */
8486 0, /* properties_destroyed */
8487 0, /* todo_flags_start */
8488 0, /* todo_flags_finish */
8491 class pass_warn_function_return : public gimple_opt_pass
8493 public:
8494 pass_warn_function_return (gcc::context *ctxt)
8495 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8498 /* opt_pass methods: */
8499 virtual unsigned int execute (function *);
8501 }; // class pass_warn_function_return
8503 unsigned int
8504 pass_warn_function_return::execute (function *fun)
8506 source_location location;
8507 gimple last;
8508 edge e;
8509 edge_iterator ei;
8511 if (!targetm.warn_func_return (fun->decl))
8512 return 0;
8514 /* If we have a path to EXIT, then we do return. */
8515 if (TREE_THIS_VOLATILE (fun->decl)
8516 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8518 location = UNKNOWN_LOCATION;
8519 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8521 last = last_stmt (e->src);
8522 if ((gimple_code (last) == GIMPLE_RETURN
8523 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8524 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8525 break;
8527 if (location == UNKNOWN_LOCATION)
8528 location = cfun->function_end_locus;
8529 warning_at (location, 0, "%<noreturn%> function does return");
8532 /* If we see "return;" in some basic block, then we do reach the end
8533 without returning a value. */
8534 else if (warn_return_type
8535 && !TREE_NO_WARNING (fun->decl)
8536 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8537 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8539 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8541 gimple last = last_stmt (e->src);
8542 greturn *return_stmt = dyn_cast <greturn *> (last);
8543 if (return_stmt
8544 && gimple_return_retval (return_stmt) == NULL
8545 && !gimple_no_warning_p (last))
8547 location = gimple_location (last);
8548 if (location == UNKNOWN_LOCATION)
8549 location = fun->function_end_locus;
8550 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8551 TREE_NO_WARNING (fun->decl) = 1;
8552 break;
8556 return 0;
8559 } // anon namespace
8561 gimple_opt_pass *
8562 make_pass_warn_function_return (gcc::context *ctxt)
8564 return new pass_warn_function_return (ctxt);
8567 /* Walk a gimplified function and warn for functions whose return value is
8568 ignored and attribute((warn_unused_result)) is set. This is done before
8569 inlining, so we don't have to worry about that. */
8571 static void
8572 do_warn_unused_result (gimple_seq seq)
8574 tree fdecl, ftype;
8575 gimple_stmt_iterator i;
8577 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8579 gimple g = gsi_stmt (i);
8581 switch (gimple_code (g))
8583 case GIMPLE_BIND:
8584 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8585 break;
8586 case GIMPLE_TRY:
8587 do_warn_unused_result (gimple_try_eval (g));
8588 do_warn_unused_result (gimple_try_cleanup (g));
8589 break;
8590 case GIMPLE_CATCH:
8591 do_warn_unused_result (gimple_catch_handler (
8592 as_a <gcatch *> (g)));
8593 break;
8594 case GIMPLE_EH_FILTER:
8595 do_warn_unused_result (gimple_eh_filter_failure (g));
8596 break;
8598 case GIMPLE_CALL:
8599 if (gimple_call_lhs (g))
8600 break;
8601 if (gimple_call_internal_p (g))
8602 break;
8604 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8605 LHS. All calls whose value is ignored should be
8606 represented like this. Look for the attribute. */
8607 fdecl = gimple_call_fndecl (g);
8608 ftype = gimple_call_fntype (g);
8610 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8612 location_t loc = gimple_location (g);
8614 if (fdecl)
8615 warning_at (loc, OPT_Wunused_result,
8616 "ignoring return value of %qD, "
8617 "declared with attribute warn_unused_result",
8618 fdecl);
8619 else
8620 warning_at (loc, OPT_Wunused_result,
8621 "ignoring return value of function "
8622 "declared with attribute warn_unused_result");
8624 break;
8626 default:
8627 /* Not a container, not a call, or a call whose value is used. */
8628 break;
8633 namespace {
8635 const pass_data pass_data_warn_unused_result =
8637 GIMPLE_PASS, /* type */
8638 "*warn_unused_result", /* name */
8639 OPTGROUP_NONE, /* optinfo_flags */
8640 TV_NONE, /* tv_id */
8641 PROP_gimple_any, /* properties_required */
8642 0, /* properties_provided */
8643 0, /* properties_destroyed */
8644 0, /* todo_flags_start */
8645 0, /* todo_flags_finish */
8648 class pass_warn_unused_result : public gimple_opt_pass
8650 public:
8651 pass_warn_unused_result (gcc::context *ctxt)
8652 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8655 /* opt_pass methods: */
8656 virtual bool gate (function *) { return flag_warn_unused_result; }
8657 virtual unsigned int execute (function *)
8659 do_warn_unused_result (gimple_body (current_function_decl));
8660 return 0;
8663 }; // class pass_warn_unused_result
8665 } // anon namespace
8667 gimple_opt_pass *
8668 make_pass_warn_unused_result (gcc::context *ctxt)
8670 return new pass_warn_unused_result (ctxt);
8673 /* IPA passes, compilation of earlier functions or inlining
8674 might have changed some properties, such as marked functions nothrow,
8675 pure, const or noreturn.
8676 Remove redundant edges and basic blocks, and create new ones if necessary.
8678 This pass can't be executed as stand alone pass from pass manager, because
8679 in between inlining and this fixup the verify_flow_info would fail. */
8681 unsigned int
8682 execute_fixup_cfg (void)
8684 basic_block bb;
8685 gimple_stmt_iterator gsi;
8686 int todo = 0;
8687 gcov_type count_scale;
8688 edge e;
8689 edge_iterator ei;
8691 count_scale
8692 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8693 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8695 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8696 cgraph_node::get (current_function_decl)->count;
8697 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8698 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8699 count_scale);
8701 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8702 e->count = apply_scale (e->count, count_scale);
8704 FOR_EACH_BB_FN (bb, cfun)
8706 bb->count = apply_scale (bb->count, count_scale);
8707 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8709 gimple stmt = gsi_stmt (gsi);
8710 tree decl = is_gimple_call (stmt)
8711 ? gimple_call_fndecl (stmt)
8712 : NULL;
8713 if (decl)
8715 int flags = gimple_call_flags (stmt);
8716 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8718 if (gimple_purge_dead_abnormal_call_edges (bb))
8719 todo |= TODO_cleanup_cfg;
8721 if (gimple_in_ssa_p (cfun))
8723 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8724 update_stmt (stmt);
8728 if (flags & ECF_NORETURN
8729 && fixup_noreturn_call (stmt))
8730 todo |= TODO_cleanup_cfg;
8733 /* Remove stores to variables we marked write-only.
8734 Keep access when store has side effect, i.e. in case when source
8735 is volatile. */
8736 if (gimple_store_p (stmt)
8737 && !gimple_has_side_effects (stmt))
8739 tree lhs = get_base_address (gimple_get_lhs (stmt));
8741 if (TREE_CODE (lhs) == VAR_DECL
8742 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8743 && varpool_node::get (lhs)->writeonly)
8745 unlink_stmt_vdef (stmt);
8746 gsi_remove (&gsi, true);
8747 release_defs (stmt);
8748 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8749 continue;
8752 /* For calls we can simply remove LHS when it is known
8753 to be write-only. */
8754 if (is_gimple_call (stmt)
8755 && gimple_get_lhs (stmt))
8757 tree lhs = get_base_address (gimple_get_lhs (stmt));
8759 if (TREE_CODE (lhs) == VAR_DECL
8760 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8761 && varpool_node::get (lhs)->writeonly)
8763 gimple_call_set_lhs (stmt, NULL);
8764 update_stmt (stmt);
8765 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8769 if (maybe_clean_eh_stmt (stmt)
8770 && gimple_purge_dead_eh_edges (bb))
8771 todo |= TODO_cleanup_cfg;
8772 gsi_next (&gsi);
8775 FOR_EACH_EDGE (e, ei, bb->succs)
8776 e->count = apply_scale (e->count, count_scale);
8778 /* If we have a basic block with no successors that does not
8779 end with a control statement or a noreturn call end it with
8780 a call to __builtin_unreachable. This situation can occur
8781 when inlining a noreturn call that does in fact return. */
8782 if (EDGE_COUNT (bb->succs) == 0)
8784 gimple stmt = last_stmt (bb);
8785 if (!stmt
8786 || (!is_ctrl_stmt (stmt)
8787 && (!is_gimple_call (stmt)
8788 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8790 if (stmt && is_gimple_call (stmt))
8791 gimple_call_set_ctrl_altering (stmt, false);
8792 stmt = gimple_build_call
8793 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8794 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8795 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8799 if (count_scale != REG_BR_PROB_BASE)
8800 compute_function_frequency ();
8802 if (current_loops
8803 && (todo & TODO_cleanup_cfg))
8804 loops_state_set (LOOPS_NEED_FIXUP);
8806 return todo;
8809 namespace {
8811 const pass_data pass_data_fixup_cfg =
8813 GIMPLE_PASS, /* type */
8814 "fixup_cfg", /* name */
8815 OPTGROUP_NONE, /* optinfo_flags */
8816 TV_NONE, /* tv_id */
8817 PROP_cfg, /* properties_required */
8818 0, /* properties_provided */
8819 0, /* properties_destroyed */
8820 0, /* todo_flags_start */
8821 0, /* todo_flags_finish */
8824 class pass_fixup_cfg : public gimple_opt_pass
8826 public:
8827 pass_fixup_cfg (gcc::context *ctxt)
8828 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8831 /* opt_pass methods: */
8832 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8833 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8835 }; // class pass_fixup_cfg
8837 } // anon namespace
8839 gimple_opt_pass *
8840 make_pass_fixup_cfg (gcc::context *ctxt)
8842 return new pass_fixup_cfg (ctxt);
8845 /* Garbage collection support for edge_def. */
8847 extern void gt_ggc_mx (tree&);
8848 extern void gt_ggc_mx (gimple&);
8849 extern void gt_ggc_mx (rtx&);
8850 extern void gt_ggc_mx (basic_block&);
8852 static void
8853 gt_ggc_mx (rtx_insn *& x)
8855 if (x)
8856 gt_ggc_mx_rtx_def ((void *) x);
8859 void
8860 gt_ggc_mx (edge_def *e)
8862 tree block = LOCATION_BLOCK (e->goto_locus);
8863 gt_ggc_mx (e->src);
8864 gt_ggc_mx (e->dest);
8865 if (current_ir_type () == IR_GIMPLE)
8866 gt_ggc_mx (e->insns.g);
8867 else
8868 gt_ggc_mx (e->insns.r);
8869 gt_ggc_mx (block);
8872 /* PCH support for edge_def. */
8874 extern void gt_pch_nx (tree&);
8875 extern void gt_pch_nx (gimple&);
8876 extern void gt_pch_nx (rtx&);
8877 extern void gt_pch_nx (basic_block&);
8879 static void
8880 gt_pch_nx (rtx_insn *& x)
8882 if (x)
8883 gt_pch_nx_rtx_def ((void *) x);
8886 void
8887 gt_pch_nx (edge_def *e)
8889 tree block = LOCATION_BLOCK (e->goto_locus);
8890 gt_pch_nx (e->src);
8891 gt_pch_nx (e->dest);
8892 if (current_ir_type () == IR_GIMPLE)
8893 gt_pch_nx (e->insns.g);
8894 else
8895 gt_pch_nx (e->insns.r);
8896 gt_pch_nx (block);
8899 void
8900 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8902 tree block = LOCATION_BLOCK (e->goto_locus);
8903 op (&(e->src), cookie);
8904 op (&(e->dest), cookie);
8905 if (current_ir_type () == IR_GIMPLE)
8906 op (&(e->insns.g), cookie);
8907 else
8908 op (&(e->insns.r), cookie);
8909 op (&(block), cookie);