2015-06-11 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / tree-cfg.c
blob9430151fdf6c23c6ccb5f9e14e78bdaac1d688ba
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "input.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "trans-mem.h"
31 #include "stor-layout.h"
32 #include "print-tree.h"
33 #include "tm_p.h"
34 #include "predict.h"
35 #include "hard-reg-set.h"
36 #include "function.h"
37 #include "dominance.h"
38 #include "cfg.h"
39 #include "cfganal.h"
40 #include "basic-block.h"
41 #include "flags.h"
42 #include "gimple-pretty-print.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimple-expr.h"
48 #include "is-a.h"
49 #include "gimple.h"
50 #include "gimple-iterator.h"
51 #include "gimplify-me.h"
52 #include "gimple-walk.h"
53 #include "gimple-ssa.h"
54 #include "plugin-api.h"
55 #include "ipa-ref.h"
56 #include "cgraph.h"
57 #include "tree-cfg.h"
58 #include "tree-phinodes.h"
59 #include "ssa-iterators.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "tree-ssa-loop-manip.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "rtl.h"
66 #include "insn-config.h"
67 #include "expmed.h"
68 #include "dojump.h"
69 #include "explow.h"
70 #include "calls.h"
71 #include "emit-rtl.h"
72 #include "varasm.h"
73 #include "stmt.h"
74 #include "expr.h"
75 #include "tree-dfa.h"
76 #include "tree-ssa.h"
77 #include "tree-dump.h"
78 #include "tree-pass.h"
79 #include "diagnostic-core.h"
80 #include "except.h"
81 #include "cfgloop.h"
82 #include "tree-ssa-propagate.h"
83 #include "value-prof.h"
84 #include "tree-inline.h"
85 #include "target.h"
86 #include "tree-ssa-live.h"
87 #include "omp-low.h"
88 #include "tree-cfgcleanup.h"
89 #include "wide-int-print.h"
91 /* This file contains functions for building the Control Flow Graph (CFG)
92 for a function tree. */
94 /* Local declarations. */
96 /* Initial capacity for the basic block array. */
97 static const int initial_cfg_capacity = 20;
99 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
100 which use a particular edge. The CASE_LABEL_EXPRs are chained together
101 via their CASE_CHAIN field, which we clear after we're done with the
102 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
104 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
105 update the case vector in response to edge redirections.
107 Right now this table is set up and torn down at key points in the
108 compilation process. It would be nice if we could make the table
109 more persistent. The key is getting notification of changes to
110 the CFG (particularly edge removal, creation and redirection). */
112 static hash_map<edge, tree> *edge_to_cases;
114 /* If we record edge_to_cases, this bitmap will hold indexes
115 of basic blocks that end in a GIMPLE_SWITCH which we touched
116 due to edge manipulations. */
118 static bitmap touched_switch_bbs;
120 /* CFG statistics. */
121 struct cfg_stats_d
123 long num_merged_labels;
126 static struct cfg_stats_d cfg_stats;
128 /* Hash table to store last discriminator assigned for each locus. */
129 struct locus_discrim_map
131 location_t locus;
132 int discriminator;
135 /* Hashtable helpers. */
137 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
139 typedef locus_discrim_map *value_type;
140 typedef locus_discrim_map *compare_type;
141 static inline hashval_t hash (const locus_discrim_map *);
142 static inline bool equal (const locus_discrim_map *,
143 const locus_discrim_map *);
146 /* Trivial hash function for a location_t. ITEM is a pointer to
147 a hash table entry that maps a location_t to a discriminator. */
149 inline hashval_t
150 locus_discrim_hasher::hash (const locus_discrim_map *item)
152 return LOCATION_LINE (item->locus);
155 /* Equality function for the locus-to-discriminator map. A and B
156 point to the two hash table entries to compare. */
158 inline bool
159 locus_discrim_hasher::equal (const locus_discrim_map *a,
160 const locus_discrim_map *b)
162 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
165 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
167 /* Basic blocks and flowgraphs. */
168 static void make_blocks (gimple_seq);
170 /* Edges. */
171 static void make_edges (void);
172 static void assign_discriminators (void);
173 static void make_cond_expr_edges (basic_block);
174 static void make_gimple_switch_edges (gswitch *, basic_block);
175 static bool make_goto_expr_edges (basic_block);
176 static void make_gimple_asm_edges (basic_block);
177 static edge gimple_redirect_edge_and_branch (edge, basic_block);
178 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
180 /* Various helpers. */
181 static inline bool stmt_starts_bb_p (gimple, gimple);
182 static int gimple_verify_flow_info (void);
183 static void gimple_make_forwarder_block (edge);
184 static gimple first_non_label_stmt (basic_block);
185 static bool verify_gimple_transaction (gtransaction *);
186 static bool call_can_make_abnormal_goto (gimple);
188 /* Flowgraph optimization and cleanup. */
189 static void gimple_merge_blocks (basic_block, basic_block);
190 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
191 static void remove_bb (basic_block);
192 static edge find_taken_edge_computed_goto (basic_block, tree);
193 static edge find_taken_edge_cond_expr (basic_block, tree);
194 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
195 static tree find_case_label_for_value (gswitch *, tree);
197 void
198 init_empty_tree_cfg_for_function (struct function *fn)
200 /* Initialize the basic block array. */
201 init_flow (fn);
202 profile_status_for_fn (fn) = PROFILE_ABSENT;
203 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
204 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
205 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
206 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
207 initial_cfg_capacity);
209 /* Build a mapping of labels to their associated blocks. */
210 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
211 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
212 initial_cfg_capacity);
214 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
215 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
217 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
218 = EXIT_BLOCK_PTR_FOR_FN (fn);
219 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
220 = ENTRY_BLOCK_PTR_FOR_FN (fn);
223 void
224 init_empty_tree_cfg (void)
226 init_empty_tree_cfg_for_function (cfun);
229 /*---------------------------------------------------------------------------
230 Create basic blocks
231 ---------------------------------------------------------------------------*/
233 /* Entry point to the CFG builder for trees. SEQ is the sequence of
234 statements to be added to the flowgraph. */
236 static void
237 build_gimple_cfg (gimple_seq seq)
239 /* Register specific gimple functions. */
240 gimple_register_cfg_hooks ();
242 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
244 init_empty_tree_cfg ();
246 make_blocks (seq);
248 /* Make sure there is always at least one block, even if it's empty. */
249 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
250 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
252 /* Adjust the size of the array. */
253 if (basic_block_info_for_fn (cfun)->length ()
254 < (size_t) n_basic_blocks_for_fn (cfun))
255 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
256 n_basic_blocks_for_fn (cfun));
258 /* To speed up statement iterator walks, we first purge dead labels. */
259 cleanup_dead_labels ();
261 /* Group case nodes to reduce the number of edges.
262 We do this after cleaning up dead labels because otherwise we miss
263 a lot of obvious case merging opportunities. */
264 group_case_labels ();
266 /* Create the edges of the flowgraph. */
267 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
268 make_edges ();
269 assign_discriminators ();
270 cleanup_dead_labels ();
271 delete discriminator_per_locus;
272 discriminator_per_locus = NULL;
275 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
276 them and propagate the information to LOOP. We assume that the annotations
277 come immediately before the condition in BB, if any. */
279 static void
280 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
282 gimple_stmt_iterator gsi = gsi_last_bb (bb);
283 gimple stmt = gsi_stmt (gsi);
285 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
286 return;
288 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
290 stmt = gsi_stmt (gsi);
291 if (gimple_code (stmt) != GIMPLE_CALL)
292 break;
293 if (!gimple_call_internal_p (stmt)
294 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
295 break;
297 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
299 case annot_expr_ivdep_kind:
300 loop->safelen = INT_MAX;
301 break;
302 case annot_expr_no_vector_kind:
303 loop->dont_vectorize = true;
304 break;
305 case annot_expr_vector_kind:
306 loop->force_vectorize = true;
307 cfun->has_force_vectorize_loops = true;
308 break;
309 default:
310 gcc_unreachable ();
313 stmt = gimple_build_assign (gimple_call_lhs (stmt),
314 gimple_call_arg (stmt, 0));
315 gsi_replace (&gsi, stmt, true);
319 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
320 them and propagate the information to the loop. We assume that the
321 annotations come immediately before the condition of the loop. */
323 static void
324 replace_loop_annotate (void)
326 struct loop *loop;
327 basic_block bb;
328 gimple_stmt_iterator gsi;
329 gimple stmt;
331 FOR_EACH_LOOP (loop, 0)
333 /* First look into the header. */
334 replace_loop_annotate_in_block (loop->header, loop);
336 /* Then look into the latch, if any. */
337 if (loop->latch)
338 replace_loop_annotate_in_block (loop->latch, loop);
341 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
342 FOR_EACH_BB_FN (bb, cfun)
344 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
346 stmt = gsi_stmt (gsi);
347 if (gimple_code (stmt) != GIMPLE_CALL)
348 continue;
349 if (!gimple_call_internal_p (stmt)
350 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
351 continue;
353 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
355 case annot_expr_ivdep_kind:
356 case annot_expr_no_vector_kind:
357 case annot_expr_vector_kind:
358 break;
359 default:
360 gcc_unreachable ();
363 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
364 stmt = gimple_build_assign (gimple_call_lhs (stmt),
365 gimple_call_arg (stmt, 0));
366 gsi_replace (&gsi, stmt, true);
372 static unsigned int
373 execute_build_cfg (void)
375 gimple_seq body = gimple_body (current_function_decl);
377 build_gimple_cfg (body);
378 gimple_set_body (current_function_decl, NULL);
379 if (dump_file && (dump_flags & TDF_DETAILS))
381 fprintf (dump_file, "Scope blocks:\n");
382 dump_scope_blocks (dump_file, dump_flags);
384 cleanup_tree_cfg ();
385 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
386 replace_loop_annotate ();
387 return 0;
390 namespace {
392 const pass_data pass_data_build_cfg =
394 GIMPLE_PASS, /* type */
395 "cfg", /* name */
396 OPTGROUP_NONE, /* optinfo_flags */
397 TV_TREE_CFG, /* tv_id */
398 PROP_gimple_leh, /* properties_required */
399 ( PROP_cfg | PROP_loops ), /* properties_provided */
400 0, /* properties_destroyed */
401 0, /* todo_flags_start */
402 0, /* todo_flags_finish */
405 class pass_build_cfg : public gimple_opt_pass
407 public:
408 pass_build_cfg (gcc::context *ctxt)
409 : gimple_opt_pass (pass_data_build_cfg, ctxt)
412 /* opt_pass methods: */
413 virtual unsigned int execute (function *) { return execute_build_cfg (); }
415 }; // class pass_build_cfg
417 } // anon namespace
419 gimple_opt_pass *
420 make_pass_build_cfg (gcc::context *ctxt)
422 return new pass_build_cfg (ctxt);
426 /* Return true if T is a computed goto. */
428 bool
429 computed_goto_p (gimple t)
431 return (gimple_code (t) == GIMPLE_GOTO
432 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
435 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
436 the other edge points to a bb with just __builtin_unreachable ().
437 I.e. return true for C->M edge in:
438 <bb C>:
440 if (something)
441 goto <bb N>;
442 else
443 goto <bb M>;
444 <bb N>:
445 __builtin_unreachable ();
446 <bb M>: */
448 bool
449 assert_unreachable_fallthru_edge_p (edge e)
451 basic_block pred_bb = e->src;
452 gimple last = last_stmt (pred_bb);
453 if (last && gimple_code (last) == GIMPLE_COND)
455 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
456 if (other_bb == e->dest)
457 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
458 if (EDGE_COUNT (other_bb->succs) == 0)
460 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
461 gimple stmt;
463 if (gsi_end_p (gsi))
464 return false;
465 stmt = gsi_stmt (gsi);
466 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
468 gsi_next (&gsi);
469 if (gsi_end_p (gsi))
470 return false;
471 stmt = gsi_stmt (gsi);
473 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
476 return false;
480 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
481 could alter control flow except via eh. We initialize the flag at
482 CFG build time and only ever clear it later. */
484 static void
485 gimple_call_initialize_ctrl_altering (gimple stmt)
487 int flags = gimple_call_flags (stmt);
489 /* A call alters control flow if it can make an abnormal goto. */
490 if (call_can_make_abnormal_goto (stmt)
491 /* A call also alters control flow if it does not return. */
492 || flags & ECF_NORETURN
493 /* TM ending statements have backedges out of the transaction.
494 Return true so we split the basic block containing them.
495 Note that the TM_BUILTIN test is merely an optimization. */
496 || ((flags & ECF_TM_BUILTIN)
497 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
498 /* BUILT_IN_RETURN call is same as return statement. */
499 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
500 gimple_call_set_ctrl_altering (stmt, true);
501 else
502 gimple_call_set_ctrl_altering (stmt, false);
506 /* Insert SEQ after BB and build a flowgraph. */
508 static basic_block
509 make_blocks_1 (gimple_seq seq, basic_block bb)
511 gimple_stmt_iterator i = gsi_start (seq);
512 gimple stmt = NULL;
513 bool start_new_block = true;
514 bool first_stmt_of_seq = true;
516 while (!gsi_end_p (i))
518 gimple prev_stmt;
520 prev_stmt = stmt;
521 stmt = gsi_stmt (i);
523 if (stmt && is_gimple_call (stmt))
524 gimple_call_initialize_ctrl_altering (stmt);
526 /* If the statement starts a new basic block or if we have determined
527 in a previous pass that we need to create a new block for STMT, do
528 so now. */
529 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
531 if (!first_stmt_of_seq)
532 gsi_split_seq_before (&i, &seq);
533 bb = create_basic_block (seq, bb);
534 start_new_block = false;
537 /* Now add STMT to BB and create the subgraphs for special statement
538 codes. */
539 gimple_set_bb (stmt, bb);
541 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
542 next iteration. */
543 if (stmt_ends_bb_p (stmt))
545 /* If the stmt can make abnormal goto use a new temporary
546 for the assignment to the LHS. This makes sure the old value
547 of the LHS is available on the abnormal edge. Otherwise
548 we will end up with overlapping life-ranges for abnormal
549 SSA names. */
550 if (gimple_has_lhs (stmt)
551 && stmt_can_make_abnormal_goto (stmt)
552 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
554 tree lhs = gimple_get_lhs (stmt);
555 tree tmp = create_tmp_var (TREE_TYPE (lhs));
556 gimple s = gimple_build_assign (lhs, tmp);
557 gimple_set_location (s, gimple_location (stmt));
558 gimple_set_block (s, gimple_block (stmt));
559 gimple_set_lhs (stmt, tmp);
560 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
561 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
562 DECL_GIMPLE_REG_P (tmp) = 1;
563 gsi_insert_after (&i, s, GSI_SAME_STMT);
565 start_new_block = true;
568 gsi_next (&i);
569 first_stmt_of_seq = false;
571 return bb;
574 /* Build a flowgraph for the sequence of stmts SEQ. */
576 static void
577 make_blocks (gimple_seq seq)
579 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
582 /* Create and return a new empty basic block after bb AFTER. */
584 static basic_block
585 create_bb (void *h, void *e, basic_block after)
587 basic_block bb;
589 gcc_assert (!e);
591 /* Create and initialize a new basic block. Since alloc_block uses
592 GC allocation that clears memory to allocate a basic block, we do
593 not have to clear the newly allocated basic block here. */
594 bb = alloc_block ();
596 bb->index = last_basic_block_for_fn (cfun);
597 bb->flags = BB_NEW;
598 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
600 /* Add the new block to the linked list of blocks. */
601 link_block (bb, after);
603 /* Grow the basic block array if needed. */
604 if ((size_t) last_basic_block_for_fn (cfun)
605 == basic_block_info_for_fn (cfun)->length ())
607 size_t new_size =
608 (last_basic_block_for_fn (cfun)
609 + (last_basic_block_for_fn (cfun) + 3) / 4);
610 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
613 /* Add the newly created block to the array. */
614 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
616 n_basic_blocks_for_fn (cfun)++;
617 last_basic_block_for_fn (cfun)++;
619 return bb;
623 /*---------------------------------------------------------------------------
624 Edge creation
625 ---------------------------------------------------------------------------*/
627 /* Fold COND_EXPR_COND of each COND_EXPR. */
629 void
630 fold_cond_expr_cond (void)
632 basic_block bb;
634 FOR_EACH_BB_FN (bb, cfun)
636 gimple stmt = last_stmt (bb);
638 if (stmt && gimple_code (stmt) == GIMPLE_COND)
640 gcond *cond_stmt = as_a <gcond *> (stmt);
641 location_t loc = gimple_location (stmt);
642 tree cond;
643 bool zerop, onep;
645 fold_defer_overflow_warnings ();
646 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
647 boolean_type_node,
648 gimple_cond_lhs (cond_stmt),
649 gimple_cond_rhs (cond_stmt));
650 if (cond)
652 zerop = integer_zerop (cond);
653 onep = integer_onep (cond);
655 else
656 zerop = onep = false;
658 fold_undefer_overflow_warnings (zerop || onep,
659 stmt,
660 WARN_STRICT_OVERFLOW_CONDITIONAL);
661 if (zerop)
662 gimple_cond_make_false (cond_stmt);
663 else if (onep)
664 gimple_cond_make_true (cond_stmt);
669 /* If basic block BB has an abnormal edge to a basic block
670 containing IFN_ABNORMAL_DISPATCHER internal call, return
671 that the dispatcher's basic block, otherwise return NULL. */
673 basic_block
674 get_abnormal_succ_dispatcher (basic_block bb)
676 edge e;
677 edge_iterator ei;
679 FOR_EACH_EDGE (e, ei, bb->succs)
680 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
682 gimple_stmt_iterator gsi
683 = gsi_start_nondebug_after_labels_bb (e->dest);
684 gimple g = gsi_stmt (gsi);
685 if (g
686 && is_gimple_call (g)
687 && gimple_call_internal_p (g)
688 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
689 return e->dest;
691 return NULL;
694 /* Helper function for make_edges. Create a basic block with
695 with ABNORMAL_DISPATCHER internal call in it if needed, and
696 create abnormal edges from BBS to it and from it to FOR_BB
697 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
699 static void
700 handle_abnormal_edges (basic_block *dispatcher_bbs,
701 basic_block for_bb, int *bb_to_omp_idx,
702 auto_vec<basic_block> *bbs, bool computed_goto)
704 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
705 unsigned int idx = 0;
706 basic_block bb;
707 bool inner = false;
709 if (bb_to_omp_idx)
711 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
712 if (bb_to_omp_idx[for_bb->index] != 0)
713 inner = true;
716 /* If the dispatcher has been created already, then there are basic
717 blocks with abnormal edges to it, so just make a new edge to
718 for_bb. */
719 if (*dispatcher == NULL)
721 /* Check if there are any basic blocks that need to have
722 abnormal edges to this dispatcher. If there are none, return
723 early. */
724 if (bb_to_omp_idx == NULL)
726 if (bbs->is_empty ())
727 return;
729 else
731 FOR_EACH_VEC_ELT (*bbs, idx, bb)
732 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
733 break;
734 if (bb == NULL)
735 return;
738 /* Create the dispatcher bb. */
739 *dispatcher = create_basic_block (NULL, for_bb);
740 if (computed_goto)
742 /* Factor computed gotos into a common computed goto site. Also
743 record the location of that site so that we can un-factor the
744 gotos after we have converted back to normal form. */
745 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
747 /* Create the destination of the factored goto. Each original
748 computed goto will put its desired destination into this
749 variable and jump to the label we create immediately below. */
750 tree var = create_tmp_var (ptr_type_node, "gotovar");
752 /* Build a label for the new block which will contain the
753 factored computed goto. */
754 tree factored_label_decl
755 = create_artificial_label (UNKNOWN_LOCATION);
756 gimple factored_computed_goto_label
757 = gimple_build_label (factored_label_decl);
758 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
760 /* Build our new computed goto. */
761 gimple factored_computed_goto = gimple_build_goto (var);
762 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
764 FOR_EACH_VEC_ELT (*bbs, idx, bb)
766 if (bb_to_omp_idx
767 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
768 continue;
770 gsi = gsi_last_bb (bb);
771 gimple last = gsi_stmt (gsi);
773 gcc_assert (computed_goto_p (last));
775 /* Copy the original computed goto's destination into VAR. */
776 gimple assignment
777 = gimple_build_assign (var, gimple_goto_dest (last));
778 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
780 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
781 e->goto_locus = gimple_location (last);
782 gsi_remove (&gsi, true);
785 else
787 tree arg = inner ? boolean_true_node : boolean_false_node;
788 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
789 1, arg);
790 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
791 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
793 /* Create predecessor edges of the dispatcher. */
794 FOR_EACH_VEC_ELT (*bbs, idx, bb)
796 if (bb_to_omp_idx
797 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
798 continue;
799 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
804 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
807 /* Creates outgoing edges for BB. Returns 1 when it ends with an
808 computed goto, returns 2 when it ends with a statement that
809 might return to this function via an nonlocal goto, otherwise
810 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
812 static int
813 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
815 gimple last = last_stmt (bb);
816 bool fallthru = false;
817 int ret = 0;
819 if (!last)
820 return ret;
822 switch (gimple_code (last))
824 case GIMPLE_GOTO:
825 if (make_goto_expr_edges (bb))
826 ret = 1;
827 fallthru = false;
828 break;
829 case GIMPLE_RETURN:
831 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
832 e->goto_locus = gimple_location (last);
833 fallthru = false;
835 break;
836 case GIMPLE_COND:
837 make_cond_expr_edges (bb);
838 fallthru = false;
839 break;
840 case GIMPLE_SWITCH:
841 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
842 fallthru = false;
843 break;
844 case GIMPLE_RESX:
845 make_eh_edges (last);
846 fallthru = false;
847 break;
848 case GIMPLE_EH_DISPATCH:
849 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
850 break;
852 case GIMPLE_CALL:
853 /* If this function receives a nonlocal goto, then we need to
854 make edges from this call site to all the nonlocal goto
855 handlers. */
856 if (stmt_can_make_abnormal_goto (last))
857 ret = 2;
859 /* If this statement has reachable exception handlers, then
860 create abnormal edges to them. */
861 make_eh_edges (last);
863 /* BUILTIN_RETURN is really a return statement. */
864 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
866 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
867 fallthru = false;
869 /* Some calls are known not to return. */
870 else
871 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
872 break;
874 case GIMPLE_ASSIGN:
875 /* A GIMPLE_ASSIGN may throw internally and thus be considered
876 control-altering. */
877 if (is_ctrl_altering_stmt (last))
878 make_eh_edges (last);
879 fallthru = true;
880 break;
882 case GIMPLE_ASM:
883 make_gimple_asm_edges (bb);
884 fallthru = true;
885 break;
887 CASE_GIMPLE_OMP:
888 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
889 break;
891 case GIMPLE_TRANSACTION:
893 tree abort_label
894 = gimple_transaction_label (as_a <gtransaction *> (last));
895 if (abort_label)
896 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
897 fallthru = true;
899 break;
901 default:
902 gcc_assert (!stmt_ends_bb_p (last));
903 fallthru = true;
904 break;
907 if (fallthru)
908 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
910 return ret;
913 /* Join all the blocks in the flowgraph. */
915 static void
916 make_edges (void)
918 basic_block bb;
919 struct omp_region *cur_region = NULL;
920 auto_vec<basic_block> ab_edge_goto;
921 auto_vec<basic_block> ab_edge_call;
922 int *bb_to_omp_idx = NULL;
923 int cur_omp_region_idx = 0;
925 /* Create an edge from entry to the first block with executable
926 statements in it. */
927 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
928 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
929 EDGE_FALLTHRU);
931 /* Traverse the basic block array placing edges. */
932 FOR_EACH_BB_FN (bb, cfun)
934 int mer;
936 if (bb_to_omp_idx)
937 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
939 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
940 if (mer == 1)
941 ab_edge_goto.safe_push (bb);
942 else if (mer == 2)
943 ab_edge_call.safe_push (bb);
945 if (cur_region && bb_to_omp_idx == NULL)
946 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
949 /* Computed gotos are hell to deal with, especially if there are
950 lots of them with a large number of destinations. So we factor
951 them to a common computed goto location before we build the
952 edge list. After we convert back to normal form, we will un-factor
953 the computed gotos since factoring introduces an unwanted jump.
954 For non-local gotos and abnormal edges from calls to calls that return
955 twice or forced labels, factor the abnormal edges too, by having all
956 abnormal edges from the calls go to a common artificial basic block
957 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
958 basic block to all forced labels and calls returning twice.
959 We do this per-OpenMP structured block, because those regions
960 are guaranteed to be single entry single exit by the standard,
961 so it is not allowed to enter or exit such regions abnormally this way,
962 thus all computed gotos, non-local gotos and setjmp/longjmp calls
963 must not transfer control across SESE region boundaries. */
964 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
966 gimple_stmt_iterator gsi;
967 basic_block dispatcher_bb_array[2] = { NULL, NULL };
968 basic_block *dispatcher_bbs = dispatcher_bb_array;
969 int count = n_basic_blocks_for_fn (cfun);
971 if (bb_to_omp_idx)
972 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
974 FOR_EACH_BB_FN (bb, cfun)
976 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
978 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
979 tree target;
981 if (!label_stmt)
982 break;
984 target = gimple_label_label (label_stmt);
986 /* Make an edge to every label block that has been marked as a
987 potential target for a computed goto or a non-local goto. */
988 if (FORCED_LABEL (target))
989 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
990 &ab_edge_goto, true);
991 if (DECL_NONLOCAL (target))
993 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
994 &ab_edge_call, false);
995 break;
999 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1000 gsi_next_nondebug (&gsi);
1001 if (!gsi_end_p (gsi))
1003 /* Make an edge to every setjmp-like call. */
1004 gimple call_stmt = gsi_stmt (gsi);
1005 if (is_gimple_call (call_stmt)
1006 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1007 || gimple_call_builtin_p (call_stmt,
1008 BUILT_IN_SETJMP_RECEIVER)))
1009 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1010 &ab_edge_call, false);
1014 if (bb_to_omp_idx)
1015 XDELETE (dispatcher_bbs);
1018 XDELETE (bb_to_omp_idx);
1020 free_omp_regions ();
1022 /* Fold COND_EXPR_COND of each COND_EXPR. */
1023 fold_cond_expr_cond ();
1026 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1027 needed. Returns true if new bbs were created.
1028 Note: This is transitional code, and should not be used for new code. We
1029 should be able to get rid of this by rewriting all target va-arg
1030 gimplification hooks to use an interface gimple_build_cond_value as described
1031 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1033 bool
1034 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1036 gimple stmt = gsi_stmt (*gsi);
1037 basic_block bb = gimple_bb (stmt);
1038 basic_block lastbb, afterbb;
1039 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1040 edge e;
1041 lastbb = make_blocks_1 (seq, bb);
1042 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1043 return false;
1044 e = split_block (bb, stmt);
1045 /* Move e->dest to come after the new basic blocks. */
1046 afterbb = e->dest;
1047 unlink_block (afterbb);
1048 link_block (afterbb, lastbb);
1049 redirect_edge_succ (e, bb->next_bb);
1050 bb = bb->next_bb;
1051 while (bb != afterbb)
1053 struct omp_region *cur_region = NULL;
1054 int cur_omp_region_idx = 0;
1055 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1056 gcc_assert (!mer && !cur_region);
1057 add_bb_to_loop (bb, afterbb->loop_father);
1058 bb = bb->next_bb;
1060 return true;
1063 /* Find the next available discriminator value for LOCUS. The
1064 discriminator distinguishes among several basic blocks that
1065 share a common locus, allowing for more accurate sample-based
1066 profiling. */
1068 static int
1069 next_discriminator_for_locus (location_t locus)
1071 struct locus_discrim_map item;
1072 struct locus_discrim_map **slot;
1074 item.locus = locus;
1075 item.discriminator = 0;
1076 slot = discriminator_per_locus->find_slot_with_hash (
1077 &item, LOCATION_LINE (locus), INSERT);
1078 gcc_assert (slot);
1079 if (*slot == HTAB_EMPTY_ENTRY)
1081 *slot = XNEW (struct locus_discrim_map);
1082 gcc_assert (*slot);
1083 (*slot)->locus = locus;
1084 (*slot)->discriminator = 0;
1086 (*slot)->discriminator++;
1087 return (*slot)->discriminator;
1090 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1092 static bool
1093 same_line_p (location_t locus1, location_t locus2)
1095 expanded_location from, to;
1097 if (locus1 == locus2)
1098 return true;
1100 from = expand_location (locus1);
1101 to = expand_location (locus2);
1103 if (from.line != to.line)
1104 return false;
1105 if (from.file == to.file)
1106 return true;
1107 return (from.file != NULL
1108 && to.file != NULL
1109 && filename_cmp (from.file, to.file) == 0);
1112 /* Assign discriminators to each basic block. */
1114 static void
1115 assign_discriminators (void)
1117 basic_block bb;
1119 FOR_EACH_BB_FN (bb, cfun)
1121 edge e;
1122 edge_iterator ei;
1123 gimple last = last_stmt (bb);
1124 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1126 if (locus == UNKNOWN_LOCATION)
1127 continue;
1129 FOR_EACH_EDGE (e, ei, bb->succs)
1131 gimple first = first_non_label_stmt (e->dest);
1132 gimple last = last_stmt (e->dest);
1133 if ((first && same_line_p (locus, gimple_location (first)))
1134 || (last && same_line_p (locus, gimple_location (last))))
1136 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1137 bb->discriminator = next_discriminator_for_locus (locus);
1138 else
1139 e->dest->discriminator = next_discriminator_for_locus (locus);
1145 /* Create the edges for a GIMPLE_COND starting at block BB. */
1147 static void
1148 make_cond_expr_edges (basic_block bb)
1150 gcond *entry = as_a <gcond *> (last_stmt (bb));
1151 gimple then_stmt, else_stmt;
1152 basic_block then_bb, else_bb;
1153 tree then_label, else_label;
1154 edge e;
1156 gcc_assert (entry);
1157 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1159 /* Entry basic blocks for each component. */
1160 then_label = gimple_cond_true_label (entry);
1161 else_label = gimple_cond_false_label (entry);
1162 then_bb = label_to_block (then_label);
1163 else_bb = label_to_block (else_label);
1164 then_stmt = first_stmt (then_bb);
1165 else_stmt = first_stmt (else_bb);
1167 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1168 e->goto_locus = gimple_location (then_stmt);
1169 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1170 if (e)
1171 e->goto_locus = gimple_location (else_stmt);
1173 /* We do not need the labels anymore. */
1174 gimple_cond_set_true_label (entry, NULL_TREE);
1175 gimple_cond_set_false_label (entry, NULL_TREE);
1179 /* Called for each element in the hash table (P) as we delete the
1180 edge to cases hash table.
1182 Clear all the TREE_CHAINs to prevent problems with copying of
1183 SWITCH_EXPRs and structure sharing rules, then free the hash table
1184 element. */
1186 bool
1187 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1189 tree t, next;
1191 for (t = value; t; t = next)
1193 next = CASE_CHAIN (t);
1194 CASE_CHAIN (t) = NULL;
1197 return true;
1200 /* Start recording information mapping edges to case labels. */
1202 void
1203 start_recording_case_labels (void)
1205 gcc_assert (edge_to_cases == NULL);
1206 edge_to_cases = new hash_map<edge, tree>;
1207 touched_switch_bbs = BITMAP_ALLOC (NULL);
1210 /* Return nonzero if we are recording information for case labels. */
1212 static bool
1213 recording_case_labels_p (void)
1215 return (edge_to_cases != NULL);
1218 /* Stop recording information mapping edges to case labels and
1219 remove any information we have recorded. */
1220 void
1221 end_recording_case_labels (void)
1223 bitmap_iterator bi;
1224 unsigned i;
1225 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1226 delete edge_to_cases;
1227 edge_to_cases = NULL;
1228 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1230 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1231 if (bb)
1233 gimple stmt = last_stmt (bb);
1234 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1235 group_case_labels_stmt (as_a <gswitch *> (stmt));
1238 BITMAP_FREE (touched_switch_bbs);
1241 /* If we are inside a {start,end}_recording_cases block, then return
1242 a chain of CASE_LABEL_EXPRs from T which reference E.
1244 Otherwise return NULL. */
1246 static tree
1247 get_cases_for_edge (edge e, gswitch *t)
1249 tree *slot;
1250 size_t i, n;
1252 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1253 chains available. Return NULL so the caller can detect this case. */
1254 if (!recording_case_labels_p ())
1255 return NULL;
1257 slot = edge_to_cases->get (e);
1258 if (slot)
1259 return *slot;
1261 /* If we did not find E in the hash table, then this must be the first
1262 time we have been queried for information about E & T. Add all the
1263 elements from T to the hash table then perform the query again. */
1265 n = gimple_switch_num_labels (t);
1266 for (i = 0; i < n; i++)
1268 tree elt = gimple_switch_label (t, i);
1269 tree lab = CASE_LABEL (elt);
1270 basic_block label_bb = label_to_block (lab);
1271 edge this_edge = find_edge (e->src, label_bb);
1273 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1274 a new chain. */
1275 tree &s = edge_to_cases->get_or_insert (this_edge);
1276 CASE_CHAIN (elt) = s;
1277 s = elt;
1280 return *edge_to_cases->get (e);
1283 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1285 static void
1286 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1288 size_t i, n;
1290 n = gimple_switch_num_labels (entry);
1292 for (i = 0; i < n; ++i)
1294 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1295 basic_block label_bb = label_to_block (lab);
1296 make_edge (bb, label_bb, 0);
1301 /* Return the basic block holding label DEST. */
1303 basic_block
1304 label_to_block_fn (struct function *ifun, tree dest)
1306 int uid = LABEL_DECL_UID (dest);
1308 /* We would die hard when faced by an undefined label. Emit a label to
1309 the very first basic block. This will hopefully make even the dataflow
1310 and undefined variable warnings quite right. */
1311 if (seen_error () && uid < 0)
1313 gimple_stmt_iterator gsi =
1314 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1315 gimple stmt;
1317 stmt = gimple_build_label (dest);
1318 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1319 uid = LABEL_DECL_UID (dest);
1321 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1322 return NULL;
1323 return (*ifun->cfg->x_label_to_block_map)[uid];
1326 /* Create edges for a goto statement at block BB. Returns true
1327 if abnormal edges should be created. */
1329 static bool
1330 make_goto_expr_edges (basic_block bb)
1332 gimple_stmt_iterator last = gsi_last_bb (bb);
1333 gimple goto_t = gsi_stmt (last);
1335 /* A simple GOTO creates normal edges. */
1336 if (simple_goto_p (goto_t))
1338 tree dest = gimple_goto_dest (goto_t);
1339 basic_block label_bb = label_to_block (dest);
1340 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1341 e->goto_locus = gimple_location (goto_t);
1342 gsi_remove (&last, true);
1343 return false;
1346 /* A computed GOTO creates abnormal edges. */
1347 return true;
1350 /* Create edges for an asm statement with labels at block BB. */
1352 static void
1353 make_gimple_asm_edges (basic_block bb)
1355 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1356 int i, n = gimple_asm_nlabels (stmt);
1358 for (i = 0; i < n; ++i)
1360 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1361 basic_block label_bb = label_to_block (label);
1362 make_edge (bb, label_bb, 0);
1366 /*---------------------------------------------------------------------------
1367 Flowgraph analysis
1368 ---------------------------------------------------------------------------*/
1370 /* Cleanup useless labels in basic blocks. This is something we wish
1371 to do early because it allows us to group case labels before creating
1372 the edges for the CFG, and it speeds up block statement iterators in
1373 all passes later on.
1374 We rerun this pass after CFG is created, to get rid of the labels that
1375 are no longer referenced. After then we do not run it any more, since
1376 (almost) no new labels should be created. */
1378 /* A map from basic block index to the leading label of that block. */
1379 static struct label_record
1381 /* The label. */
1382 tree label;
1384 /* True if the label is referenced from somewhere. */
1385 bool used;
1386 } *label_for_bb;
1388 /* Given LABEL return the first label in the same basic block. */
1390 static tree
1391 main_block_label (tree label)
1393 basic_block bb = label_to_block (label);
1394 tree main_label = label_for_bb[bb->index].label;
1396 /* label_to_block possibly inserted undefined label into the chain. */
1397 if (!main_label)
1399 label_for_bb[bb->index].label = label;
1400 main_label = label;
1403 label_for_bb[bb->index].used = true;
1404 return main_label;
1407 /* Clean up redundant labels within the exception tree. */
1409 static void
1410 cleanup_dead_labels_eh (void)
1412 eh_landing_pad lp;
1413 eh_region r;
1414 tree lab;
1415 int i;
1417 if (cfun->eh == NULL)
1418 return;
1420 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1421 if (lp && lp->post_landing_pad)
1423 lab = main_block_label (lp->post_landing_pad);
1424 if (lab != lp->post_landing_pad)
1426 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1427 EH_LANDING_PAD_NR (lab) = lp->index;
1431 FOR_ALL_EH_REGION (r)
1432 switch (r->type)
1434 case ERT_CLEANUP:
1435 case ERT_MUST_NOT_THROW:
1436 break;
1438 case ERT_TRY:
1440 eh_catch c;
1441 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1443 lab = c->label;
1444 if (lab)
1445 c->label = main_block_label (lab);
1448 break;
1450 case ERT_ALLOWED_EXCEPTIONS:
1451 lab = r->u.allowed.label;
1452 if (lab)
1453 r->u.allowed.label = main_block_label (lab);
1454 break;
1459 /* Cleanup redundant labels. This is a three-step process:
1460 1) Find the leading label for each block.
1461 2) Redirect all references to labels to the leading labels.
1462 3) Cleanup all useless labels. */
1464 void
1465 cleanup_dead_labels (void)
1467 basic_block bb;
1468 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1470 /* Find a suitable label for each block. We use the first user-defined
1471 label if there is one, or otherwise just the first label we see. */
1472 FOR_EACH_BB_FN (bb, cfun)
1474 gimple_stmt_iterator i;
1476 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1478 tree label;
1479 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1481 if (!label_stmt)
1482 break;
1484 label = gimple_label_label (label_stmt);
1486 /* If we have not yet seen a label for the current block,
1487 remember this one and see if there are more labels. */
1488 if (!label_for_bb[bb->index].label)
1490 label_for_bb[bb->index].label = label;
1491 continue;
1494 /* If we did see a label for the current block already, but it
1495 is an artificially created label, replace it if the current
1496 label is a user defined label. */
1497 if (!DECL_ARTIFICIAL (label)
1498 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1500 label_for_bb[bb->index].label = label;
1501 break;
1506 /* Now redirect all jumps/branches to the selected label.
1507 First do so for each block ending in a control statement. */
1508 FOR_EACH_BB_FN (bb, cfun)
1510 gimple stmt = last_stmt (bb);
1511 tree label, new_label;
1513 if (!stmt)
1514 continue;
1516 switch (gimple_code (stmt))
1518 case GIMPLE_COND:
1520 gcond *cond_stmt = as_a <gcond *> (stmt);
1521 label = gimple_cond_true_label (cond_stmt);
1522 if (label)
1524 new_label = main_block_label (label);
1525 if (new_label != label)
1526 gimple_cond_set_true_label (cond_stmt, new_label);
1529 label = gimple_cond_false_label (cond_stmt);
1530 if (label)
1532 new_label = main_block_label (label);
1533 if (new_label != label)
1534 gimple_cond_set_false_label (cond_stmt, new_label);
1537 break;
1539 case GIMPLE_SWITCH:
1541 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1542 size_t i, n = gimple_switch_num_labels (switch_stmt);
1544 /* Replace all destination labels. */
1545 for (i = 0; i < n; ++i)
1547 tree case_label = gimple_switch_label (switch_stmt, i);
1548 label = CASE_LABEL (case_label);
1549 new_label = main_block_label (label);
1550 if (new_label != label)
1551 CASE_LABEL (case_label) = new_label;
1553 break;
1556 case GIMPLE_ASM:
1558 gasm *asm_stmt = as_a <gasm *> (stmt);
1559 int i, n = gimple_asm_nlabels (asm_stmt);
1561 for (i = 0; i < n; ++i)
1563 tree cons = gimple_asm_label_op (asm_stmt, i);
1564 tree label = main_block_label (TREE_VALUE (cons));
1565 TREE_VALUE (cons) = label;
1567 break;
1570 /* We have to handle gotos until they're removed, and we don't
1571 remove them until after we've created the CFG edges. */
1572 case GIMPLE_GOTO:
1573 if (!computed_goto_p (stmt))
1575 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1576 label = gimple_goto_dest (goto_stmt);
1577 new_label = main_block_label (label);
1578 if (new_label != label)
1579 gimple_goto_set_dest (goto_stmt, new_label);
1581 break;
1583 case GIMPLE_TRANSACTION:
1585 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1586 tree label = gimple_transaction_label (trans_stmt);
1587 if (label)
1589 tree new_label = main_block_label (label);
1590 if (new_label != label)
1591 gimple_transaction_set_label (trans_stmt, new_label);
1594 break;
1596 default:
1597 break;
1601 /* Do the same for the exception region tree labels. */
1602 cleanup_dead_labels_eh ();
1604 /* Finally, purge dead labels. All user-defined labels and labels that
1605 can be the target of non-local gotos and labels which have their
1606 address taken are preserved. */
1607 FOR_EACH_BB_FN (bb, cfun)
1609 gimple_stmt_iterator i;
1610 tree label_for_this_bb = label_for_bb[bb->index].label;
1612 if (!label_for_this_bb)
1613 continue;
1615 /* If the main label of the block is unused, we may still remove it. */
1616 if (!label_for_bb[bb->index].used)
1617 label_for_this_bb = NULL;
1619 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1621 tree label;
1622 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1624 if (!label_stmt)
1625 break;
1627 label = gimple_label_label (label_stmt);
1629 if (label == label_for_this_bb
1630 || !DECL_ARTIFICIAL (label)
1631 || DECL_NONLOCAL (label)
1632 || FORCED_LABEL (label))
1633 gsi_next (&i);
1634 else
1635 gsi_remove (&i, true);
1639 free (label_for_bb);
1642 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1643 the ones jumping to the same label.
1644 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1646 void
1647 group_case_labels_stmt (gswitch *stmt)
1649 int old_size = gimple_switch_num_labels (stmt);
1650 int i, j, new_size = old_size;
1651 basic_block default_bb = NULL;
1653 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1655 /* Look for possible opportunities to merge cases. */
1656 i = 1;
1657 while (i < old_size)
1659 tree base_case, base_high;
1660 basic_block base_bb;
1662 base_case = gimple_switch_label (stmt, i);
1664 gcc_assert (base_case);
1665 base_bb = label_to_block (CASE_LABEL (base_case));
1667 /* Discard cases that have the same destination as the
1668 default case. */
1669 if (base_bb == default_bb)
1671 gimple_switch_set_label (stmt, i, NULL_TREE);
1672 i++;
1673 new_size--;
1674 continue;
1677 base_high = CASE_HIGH (base_case)
1678 ? CASE_HIGH (base_case)
1679 : CASE_LOW (base_case);
1680 i++;
1682 /* Try to merge case labels. Break out when we reach the end
1683 of the label vector or when we cannot merge the next case
1684 label with the current one. */
1685 while (i < old_size)
1687 tree merge_case = gimple_switch_label (stmt, i);
1688 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1689 wide_int bhp1 = wi::add (base_high, 1);
1691 /* Merge the cases if they jump to the same place,
1692 and their ranges are consecutive. */
1693 if (merge_bb == base_bb
1694 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1696 base_high = CASE_HIGH (merge_case) ?
1697 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1698 CASE_HIGH (base_case) = base_high;
1699 gimple_switch_set_label (stmt, i, NULL_TREE);
1700 new_size--;
1701 i++;
1703 else
1704 break;
1708 /* Compress the case labels in the label vector, and adjust the
1709 length of the vector. */
1710 for (i = 0, j = 0; i < new_size; i++)
1712 while (! gimple_switch_label (stmt, j))
1713 j++;
1714 gimple_switch_set_label (stmt, i,
1715 gimple_switch_label (stmt, j++));
1718 gcc_assert (new_size <= old_size);
1719 gimple_switch_set_num_labels (stmt, new_size);
1722 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1723 and scan the sorted vector of cases. Combine the ones jumping to the
1724 same label. */
1726 void
1727 group_case_labels (void)
1729 basic_block bb;
1731 FOR_EACH_BB_FN (bb, cfun)
1733 gimple stmt = last_stmt (bb);
1734 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1735 group_case_labels_stmt (as_a <gswitch *> (stmt));
1739 /* Checks whether we can merge block B into block A. */
1741 static bool
1742 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1744 gimple stmt;
1746 if (!single_succ_p (a))
1747 return false;
1749 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1750 return false;
1752 if (single_succ (a) != b)
1753 return false;
1755 if (!single_pred_p (b))
1756 return false;
1758 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1759 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1760 return false;
1762 /* If A ends by a statement causing exceptions or something similar, we
1763 cannot merge the blocks. */
1764 stmt = last_stmt (a);
1765 if (stmt && stmt_ends_bb_p (stmt))
1766 return false;
1768 /* Do not allow a block with only a non-local label to be merged. */
1769 if (stmt)
1770 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1771 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1772 return false;
1774 /* Examine the labels at the beginning of B. */
1775 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1776 gsi_next (&gsi))
1778 tree lab;
1779 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1780 if (!label_stmt)
1781 break;
1782 lab = gimple_label_label (label_stmt);
1784 /* Do not remove user forced labels or for -O0 any user labels. */
1785 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1786 return false;
1789 /* Protect simple loop latches. We only want to avoid merging
1790 the latch with the loop header or with a block in another
1791 loop in this case. */
1792 if (current_loops
1793 && b->loop_father->latch == b
1794 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1795 && (b->loop_father->header == a
1796 || b->loop_father != a->loop_father))
1797 return false;
1799 /* It must be possible to eliminate all phi nodes in B. If ssa form
1800 is not up-to-date and a name-mapping is registered, we cannot eliminate
1801 any phis. Symbols marked for renaming are never a problem though. */
1802 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1803 gsi_next (&gsi))
1805 gphi *phi = gsi.phi ();
1806 /* Technically only new names matter. */
1807 if (name_registered_for_update_p (PHI_RESULT (phi)))
1808 return false;
1811 /* When not optimizing, don't merge if we'd lose goto_locus. */
1812 if (!optimize
1813 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1815 location_t goto_locus = single_succ_edge (a)->goto_locus;
1816 gimple_stmt_iterator prev, next;
1817 prev = gsi_last_nondebug_bb (a);
1818 next = gsi_after_labels (b);
1819 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1820 gsi_next_nondebug (&next);
1821 if ((gsi_end_p (prev)
1822 || gimple_location (gsi_stmt (prev)) != goto_locus)
1823 && (gsi_end_p (next)
1824 || gimple_location (gsi_stmt (next)) != goto_locus))
1825 return false;
1828 return true;
1831 /* Replaces all uses of NAME by VAL. */
1833 void
1834 replace_uses_by (tree name, tree val)
1836 imm_use_iterator imm_iter;
1837 use_operand_p use;
1838 gimple stmt;
1839 edge e;
1841 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1843 /* Mark the block if we change the last stmt in it. */
1844 if (cfgcleanup_altered_bbs
1845 && stmt_ends_bb_p (stmt))
1846 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1848 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1850 replace_exp (use, val);
1852 if (gimple_code (stmt) == GIMPLE_PHI)
1854 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1855 PHI_ARG_INDEX_FROM_USE (use));
1856 if (e->flags & EDGE_ABNORMAL
1857 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1859 /* This can only occur for virtual operands, since
1860 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1861 would prevent replacement. */
1862 gcc_checking_assert (virtual_operand_p (name));
1863 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1868 if (gimple_code (stmt) != GIMPLE_PHI)
1870 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1871 gimple orig_stmt = stmt;
1872 size_t i;
1874 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1875 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1876 only change sth from non-invariant to invariant, and only
1877 when propagating constants. */
1878 if (is_gimple_min_invariant (val))
1879 for (i = 0; i < gimple_num_ops (stmt); i++)
1881 tree op = gimple_op (stmt, i);
1882 /* Operands may be empty here. For example, the labels
1883 of a GIMPLE_COND are nulled out following the creation
1884 of the corresponding CFG edges. */
1885 if (op && TREE_CODE (op) == ADDR_EXPR)
1886 recompute_tree_invariant_for_addr_expr (op);
1889 if (fold_stmt (&gsi))
1890 stmt = gsi_stmt (gsi);
1892 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1893 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1895 update_stmt (stmt);
1899 gcc_checking_assert (has_zero_uses (name));
1901 /* Also update the trees stored in loop structures. */
1902 if (current_loops)
1904 struct loop *loop;
1906 FOR_EACH_LOOP (loop, 0)
1908 substitute_in_loop_info (loop, name, val);
1913 /* Merge block B into block A. */
1915 static void
1916 gimple_merge_blocks (basic_block a, basic_block b)
1918 gimple_stmt_iterator last, gsi;
1919 gphi_iterator psi;
1921 if (dump_file)
1922 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1924 /* Remove all single-valued PHI nodes from block B of the form
1925 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1926 gsi = gsi_last_bb (a);
1927 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1929 gimple phi = gsi_stmt (psi);
1930 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1931 gimple copy;
1932 bool may_replace_uses = (virtual_operand_p (def)
1933 || may_propagate_copy (def, use));
1935 /* In case we maintain loop closed ssa form, do not propagate arguments
1936 of loop exit phi nodes. */
1937 if (current_loops
1938 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1939 && !virtual_operand_p (def)
1940 && TREE_CODE (use) == SSA_NAME
1941 && a->loop_father != b->loop_father)
1942 may_replace_uses = false;
1944 if (!may_replace_uses)
1946 gcc_assert (!virtual_operand_p (def));
1948 /* Note that just emitting the copies is fine -- there is no problem
1949 with ordering of phi nodes. This is because A is the single
1950 predecessor of B, therefore results of the phi nodes cannot
1951 appear as arguments of the phi nodes. */
1952 copy = gimple_build_assign (def, use);
1953 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1954 remove_phi_node (&psi, false);
1956 else
1958 /* If we deal with a PHI for virtual operands, we can simply
1959 propagate these without fussing with folding or updating
1960 the stmt. */
1961 if (virtual_operand_p (def))
1963 imm_use_iterator iter;
1964 use_operand_p use_p;
1965 gimple stmt;
1967 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1968 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1969 SET_USE (use_p, use);
1971 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1972 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1974 else
1975 replace_uses_by (def, use);
1977 remove_phi_node (&psi, true);
1981 /* Ensure that B follows A. */
1982 move_block_after (b, a);
1984 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1985 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1987 /* Remove labels from B and set gimple_bb to A for other statements. */
1988 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1990 gimple stmt = gsi_stmt (gsi);
1991 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1993 tree label = gimple_label_label (label_stmt);
1994 int lp_nr;
1996 gsi_remove (&gsi, false);
1998 /* Now that we can thread computed gotos, we might have
1999 a situation where we have a forced label in block B
2000 However, the label at the start of block B might still be
2001 used in other ways (think about the runtime checking for
2002 Fortran assigned gotos). So we can not just delete the
2003 label. Instead we move the label to the start of block A. */
2004 if (FORCED_LABEL (label))
2006 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2007 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2009 /* Other user labels keep around in a form of a debug stmt. */
2010 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2012 gimple dbg = gimple_build_debug_bind (label,
2013 integer_zero_node,
2014 stmt);
2015 gimple_debug_bind_reset_value (dbg);
2016 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2019 lp_nr = EH_LANDING_PAD_NR (label);
2020 if (lp_nr)
2022 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2023 lp->post_landing_pad = NULL;
2026 else
2028 gimple_set_bb (stmt, a);
2029 gsi_next (&gsi);
2033 /* When merging two BBs, if their counts are different, the larger count
2034 is selected as the new bb count. This is to handle inconsistent
2035 profiles. */
2036 if (a->loop_father == b->loop_father)
2038 a->count = MAX (a->count, b->count);
2039 a->frequency = MAX (a->frequency, b->frequency);
2042 /* Merge the sequences. */
2043 last = gsi_last_bb (a);
2044 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2045 set_bb_seq (b, NULL);
2047 if (cfgcleanup_altered_bbs)
2048 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2052 /* Return the one of two successors of BB that is not reachable by a
2053 complex edge, if there is one. Else, return BB. We use
2054 this in optimizations that use post-dominators for their heuristics,
2055 to catch the cases in C++ where function calls are involved. */
2057 basic_block
2058 single_noncomplex_succ (basic_block bb)
2060 edge e0, e1;
2061 if (EDGE_COUNT (bb->succs) != 2)
2062 return bb;
2064 e0 = EDGE_SUCC (bb, 0);
2065 e1 = EDGE_SUCC (bb, 1);
2066 if (e0->flags & EDGE_COMPLEX)
2067 return e1->dest;
2068 if (e1->flags & EDGE_COMPLEX)
2069 return e0->dest;
2071 return bb;
2074 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2076 void
2077 notice_special_calls (gcall *call)
2079 int flags = gimple_call_flags (call);
2081 if (flags & ECF_MAY_BE_ALLOCA)
2082 cfun->calls_alloca = true;
2083 if (flags & ECF_RETURNS_TWICE)
2084 cfun->calls_setjmp = true;
2088 /* Clear flags set by notice_special_calls. Used by dead code removal
2089 to update the flags. */
2091 void
2092 clear_special_calls (void)
2094 cfun->calls_alloca = false;
2095 cfun->calls_setjmp = false;
2098 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2100 static void
2101 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2103 /* Since this block is no longer reachable, we can just delete all
2104 of its PHI nodes. */
2105 remove_phi_nodes (bb);
2107 /* Remove edges to BB's successors. */
2108 while (EDGE_COUNT (bb->succs) > 0)
2109 remove_edge (EDGE_SUCC (bb, 0));
2113 /* Remove statements of basic block BB. */
2115 static void
2116 remove_bb (basic_block bb)
2118 gimple_stmt_iterator i;
2120 if (dump_file)
2122 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2123 if (dump_flags & TDF_DETAILS)
2125 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2126 fprintf (dump_file, "\n");
2130 if (current_loops)
2132 struct loop *loop = bb->loop_father;
2134 /* If a loop gets removed, clean up the information associated
2135 with it. */
2136 if (loop->latch == bb
2137 || loop->header == bb)
2138 free_numbers_of_iterations_estimates_loop (loop);
2141 /* Remove all the instructions in the block. */
2142 if (bb_seq (bb) != NULL)
2144 /* Walk backwards so as to get a chance to substitute all
2145 released DEFs into debug stmts. See
2146 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2147 details. */
2148 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2150 gimple stmt = gsi_stmt (i);
2151 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2152 if (label_stmt
2153 && (FORCED_LABEL (gimple_label_label (label_stmt))
2154 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2156 basic_block new_bb;
2157 gimple_stmt_iterator new_gsi;
2159 /* A non-reachable non-local label may still be referenced.
2160 But it no longer needs to carry the extra semantics of
2161 non-locality. */
2162 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2164 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2165 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2168 new_bb = bb->prev_bb;
2169 new_gsi = gsi_start_bb (new_bb);
2170 gsi_remove (&i, false);
2171 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2173 else
2175 /* Release SSA definitions if we are in SSA. Note that we
2176 may be called when not in SSA. For example,
2177 final_cleanup calls this function via
2178 cleanup_tree_cfg. */
2179 if (gimple_in_ssa_p (cfun))
2180 release_defs (stmt);
2182 gsi_remove (&i, true);
2185 if (gsi_end_p (i))
2186 i = gsi_last_bb (bb);
2187 else
2188 gsi_prev (&i);
2192 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2193 bb->il.gimple.seq = NULL;
2194 bb->il.gimple.phi_nodes = NULL;
2198 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2199 predicate VAL, return the edge that will be taken out of the block.
2200 If VAL does not match a unique edge, NULL is returned. */
2202 edge
2203 find_taken_edge (basic_block bb, tree val)
2205 gimple stmt;
2207 stmt = last_stmt (bb);
2209 gcc_assert (stmt);
2210 gcc_assert (is_ctrl_stmt (stmt));
2212 if (val == NULL)
2213 return NULL;
2215 if (!is_gimple_min_invariant (val))
2216 return NULL;
2218 if (gimple_code (stmt) == GIMPLE_COND)
2219 return find_taken_edge_cond_expr (bb, val);
2221 if (gimple_code (stmt) == GIMPLE_SWITCH)
2222 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2224 if (computed_goto_p (stmt))
2226 /* Only optimize if the argument is a label, if the argument is
2227 not a label then we can not construct a proper CFG.
2229 It may be the case that we only need to allow the LABEL_REF to
2230 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2231 appear inside a LABEL_EXPR just to be safe. */
2232 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2233 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2234 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2235 return NULL;
2238 gcc_unreachable ();
2241 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2242 statement, determine which of the outgoing edges will be taken out of the
2243 block. Return NULL if either edge may be taken. */
2245 static edge
2246 find_taken_edge_computed_goto (basic_block bb, tree val)
2248 basic_block dest;
2249 edge e = NULL;
2251 dest = label_to_block (val);
2252 if (dest)
2254 e = find_edge (bb, dest);
2255 gcc_assert (e != NULL);
2258 return e;
2261 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2262 statement, determine which of the two edges will be taken out of the
2263 block. Return NULL if either edge may be taken. */
2265 static edge
2266 find_taken_edge_cond_expr (basic_block bb, tree val)
2268 edge true_edge, false_edge;
2270 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2272 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2273 return (integer_zerop (val) ? false_edge : true_edge);
2276 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2277 statement, determine which edge will be taken out of the block. Return
2278 NULL if any edge may be taken. */
2280 static edge
2281 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2282 tree val)
2284 basic_block dest_bb;
2285 edge e;
2286 tree taken_case;
2288 taken_case = find_case_label_for_value (switch_stmt, val);
2289 dest_bb = label_to_block (CASE_LABEL (taken_case));
2291 e = find_edge (bb, dest_bb);
2292 gcc_assert (e);
2293 return e;
2297 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2298 We can make optimal use here of the fact that the case labels are
2299 sorted: We can do a binary search for a case matching VAL. */
2301 static tree
2302 find_case_label_for_value (gswitch *switch_stmt, tree val)
2304 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2305 tree default_case = gimple_switch_default_label (switch_stmt);
2307 for (low = 0, high = n; high - low > 1; )
2309 size_t i = (high + low) / 2;
2310 tree t = gimple_switch_label (switch_stmt, i);
2311 int cmp;
2313 /* Cache the result of comparing CASE_LOW and val. */
2314 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2316 if (cmp > 0)
2317 high = i;
2318 else
2319 low = i;
2321 if (CASE_HIGH (t) == NULL)
2323 /* A singe-valued case label. */
2324 if (cmp == 0)
2325 return t;
2327 else
2329 /* A case range. We can only handle integer ranges. */
2330 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2331 return t;
2335 return default_case;
2339 /* Dump a basic block on stderr. */
2341 void
2342 gimple_debug_bb (basic_block bb)
2344 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2348 /* Dump basic block with index N on stderr. */
2350 basic_block
2351 gimple_debug_bb_n (int n)
2353 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2354 return BASIC_BLOCK_FOR_FN (cfun, n);
2358 /* Dump the CFG on stderr.
2360 FLAGS are the same used by the tree dumping functions
2361 (see TDF_* in dumpfile.h). */
2363 void
2364 gimple_debug_cfg (int flags)
2366 gimple_dump_cfg (stderr, flags);
2370 /* Dump the program showing basic block boundaries on the given FILE.
2372 FLAGS are the same used by the tree dumping functions (see TDF_* in
2373 tree.h). */
2375 void
2376 gimple_dump_cfg (FILE *file, int flags)
2378 if (flags & TDF_DETAILS)
2380 dump_function_header (file, current_function_decl, flags);
2381 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2382 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2383 last_basic_block_for_fn (cfun));
2385 brief_dump_cfg (file, flags | TDF_COMMENT);
2386 fprintf (file, "\n");
2389 if (flags & TDF_STATS)
2390 dump_cfg_stats (file);
2392 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2396 /* Dump CFG statistics on FILE. */
2398 void
2399 dump_cfg_stats (FILE *file)
2401 static long max_num_merged_labels = 0;
2402 unsigned long size, total = 0;
2403 long num_edges;
2404 basic_block bb;
2405 const char * const fmt_str = "%-30s%-13s%12s\n";
2406 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2407 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2408 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2409 const char *funcname = current_function_name ();
2411 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2413 fprintf (file, "---------------------------------------------------------\n");
2414 fprintf (file, fmt_str, "", " Number of ", "Memory");
2415 fprintf (file, fmt_str, "", " instances ", "used ");
2416 fprintf (file, "---------------------------------------------------------\n");
2418 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2419 total += size;
2420 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2421 SCALE (size), LABEL (size));
2423 num_edges = 0;
2424 FOR_EACH_BB_FN (bb, cfun)
2425 num_edges += EDGE_COUNT (bb->succs);
2426 size = num_edges * sizeof (struct edge_def);
2427 total += size;
2428 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2430 fprintf (file, "---------------------------------------------------------\n");
2431 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2432 LABEL (total));
2433 fprintf (file, "---------------------------------------------------------\n");
2434 fprintf (file, "\n");
2436 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2437 max_num_merged_labels = cfg_stats.num_merged_labels;
2439 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2440 cfg_stats.num_merged_labels, max_num_merged_labels);
2442 fprintf (file, "\n");
2446 /* Dump CFG statistics on stderr. Keep extern so that it's always
2447 linked in the final executable. */
2449 DEBUG_FUNCTION void
2450 debug_cfg_stats (void)
2452 dump_cfg_stats (stderr);
2455 /*---------------------------------------------------------------------------
2456 Miscellaneous helpers
2457 ---------------------------------------------------------------------------*/
2459 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2460 flow. Transfers of control flow associated with EH are excluded. */
2462 static bool
2463 call_can_make_abnormal_goto (gimple t)
2465 /* If the function has no non-local labels, then a call cannot make an
2466 abnormal transfer of control. */
2467 if (!cfun->has_nonlocal_label
2468 && !cfun->calls_setjmp)
2469 return false;
2471 /* Likewise if the call has no side effects. */
2472 if (!gimple_has_side_effects (t))
2473 return false;
2475 /* Likewise if the called function is leaf. */
2476 if (gimple_call_flags (t) & ECF_LEAF)
2477 return false;
2479 return true;
2483 /* Return true if T can make an abnormal transfer of control flow.
2484 Transfers of control flow associated with EH are excluded. */
2486 bool
2487 stmt_can_make_abnormal_goto (gimple t)
2489 if (computed_goto_p (t))
2490 return true;
2491 if (is_gimple_call (t))
2492 return call_can_make_abnormal_goto (t);
2493 return false;
2497 /* Return true if T represents a stmt that always transfers control. */
2499 bool
2500 is_ctrl_stmt (gimple t)
2502 switch (gimple_code (t))
2504 case GIMPLE_COND:
2505 case GIMPLE_SWITCH:
2506 case GIMPLE_GOTO:
2507 case GIMPLE_RETURN:
2508 case GIMPLE_RESX:
2509 return true;
2510 default:
2511 return false;
2516 /* Return true if T is a statement that may alter the flow of control
2517 (e.g., a call to a non-returning function). */
2519 bool
2520 is_ctrl_altering_stmt (gimple t)
2522 gcc_assert (t);
2524 switch (gimple_code (t))
2526 case GIMPLE_CALL:
2527 /* Per stmt call flag indicates whether the call could alter
2528 controlflow. */
2529 if (gimple_call_ctrl_altering_p (t))
2530 return true;
2531 break;
2533 case GIMPLE_EH_DISPATCH:
2534 /* EH_DISPATCH branches to the individual catch handlers at
2535 this level of a try or allowed-exceptions region. It can
2536 fallthru to the next statement as well. */
2537 return true;
2539 case GIMPLE_ASM:
2540 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2541 return true;
2542 break;
2544 CASE_GIMPLE_OMP:
2545 /* OpenMP directives alter control flow. */
2546 return true;
2548 case GIMPLE_TRANSACTION:
2549 /* A transaction start alters control flow. */
2550 return true;
2552 default:
2553 break;
2556 /* If a statement can throw, it alters control flow. */
2557 return stmt_can_throw_internal (t);
2561 /* Return true if T is a simple local goto. */
2563 bool
2564 simple_goto_p (gimple t)
2566 return (gimple_code (t) == GIMPLE_GOTO
2567 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2571 /* Return true if STMT should start a new basic block. PREV_STMT is
2572 the statement preceding STMT. It is used when STMT is a label or a
2573 case label. Labels should only start a new basic block if their
2574 previous statement wasn't a label. Otherwise, sequence of labels
2575 would generate unnecessary basic blocks that only contain a single
2576 label. */
2578 static inline bool
2579 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2581 if (stmt == NULL)
2582 return false;
2584 /* Labels start a new basic block only if the preceding statement
2585 wasn't a label of the same type. This prevents the creation of
2586 consecutive blocks that have nothing but a single label. */
2587 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2589 /* Nonlocal and computed GOTO targets always start a new block. */
2590 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2591 || FORCED_LABEL (gimple_label_label (label_stmt)))
2592 return true;
2594 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2596 if (DECL_NONLOCAL (gimple_label_label (
2597 as_a <glabel *> (prev_stmt))))
2598 return true;
2600 cfg_stats.num_merged_labels++;
2601 return false;
2603 else
2604 return true;
2606 else if (gimple_code (stmt) == GIMPLE_CALL
2607 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2608 /* setjmp acts similar to a nonlocal GOTO target and thus should
2609 start a new block. */
2610 return true;
2612 return false;
2616 /* Return true if T should end a basic block. */
2618 bool
2619 stmt_ends_bb_p (gimple t)
2621 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2624 /* Remove block annotations and other data structures. */
2626 void
2627 delete_tree_cfg_annotations (void)
2629 vec_free (label_to_block_map_for_fn (cfun));
2633 /* Return the first statement in basic block BB. */
2635 gimple
2636 first_stmt (basic_block bb)
2638 gimple_stmt_iterator i = gsi_start_bb (bb);
2639 gimple stmt = NULL;
2641 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2643 gsi_next (&i);
2644 stmt = NULL;
2646 return stmt;
2649 /* Return the first non-label statement in basic block BB. */
2651 static gimple
2652 first_non_label_stmt (basic_block bb)
2654 gimple_stmt_iterator i = gsi_start_bb (bb);
2655 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2656 gsi_next (&i);
2657 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2660 /* Return the last statement in basic block BB. */
2662 gimple
2663 last_stmt (basic_block bb)
2665 gimple_stmt_iterator i = gsi_last_bb (bb);
2666 gimple stmt = NULL;
2668 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2670 gsi_prev (&i);
2671 stmt = NULL;
2673 return stmt;
2676 /* Return the last statement of an otherwise empty block. Return NULL
2677 if the block is totally empty, or if it contains more than one
2678 statement. */
2680 gimple
2681 last_and_only_stmt (basic_block bb)
2683 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2684 gimple last, prev;
2686 if (gsi_end_p (i))
2687 return NULL;
2689 last = gsi_stmt (i);
2690 gsi_prev_nondebug (&i);
2691 if (gsi_end_p (i))
2692 return last;
2694 /* Empty statements should no longer appear in the instruction stream.
2695 Everything that might have appeared before should be deleted by
2696 remove_useless_stmts, and the optimizers should just gsi_remove
2697 instead of smashing with build_empty_stmt.
2699 Thus the only thing that should appear here in a block containing
2700 one executable statement is a label. */
2701 prev = gsi_stmt (i);
2702 if (gimple_code (prev) == GIMPLE_LABEL)
2703 return last;
2704 else
2705 return NULL;
2708 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2710 static void
2711 reinstall_phi_args (edge new_edge, edge old_edge)
2713 edge_var_map *vm;
2714 int i;
2715 gphi_iterator phis;
2717 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2718 if (!v)
2719 return;
2721 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2722 v->iterate (i, &vm) && !gsi_end_p (phis);
2723 i++, gsi_next (&phis))
2725 gphi *phi = phis.phi ();
2726 tree result = redirect_edge_var_map_result (vm);
2727 tree arg = redirect_edge_var_map_def (vm);
2729 gcc_assert (result == gimple_phi_result (phi));
2731 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2734 redirect_edge_var_map_clear (old_edge);
2737 /* Returns the basic block after which the new basic block created
2738 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2739 near its "logical" location. This is of most help to humans looking
2740 at debugging dumps. */
2742 basic_block
2743 split_edge_bb_loc (edge edge_in)
2745 basic_block dest = edge_in->dest;
2746 basic_block dest_prev = dest->prev_bb;
2748 if (dest_prev)
2750 edge e = find_edge (dest_prev, dest);
2751 if (e && !(e->flags & EDGE_COMPLEX))
2752 return edge_in->src;
2754 return dest_prev;
2757 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2758 Abort on abnormal edges. */
2760 static basic_block
2761 gimple_split_edge (edge edge_in)
2763 basic_block new_bb, after_bb, dest;
2764 edge new_edge, e;
2766 /* Abnormal edges cannot be split. */
2767 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2769 dest = edge_in->dest;
2771 after_bb = split_edge_bb_loc (edge_in);
2773 new_bb = create_empty_bb (after_bb);
2774 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2775 new_bb->count = edge_in->count;
2776 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2777 new_edge->probability = REG_BR_PROB_BASE;
2778 new_edge->count = edge_in->count;
2780 e = redirect_edge_and_branch (edge_in, new_bb);
2781 gcc_assert (e == edge_in);
2782 reinstall_phi_args (new_edge, e);
2784 return new_bb;
2788 /* Verify properties of the address expression T with base object BASE. */
2790 static tree
2791 verify_address (tree t, tree base)
2793 bool old_constant;
2794 bool old_side_effects;
2795 bool new_constant;
2796 bool new_side_effects;
2798 old_constant = TREE_CONSTANT (t);
2799 old_side_effects = TREE_SIDE_EFFECTS (t);
2801 recompute_tree_invariant_for_addr_expr (t);
2802 new_side_effects = TREE_SIDE_EFFECTS (t);
2803 new_constant = TREE_CONSTANT (t);
2805 if (old_constant != new_constant)
2807 error ("constant not recomputed when ADDR_EXPR changed");
2808 return t;
2810 if (old_side_effects != new_side_effects)
2812 error ("side effects not recomputed when ADDR_EXPR changed");
2813 return t;
2816 if (!(TREE_CODE (base) == VAR_DECL
2817 || TREE_CODE (base) == PARM_DECL
2818 || TREE_CODE (base) == RESULT_DECL))
2819 return NULL_TREE;
2821 if (DECL_GIMPLE_REG_P (base))
2823 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2824 return base;
2827 return NULL_TREE;
2830 /* Callback for walk_tree, check that all elements with address taken are
2831 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2832 inside a PHI node. */
2834 static tree
2835 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2837 tree t = *tp, x;
2839 if (TYPE_P (t))
2840 *walk_subtrees = 0;
2842 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2843 #define CHECK_OP(N, MSG) \
2844 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2845 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2847 switch (TREE_CODE (t))
2849 case SSA_NAME:
2850 if (SSA_NAME_IN_FREE_LIST (t))
2852 error ("SSA name in freelist but still referenced");
2853 return *tp;
2855 break;
2857 case INDIRECT_REF:
2858 error ("INDIRECT_REF in gimple IL");
2859 return t;
2861 case MEM_REF:
2862 x = TREE_OPERAND (t, 0);
2863 if (!POINTER_TYPE_P (TREE_TYPE (x))
2864 || !is_gimple_mem_ref_addr (x))
2866 error ("invalid first operand of MEM_REF");
2867 return x;
2869 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2870 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2872 error ("invalid offset operand of MEM_REF");
2873 return TREE_OPERAND (t, 1);
2875 if (TREE_CODE (x) == ADDR_EXPR
2876 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2877 return x;
2878 *walk_subtrees = 0;
2879 break;
2881 case ASSERT_EXPR:
2882 x = fold (ASSERT_EXPR_COND (t));
2883 if (x == boolean_false_node)
2885 error ("ASSERT_EXPR with an always-false condition");
2886 return *tp;
2888 break;
2890 case MODIFY_EXPR:
2891 error ("MODIFY_EXPR not expected while having tuples");
2892 return *tp;
2894 case ADDR_EXPR:
2896 tree tem;
2898 gcc_assert (is_gimple_address (t));
2900 /* Skip any references (they will be checked when we recurse down the
2901 tree) and ensure that any variable used as a prefix is marked
2902 addressable. */
2903 for (x = TREE_OPERAND (t, 0);
2904 handled_component_p (x);
2905 x = TREE_OPERAND (x, 0))
2908 if ((tem = verify_address (t, x)))
2909 return tem;
2911 if (!(TREE_CODE (x) == VAR_DECL
2912 || TREE_CODE (x) == PARM_DECL
2913 || TREE_CODE (x) == RESULT_DECL))
2914 return NULL;
2916 if (!TREE_ADDRESSABLE (x))
2918 error ("address taken, but ADDRESSABLE bit not set");
2919 return x;
2922 break;
2925 case COND_EXPR:
2926 x = COND_EXPR_COND (t);
2927 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2929 error ("non-integral used in condition");
2930 return x;
2932 if (!is_gimple_condexpr (x))
2934 error ("invalid conditional operand");
2935 return x;
2937 break;
2939 case NON_LVALUE_EXPR:
2940 case TRUTH_NOT_EXPR:
2941 gcc_unreachable ();
2943 CASE_CONVERT:
2944 case FIX_TRUNC_EXPR:
2945 case FLOAT_EXPR:
2946 case NEGATE_EXPR:
2947 case ABS_EXPR:
2948 case BIT_NOT_EXPR:
2949 CHECK_OP (0, "invalid operand to unary operator");
2950 break;
2952 case REALPART_EXPR:
2953 case IMAGPART_EXPR:
2954 case BIT_FIELD_REF:
2955 if (!is_gimple_reg_type (TREE_TYPE (t)))
2957 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2958 return t;
2961 if (TREE_CODE (t) == BIT_FIELD_REF)
2963 tree t0 = TREE_OPERAND (t, 0);
2964 tree t1 = TREE_OPERAND (t, 1);
2965 tree t2 = TREE_OPERAND (t, 2);
2966 if (!tree_fits_uhwi_p (t1)
2967 || !tree_fits_uhwi_p (t2))
2969 error ("invalid position or size operand to BIT_FIELD_REF");
2970 return t;
2972 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2973 && (TYPE_PRECISION (TREE_TYPE (t))
2974 != tree_to_uhwi (t1)))
2976 error ("integral result type precision does not match "
2977 "field size of BIT_FIELD_REF");
2978 return t;
2980 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2981 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2982 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2983 != tree_to_uhwi (t1)))
2985 error ("mode precision of non-integral result does not "
2986 "match field size of BIT_FIELD_REF");
2987 return t;
2989 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2990 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2991 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2993 error ("position plus size exceeds size of referenced object in "
2994 "BIT_FIELD_REF");
2995 return t;
2998 t = TREE_OPERAND (t, 0);
3000 /* Fall-through. */
3001 case COMPONENT_REF:
3002 case ARRAY_REF:
3003 case ARRAY_RANGE_REF:
3004 case VIEW_CONVERT_EXPR:
3005 /* We have a nest of references. Verify that each of the operands
3006 that determine where to reference is either a constant or a variable,
3007 verify that the base is valid, and then show we've already checked
3008 the subtrees. */
3009 while (handled_component_p (t))
3011 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3012 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3013 else if (TREE_CODE (t) == ARRAY_REF
3014 || TREE_CODE (t) == ARRAY_RANGE_REF)
3016 CHECK_OP (1, "invalid array index");
3017 if (TREE_OPERAND (t, 2))
3018 CHECK_OP (2, "invalid array lower bound");
3019 if (TREE_OPERAND (t, 3))
3020 CHECK_OP (3, "invalid array stride");
3022 else if (TREE_CODE (t) == BIT_FIELD_REF
3023 || TREE_CODE (t) == REALPART_EXPR
3024 || TREE_CODE (t) == IMAGPART_EXPR)
3026 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3027 "REALPART_EXPR");
3028 return t;
3031 t = TREE_OPERAND (t, 0);
3034 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3036 error ("invalid reference prefix");
3037 return t;
3039 *walk_subtrees = 0;
3040 break;
3041 case PLUS_EXPR:
3042 case MINUS_EXPR:
3043 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3044 POINTER_PLUS_EXPR. */
3045 if (POINTER_TYPE_P (TREE_TYPE (t)))
3047 error ("invalid operand to plus/minus, type is a pointer");
3048 return t;
3050 CHECK_OP (0, "invalid operand to binary operator");
3051 CHECK_OP (1, "invalid operand to binary operator");
3052 break;
3054 case POINTER_PLUS_EXPR:
3055 /* Check to make sure the first operand is a pointer or reference type. */
3056 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3058 error ("invalid operand to pointer plus, first operand is not a pointer");
3059 return t;
3061 /* Check to make sure the second operand is a ptrofftype. */
3062 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3064 error ("invalid operand to pointer plus, second operand is not an "
3065 "integer type of appropriate width");
3066 return t;
3068 /* FALLTHROUGH */
3069 case LT_EXPR:
3070 case LE_EXPR:
3071 case GT_EXPR:
3072 case GE_EXPR:
3073 case EQ_EXPR:
3074 case NE_EXPR:
3075 case UNORDERED_EXPR:
3076 case ORDERED_EXPR:
3077 case UNLT_EXPR:
3078 case UNLE_EXPR:
3079 case UNGT_EXPR:
3080 case UNGE_EXPR:
3081 case UNEQ_EXPR:
3082 case LTGT_EXPR:
3083 case MULT_EXPR:
3084 case TRUNC_DIV_EXPR:
3085 case CEIL_DIV_EXPR:
3086 case FLOOR_DIV_EXPR:
3087 case ROUND_DIV_EXPR:
3088 case TRUNC_MOD_EXPR:
3089 case CEIL_MOD_EXPR:
3090 case FLOOR_MOD_EXPR:
3091 case ROUND_MOD_EXPR:
3092 case RDIV_EXPR:
3093 case EXACT_DIV_EXPR:
3094 case MIN_EXPR:
3095 case MAX_EXPR:
3096 case LSHIFT_EXPR:
3097 case RSHIFT_EXPR:
3098 case LROTATE_EXPR:
3099 case RROTATE_EXPR:
3100 case BIT_IOR_EXPR:
3101 case BIT_XOR_EXPR:
3102 case BIT_AND_EXPR:
3103 CHECK_OP (0, "invalid operand to binary operator");
3104 CHECK_OP (1, "invalid operand to binary operator");
3105 break;
3107 case CONSTRUCTOR:
3108 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3109 *walk_subtrees = 0;
3110 break;
3112 case CASE_LABEL_EXPR:
3113 if (CASE_CHAIN (t))
3115 error ("invalid CASE_CHAIN");
3116 return t;
3118 break;
3120 default:
3121 break;
3123 return NULL;
3125 #undef CHECK_OP
3129 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3130 Returns true if there is an error, otherwise false. */
3132 static bool
3133 verify_types_in_gimple_min_lval (tree expr)
3135 tree op;
3137 if (is_gimple_id (expr))
3138 return false;
3140 if (TREE_CODE (expr) != TARGET_MEM_REF
3141 && TREE_CODE (expr) != MEM_REF)
3143 error ("invalid expression for min lvalue");
3144 return true;
3147 /* TARGET_MEM_REFs are strange beasts. */
3148 if (TREE_CODE (expr) == TARGET_MEM_REF)
3149 return false;
3151 op = TREE_OPERAND (expr, 0);
3152 if (!is_gimple_val (op))
3154 error ("invalid operand in indirect reference");
3155 debug_generic_stmt (op);
3156 return true;
3158 /* Memory references now generally can involve a value conversion. */
3160 return false;
3163 /* Verify if EXPR is a valid GIMPLE reference expression. If
3164 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3165 if there is an error, otherwise false. */
3167 static bool
3168 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3170 while (handled_component_p (expr))
3172 tree op = TREE_OPERAND (expr, 0);
3174 if (TREE_CODE (expr) == ARRAY_REF
3175 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3177 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3178 || (TREE_OPERAND (expr, 2)
3179 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3180 || (TREE_OPERAND (expr, 3)
3181 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3183 error ("invalid operands to array reference");
3184 debug_generic_stmt (expr);
3185 return true;
3189 /* Verify if the reference array element types are compatible. */
3190 if (TREE_CODE (expr) == ARRAY_REF
3191 && !useless_type_conversion_p (TREE_TYPE (expr),
3192 TREE_TYPE (TREE_TYPE (op))))
3194 error ("type mismatch in array reference");
3195 debug_generic_stmt (TREE_TYPE (expr));
3196 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3197 return true;
3199 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3200 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3201 TREE_TYPE (TREE_TYPE (op))))
3203 error ("type mismatch in array range reference");
3204 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3205 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3206 return true;
3209 if ((TREE_CODE (expr) == REALPART_EXPR
3210 || TREE_CODE (expr) == IMAGPART_EXPR)
3211 && !useless_type_conversion_p (TREE_TYPE (expr),
3212 TREE_TYPE (TREE_TYPE (op))))
3214 error ("type mismatch in real/imagpart reference");
3215 debug_generic_stmt (TREE_TYPE (expr));
3216 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3217 return true;
3220 if (TREE_CODE (expr) == COMPONENT_REF
3221 && !useless_type_conversion_p (TREE_TYPE (expr),
3222 TREE_TYPE (TREE_OPERAND (expr, 1))))
3224 error ("type mismatch in component reference");
3225 debug_generic_stmt (TREE_TYPE (expr));
3226 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3227 return true;
3230 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3232 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3233 that their operand is not an SSA name or an invariant when
3234 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3235 bug). Otherwise there is nothing to verify, gross mismatches at
3236 most invoke undefined behavior. */
3237 if (require_lvalue
3238 && (TREE_CODE (op) == SSA_NAME
3239 || is_gimple_min_invariant (op)))
3241 error ("conversion of an SSA_NAME on the left hand side");
3242 debug_generic_stmt (expr);
3243 return true;
3245 else if (TREE_CODE (op) == SSA_NAME
3246 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3248 error ("conversion of register to a different size");
3249 debug_generic_stmt (expr);
3250 return true;
3252 else if (!handled_component_p (op))
3253 return false;
3256 expr = op;
3259 if (TREE_CODE (expr) == MEM_REF)
3261 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3263 error ("invalid address operand in MEM_REF");
3264 debug_generic_stmt (expr);
3265 return true;
3267 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3268 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3270 error ("invalid offset operand in MEM_REF");
3271 debug_generic_stmt (expr);
3272 return true;
3275 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3277 if (!TMR_BASE (expr)
3278 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3280 error ("invalid address operand in TARGET_MEM_REF");
3281 return true;
3283 if (!TMR_OFFSET (expr)
3284 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3285 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3287 error ("invalid offset operand in TARGET_MEM_REF");
3288 debug_generic_stmt (expr);
3289 return true;
3293 return ((require_lvalue || !is_gimple_min_invariant (expr))
3294 && verify_types_in_gimple_min_lval (expr));
3297 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3298 list of pointer-to types that is trivially convertible to DEST. */
3300 static bool
3301 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3303 tree src;
3305 if (!TYPE_POINTER_TO (src_obj))
3306 return true;
3308 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3309 if (useless_type_conversion_p (dest, src))
3310 return true;
3312 return false;
3315 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3316 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3318 static bool
3319 valid_fixed_convert_types_p (tree type1, tree type2)
3321 return (FIXED_POINT_TYPE_P (type1)
3322 && (INTEGRAL_TYPE_P (type2)
3323 || SCALAR_FLOAT_TYPE_P (type2)
3324 || FIXED_POINT_TYPE_P (type2)));
3327 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3328 is a problem, otherwise false. */
3330 static bool
3331 verify_gimple_call (gcall *stmt)
3333 tree fn = gimple_call_fn (stmt);
3334 tree fntype, fndecl;
3335 unsigned i;
3337 if (gimple_call_internal_p (stmt))
3339 if (fn)
3341 error ("gimple call has two targets");
3342 debug_generic_stmt (fn);
3343 return true;
3346 else
3348 if (!fn)
3350 error ("gimple call has no target");
3351 return true;
3355 if (fn && !is_gimple_call_addr (fn))
3357 error ("invalid function in gimple call");
3358 debug_generic_stmt (fn);
3359 return true;
3362 if (fn
3363 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3364 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3365 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3367 error ("non-function in gimple call");
3368 return true;
3371 fndecl = gimple_call_fndecl (stmt);
3372 if (fndecl
3373 && TREE_CODE (fndecl) == FUNCTION_DECL
3374 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3375 && !DECL_PURE_P (fndecl)
3376 && !TREE_READONLY (fndecl))
3378 error ("invalid pure const state for function");
3379 return true;
3382 tree lhs = gimple_call_lhs (stmt);
3383 if (lhs
3384 && (!is_gimple_lvalue (lhs)
3385 || verify_types_in_gimple_reference (lhs, true)))
3387 error ("invalid LHS in gimple call");
3388 return true;
3391 if (lhs
3392 && gimple_call_ctrl_altering_p (stmt)
3393 && gimple_call_noreturn_p (stmt)
3394 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3396 error ("LHS in noreturn call");
3397 return true;
3400 fntype = gimple_call_fntype (stmt);
3401 if (fntype
3402 && lhs
3403 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3404 /* ??? At least C++ misses conversions at assignments from
3405 void * call results.
3406 ??? Java is completely off. Especially with functions
3407 returning java.lang.Object.
3408 For now simply allow arbitrary pointer type conversions. */
3409 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3410 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3412 error ("invalid conversion in gimple call");
3413 debug_generic_stmt (TREE_TYPE (lhs));
3414 debug_generic_stmt (TREE_TYPE (fntype));
3415 return true;
3418 if (gimple_call_chain (stmt)
3419 && !is_gimple_val (gimple_call_chain (stmt)))
3421 error ("invalid static chain in gimple call");
3422 debug_generic_stmt (gimple_call_chain (stmt));
3423 return true;
3426 /* If there is a static chain argument, the call should either be
3427 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3428 if (gimple_call_chain (stmt)
3429 && fndecl
3430 && !DECL_STATIC_CHAIN (fndecl))
3432 error ("static chain with function that doesn%'t use one");
3433 return true;
3436 /* ??? The C frontend passes unpromoted arguments in case it
3437 didn't see a function declaration before the call. So for now
3438 leave the call arguments mostly unverified. Once we gimplify
3439 unit-at-a-time we have a chance to fix this. */
3441 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3443 tree arg = gimple_call_arg (stmt, i);
3444 if ((is_gimple_reg_type (TREE_TYPE (arg))
3445 && !is_gimple_val (arg))
3446 || (!is_gimple_reg_type (TREE_TYPE (arg))
3447 && !is_gimple_lvalue (arg)))
3449 error ("invalid argument to gimple call");
3450 debug_generic_expr (arg);
3451 return true;
3455 return false;
3458 /* Verifies the gimple comparison with the result type TYPE and
3459 the operands OP0 and OP1. */
3461 static bool
3462 verify_gimple_comparison (tree type, tree op0, tree op1)
3464 tree op0_type = TREE_TYPE (op0);
3465 tree op1_type = TREE_TYPE (op1);
3467 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3469 error ("invalid operands in gimple comparison");
3470 return true;
3473 /* For comparisons we do not have the operations type as the
3474 effective type the comparison is carried out in. Instead
3475 we require that either the first operand is trivially
3476 convertible into the second, or the other way around.
3477 Because we special-case pointers to void we allow
3478 comparisons of pointers with the same mode as well. */
3479 if (!useless_type_conversion_p (op0_type, op1_type)
3480 && !useless_type_conversion_p (op1_type, op0_type)
3481 && (!POINTER_TYPE_P (op0_type)
3482 || !POINTER_TYPE_P (op1_type)
3483 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3485 error ("mismatching comparison operand types");
3486 debug_generic_expr (op0_type);
3487 debug_generic_expr (op1_type);
3488 return true;
3491 /* The resulting type of a comparison may be an effective boolean type. */
3492 if (INTEGRAL_TYPE_P (type)
3493 && (TREE_CODE (type) == BOOLEAN_TYPE
3494 || TYPE_PRECISION (type) == 1))
3496 if (TREE_CODE (op0_type) == VECTOR_TYPE
3497 || TREE_CODE (op1_type) == VECTOR_TYPE)
3499 error ("vector comparison returning a boolean");
3500 debug_generic_expr (op0_type);
3501 debug_generic_expr (op1_type);
3502 return true;
3505 /* Or an integer vector type with the same size and element count
3506 as the comparison operand types. */
3507 else if (TREE_CODE (type) == VECTOR_TYPE
3508 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3510 if (TREE_CODE (op0_type) != VECTOR_TYPE
3511 || TREE_CODE (op1_type) != VECTOR_TYPE)
3513 error ("non-vector operands in vector comparison");
3514 debug_generic_expr (op0_type);
3515 debug_generic_expr (op1_type);
3516 return true;
3519 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3520 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3521 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3522 /* The result of a vector comparison is of signed
3523 integral type. */
3524 || TYPE_UNSIGNED (TREE_TYPE (type)))
3526 error ("invalid vector comparison resulting type");
3527 debug_generic_expr (type);
3528 return true;
3531 else
3533 error ("bogus comparison result type");
3534 debug_generic_expr (type);
3535 return true;
3538 return false;
3541 /* Verify a gimple assignment statement STMT with an unary rhs.
3542 Returns true if anything is wrong. */
3544 static bool
3545 verify_gimple_assign_unary (gassign *stmt)
3547 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3548 tree lhs = gimple_assign_lhs (stmt);
3549 tree lhs_type = TREE_TYPE (lhs);
3550 tree rhs1 = gimple_assign_rhs1 (stmt);
3551 tree rhs1_type = TREE_TYPE (rhs1);
3553 if (!is_gimple_reg (lhs))
3555 error ("non-register as LHS of unary operation");
3556 return true;
3559 if (!is_gimple_val (rhs1))
3561 error ("invalid operand in unary operation");
3562 return true;
3565 /* First handle conversions. */
3566 switch (rhs_code)
3568 CASE_CONVERT:
3570 /* Allow conversions from pointer type to integral type only if
3571 there is no sign or zero extension involved.
3572 For targets were the precision of ptrofftype doesn't match that
3573 of pointers we need to allow arbitrary conversions to ptrofftype. */
3574 if ((POINTER_TYPE_P (lhs_type)
3575 && INTEGRAL_TYPE_P (rhs1_type))
3576 || (POINTER_TYPE_P (rhs1_type)
3577 && INTEGRAL_TYPE_P (lhs_type)
3578 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3579 || ptrofftype_p (sizetype))))
3580 return false;
3582 /* Allow conversion from integral to offset type and vice versa. */
3583 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3584 && INTEGRAL_TYPE_P (rhs1_type))
3585 || (INTEGRAL_TYPE_P (lhs_type)
3586 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3587 return false;
3589 /* Otherwise assert we are converting between types of the
3590 same kind. */
3591 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3593 error ("invalid types in nop conversion");
3594 debug_generic_expr (lhs_type);
3595 debug_generic_expr (rhs1_type);
3596 return true;
3599 return false;
3602 case ADDR_SPACE_CONVERT_EXPR:
3604 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3605 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3606 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3608 error ("invalid types in address space conversion");
3609 debug_generic_expr (lhs_type);
3610 debug_generic_expr (rhs1_type);
3611 return true;
3614 return false;
3617 case FIXED_CONVERT_EXPR:
3619 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3620 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3622 error ("invalid types in fixed-point conversion");
3623 debug_generic_expr (lhs_type);
3624 debug_generic_expr (rhs1_type);
3625 return true;
3628 return false;
3631 case FLOAT_EXPR:
3633 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3634 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3635 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3637 error ("invalid types in conversion to floating point");
3638 debug_generic_expr (lhs_type);
3639 debug_generic_expr (rhs1_type);
3640 return true;
3643 return false;
3646 case FIX_TRUNC_EXPR:
3648 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3649 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3650 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3652 error ("invalid types in conversion to integer");
3653 debug_generic_expr (lhs_type);
3654 debug_generic_expr (rhs1_type);
3655 return true;
3658 return false;
3660 case REDUC_MAX_EXPR:
3661 case REDUC_MIN_EXPR:
3662 case REDUC_PLUS_EXPR:
3663 if (!VECTOR_TYPE_P (rhs1_type)
3664 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3666 error ("reduction should convert from vector to element type");
3667 debug_generic_expr (lhs_type);
3668 debug_generic_expr (rhs1_type);
3669 return true;
3671 return false;
3673 case VEC_UNPACK_HI_EXPR:
3674 case VEC_UNPACK_LO_EXPR:
3675 case VEC_UNPACK_FLOAT_HI_EXPR:
3676 case VEC_UNPACK_FLOAT_LO_EXPR:
3677 /* FIXME. */
3678 return false;
3680 case NEGATE_EXPR:
3681 case ABS_EXPR:
3682 case BIT_NOT_EXPR:
3683 case PAREN_EXPR:
3684 case CONJ_EXPR:
3685 break;
3687 default:
3688 gcc_unreachable ();
3691 /* For the remaining codes assert there is no conversion involved. */
3692 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3694 error ("non-trivial conversion in unary operation");
3695 debug_generic_expr (lhs_type);
3696 debug_generic_expr (rhs1_type);
3697 return true;
3700 return false;
3703 /* Verify a gimple assignment statement STMT with a binary rhs.
3704 Returns true if anything is wrong. */
3706 static bool
3707 verify_gimple_assign_binary (gassign *stmt)
3709 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3710 tree lhs = gimple_assign_lhs (stmt);
3711 tree lhs_type = TREE_TYPE (lhs);
3712 tree rhs1 = gimple_assign_rhs1 (stmt);
3713 tree rhs1_type = TREE_TYPE (rhs1);
3714 tree rhs2 = gimple_assign_rhs2 (stmt);
3715 tree rhs2_type = TREE_TYPE (rhs2);
3717 if (!is_gimple_reg (lhs))
3719 error ("non-register as LHS of binary operation");
3720 return true;
3723 if (!is_gimple_val (rhs1)
3724 || !is_gimple_val (rhs2))
3726 error ("invalid operands in binary operation");
3727 return true;
3730 /* First handle operations that involve different types. */
3731 switch (rhs_code)
3733 case COMPLEX_EXPR:
3735 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3736 || !(INTEGRAL_TYPE_P (rhs1_type)
3737 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3738 || !(INTEGRAL_TYPE_P (rhs2_type)
3739 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3741 error ("type mismatch in complex expression");
3742 debug_generic_expr (lhs_type);
3743 debug_generic_expr (rhs1_type);
3744 debug_generic_expr (rhs2_type);
3745 return true;
3748 return false;
3751 case LSHIFT_EXPR:
3752 case RSHIFT_EXPR:
3753 case LROTATE_EXPR:
3754 case RROTATE_EXPR:
3756 /* Shifts and rotates are ok on integral types, fixed point
3757 types and integer vector types. */
3758 if ((!INTEGRAL_TYPE_P (rhs1_type)
3759 && !FIXED_POINT_TYPE_P (rhs1_type)
3760 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3761 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3762 || (!INTEGRAL_TYPE_P (rhs2_type)
3763 /* Vector shifts of vectors are also ok. */
3764 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3765 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3766 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3767 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3768 || !useless_type_conversion_p (lhs_type, rhs1_type))
3770 error ("type mismatch in shift expression");
3771 debug_generic_expr (lhs_type);
3772 debug_generic_expr (rhs1_type);
3773 debug_generic_expr (rhs2_type);
3774 return true;
3777 return false;
3780 case WIDEN_LSHIFT_EXPR:
3782 if (!INTEGRAL_TYPE_P (lhs_type)
3783 || !INTEGRAL_TYPE_P (rhs1_type)
3784 || TREE_CODE (rhs2) != INTEGER_CST
3785 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3787 error ("type mismatch in widening vector shift expression");
3788 debug_generic_expr (lhs_type);
3789 debug_generic_expr (rhs1_type);
3790 debug_generic_expr (rhs2_type);
3791 return true;
3794 return false;
3797 case VEC_WIDEN_LSHIFT_HI_EXPR:
3798 case VEC_WIDEN_LSHIFT_LO_EXPR:
3800 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3801 || TREE_CODE (lhs_type) != VECTOR_TYPE
3802 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3803 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3804 || TREE_CODE (rhs2) != INTEGER_CST
3805 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3806 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3808 error ("type mismatch in widening vector shift expression");
3809 debug_generic_expr (lhs_type);
3810 debug_generic_expr (rhs1_type);
3811 debug_generic_expr (rhs2_type);
3812 return true;
3815 return false;
3818 case PLUS_EXPR:
3819 case MINUS_EXPR:
3821 tree lhs_etype = lhs_type;
3822 tree rhs1_etype = rhs1_type;
3823 tree rhs2_etype = rhs2_type;
3824 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3826 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3827 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3829 error ("invalid non-vector operands to vector valued plus");
3830 return true;
3832 lhs_etype = TREE_TYPE (lhs_type);
3833 rhs1_etype = TREE_TYPE (rhs1_type);
3834 rhs2_etype = TREE_TYPE (rhs2_type);
3836 if (POINTER_TYPE_P (lhs_etype)
3837 || POINTER_TYPE_P (rhs1_etype)
3838 || POINTER_TYPE_P (rhs2_etype))
3840 error ("invalid (pointer) operands to plus/minus");
3841 return true;
3844 /* Continue with generic binary expression handling. */
3845 break;
3848 case POINTER_PLUS_EXPR:
3850 if (!POINTER_TYPE_P (rhs1_type)
3851 || !useless_type_conversion_p (lhs_type, rhs1_type)
3852 || !ptrofftype_p (rhs2_type))
3854 error ("type mismatch in pointer plus expression");
3855 debug_generic_stmt (lhs_type);
3856 debug_generic_stmt (rhs1_type);
3857 debug_generic_stmt (rhs2_type);
3858 return true;
3861 return false;
3864 case TRUTH_ANDIF_EXPR:
3865 case TRUTH_ORIF_EXPR:
3866 case TRUTH_AND_EXPR:
3867 case TRUTH_OR_EXPR:
3868 case TRUTH_XOR_EXPR:
3870 gcc_unreachable ();
3872 case LT_EXPR:
3873 case LE_EXPR:
3874 case GT_EXPR:
3875 case GE_EXPR:
3876 case EQ_EXPR:
3877 case NE_EXPR:
3878 case UNORDERED_EXPR:
3879 case ORDERED_EXPR:
3880 case UNLT_EXPR:
3881 case UNLE_EXPR:
3882 case UNGT_EXPR:
3883 case UNGE_EXPR:
3884 case UNEQ_EXPR:
3885 case LTGT_EXPR:
3886 /* Comparisons are also binary, but the result type is not
3887 connected to the operand types. */
3888 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3890 case WIDEN_MULT_EXPR:
3891 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3892 return true;
3893 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3894 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3896 case WIDEN_SUM_EXPR:
3897 case VEC_WIDEN_MULT_HI_EXPR:
3898 case VEC_WIDEN_MULT_LO_EXPR:
3899 case VEC_WIDEN_MULT_EVEN_EXPR:
3900 case VEC_WIDEN_MULT_ODD_EXPR:
3901 case VEC_PACK_TRUNC_EXPR:
3902 case VEC_PACK_SAT_EXPR:
3903 case VEC_PACK_FIX_TRUNC_EXPR:
3904 /* FIXME. */
3905 return false;
3907 case MULT_EXPR:
3908 case MULT_HIGHPART_EXPR:
3909 case TRUNC_DIV_EXPR:
3910 case CEIL_DIV_EXPR:
3911 case FLOOR_DIV_EXPR:
3912 case ROUND_DIV_EXPR:
3913 case TRUNC_MOD_EXPR:
3914 case CEIL_MOD_EXPR:
3915 case FLOOR_MOD_EXPR:
3916 case ROUND_MOD_EXPR:
3917 case RDIV_EXPR:
3918 case EXACT_DIV_EXPR:
3919 case MIN_EXPR:
3920 case MAX_EXPR:
3921 case BIT_IOR_EXPR:
3922 case BIT_XOR_EXPR:
3923 case BIT_AND_EXPR:
3924 /* Continue with generic binary expression handling. */
3925 break;
3927 default:
3928 gcc_unreachable ();
3931 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3932 || !useless_type_conversion_p (lhs_type, rhs2_type))
3934 error ("type mismatch in binary expression");
3935 debug_generic_stmt (lhs_type);
3936 debug_generic_stmt (rhs1_type);
3937 debug_generic_stmt (rhs2_type);
3938 return true;
3941 return false;
3944 /* Verify a gimple assignment statement STMT with a ternary rhs.
3945 Returns true if anything is wrong. */
3947 static bool
3948 verify_gimple_assign_ternary (gassign *stmt)
3950 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3951 tree lhs = gimple_assign_lhs (stmt);
3952 tree lhs_type = TREE_TYPE (lhs);
3953 tree rhs1 = gimple_assign_rhs1 (stmt);
3954 tree rhs1_type = TREE_TYPE (rhs1);
3955 tree rhs2 = gimple_assign_rhs2 (stmt);
3956 tree rhs2_type = TREE_TYPE (rhs2);
3957 tree rhs3 = gimple_assign_rhs3 (stmt);
3958 tree rhs3_type = TREE_TYPE (rhs3);
3960 if (!is_gimple_reg (lhs))
3962 error ("non-register as LHS of ternary operation");
3963 return true;
3966 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3967 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3968 || !is_gimple_val (rhs2)
3969 || !is_gimple_val (rhs3))
3971 error ("invalid operands in ternary operation");
3972 return true;
3975 /* First handle operations that involve different types. */
3976 switch (rhs_code)
3978 case WIDEN_MULT_PLUS_EXPR:
3979 case WIDEN_MULT_MINUS_EXPR:
3980 if ((!INTEGRAL_TYPE_P (rhs1_type)
3981 && !FIXED_POINT_TYPE_P (rhs1_type))
3982 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3983 || !useless_type_conversion_p (lhs_type, rhs3_type)
3984 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3985 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3987 error ("type mismatch in widening multiply-accumulate expression");
3988 debug_generic_expr (lhs_type);
3989 debug_generic_expr (rhs1_type);
3990 debug_generic_expr (rhs2_type);
3991 debug_generic_expr (rhs3_type);
3992 return true;
3994 break;
3996 case FMA_EXPR:
3997 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3998 || !useless_type_conversion_p (lhs_type, rhs2_type)
3999 || !useless_type_conversion_p (lhs_type, rhs3_type))
4001 error ("type mismatch in fused multiply-add expression");
4002 debug_generic_expr (lhs_type);
4003 debug_generic_expr (rhs1_type);
4004 debug_generic_expr (rhs2_type);
4005 debug_generic_expr (rhs3_type);
4006 return true;
4008 break;
4010 case COND_EXPR:
4011 case VEC_COND_EXPR:
4012 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4013 || !useless_type_conversion_p (lhs_type, rhs3_type))
4015 error ("type mismatch in conditional expression");
4016 debug_generic_expr (lhs_type);
4017 debug_generic_expr (rhs2_type);
4018 debug_generic_expr (rhs3_type);
4019 return true;
4021 break;
4023 case VEC_PERM_EXPR:
4024 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4025 || !useless_type_conversion_p (lhs_type, rhs2_type))
4027 error ("type mismatch in vector permute expression");
4028 debug_generic_expr (lhs_type);
4029 debug_generic_expr (rhs1_type);
4030 debug_generic_expr (rhs2_type);
4031 debug_generic_expr (rhs3_type);
4032 return true;
4035 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4036 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4037 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4039 error ("vector types expected in vector permute expression");
4040 debug_generic_expr (lhs_type);
4041 debug_generic_expr (rhs1_type);
4042 debug_generic_expr (rhs2_type);
4043 debug_generic_expr (rhs3_type);
4044 return true;
4047 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4048 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4049 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4050 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4051 != TYPE_VECTOR_SUBPARTS (lhs_type))
4053 error ("vectors with different element number found "
4054 "in vector permute expression");
4055 debug_generic_expr (lhs_type);
4056 debug_generic_expr (rhs1_type);
4057 debug_generic_expr (rhs2_type);
4058 debug_generic_expr (rhs3_type);
4059 return true;
4062 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4063 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4064 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4066 error ("invalid mask type in vector permute expression");
4067 debug_generic_expr (lhs_type);
4068 debug_generic_expr (rhs1_type);
4069 debug_generic_expr (rhs2_type);
4070 debug_generic_expr (rhs3_type);
4071 return true;
4074 return false;
4076 case SAD_EXPR:
4077 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4078 || !useless_type_conversion_p (lhs_type, rhs3_type)
4079 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4080 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4081 > GET_MODE_BITSIZE (GET_MODE_INNER
4082 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4084 error ("type mismatch in sad expression");
4085 debug_generic_expr (lhs_type);
4086 debug_generic_expr (rhs1_type);
4087 debug_generic_expr (rhs2_type);
4088 debug_generic_expr (rhs3_type);
4089 return true;
4092 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4093 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4094 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4096 error ("vector types expected in sad expression");
4097 debug_generic_expr (lhs_type);
4098 debug_generic_expr (rhs1_type);
4099 debug_generic_expr (rhs2_type);
4100 debug_generic_expr (rhs3_type);
4101 return true;
4104 return false;
4106 case DOT_PROD_EXPR:
4107 case REALIGN_LOAD_EXPR:
4108 /* FIXME. */
4109 return false;
4111 default:
4112 gcc_unreachable ();
4114 return false;
4117 /* Verify a gimple assignment statement STMT with a single rhs.
4118 Returns true if anything is wrong. */
4120 static bool
4121 verify_gimple_assign_single (gassign *stmt)
4123 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4124 tree lhs = gimple_assign_lhs (stmt);
4125 tree lhs_type = TREE_TYPE (lhs);
4126 tree rhs1 = gimple_assign_rhs1 (stmt);
4127 tree rhs1_type = TREE_TYPE (rhs1);
4128 bool res = false;
4130 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4132 error ("non-trivial conversion at assignment");
4133 debug_generic_expr (lhs_type);
4134 debug_generic_expr (rhs1_type);
4135 return true;
4138 if (gimple_clobber_p (stmt)
4139 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4141 error ("non-decl/MEM_REF LHS in clobber statement");
4142 debug_generic_expr (lhs);
4143 return true;
4146 if (handled_component_p (lhs)
4147 || TREE_CODE (lhs) == MEM_REF
4148 || TREE_CODE (lhs) == TARGET_MEM_REF)
4149 res |= verify_types_in_gimple_reference (lhs, true);
4151 /* Special codes we cannot handle via their class. */
4152 switch (rhs_code)
4154 case ADDR_EXPR:
4156 tree op = TREE_OPERAND (rhs1, 0);
4157 if (!is_gimple_addressable (op))
4159 error ("invalid operand in unary expression");
4160 return true;
4163 /* Technically there is no longer a need for matching types, but
4164 gimple hygiene asks for this check. In LTO we can end up
4165 combining incompatible units and thus end up with addresses
4166 of globals that change their type to a common one. */
4167 if (!in_lto_p
4168 && !types_compatible_p (TREE_TYPE (op),
4169 TREE_TYPE (TREE_TYPE (rhs1)))
4170 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4171 TREE_TYPE (op)))
4173 error ("type mismatch in address expression");
4174 debug_generic_stmt (TREE_TYPE (rhs1));
4175 debug_generic_stmt (TREE_TYPE (op));
4176 return true;
4179 return verify_types_in_gimple_reference (op, true);
4182 /* tcc_reference */
4183 case INDIRECT_REF:
4184 error ("INDIRECT_REF in gimple IL");
4185 return true;
4187 case COMPONENT_REF:
4188 case BIT_FIELD_REF:
4189 case ARRAY_REF:
4190 case ARRAY_RANGE_REF:
4191 case VIEW_CONVERT_EXPR:
4192 case REALPART_EXPR:
4193 case IMAGPART_EXPR:
4194 case TARGET_MEM_REF:
4195 case MEM_REF:
4196 if (!is_gimple_reg (lhs)
4197 && is_gimple_reg_type (TREE_TYPE (lhs)))
4199 error ("invalid rhs for gimple memory store");
4200 debug_generic_stmt (lhs);
4201 debug_generic_stmt (rhs1);
4202 return true;
4204 return res || verify_types_in_gimple_reference (rhs1, false);
4206 /* tcc_constant */
4207 case SSA_NAME:
4208 case INTEGER_CST:
4209 case REAL_CST:
4210 case FIXED_CST:
4211 case COMPLEX_CST:
4212 case VECTOR_CST:
4213 case STRING_CST:
4214 return res;
4216 /* tcc_declaration */
4217 case CONST_DECL:
4218 return res;
4219 case VAR_DECL:
4220 case PARM_DECL:
4221 if (!is_gimple_reg (lhs)
4222 && !is_gimple_reg (rhs1)
4223 && is_gimple_reg_type (TREE_TYPE (lhs)))
4225 error ("invalid rhs for gimple memory store");
4226 debug_generic_stmt (lhs);
4227 debug_generic_stmt (rhs1);
4228 return true;
4230 return res;
4232 case CONSTRUCTOR:
4233 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4235 unsigned int i;
4236 tree elt_i, elt_v, elt_t = NULL_TREE;
4238 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4239 return res;
4240 /* For vector CONSTRUCTORs we require that either it is empty
4241 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4242 (then the element count must be correct to cover the whole
4243 outer vector and index must be NULL on all elements, or it is
4244 a CONSTRUCTOR of scalar elements, where we as an exception allow
4245 smaller number of elements (assuming zero filling) and
4246 consecutive indexes as compared to NULL indexes (such
4247 CONSTRUCTORs can appear in the IL from FEs). */
4248 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4250 if (elt_t == NULL_TREE)
4252 elt_t = TREE_TYPE (elt_v);
4253 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4255 tree elt_t = TREE_TYPE (elt_v);
4256 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4257 TREE_TYPE (elt_t)))
4259 error ("incorrect type of vector CONSTRUCTOR"
4260 " elements");
4261 debug_generic_stmt (rhs1);
4262 return true;
4264 else if (CONSTRUCTOR_NELTS (rhs1)
4265 * TYPE_VECTOR_SUBPARTS (elt_t)
4266 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4268 error ("incorrect number of vector CONSTRUCTOR"
4269 " elements");
4270 debug_generic_stmt (rhs1);
4271 return true;
4274 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4275 elt_t))
4277 error ("incorrect type of vector CONSTRUCTOR elements");
4278 debug_generic_stmt (rhs1);
4279 return true;
4281 else if (CONSTRUCTOR_NELTS (rhs1)
4282 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4284 error ("incorrect number of vector CONSTRUCTOR elements");
4285 debug_generic_stmt (rhs1);
4286 return true;
4289 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4291 error ("incorrect type of vector CONSTRUCTOR elements");
4292 debug_generic_stmt (rhs1);
4293 return true;
4295 if (elt_i != NULL_TREE
4296 && (TREE_CODE (elt_t) == VECTOR_TYPE
4297 || TREE_CODE (elt_i) != INTEGER_CST
4298 || compare_tree_int (elt_i, i) != 0))
4300 error ("vector CONSTRUCTOR with non-NULL element index");
4301 debug_generic_stmt (rhs1);
4302 return true;
4304 if (!is_gimple_val (elt_v))
4306 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4307 debug_generic_stmt (rhs1);
4308 return true;
4312 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4314 error ("non-vector CONSTRUCTOR with elements");
4315 debug_generic_stmt (rhs1);
4316 return true;
4318 return res;
4319 case OBJ_TYPE_REF:
4320 case ASSERT_EXPR:
4321 case WITH_SIZE_EXPR:
4322 /* FIXME. */
4323 return res;
4325 default:;
4328 return res;
4331 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4332 is a problem, otherwise false. */
4334 static bool
4335 verify_gimple_assign (gassign *stmt)
4337 switch (gimple_assign_rhs_class (stmt))
4339 case GIMPLE_SINGLE_RHS:
4340 return verify_gimple_assign_single (stmt);
4342 case GIMPLE_UNARY_RHS:
4343 return verify_gimple_assign_unary (stmt);
4345 case GIMPLE_BINARY_RHS:
4346 return verify_gimple_assign_binary (stmt);
4348 case GIMPLE_TERNARY_RHS:
4349 return verify_gimple_assign_ternary (stmt);
4351 default:
4352 gcc_unreachable ();
4356 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4357 is a problem, otherwise false. */
4359 static bool
4360 verify_gimple_return (greturn *stmt)
4362 tree op = gimple_return_retval (stmt);
4363 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4365 /* We cannot test for present return values as we do not fix up missing
4366 return values from the original source. */
4367 if (op == NULL)
4368 return false;
4370 if (!is_gimple_val (op)
4371 && TREE_CODE (op) != RESULT_DECL)
4373 error ("invalid operand in return statement");
4374 debug_generic_stmt (op);
4375 return true;
4378 if ((TREE_CODE (op) == RESULT_DECL
4379 && DECL_BY_REFERENCE (op))
4380 || (TREE_CODE (op) == SSA_NAME
4381 && SSA_NAME_VAR (op)
4382 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4383 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4384 op = TREE_TYPE (op);
4386 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4388 error ("invalid conversion in return statement");
4389 debug_generic_stmt (restype);
4390 debug_generic_stmt (TREE_TYPE (op));
4391 return true;
4394 return false;
4398 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4399 is a problem, otherwise false. */
4401 static bool
4402 verify_gimple_goto (ggoto *stmt)
4404 tree dest = gimple_goto_dest (stmt);
4406 /* ??? We have two canonical forms of direct goto destinations, a
4407 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4408 if (TREE_CODE (dest) != LABEL_DECL
4409 && (!is_gimple_val (dest)
4410 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4412 error ("goto destination is neither a label nor a pointer");
4413 return true;
4416 return false;
4419 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4420 is a problem, otherwise false. */
4422 static bool
4423 verify_gimple_switch (gswitch *stmt)
4425 unsigned int i, n;
4426 tree elt, prev_upper_bound = NULL_TREE;
4427 tree index_type, elt_type = NULL_TREE;
4429 if (!is_gimple_val (gimple_switch_index (stmt)))
4431 error ("invalid operand to switch statement");
4432 debug_generic_stmt (gimple_switch_index (stmt));
4433 return true;
4436 index_type = TREE_TYPE (gimple_switch_index (stmt));
4437 if (! INTEGRAL_TYPE_P (index_type))
4439 error ("non-integral type switch statement");
4440 debug_generic_expr (index_type);
4441 return true;
4444 elt = gimple_switch_label (stmt, 0);
4445 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4447 error ("invalid default case label in switch statement");
4448 debug_generic_expr (elt);
4449 return true;
4452 n = gimple_switch_num_labels (stmt);
4453 for (i = 1; i < n; i++)
4455 elt = gimple_switch_label (stmt, i);
4457 if (! CASE_LOW (elt))
4459 error ("invalid case label in switch statement");
4460 debug_generic_expr (elt);
4461 return true;
4463 if (CASE_HIGH (elt)
4464 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4466 error ("invalid case range in switch statement");
4467 debug_generic_expr (elt);
4468 return true;
4471 if (elt_type)
4473 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4474 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4476 error ("type mismatch for case label in switch statement");
4477 debug_generic_expr (elt);
4478 return true;
4481 else
4483 elt_type = TREE_TYPE (CASE_LOW (elt));
4484 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4486 error ("type precision mismatch in switch statement");
4487 return true;
4491 if (prev_upper_bound)
4493 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4495 error ("case labels not sorted in switch statement");
4496 return true;
4500 prev_upper_bound = CASE_HIGH (elt);
4501 if (! prev_upper_bound)
4502 prev_upper_bound = CASE_LOW (elt);
4505 return false;
4508 /* Verify a gimple debug statement STMT.
4509 Returns true if anything is wrong. */
4511 static bool
4512 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4514 /* There isn't much that could be wrong in a gimple debug stmt. A
4515 gimple debug bind stmt, for example, maps a tree, that's usually
4516 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4517 component or member of an aggregate type, to another tree, that
4518 can be an arbitrary expression. These stmts expand into debug
4519 insns, and are converted to debug notes by var-tracking.c. */
4520 return false;
4523 /* Verify a gimple label statement STMT.
4524 Returns true if anything is wrong. */
4526 static bool
4527 verify_gimple_label (glabel *stmt)
4529 tree decl = gimple_label_label (stmt);
4530 int uid;
4531 bool err = false;
4533 if (TREE_CODE (decl) != LABEL_DECL)
4534 return true;
4535 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4536 && DECL_CONTEXT (decl) != current_function_decl)
4538 error ("label's context is not the current function decl");
4539 err |= true;
4542 uid = LABEL_DECL_UID (decl);
4543 if (cfun->cfg
4544 && (uid == -1
4545 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4547 error ("incorrect entry in label_to_block_map");
4548 err |= true;
4551 uid = EH_LANDING_PAD_NR (decl);
4552 if (uid)
4554 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4555 if (decl != lp->post_landing_pad)
4557 error ("incorrect setting of landing pad number");
4558 err |= true;
4562 return err;
4565 /* Verify a gimple cond statement STMT.
4566 Returns true if anything is wrong. */
4568 static bool
4569 verify_gimple_cond (gcond *stmt)
4571 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4573 error ("invalid comparison code in gimple cond");
4574 return true;
4576 if (!(!gimple_cond_true_label (stmt)
4577 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4578 || !(!gimple_cond_false_label (stmt)
4579 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4581 error ("invalid labels in gimple cond");
4582 return true;
4585 return verify_gimple_comparison (boolean_type_node,
4586 gimple_cond_lhs (stmt),
4587 gimple_cond_rhs (stmt));
4590 /* Verify the GIMPLE statement STMT. Returns true if there is an
4591 error, otherwise false. */
4593 static bool
4594 verify_gimple_stmt (gimple stmt)
4596 switch (gimple_code (stmt))
4598 case GIMPLE_ASSIGN:
4599 return verify_gimple_assign (as_a <gassign *> (stmt));
4601 case GIMPLE_LABEL:
4602 return verify_gimple_label (as_a <glabel *> (stmt));
4604 case GIMPLE_CALL:
4605 return verify_gimple_call (as_a <gcall *> (stmt));
4607 case GIMPLE_COND:
4608 return verify_gimple_cond (as_a <gcond *> (stmt));
4610 case GIMPLE_GOTO:
4611 return verify_gimple_goto (as_a <ggoto *> (stmt));
4613 case GIMPLE_SWITCH:
4614 return verify_gimple_switch (as_a <gswitch *> (stmt));
4616 case GIMPLE_RETURN:
4617 return verify_gimple_return (as_a <greturn *> (stmt));
4619 case GIMPLE_ASM:
4620 return false;
4622 case GIMPLE_TRANSACTION:
4623 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4625 /* Tuples that do not have tree operands. */
4626 case GIMPLE_NOP:
4627 case GIMPLE_PREDICT:
4628 case GIMPLE_RESX:
4629 case GIMPLE_EH_DISPATCH:
4630 case GIMPLE_EH_MUST_NOT_THROW:
4631 return false;
4633 CASE_GIMPLE_OMP:
4634 /* OpenMP directives are validated by the FE and never operated
4635 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4636 non-gimple expressions when the main index variable has had
4637 its address taken. This does not affect the loop itself
4638 because the header of an GIMPLE_OMP_FOR is merely used to determine
4639 how to setup the parallel iteration. */
4640 return false;
4642 case GIMPLE_DEBUG:
4643 return verify_gimple_debug (stmt);
4645 default:
4646 gcc_unreachable ();
4650 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4651 and false otherwise. */
4653 static bool
4654 verify_gimple_phi (gimple phi)
4656 bool err = false;
4657 unsigned i;
4658 tree phi_result = gimple_phi_result (phi);
4659 bool virtual_p;
4661 if (!phi_result)
4663 error ("invalid PHI result");
4664 return true;
4667 virtual_p = virtual_operand_p (phi_result);
4668 if (TREE_CODE (phi_result) != SSA_NAME
4669 || (virtual_p
4670 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4672 error ("invalid PHI result");
4673 err = true;
4676 for (i = 0; i < gimple_phi_num_args (phi); i++)
4678 tree t = gimple_phi_arg_def (phi, i);
4680 if (!t)
4682 error ("missing PHI def");
4683 err |= true;
4684 continue;
4686 /* Addressable variables do have SSA_NAMEs but they
4687 are not considered gimple values. */
4688 else if ((TREE_CODE (t) == SSA_NAME
4689 && virtual_p != virtual_operand_p (t))
4690 || (virtual_p
4691 && (TREE_CODE (t) != SSA_NAME
4692 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4693 || (!virtual_p
4694 && !is_gimple_val (t)))
4696 error ("invalid PHI argument");
4697 debug_generic_expr (t);
4698 err |= true;
4700 #ifdef ENABLE_TYPES_CHECKING
4701 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4703 error ("incompatible types in PHI argument %u", i);
4704 debug_generic_stmt (TREE_TYPE (phi_result));
4705 debug_generic_stmt (TREE_TYPE (t));
4706 err |= true;
4708 #endif
4711 return err;
4714 /* Verify the GIMPLE statements inside the sequence STMTS. */
4716 static bool
4717 verify_gimple_in_seq_2 (gimple_seq stmts)
4719 gimple_stmt_iterator ittr;
4720 bool err = false;
4722 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4724 gimple stmt = gsi_stmt (ittr);
4726 switch (gimple_code (stmt))
4728 case GIMPLE_BIND:
4729 err |= verify_gimple_in_seq_2 (
4730 gimple_bind_body (as_a <gbind *> (stmt)));
4731 break;
4733 case GIMPLE_TRY:
4734 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4735 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4736 break;
4738 case GIMPLE_EH_FILTER:
4739 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4740 break;
4742 case GIMPLE_EH_ELSE:
4744 geh_else *eh_else = as_a <geh_else *> (stmt);
4745 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4746 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4748 break;
4750 case GIMPLE_CATCH:
4751 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4752 as_a <gcatch *> (stmt)));
4753 break;
4755 case GIMPLE_TRANSACTION:
4756 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4757 break;
4759 default:
4761 bool err2 = verify_gimple_stmt (stmt);
4762 if (err2)
4763 debug_gimple_stmt (stmt);
4764 err |= err2;
4769 return err;
4772 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4773 is a problem, otherwise false. */
4775 static bool
4776 verify_gimple_transaction (gtransaction *stmt)
4778 tree lab = gimple_transaction_label (stmt);
4779 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4780 return true;
4781 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4785 /* Verify the GIMPLE statements inside the statement list STMTS. */
4787 DEBUG_FUNCTION void
4788 verify_gimple_in_seq (gimple_seq stmts)
4790 timevar_push (TV_TREE_STMT_VERIFY);
4791 if (verify_gimple_in_seq_2 (stmts))
4792 internal_error ("verify_gimple failed");
4793 timevar_pop (TV_TREE_STMT_VERIFY);
4796 /* Return true when the T can be shared. */
4798 static bool
4799 tree_node_can_be_shared (tree t)
4801 if (IS_TYPE_OR_DECL_P (t)
4802 || is_gimple_min_invariant (t)
4803 || TREE_CODE (t) == SSA_NAME
4804 || t == error_mark_node
4805 || TREE_CODE (t) == IDENTIFIER_NODE)
4806 return true;
4808 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4809 return true;
4811 if (DECL_P (t))
4812 return true;
4814 return false;
4817 /* Called via walk_tree. Verify tree sharing. */
4819 static tree
4820 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4822 hash_set<void *> *visited = (hash_set<void *> *) data;
4824 if (tree_node_can_be_shared (*tp))
4826 *walk_subtrees = false;
4827 return NULL;
4830 if (visited->add (*tp))
4831 return *tp;
4833 return NULL;
4836 /* Called via walk_gimple_stmt. Verify tree sharing. */
4838 static tree
4839 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4841 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4842 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4845 static bool eh_error_found;
4846 bool
4847 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4848 hash_set<gimple> *visited)
4850 if (!visited->contains (stmt))
4852 error ("dead STMT in EH table");
4853 debug_gimple_stmt (stmt);
4854 eh_error_found = true;
4856 return true;
4859 /* Verify if the location LOCs block is in BLOCKS. */
4861 static bool
4862 verify_location (hash_set<tree> *blocks, location_t loc)
4864 tree block = LOCATION_BLOCK (loc);
4865 if (block != NULL_TREE
4866 && !blocks->contains (block))
4868 error ("location references block not in block tree");
4869 return true;
4871 if (block != NULL_TREE)
4872 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4873 return false;
4876 /* Called via walk_tree. Verify that expressions have no blocks. */
4878 static tree
4879 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4881 if (!EXPR_P (*tp))
4883 *walk_subtrees = false;
4884 return NULL;
4887 location_t loc = EXPR_LOCATION (*tp);
4888 if (LOCATION_BLOCK (loc) != NULL)
4889 return *tp;
4891 return NULL;
4894 /* Called via walk_tree. Verify locations of expressions. */
4896 static tree
4897 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4899 hash_set<tree> *blocks = (hash_set<tree> *) data;
4901 if (TREE_CODE (*tp) == VAR_DECL
4902 && DECL_HAS_DEBUG_EXPR_P (*tp))
4904 tree t = DECL_DEBUG_EXPR (*tp);
4905 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4906 if (addr)
4907 return addr;
4909 if ((TREE_CODE (*tp) == VAR_DECL
4910 || TREE_CODE (*tp) == PARM_DECL
4911 || TREE_CODE (*tp) == RESULT_DECL)
4912 && DECL_HAS_VALUE_EXPR_P (*tp))
4914 tree t = DECL_VALUE_EXPR (*tp);
4915 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4916 if (addr)
4917 return addr;
4920 if (!EXPR_P (*tp))
4922 *walk_subtrees = false;
4923 return NULL;
4926 location_t loc = EXPR_LOCATION (*tp);
4927 if (verify_location (blocks, loc))
4928 return *tp;
4930 return NULL;
4933 /* Called via walk_gimple_op. Verify locations of expressions. */
4935 static tree
4936 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4938 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4939 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4942 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4944 static void
4945 collect_subblocks (hash_set<tree> *blocks, tree block)
4947 tree t;
4948 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4950 blocks->add (t);
4951 collect_subblocks (blocks, t);
4955 /* Verify the GIMPLE statements in the CFG of FN. */
4957 DEBUG_FUNCTION void
4958 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4960 basic_block bb;
4961 bool err = false;
4963 timevar_push (TV_TREE_STMT_VERIFY);
4964 hash_set<void *> visited;
4965 hash_set<gimple> visited_stmts;
4967 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4968 hash_set<tree> blocks;
4969 if (DECL_INITIAL (fn->decl))
4971 blocks.add (DECL_INITIAL (fn->decl));
4972 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4975 FOR_EACH_BB_FN (bb, fn)
4977 gimple_stmt_iterator gsi;
4979 for (gphi_iterator gpi = gsi_start_phis (bb);
4980 !gsi_end_p (gpi);
4981 gsi_next (&gpi))
4983 gphi *phi = gpi.phi ();
4984 bool err2 = false;
4985 unsigned i;
4987 visited_stmts.add (phi);
4989 if (gimple_bb (phi) != bb)
4991 error ("gimple_bb (phi) is set to a wrong basic block");
4992 err2 = true;
4995 err2 |= verify_gimple_phi (phi);
4997 /* Only PHI arguments have locations. */
4998 if (gimple_location (phi) != UNKNOWN_LOCATION)
5000 error ("PHI node with location");
5001 err2 = true;
5004 for (i = 0; i < gimple_phi_num_args (phi); i++)
5006 tree arg = gimple_phi_arg_def (phi, i);
5007 tree addr = walk_tree (&arg, verify_node_sharing_1,
5008 &visited, NULL);
5009 if (addr)
5011 error ("incorrect sharing of tree nodes");
5012 debug_generic_expr (addr);
5013 err2 |= true;
5015 location_t loc = gimple_phi_arg_location (phi, i);
5016 if (virtual_operand_p (gimple_phi_result (phi))
5017 && loc != UNKNOWN_LOCATION)
5019 error ("virtual PHI with argument locations");
5020 err2 = true;
5022 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5023 if (addr)
5025 debug_generic_expr (addr);
5026 err2 = true;
5028 err2 |= verify_location (&blocks, loc);
5031 if (err2)
5032 debug_gimple_stmt (phi);
5033 err |= err2;
5036 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5038 gimple stmt = gsi_stmt (gsi);
5039 bool err2 = false;
5040 struct walk_stmt_info wi;
5041 tree addr;
5042 int lp_nr;
5044 visited_stmts.add (stmt);
5046 if (gimple_bb (stmt) != bb)
5048 error ("gimple_bb (stmt) is set to a wrong basic block");
5049 err2 = true;
5052 err2 |= verify_gimple_stmt (stmt);
5053 err2 |= verify_location (&blocks, gimple_location (stmt));
5055 memset (&wi, 0, sizeof (wi));
5056 wi.info = (void *) &visited;
5057 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5058 if (addr)
5060 error ("incorrect sharing of tree nodes");
5061 debug_generic_expr (addr);
5062 err2 |= true;
5065 memset (&wi, 0, sizeof (wi));
5066 wi.info = (void *) &blocks;
5067 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5068 if (addr)
5070 debug_generic_expr (addr);
5071 err2 |= true;
5074 /* ??? Instead of not checking these stmts at all the walker
5075 should know its context via wi. */
5076 if (!is_gimple_debug (stmt)
5077 && !is_gimple_omp (stmt))
5079 memset (&wi, 0, sizeof (wi));
5080 addr = walk_gimple_op (stmt, verify_expr, &wi);
5081 if (addr)
5083 debug_generic_expr (addr);
5084 inform (gimple_location (stmt), "in statement");
5085 err2 |= true;
5089 /* If the statement is marked as part of an EH region, then it is
5090 expected that the statement could throw. Verify that when we
5091 have optimizations that simplify statements such that we prove
5092 that they cannot throw, that we update other data structures
5093 to match. */
5094 lp_nr = lookup_stmt_eh_lp (stmt);
5095 if (lp_nr > 0)
5097 if (!stmt_could_throw_p (stmt))
5099 if (verify_nothrow)
5101 error ("statement marked for throw, but doesn%'t");
5102 err2 |= true;
5105 else if (!gsi_one_before_end_p (gsi))
5107 error ("statement marked for throw in middle of block");
5108 err2 |= true;
5112 if (err2)
5113 debug_gimple_stmt (stmt);
5114 err |= err2;
5118 eh_error_found = false;
5119 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5120 if (eh_table)
5121 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5122 (&visited_stmts);
5124 if (err || eh_error_found)
5125 internal_error ("verify_gimple failed");
5127 verify_histograms ();
5128 timevar_pop (TV_TREE_STMT_VERIFY);
5132 /* Verifies that the flow information is OK. */
5134 static int
5135 gimple_verify_flow_info (void)
5137 int err = 0;
5138 basic_block bb;
5139 gimple_stmt_iterator gsi;
5140 gimple stmt;
5141 edge e;
5142 edge_iterator ei;
5144 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5145 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5147 error ("ENTRY_BLOCK has IL associated with it");
5148 err = 1;
5151 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5152 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5154 error ("EXIT_BLOCK has IL associated with it");
5155 err = 1;
5158 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5159 if (e->flags & EDGE_FALLTHRU)
5161 error ("fallthru to exit from bb %d", e->src->index);
5162 err = 1;
5165 FOR_EACH_BB_FN (bb, cfun)
5167 bool found_ctrl_stmt = false;
5169 stmt = NULL;
5171 /* Skip labels on the start of basic block. */
5172 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5174 tree label;
5175 gimple prev_stmt = stmt;
5177 stmt = gsi_stmt (gsi);
5179 if (gimple_code (stmt) != GIMPLE_LABEL)
5180 break;
5182 label = gimple_label_label (as_a <glabel *> (stmt));
5183 if (prev_stmt && DECL_NONLOCAL (label))
5185 error ("nonlocal label ");
5186 print_generic_expr (stderr, label, 0);
5187 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5188 bb->index);
5189 err = 1;
5192 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5194 error ("EH landing pad label ");
5195 print_generic_expr (stderr, label, 0);
5196 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5197 bb->index);
5198 err = 1;
5201 if (label_to_block (label) != bb)
5203 error ("label ");
5204 print_generic_expr (stderr, label, 0);
5205 fprintf (stderr, " to block does not match in bb %d",
5206 bb->index);
5207 err = 1;
5210 if (decl_function_context (label) != current_function_decl)
5212 error ("label ");
5213 print_generic_expr (stderr, label, 0);
5214 fprintf (stderr, " has incorrect context in bb %d",
5215 bb->index);
5216 err = 1;
5220 /* Verify that body of basic block BB is free of control flow. */
5221 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5223 gimple stmt = gsi_stmt (gsi);
5225 if (found_ctrl_stmt)
5227 error ("control flow in the middle of basic block %d",
5228 bb->index);
5229 err = 1;
5232 if (stmt_ends_bb_p (stmt))
5233 found_ctrl_stmt = true;
5235 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5237 error ("label ");
5238 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5239 fprintf (stderr, " in the middle of basic block %d", bb->index);
5240 err = 1;
5244 gsi = gsi_last_bb (bb);
5245 if (gsi_end_p (gsi))
5246 continue;
5248 stmt = gsi_stmt (gsi);
5250 if (gimple_code (stmt) == GIMPLE_LABEL)
5251 continue;
5253 err |= verify_eh_edges (stmt);
5255 if (is_ctrl_stmt (stmt))
5257 FOR_EACH_EDGE (e, ei, bb->succs)
5258 if (e->flags & EDGE_FALLTHRU)
5260 error ("fallthru edge after a control statement in bb %d",
5261 bb->index);
5262 err = 1;
5266 if (gimple_code (stmt) != GIMPLE_COND)
5268 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5269 after anything else but if statement. */
5270 FOR_EACH_EDGE (e, ei, bb->succs)
5271 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5273 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5274 bb->index);
5275 err = 1;
5279 switch (gimple_code (stmt))
5281 case GIMPLE_COND:
5283 edge true_edge;
5284 edge false_edge;
5286 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5288 if (!true_edge
5289 || !false_edge
5290 || !(true_edge->flags & EDGE_TRUE_VALUE)
5291 || !(false_edge->flags & EDGE_FALSE_VALUE)
5292 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5293 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5294 || EDGE_COUNT (bb->succs) >= 3)
5296 error ("wrong outgoing edge flags at end of bb %d",
5297 bb->index);
5298 err = 1;
5301 break;
5303 case GIMPLE_GOTO:
5304 if (simple_goto_p (stmt))
5306 error ("explicit goto at end of bb %d", bb->index);
5307 err = 1;
5309 else
5311 /* FIXME. We should double check that the labels in the
5312 destination blocks have their address taken. */
5313 FOR_EACH_EDGE (e, ei, bb->succs)
5314 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5315 | EDGE_FALSE_VALUE))
5316 || !(e->flags & EDGE_ABNORMAL))
5318 error ("wrong outgoing edge flags at end of bb %d",
5319 bb->index);
5320 err = 1;
5323 break;
5325 case GIMPLE_CALL:
5326 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5327 break;
5328 /* ... fallthru ... */
5329 case GIMPLE_RETURN:
5330 if (!single_succ_p (bb)
5331 || (single_succ_edge (bb)->flags
5332 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5333 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5335 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5336 err = 1;
5338 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5340 error ("return edge does not point to exit in bb %d",
5341 bb->index);
5342 err = 1;
5344 break;
5346 case GIMPLE_SWITCH:
5348 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5349 tree prev;
5350 edge e;
5351 size_t i, n;
5353 n = gimple_switch_num_labels (switch_stmt);
5355 /* Mark all the destination basic blocks. */
5356 for (i = 0; i < n; ++i)
5358 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5359 basic_block label_bb = label_to_block (lab);
5360 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5361 label_bb->aux = (void *)1;
5364 /* Verify that the case labels are sorted. */
5365 prev = gimple_switch_label (switch_stmt, 0);
5366 for (i = 1; i < n; ++i)
5368 tree c = gimple_switch_label (switch_stmt, i);
5369 if (!CASE_LOW (c))
5371 error ("found default case not at the start of "
5372 "case vector");
5373 err = 1;
5374 continue;
5376 if (CASE_LOW (prev)
5377 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5379 error ("case labels not sorted: ");
5380 print_generic_expr (stderr, prev, 0);
5381 fprintf (stderr," is greater than ");
5382 print_generic_expr (stderr, c, 0);
5383 fprintf (stderr," but comes before it.\n");
5384 err = 1;
5386 prev = c;
5388 /* VRP will remove the default case if it can prove it will
5389 never be executed. So do not verify there always exists
5390 a default case here. */
5392 FOR_EACH_EDGE (e, ei, bb->succs)
5394 if (!e->dest->aux)
5396 error ("extra outgoing edge %d->%d",
5397 bb->index, e->dest->index);
5398 err = 1;
5401 e->dest->aux = (void *)2;
5402 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5403 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5405 error ("wrong outgoing edge flags at end of bb %d",
5406 bb->index);
5407 err = 1;
5411 /* Check that we have all of them. */
5412 for (i = 0; i < n; ++i)
5414 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5415 basic_block label_bb = label_to_block (lab);
5417 if (label_bb->aux != (void *)2)
5419 error ("missing edge %i->%i", bb->index, label_bb->index);
5420 err = 1;
5424 FOR_EACH_EDGE (e, ei, bb->succs)
5425 e->dest->aux = (void *)0;
5427 break;
5429 case GIMPLE_EH_DISPATCH:
5430 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5431 break;
5433 default:
5434 break;
5438 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5439 verify_dominators (CDI_DOMINATORS);
5441 return err;
5445 /* Updates phi nodes after creating a forwarder block joined
5446 by edge FALLTHRU. */
5448 static void
5449 gimple_make_forwarder_block (edge fallthru)
5451 edge e;
5452 edge_iterator ei;
5453 basic_block dummy, bb;
5454 tree var;
5455 gphi_iterator gsi;
5457 dummy = fallthru->src;
5458 bb = fallthru->dest;
5460 if (single_pred_p (bb))
5461 return;
5463 /* If we redirected a branch we must create new PHI nodes at the
5464 start of BB. */
5465 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5467 gphi *phi, *new_phi;
5469 phi = gsi.phi ();
5470 var = gimple_phi_result (phi);
5471 new_phi = create_phi_node (var, bb);
5472 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5473 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5474 UNKNOWN_LOCATION);
5477 /* Add the arguments we have stored on edges. */
5478 FOR_EACH_EDGE (e, ei, bb->preds)
5480 if (e == fallthru)
5481 continue;
5483 flush_pending_stmts (e);
5488 /* Return a non-special label in the head of basic block BLOCK.
5489 Create one if it doesn't exist. */
5491 tree
5492 gimple_block_label (basic_block bb)
5494 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5495 bool first = true;
5496 tree label;
5497 glabel *stmt;
5499 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5501 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5502 if (!stmt)
5503 break;
5504 label = gimple_label_label (stmt);
5505 if (!DECL_NONLOCAL (label))
5507 if (!first)
5508 gsi_move_before (&i, &s);
5509 return label;
5513 label = create_artificial_label (UNKNOWN_LOCATION);
5514 stmt = gimple_build_label (label);
5515 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5516 return label;
5520 /* Attempt to perform edge redirection by replacing a possibly complex
5521 jump instruction by a goto or by removing the jump completely.
5522 This can apply only if all edges now point to the same block. The
5523 parameters and return values are equivalent to
5524 redirect_edge_and_branch. */
5526 static edge
5527 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5529 basic_block src = e->src;
5530 gimple_stmt_iterator i;
5531 gimple stmt;
5533 /* We can replace or remove a complex jump only when we have exactly
5534 two edges. */
5535 if (EDGE_COUNT (src->succs) != 2
5536 /* Verify that all targets will be TARGET. Specifically, the
5537 edge that is not E must also go to TARGET. */
5538 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5539 return NULL;
5541 i = gsi_last_bb (src);
5542 if (gsi_end_p (i))
5543 return NULL;
5545 stmt = gsi_stmt (i);
5547 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5549 gsi_remove (&i, true);
5550 e = ssa_redirect_edge (e, target);
5551 e->flags = EDGE_FALLTHRU;
5552 return e;
5555 return NULL;
5559 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5560 edge representing the redirected branch. */
5562 static edge
5563 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5565 basic_block bb = e->src;
5566 gimple_stmt_iterator gsi;
5567 edge ret;
5568 gimple stmt;
5570 if (e->flags & EDGE_ABNORMAL)
5571 return NULL;
5573 if (e->dest == dest)
5574 return NULL;
5576 if (e->flags & EDGE_EH)
5577 return redirect_eh_edge (e, dest);
5579 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5581 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5582 if (ret)
5583 return ret;
5586 gsi = gsi_last_bb (bb);
5587 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5589 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5591 case GIMPLE_COND:
5592 /* For COND_EXPR, we only need to redirect the edge. */
5593 break;
5595 case GIMPLE_GOTO:
5596 /* No non-abnormal edges should lead from a non-simple goto, and
5597 simple ones should be represented implicitly. */
5598 gcc_unreachable ();
5600 case GIMPLE_SWITCH:
5602 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5603 tree label = gimple_block_label (dest);
5604 tree cases = get_cases_for_edge (e, switch_stmt);
5606 /* If we have a list of cases associated with E, then use it
5607 as it's a lot faster than walking the entire case vector. */
5608 if (cases)
5610 edge e2 = find_edge (e->src, dest);
5611 tree last, first;
5613 first = cases;
5614 while (cases)
5616 last = cases;
5617 CASE_LABEL (cases) = label;
5618 cases = CASE_CHAIN (cases);
5621 /* If there was already an edge in the CFG, then we need
5622 to move all the cases associated with E to E2. */
5623 if (e2)
5625 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5627 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5628 CASE_CHAIN (cases2) = first;
5630 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5632 else
5634 size_t i, n = gimple_switch_num_labels (switch_stmt);
5636 for (i = 0; i < n; i++)
5638 tree elt = gimple_switch_label (switch_stmt, i);
5639 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5640 CASE_LABEL (elt) = label;
5644 break;
5646 case GIMPLE_ASM:
5648 gasm *asm_stmt = as_a <gasm *> (stmt);
5649 int i, n = gimple_asm_nlabels (asm_stmt);
5650 tree label = NULL;
5652 for (i = 0; i < n; ++i)
5654 tree cons = gimple_asm_label_op (asm_stmt, i);
5655 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5657 if (!label)
5658 label = gimple_block_label (dest);
5659 TREE_VALUE (cons) = label;
5663 /* If we didn't find any label matching the former edge in the
5664 asm labels, we must be redirecting the fallthrough
5665 edge. */
5666 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5668 break;
5670 case GIMPLE_RETURN:
5671 gsi_remove (&gsi, true);
5672 e->flags |= EDGE_FALLTHRU;
5673 break;
5675 case GIMPLE_OMP_RETURN:
5676 case GIMPLE_OMP_CONTINUE:
5677 case GIMPLE_OMP_SECTIONS_SWITCH:
5678 case GIMPLE_OMP_FOR:
5679 /* The edges from OMP constructs can be simply redirected. */
5680 break;
5682 case GIMPLE_EH_DISPATCH:
5683 if (!(e->flags & EDGE_FALLTHRU))
5684 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5685 break;
5687 case GIMPLE_TRANSACTION:
5688 /* The ABORT edge has a stored label associated with it, otherwise
5689 the edges are simply redirectable. */
5690 if (e->flags == 0)
5691 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5692 gimple_block_label (dest));
5693 break;
5695 default:
5696 /* Otherwise it must be a fallthru edge, and we don't need to
5697 do anything besides redirecting it. */
5698 gcc_assert (e->flags & EDGE_FALLTHRU);
5699 break;
5702 /* Update/insert PHI nodes as necessary. */
5704 /* Now update the edges in the CFG. */
5705 e = ssa_redirect_edge (e, dest);
5707 return e;
5710 /* Returns true if it is possible to remove edge E by redirecting
5711 it to the destination of the other edge from E->src. */
5713 static bool
5714 gimple_can_remove_branch_p (const_edge e)
5716 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5717 return false;
5719 return true;
5722 /* Simple wrapper, as we can always redirect fallthru edges. */
5724 static basic_block
5725 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5727 e = gimple_redirect_edge_and_branch (e, dest);
5728 gcc_assert (e);
5730 return NULL;
5734 /* Splits basic block BB after statement STMT (but at least after the
5735 labels). If STMT is NULL, BB is split just after the labels. */
5737 static basic_block
5738 gimple_split_block (basic_block bb, void *stmt)
5740 gimple_stmt_iterator gsi;
5741 gimple_stmt_iterator gsi_tgt;
5742 gimple_seq list;
5743 basic_block new_bb;
5744 edge e;
5745 edge_iterator ei;
5747 new_bb = create_empty_bb (bb);
5749 /* Redirect the outgoing edges. */
5750 new_bb->succs = bb->succs;
5751 bb->succs = NULL;
5752 FOR_EACH_EDGE (e, ei, new_bb->succs)
5753 e->src = new_bb;
5755 /* Get a stmt iterator pointing to the first stmt to move. */
5756 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5757 gsi = gsi_after_labels (bb);
5758 else
5760 gsi = gsi_for_stmt ((gimple) stmt);
5761 gsi_next (&gsi);
5764 /* Move everything from GSI to the new basic block. */
5765 if (gsi_end_p (gsi))
5766 return new_bb;
5768 /* Split the statement list - avoid re-creating new containers as this
5769 brings ugly quadratic memory consumption in the inliner.
5770 (We are still quadratic since we need to update stmt BB pointers,
5771 sadly.) */
5772 gsi_split_seq_before (&gsi, &list);
5773 set_bb_seq (new_bb, list);
5774 for (gsi_tgt = gsi_start (list);
5775 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5776 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5778 return new_bb;
5782 /* Moves basic block BB after block AFTER. */
5784 static bool
5785 gimple_move_block_after (basic_block bb, basic_block after)
5787 if (bb->prev_bb == after)
5788 return true;
5790 unlink_block (bb);
5791 link_block (bb, after);
5793 return true;
5797 /* Return TRUE if block BB has no executable statements, otherwise return
5798 FALSE. */
5800 static bool
5801 gimple_empty_block_p (basic_block bb)
5803 /* BB must have no executable statements. */
5804 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5805 if (phi_nodes (bb))
5806 return false;
5807 if (gsi_end_p (gsi))
5808 return true;
5809 if (is_gimple_debug (gsi_stmt (gsi)))
5810 gsi_next_nondebug (&gsi);
5811 return gsi_end_p (gsi);
5815 /* Split a basic block if it ends with a conditional branch and if the
5816 other part of the block is not empty. */
5818 static basic_block
5819 gimple_split_block_before_cond_jump (basic_block bb)
5821 gimple last, split_point;
5822 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5823 if (gsi_end_p (gsi))
5824 return NULL;
5825 last = gsi_stmt (gsi);
5826 if (gimple_code (last) != GIMPLE_COND
5827 && gimple_code (last) != GIMPLE_SWITCH)
5828 return NULL;
5829 gsi_prev_nondebug (&gsi);
5830 split_point = gsi_stmt (gsi);
5831 return split_block (bb, split_point)->dest;
5835 /* Return true if basic_block can be duplicated. */
5837 static bool
5838 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5840 return true;
5843 /* Create a duplicate of the basic block BB. NOTE: This does not
5844 preserve SSA form. */
5846 static basic_block
5847 gimple_duplicate_bb (basic_block bb)
5849 basic_block new_bb;
5850 gimple_stmt_iterator gsi_tgt;
5852 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5854 /* Copy the PHI nodes. We ignore PHI node arguments here because
5855 the incoming edges have not been setup yet. */
5856 for (gphi_iterator gpi = gsi_start_phis (bb);
5857 !gsi_end_p (gpi);
5858 gsi_next (&gpi))
5860 gphi *phi, *copy;
5861 phi = gpi.phi ();
5862 copy = create_phi_node (NULL_TREE, new_bb);
5863 create_new_def_for (gimple_phi_result (phi), copy,
5864 gimple_phi_result_ptr (copy));
5865 gimple_set_uid (copy, gimple_uid (phi));
5868 gsi_tgt = gsi_start_bb (new_bb);
5869 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5870 !gsi_end_p (gsi);
5871 gsi_next (&gsi))
5873 def_operand_p def_p;
5874 ssa_op_iter op_iter;
5875 tree lhs;
5876 gimple stmt, copy;
5878 stmt = gsi_stmt (gsi);
5879 if (gimple_code (stmt) == GIMPLE_LABEL)
5880 continue;
5882 /* Don't duplicate label debug stmts. */
5883 if (gimple_debug_bind_p (stmt)
5884 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5885 == LABEL_DECL)
5886 continue;
5888 /* Create a new copy of STMT and duplicate STMT's virtual
5889 operands. */
5890 copy = gimple_copy (stmt);
5891 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5893 maybe_duplicate_eh_stmt (copy, stmt);
5894 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5896 /* When copying around a stmt writing into a local non-user
5897 aggregate, make sure it won't share stack slot with other
5898 vars. */
5899 lhs = gimple_get_lhs (stmt);
5900 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5902 tree base = get_base_address (lhs);
5903 if (base
5904 && (TREE_CODE (base) == VAR_DECL
5905 || TREE_CODE (base) == RESULT_DECL)
5906 && DECL_IGNORED_P (base)
5907 && !TREE_STATIC (base)
5908 && !DECL_EXTERNAL (base)
5909 && (TREE_CODE (base) != VAR_DECL
5910 || !DECL_HAS_VALUE_EXPR_P (base)))
5911 DECL_NONSHAREABLE (base) = 1;
5914 /* Create new names for all the definitions created by COPY and
5915 add replacement mappings for each new name. */
5916 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5917 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5920 return new_bb;
5923 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5925 static void
5926 add_phi_args_after_copy_edge (edge e_copy)
5928 basic_block bb, bb_copy = e_copy->src, dest;
5929 edge e;
5930 edge_iterator ei;
5931 gphi *phi, *phi_copy;
5932 tree def;
5933 gphi_iterator psi, psi_copy;
5935 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5936 return;
5938 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5940 if (e_copy->dest->flags & BB_DUPLICATED)
5941 dest = get_bb_original (e_copy->dest);
5942 else
5943 dest = e_copy->dest;
5945 e = find_edge (bb, dest);
5946 if (!e)
5948 /* During loop unrolling the target of the latch edge is copied.
5949 In this case we are not looking for edge to dest, but to
5950 duplicated block whose original was dest. */
5951 FOR_EACH_EDGE (e, ei, bb->succs)
5953 if ((e->dest->flags & BB_DUPLICATED)
5954 && get_bb_original (e->dest) == dest)
5955 break;
5958 gcc_assert (e != NULL);
5961 for (psi = gsi_start_phis (e->dest),
5962 psi_copy = gsi_start_phis (e_copy->dest);
5963 !gsi_end_p (psi);
5964 gsi_next (&psi), gsi_next (&psi_copy))
5966 phi = psi.phi ();
5967 phi_copy = psi_copy.phi ();
5968 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5969 add_phi_arg (phi_copy, def, e_copy,
5970 gimple_phi_arg_location_from_edge (phi, e));
5975 /* Basic block BB_COPY was created by code duplication. Add phi node
5976 arguments for edges going out of BB_COPY. The blocks that were
5977 duplicated have BB_DUPLICATED set. */
5979 void
5980 add_phi_args_after_copy_bb (basic_block bb_copy)
5982 edge e_copy;
5983 edge_iterator ei;
5985 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5987 add_phi_args_after_copy_edge (e_copy);
5991 /* Blocks in REGION_COPY array of length N_REGION were created by
5992 duplication of basic blocks. Add phi node arguments for edges
5993 going from these blocks. If E_COPY is not NULL, also add
5994 phi node arguments for its destination.*/
5996 void
5997 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5998 edge e_copy)
6000 unsigned i;
6002 for (i = 0; i < n_region; i++)
6003 region_copy[i]->flags |= BB_DUPLICATED;
6005 for (i = 0; i < n_region; i++)
6006 add_phi_args_after_copy_bb (region_copy[i]);
6007 if (e_copy)
6008 add_phi_args_after_copy_edge (e_copy);
6010 for (i = 0; i < n_region; i++)
6011 region_copy[i]->flags &= ~BB_DUPLICATED;
6014 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6015 important exit edge EXIT. By important we mean that no SSA name defined
6016 inside region is live over the other exit edges of the region. All entry
6017 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6018 to the duplicate of the region. Dominance and loop information is
6019 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6020 UPDATE_DOMINANCE is false then we assume that the caller will update the
6021 dominance information after calling this function. The new basic
6022 blocks are stored to REGION_COPY in the same order as they had in REGION,
6023 provided that REGION_COPY is not NULL.
6024 The function returns false if it is unable to copy the region,
6025 true otherwise. */
6027 bool
6028 gimple_duplicate_sese_region (edge entry, edge exit,
6029 basic_block *region, unsigned n_region,
6030 basic_block *region_copy,
6031 bool update_dominance)
6033 unsigned i;
6034 bool free_region_copy = false, copying_header = false;
6035 struct loop *loop = entry->dest->loop_father;
6036 edge exit_copy;
6037 vec<basic_block> doms;
6038 edge redirected;
6039 int total_freq = 0, entry_freq = 0;
6040 gcov_type total_count = 0, entry_count = 0;
6042 if (!can_copy_bbs_p (region, n_region))
6043 return false;
6045 /* Some sanity checking. Note that we do not check for all possible
6046 missuses of the functions. I.e. if you ask to copy something weird,
6047 it will work, but the state of structures probably will not be
6048 correct. */
6049 for (i = 0; i < n_region; i++)
6051 /* We do not handle subloops, i.e. all the blocks must belong to the
6052 same loop. */
6053 if (region[i]->loop_father != loop)
6054 return false;
6056 if (region[i] != entry->dest
6057 && region[i] == loop->header)
6058 return false;
6061 /* In case the function is used for loop header copying (which is the primary
6062 use), ensure that EXIT and its copy will be new latch and entry edges. */
6063 if (loop->header == entry->dest)
6065 copying_header = true;
6067 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6068 return false;
6070 for (i = 0; i < n_region; i++)
6071 if (region[i] != exit->src
6072 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6073 return false;
6076 initialize_original_copy_tables ();
6078 if (copying_header)
6079 set_loop_copy (loop, loop_outer (loop));
6080 else
6081 set_loop_copy (loop, loop);
6083 if (!region_copy)
6085 region_copy = XNEWVEC (basic_block, n_region);
6086 free_region_copy = true;
6089 /* Record blocks outside the region that are dominated by something
6090 inside. */
6091 if (update_dominance)
6093 doms.create (0);
6094 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6097 if (entry->dest->count)
6099 total_count = entry->dest->count;
6100 entry_count = entry->count;
6101 /* Fix up corner cases, to avoid division by zero or creation of negative
6102 frequencies. */
6103 if (entry_count > total_count)
6104 entry_count = total_count;
6106 else
6108 total_freq = entry->dest->frequency;
6109 entry_freq = EDGE_FREQUENCY (entry);
6110 /* Fix up corner cases, to avoid division by zero or creation of negative
6111 frequencies. */
6112 if (total_freq == 0)
6113 total_freq = 1;
6114 else if (entry_freq > total_freq)
6115 entry_freq = total_freq;
6118 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6119 split_edge_bb_loc (entry), update_dominance);
6120 if (total_count)
6122 scale_bbs_frequencies_gcov_type (region, n_region,
6123 total_count - entry_count,
6124 total_count);
6125 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6126 total_count);
6128 else
6130 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6131 total_freq);
6132 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6135 if (copying_header)
6137 loop->header = exit->dest;
6138 loop->latch = exit->src;
6141 /* Redirect the entry and add the phi node arguments. */
6142 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6143 gcc_assert (redirected != NULL);
6144 flush_pending_stmts (entry);
6146 /* Concerning updating of dominators: We must recount dominators
6147 for entry block and its copy. Anything that is outside of the
6148 region, but was dominated by something inside needs recounting as
6149 well. */
6150 if (update_dominance)
6152 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6153 doms.safe_push (get_bb_original (entry->dest));
6154 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6155 doms.release ();
6158 /* Add the other PHI node arguments. */
6159 add_phi_args_after_copy (region_copy, n_region, NULL);
6161 if (free_region_copy)
6162 free (region_copy);
6164 free_original_copy_tables ();
6165 return true;
6168 /* Checks if BB is part of the region defined by N_REGION BBS. */
6169 static bool
6170 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6172 unsigned int n;
6174 for (n = 0; n < n_region; n++)
6176 if (bb == bbs[n])
6177 return true;
6179 return false;
6182 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6183 are stored to REGION_COPY in the same order in that they appear
6184 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6185 the region, EXIT an exit from it. The condition guarding EXIT
6186 is moved to ENTRY. Returns true if duplication succeeds, false
6187 otherwise.
6189 For example,
6191 some_code;
6192 if (cond)
6194 else
6197 is transformed to
6199 if (cond)
6201 some_code;
6204 else
6206 some_code;
6211 bool
6212 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6213 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6214 basic_block *region_copy ATTRIBUTE_UNUSED)
6216 unsigned i;
6217 bool free_region_copy = false;
6218 struct loop *loop = exit->dest->loop_father;
6219 struct loop *orig_loop = entry->dest->loop_father;
6220 basic_block switch_bb, entry_bb, nentry_bb;
6221 vec<basic_block> doms;
6222 int total_freq = 0, exit_freq = 0;
6223 gcov_type total_count = 0, exit_count = 0;
6224 edge exits[2], nexits[2], e;
6225 gimple_stmt_iterator gsi;
6226 gimple cond_stmt;
6227 edge sorig, snew;
6228 basic_block exit_bb;
6229 gphi_iterator psi;
6230 gphi *phi;
6231 tree def;
6232 struct loop *target, *aloop, *cloop;
6234 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6235 exits[0] = exit;
6236 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6238 if (!can_copy_bbs_p (region, n_region))
6239 return false;
6241 initialize_original_copy_tables ();
6242 set_loop_copy (orig_loop, loop);
6244 target= loop;
6245 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6247 if (bb_part_of_region_p (aloop->header, region, n_region))
6249 cloop = duplicate_loop (aloop, target);
6250 duplicate_subloops (aloop, cloop);
6254 if (!region_copy)
6256 region_copy = XNEWVEC (basic_block, n_region);
6257 free_region_copy = true;
6260 gcc_assert (!need_ssa_update_p (cfun));
6262 /* Record blocks outside the region that are dominated by something
6263 inside. */
6264 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6266 if (exit->src->count)
6268 total_count = exit->src->count;
6269 exit_count = exit->count;
6270 /* Fix up corner cases, to avoid division by zero or creation of negative
6271 frequencies. */
6272 if (exit_count > total_count)
6273 exit_count = total_count;
6275 else
6277 total_freq = exit->src->frequency;
6278 exit_freq = EDGE_FREQUENCY (exit);
6279 /* Fix up corner cases, to avoid division by zero or creation of negative
6280 frequencies. */
6281 if (total_freq == 0)
6282 total_freq = 1;
6283 if (exit_freq > total_freq)
6284 exit_freq = total_freq;
6287 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6288 split_edge_bb_loc (exit), true);
6289 if (total_count)
6291 scale_bbs_frequencies_gcov_type (region, n_region,
6292 total_count - exit_count,
6293 total_count);
6294 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6295 total_count);
6297 else
6299 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6300 total_freq);
6301 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6304 /* Create the switch block, and put the exit condition to it. */
6305 entry_bb = entry->dest;
6306 nentry_bb = get_bb_copy (entry_bb);
6307 if (!last_stmt (entry->src)
6308 || !stmt_ends_bb_p (last_stmt (entry->src)))
6309 switch_bb = entry->src;
6310 else
6311 switch_bb = split_edge (entry);
6312 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6314 gsi = gsi_last_bb (switch_bb);
6315 cond_stmt = last_stmt (exit->src);
6316 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6317 cond_stmt = gimple_copy (cond_stmt);
6319 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6321 sorig = single_succ_edge (switch_bb);
6322 sorig->flags = exits[1]->flags;
6323 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6325 /* Register the new edge from SWITCH_BB in loop exit lists. */
6326 rescan_loop_exit (snew, true, false);
6328 /* Add the PHI node arguments. */
6329 add_phi_args_after_copy (region_copy, n_region, snew);
6331 /* Get rid of now superfluous conditions and associated edges (and phi node
6332 arguments). */
6333 exit_bb = exit->dest;
6335 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6336 PENDING_STMT (e) = NULL;
6338 /* The latch of ORIG_LOOP was copied, and so was the backedge
6339 to the original header. We redirect this backedge to EXIT_BB. */
6340 for (i = 0; i < n_region; i++)
6341 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6343 gcc_assert (single_succ_edge (region_copy[i]));
6344 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6345 PENDING_STMT (e) = NULL;
6346 for (psi = gsi_start_phis (exit_bb);
6347 !gsi_end_p (psi);
6348 gsi_next (&psi))
6350 phi = psi.phi ();
6351 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6352 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6355 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6356 PENDING_STMT (e) = NULL;
6358 /* Anything that is outside of the region, but was dominated by something
6359 inside needs to update dominance info. */
6360 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6361 doms.release ();
6362 /* Update the SSA web. */
6363 update_ssa (TODO_update_ssa);
6365 if (free_region_copy)
6366 free (region_copy);
6368 free_original_copy_tables ();
6369 return true;
6372 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6373 adding blocks when the dominator traversal reaches EXIT. This
6374 function silently assumes that ENTRY strictly dominates EXIT. */
6376 void
6377 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6378 vec<basic_block> *bbs_p)
6380 basic_block son;
6382 for (son = first_dom_son (CDI_DOMINATORS, entry);
6383 son;
6384 son = next_dom_son (CDI_DOMINATORS, son))
6386 bbs_p->safe_push (son);
6387 if (son != exit)
6388 gather_blocks_in_sese_region (son, exit, bbs_p);
6392 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6393 The duplicates are recorded in VARS_MAP. */
6395 static void
6396 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6397 tree to_context)
6399 tree t = *tp, new_t;
6400 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6402 if (DECL_CONTEXT (t) == to_context)
6403 return;
6405 bool existed;
6406 tree &loc = vars_map->get_or_insert (t, &existed);
6408 if (!existed)
6410 if (SSA_VAR_P (t))
6412 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6413 add_local_decl (f, new_t);
6415 else
6417 gcc_assert (TREE_CODE (t) == CONST_DECL);
6418 new_t = copy_node (t);
6420 DECL_CONTEXT (new_t) = to_context;
6422 loc = new_t;
6424 else
6425 new_t = loc;
6427 *tp = new_t;
6431 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6432 VARS_MAP maps old ssa names and var_decls to the new ones. */
6434 static tree
6435 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6436 tree to_context)
6438 tree new_name;
6440 gcc_assert (!virtual_operand_p (name));
6442 tree *loc = vars_map->get (name);
6444 if (!loc)
6446 tree decl = SSA_NAME_VAR (name);
6447 if (decl)
6449 replace_by_duplicate_decl (&decl, vars_map, to_context);
6450 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6451 decl, SSA_NAME_DEF_STMT (name));
6452 if (SSA_NAME_IS_DEFAULT_DEF (name))
6453 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6454 decl, new_name);
6456 else
6457 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6458 name, SSA_NAME_DEF_STMT (name));
6460 vars_map->put (name, new_name);
6462 else
6463 new_name = *loc;
6465 return new_name;
6468 struct move_stmt_d
6470 tree orig_block;
6471 tree new_block;
6472 tree from_context;
6473 tree to_context;
6474 hash_map<tree, tree> *vars_map;
6475 htab_t new_label_map;
6476 hash_map<void *, void *> *eh_map;
6477 bool remap_decls_p;
6480 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6481 contained in *TP if it has been ORIG_BLOCK previously and change the
6482 DECL_CONTEXT of every local variable referenced in *TP. */
6484 static tree
6485 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6487 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6488 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6489 tree t = *tp;
6491 if (EXPR_P (t))
6493 tree block = TREE_BLOCK (t);
6494 if (block == p->orig_block
6495 || (p->orig_block == NULL_TREE
6496 && block != NULL_TREE))
6497 TREE_SET_BLOCK (t, p->new_block);
6498 #ifdef ENABLE_CHECKING
6499 else if (block != NULL_TREE)
6501 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6502 block = BLOCK_SUPERCONTEXT (block);
6503 gcc_assert (block == p->orig_block);
6505 #endif
6507 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6509 if (TREE_CODE (t) == SSA_NAME)
6510 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6511 else if (TREE_CODE (t) == LABEL_DECL)
6513 if (p->new_label_map)
6515 struct tree_map in, *out;
6516 in.base.from = t;
6517 out = (struct tree_map *)
6518 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6519 if (out)
6520 *tp = t = out->to;
6523 DECL_CONTEXT (t) = p->to_context;
6525 else if (p->remap_decls_p)
6527 /* Replace T with its duplicate. T should no longer appear in the
6528 parent function, so this looks wasteful; however, it may appear
6529 in referenced_vars, and more importantly, as virtual operands of
6530 statements, and in alias lists of other variables. It would be
6531 quite difficult to expunge it from all those places. ??? It might
6532 suffice to do this for addressable variables. */
6533 if ((TREE_CODE (t) == VAR_DECL
6534 && !is_global_var (t))
6535 || TREE_CODE (t) == CONST_DECL)
6536 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6538 *walk_subtrees = 0;
6540 else if (TYPE_P (t))
6541 *walk_subtrees = 0;
6543 return NULL_TREE;
6546 /* Helper for move_stmt_r. Given an EH region number for the source
6547 function, map that to the duplicate EH regio number in the dest. */
6549 static int
6550 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6552 eh_region old_r, new_r;
6554 old_r = get_eh_region_from_number (old_nr);
6555 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6557 return new_r->index;
6560 /* Similar, but operate on INTEGER_CSTs. */
6562 static tree
6563 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6565 int old_nr, new_nr;
6567 old_nr = tree_to_shwi (old_t_nr);
6568 new_nr = move_stmt_eh_region_nr (old_nr, p);
6570 return build_int_cst (integer_type_node, new_nr);
6573 /* Like move_stmt_op, but for gimple statements.
6575 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6576 contained in the current statement in *GSI_P and change the
6577 DECL_CONTEXT of every local variable referenced in the current
6578 statement. */
6580 static tree
6581 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6582 struct walk_stmt_info *wi)
6584 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6585 gimple stmt = gsi_stmt (*gsi_p);
6586 tree block = gimple_block (stmt);
6588 if (block == p->orig_block
6589 || (p->orig_block == NULL_TREE
6590 && block != NULL_TREE))
6591 gimple_set_block (stmt, p->new_block);
6593 switch (gimple_code (stmt))
6595 case GIMPLE_CALL:
6596 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6598 tree r, fndecl = gimple_call_fndecl (stmt);
6599 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6600 switch (DECL_FUNCTION_CODE (fndecl))
6602 case BUILT_IN_EH_COPY_VALUES:
6603 r = gimple_call_arg (stmt, 1);
6604 r = move_stmt_eh_region_tree_nr (r, p);
6605 gimple_call_set_arg (stmt, 1, r);
6606 /* FALLTHRU */
6608 case BUILT_IN_EH_POINTER:
6609 case BUILT_IN_EH_FILTER:
6610 r = gimple_call_arg (stmt, 0);
6611 r = move_stmt_eh_region_tree_nr (r, p);
6612 gimple_call_set_arg (stmt, 0, r);
6613 break;
6615 default:
6616 break;
6619 break;
6621 case GIMPLE_RESX:
6623 gresx *resx_stmt = as_a <gresx *> (stmt);
6624 int r = gimple_resx_region (resx_stmt);
6625 r = move_stmt_eh_region_nr (r, p);
6626 gimple_resx_set_region (resx_stmt, r);
6628 break;
6630 case GIMPLE_EH_DISPATCH:
6632 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6633 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6634 r = move_stmt_eh_region_nr (r, p);
6635 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6637 break;
6639 case GIMPLE_OMP_RETURN:
6640 case GIMPLE_OMP_CONTINUE:
6641 break;
6642 default:
6643 if (is_gimple_omp (stmt))
6645 /* Do not remap variables inside OMP directives. Variables
6646 referenced in clauses and directive header belong to the
6647 parent function and should not be moved into the child
6648 function. */
6649 bool save_remap_decls_p = p->remap_decls_p;
6650 p->remap_decls_p = false;
6651 *handled_ops_p = true;
6653 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6654 move_stmt_op, wi);
6656 p->remap_decls_p = save_remap_decls_p;
6658 break;
6661 return NULL_TREE;
6664 /* Move basic block BB from function CFUN to function DEST_FN. The
6665 block is moved out of the original linked list and placed after
6666 block AFTER in the new list. Also, the block is removed from the
6667 original array of blocks and placed in DEST_FN's array of blocks.
6668 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6669 updated to reflect the moved edges.
6671 The local variables are remapped to new instances, VARS_MAP is used
6672 to record the mapping. */
6674 static void
6675 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6676 basic_block after, bool update_edge_count_p,
6677 struct move_stmt_d *d)
6679 struct control_flow_graph *cfg;
6680 edge_iterator ei;
6681 edge e;
6682 gimple_stmt_iterator si;
6683 unsigned old_len, new_len;
6685 /* Remove BB from dominance structures. */
6686 delete_from_dominance_info (CDI_DOMINATORS, bb);
6688 /* Move BB from its current loop to the copy in the new function. */
6689 if (current_loops)
6691 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6692 if (new_loop)
6693 bb->loop_father = new_loop;
6696 /* Link BB to the new linked list. */
6697 move_block_after (bb, after);
6699 /* Update the edge count in the corresponding flowgraphs. */
6700 if (update_edge_count_p)
6701 FOR_EACH_EDGE (e, ei, bb->succs)
6703 cfun->cfg->x_n_edges--;
6704 dest_cfun->cfg->x_n_edges++;
6707 /* Remove BB from the original basic block array. */
6708 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6709 cfun->cfg->x_n_basic_blocks--;
6711 /* Grow DEST_CFUN's basic block array if needed. */
6712 cfg = dest_cfun->cfg;
6713 cfg->x_n_basic_blocks++;
6714 if (bb->index >= cfg->x_last_basic_block)
6715 cfg->x_last_basic_block = bb->index + 1;
6717 old_len = vec_safe_length (cfg->x_basic_block_info);
6718 if ((unsigned) cfg->x_last_basic_block >= old_len)
6720 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6721 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6724 (*cfg->x_basic_block_info)[bb->index] = bb;
6726 /* Remap the variables in phi nodes. */
6727 for (gphi_iterator psi = gsi_start_phis (bb);
6728 !gsi_end_p (psi); )
6730 gphi *phi = psi.phi ();
6731 use_operand_p use;
6732 tree op = PHI_RESULT (phi);
6733 ssa_op_iter oi;
6734 unsigned i;
6736 if (virtual_operand_p (op))
6738 /* Remove the phi nodes for virtual operands (alias analysis will be
6739 run for the new function, anyway). */
6740 remove_phi_node (&psi, true);
6741 continue;
6744 SET_PHI_RESULT (phi,
6745 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6746 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6748 op = USE_FROM_PTR (use);
6749 if (TREE_CODE (op) == SSA_NAME)
6750 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6753 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6755 location_t locus = gimple_phi_arg_location (phi, i);
6756 tree block = LOCATION_BLOCK (locus);
6758 if (locus == UNKNOWN_LOCATION)
6759 continue;
6760 if (d->orig_block == NULL_TREE || block == d->orig_block)
6762 if (d->new_block == NULL_TREE)
6763 locus = LOCATION_LOCUS (locus);
6764 else
6765 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6766 gimple_phi_arg_set_location (phi, i, locus);
6770 gsi_next (&psi);
6773 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6775 gimple stmt = gsi_stmt (si);
6776 struct walk_stmt_info wi;
6778 memset (&wi, 0, sizeof (wi));
6779 wi.info = d;
6780 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6782 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6784 tree label = gimple_label_label (label_stmt);
6785 int uid = LABEL_DECL_UID (label);
6787 gcc_assert (uid > -1);
6789 old_len = vec_safe_length (cfg->x_label_to_block_map);
6790 if (old_len <= (unsigned) uid)
6792 new_len = 3 * uid / 2 + 1;
6793 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6796 (*cfg->x_label_to_block_map)[uid] = bb;
6797 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6799 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6801 if (uid >= dest_cfun->cfg->last_label_uid)
6802 dest_cfun->cfg->last_label_uid = uid + 1;
6805 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6806 remove_stmt_from_eh_lp_fn (cfun, stmt);
6808 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6809 gimple_remove_stmt_histograms (cfun, stmt);
6811 /* We cannot leave any operands allocated from the operand caches of
6812 the current function. */
6813 free_stmt_operands (cfun, stmt);
6814 push_cfun (dest_cfun);
6815 update_stmt (stmt);
6816 pop_cfun ();
6819 FOR_EACH_EDGE (e, ei, bb->succs)
6820 if (e->goto_locus != UNKNOWN_LOCATION)
6822 tree block = LOCATION_BLOCK (e->goto_locus);
6823 if (d->orig_block == NULL_TREE
6824 || block == d->orig_block)
6825 e->goto_locus = d->new_block ?
6826 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6827 LOCATION_LOCUS (e->goto_locus);
6831 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6832 the outermost EH region. Use REGION as the incoming base EH region. */
6834 static eh_region
6835 find_outermost_region_in_block (struct function *src_cfun,
6836 basic_block bb, eh_region region)
6838 gimple_stmt_iterator si;
6840 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6842 gimple stmt = gsi_stmt (si);
6843 eh_region stmt_region;
6844 int lp_nr;
6846 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6847 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6848 if (stmt_region)
6850 if (region == NULL)
6851 region = stmt_region;
6852 else if (stmt_region != region)
6854 region = eh_region_outermost (src_cfun, stmt_region, region);
6855 gcc_assert (region != NULL);
6860 return region;
6863 static tree
6864 new_label_mapper (tree decl, void *data)
6866 htab_t hash = (htab_t) data;
6867 struct tree_map *m;
6868 void **slot;
6870 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6872 m = XNEW (struct tree_map);
6873 m->hash = DECL_UID (decl);
6874 m->base.from = decl;
6875 m->to = create_artificial_label (UNKNOWN_LOCATION);
6876 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6877 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6878 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6880 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6881 gcc_assert (*slot == NULL);
6883 *slot = m;
6885 return m->to;
6888 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6889 subblocks. */
6891 static void
6892 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6893 tree to_context)
6895 tree *tp, t;
6897 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6899 t = *tp;
6900 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6901 continue;
6902 replace_by_duplicate_decl (&t, vars_map, to_context);
6903 if (t != *tp)
6905 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6907 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6908 DECL_HAS_VALUE_EXPR_P (t) = 1;
6910 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6911 *tp = t;
6915 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6916 replace_block_vars_by_duplicates (block, vars_map, to_context);
6919 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6920 from FN1 to FN2. */
6922 static void
6923 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6924 struct loop *loop)
6926 /* Discard it from the old loop array. */
6927 (*get_loops (fn1))[loop->num] = NULL;
6929 /* Place it in the new loop array, assigning it a new number. */
6930 loop->num = number_of_loops (fn2);
6931 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6933 /* Recurse to children. */
6934 for (loop = loop->inner; loop; loop = loop->next)
6935 fixup_loop_arrays_after_move (fn1, fn2, loop);
6938 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6939 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6941 DEBUG_FUNCTION void
6942 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6944 basic_block bb;
6945 edge_iterator ei;
6946 edge e;
6947 bitmap bbs = BITMAP_ALLOC (NULL);
6948 int i;
6950 gcc_assert (entry != NULL);
6951 gcc_assert (entry != exit);
6952 gcc_assert (bbs_p != NULL);
6954 gcc_assert (bbs_p->length () > 0);
6956 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6957 bitmap_set_bit (bbs, bb->index);
6959 gcc_assert (bitmap_bit_p (bbs, entry->index));
6960 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6962 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6964 if (bb == entry)
6966 gcc_assert (single_pred_p (entry));
6967 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6969 else
6970 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6972 e = ei_edge (ei);
6973 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6976 if (bb == exit)
6978 gcc_assert (single_succ_p (exit));
6979 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6981 else
6982 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6984 e = ei_edge (ei);
6985 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6989 BITMAP_FREE (bbs);
6993 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6994 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6995 single basic block in the original CFG and the new basic block is
6996 returned. DEST_CFUN must not have a CFG yet.
6998 Note that the region need not be a pure SESE region. Blocks inside
6999 the region may contain calls to abort/exit. The only restriction
7000 is that ENTRY_BB should be the only entry point and it must
7001 dominate EXIT_BB.
7003 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7004 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7005 to the new function.
7007 All local variables referenced in the region are assumed to be in
7008 the corresponding BLOCK_VARS and unexpanded variable lists
7009 associated with DEST_CFUN. */
7011 basic_block
7012 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7013 basic_block exit_bb, tree orig_block)
7015 vec<basic_block> bbs, dom_bbs;
7016 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7017 basic_block after, bb, *entry_pred, *exit_succ, abb;
7018 struct function *saved_cfun = cfun;
7019 int *entry_flag, *exit_flag;
7020 unsigned *entry_prob, *exit_prob;
7021 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7022 edge e;
7023 edge_iterator ei;
7024 htab_t new_label_map;
7025 hash_map<void *, void *> *eh_map;
7026 struct loop *loop = entry_bb->loop_father;
7027 struct loop *loop0 = get_loop (saved_cfun, 0);
7028 struct move_stmt_d d;
7030 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7031 region. */
7032 gcc_assert (entry_bb != exit_bb
7033 && (!exit_bb
7034 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7036 /* Collect all the blocks in the region. Manually add ENTRY_BB
7037 because it won't be added by dfs_enumerate_from. */
7038 bbs.create (0);
7039 bbs.safe_push (entry_bb);
7040 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7041 #ifdef ENABLE_CHECKING
7042 verify_sese (entry_bb, exit_bb, &bbs);
7043 #endif
7045 /* The blocks that used to be dominated by something in BBS will now be
7046 dominated by the new block. */
7047 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7048 bbs.address (),
7049 bbs.length ());
7051 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7052 the predecessor edges to ENTRY_BB and the successor edges to
7053 EXIT_BB so that we can re-attach them to the new basic block that
7054 will replace the region. */
7055 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7056 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7057 entry_flag = XNEWVEC (int, num_entry_edges);
7058 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7059 i = 0;
7060 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7062 entry_prob[i] = e->probability;
7063 entry_flag[i] = e->flags;
7064 entry_pred[i++] = e->src;
7065 remove_edge (e);
7068 if (exit_bb)
7070 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7071 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7072 exit_flag = XNEWVEC (int, num_exit_edges);
7073 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7074 i = 0;
7075 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7077 exit_prob[i] = e->probability;
7078 exit_flag[i] = e->flags;
7079 exit_succ[i++] = e->dest;
7080 remove_edge (e);
7083 else
7085 num_exit_edges = 0;
7086 exit_succ = NULL;
7087 exit_flag = NULL;
7088 exit_prob = NULL;
7091 /* Switch context to the child function to initialize DEST_FN's CFG. */
7092 gcc_assert (dest_cfun->cfg == NULL);
7093 push_cfun (dest_cfun);
7095 init_empty_tree_cfg ();
7097 /* Initialize EH information for the new function. */
7098 eh_map = NULL;
7099 new_label_map = NULL;
7100 if (saved_cfun->eh)
7102 eh_region region = NULL;
7104 FOR_EACH_VEC_ELT (bbs, i, bb)
7105 region = find_outermost_region_in_block (saved_cfun, bb, region);
7107 init_eh_for_function ();
7108 if (region != NULL)
7110 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7111 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7112 new_label_mapper, new_label_map);
7116 /* Initialize an empty loop tree. */
7117 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7118 init_loops_structure (dest_cfun, loops, 1);
7119 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7120 set_loops_for_fn (dest_cfun, loops);
7122 /* Move the outlined loop tree part. */
7123 num_nodes = bbs.length ();
7124 FOR_EACH_VEC_ELT (bbs, i, bb)
7126 if (bb->loop_father->header == bb)
7128 struct loop *this_loop = bb->loop_father;
7129 struct loop *outer = loop_outer (this_loop);
7130 if (outer == loop
7131 /* If the SESE region contains some bbs ending with
7132 a noreturn call, those are considered to belong
7133 to the outermost loop in saved_cfun, rather than
7134 the entry_bb's loop_father. */
7135 || outer == loop0)
7137 if (outer != loop)
7138 num_nodes -= this_loop->num_nodes;
7139 flow_loop_tree_node_remove (bb->loop_father);
7140 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7141 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7144 else if (bb->loop_father == loop0 && loop0 != loop)
7145 num_nodes--;
7147 /* Remove loop exits from the outlined region. */
7148 if (loops_for_fn (saved_cfun)->exits)
7149 FOR_EACH_EDGE (e, ei, bb->succs)
7151 struct loops *l = loops_for_fn (saved_cfun);
7152 loop_exit **slot
7153 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7154 NO_INSERT);
7155 if (slot)
7156 l->exits->clear_slot (slot);
7161 /* Adjust the number of blocks in the tree root of the outlined part. */
7162 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7164 /* Setup a mapping to be used by move_block_to_fn. */
7165 loop->aux = current_loops->tree_root;
7166 loop0->aux = current_loops->tree_root;
7168 pop_cfun ();
7170 /* Move blocks from BBS into DEST_CFUN. */
7171 gcc_assert (bbs.length () >= 2);
7172 after = dest_cfun->cfg->x_entry_block_ptr;
7173 hash_map<tree, tree> vars_map;
7175 memset (&d, 0, sizeof (d));
7176 d.orig_block = orig_block;
7177 d.new_block = DECL_INITIAL (dest_cfun->decl);
7178 d.from_context = cfun->decl;
7179 d.to_context = dest_cfun->decl;
7180 d.vars_map = &vars_map;
7181 d.new_label_map = new_label_map;
7182 d.eh_map = eh_map;
7183 d.remap_decls_p = true;
7185 FOR_EACH_VEC_ELT (bbs, i, bb)
7187 /* No need to update edge counts on the last block. It has
7188 already been updated earlier when we detached the region from
7189 the original CFG. */
7190 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7191 after = bb;
7194 loop->aux = NULL;
7195 loop0->aux = NULL;
7196 /* Loop sizes are no longer correct, fix them up. */
7197 loop->num_nodes -= num_nodes;
7198 for (struct loop *outer = loop_outer (loop);
7199 outer; outer = loop_outer (outer))
7200 outer->num_nodes -= num_nodes;
7201 loop0->num_nodes -= bbs.length () - num_nodes;
7203 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7205 struct loop *aloop;
7206 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7207 if (aloop != NULL)
7209 if (aloop->simduid)
7211 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7212 d.to_context);
7213 dest_cfun->has_simduid_loops = true;
7215 if (aloop->force_vectorize)
7216 dest_cfun->has_force_vectorize_loops = true;
7220 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7221 if (orig_block)
7223 tree block;
7224 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7225 == NULL_TREE);
7226 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7227 = BLOCK_SUBBLOCKS (orig_block);
7228 for (block = BLOCK_SUBBLOCKS (orig_block);
7229 block; block = BLOCK_CHAIN (block))
7230 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7231 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7234 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7235 &vars_map, dest_cfun->decl);
7237 if (new_label_map)
7238 htab_delete (new_label_map);
7239 if (eh_map)
7240 delete eh_map;
7242 /* Rewire the entry and exit blocks. The successor to the entry
7243 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7244 the child function. Similarly, the predecessor of DEST_FN's
7245 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7246 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7247 various CFG manipulation function get to the right CFG.
7249 FIXME, this is silly. The CFG ought to become a parameter to
7250 these helpers. */
7251 push_cfun (dest_cfun);
7252 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7253 if (exit_bb)
7254 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7255 pop_cfun ();
7257 /* Back in the original function, the SESE region has disappeared,
7258 create a new basic block in its place. */
7259 bb = create_empty_bb (entry_pred[0]);
7260 if (current_loops)
7261 add_bb_to_loop (bb, loop);
7262 for (i = 0; i < num_entry_edges; i++)
7264 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7265 e->probability = entry_prob[i];
7268 for (i = 0; i < num_exit_edges; i++)
7270 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7271 e->probability = exit_prob[i];
7274 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7275 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7276 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7277 dom_bbs.release ();
7279 if (exit_bb)
7281 free (exit_prob);
7282 free (exit_flag);
7283 free (exit_succ);
7285 free (entry_prob);
7286 free (entry_flag);
7287 free (entry_pred);
7288 bbs.release ();
7290 return bb;
7294 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7297 void
7298 dump_function_to_file (tree fndecl, FILE *file, int flags)
7300 tree arg, var, old_current_fndecl = current_function_decl;
7301 struct function *dsf;
7302 bool ignore_topmost_bind = false, any_var = false;
7303 basic_block bb;
7304 tree chain;
7305 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7306 && decl_is_tm_clone (fndecl));
7307 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7309 current_function_decl = fndecl;
7310 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7312 arg = DECL_ARGUMENTS (fndecl);
7313 while (arg)
7315 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7316 fprintf (file, " ");
7317 print_generic_expr (file, arg, dump_flags);
7318 if (flags & TDF_VERBOSE)
7319 print_node (file, "", arg, 4);
7320 if (DECL_CHAIN (arg))
7321 fprintf (file, ", ");
7322 arg = DECL_CHAIN (arg);
7324 fprintf (file, ")\n");
7326 if (flags & TDF_VERBOSE)
7327 print_node (file, "", fndecl, 2);
7329 dsf = DECL_STRUCT_FUNCTION (fndecl);
7330 if (dsf && (flags & TDF_EH))
7331 dump_eh_tree (file, dsf);
7333 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7335 dump_node (fndecl, TDF_SLIM | flags, file);
7336 current_function_decl = old_current_fndecl;
7337 return;
7340 /* When GIMPLE is lowered, the variables are no longer available in
7341 BIND_EXPRs, so display them separately. */
7342 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7344 unsigned ix;
7345 ignore_topmost_bind = true;
7347 fprintf (file, "{\n");
7348 if (!vec_safe_is_empty (fun->local_decls))
7349 FOR_EACH_LOCAL_DECL (fun, ix, var)
7351 print_generic_decl (file, var, flags);
7352 if (flags & TDF_VERBOSE)
7353 print_node (file, "", var, 4);
7354 fprintf (file, "\n");
7356 any_var = true;
7358 if (gimple_in_ssa_p (cfun))
7359 for (ix = 1; ix < num_ssa_names; ++ix)
7361 tree name = ssa_name (ix);
7362 if (name && !SSA_NAME_VAR (name))
7364 fprintf (file, " ");
7365 print_generic_expr (file, TREE_TYPE (name), flags);
7366 fprintf (file, " ");
7367 print_generic_expr (file, name, flags);
7368 fprintf (file, ";\n");
7370 any_var = true;
7375 if (fun && fun->decl == fndecl
7376 && fun->cfg
7377 && basic_block_info_for_fn (fun))
7379 /* If the CFG has been built, emit a CFG-based dump. */
7380 if (!ignore_topmost_bind)
7381 fprintf (file, "{\n");
7383 if (any_var && n_basic_blocks_for_fn (fun))
7384 fprintf (file, "\n");
7386 FOR_EACH_BB_FN (bb, fun)
7387 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7389 fprintf (file, "}\n");
7391 else if (DECL_SAVED_TREE (fndecl) == NULL)
7393 /* The function is now in GIMPLE form but the CFG has not been
7394 built yet. Emit the single sequence of GIMPLE statements
7395 that make up its body. */
7396 gimple_seq body = gimple_body (fndecl);
7398 if (gimple_seq_first_stmt (body)
7399 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7400 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7401 print_gimple_seq (file, body, 0, flags);
7402 else
7404 if (!ignore_topmost_bind)
7405 fprintf (file, "{\n");
7407 if (any_var)
7408 fprintf (file, "\n");
7410 print_gimple_seq (file, body, 2, flags);
7411 fprintf (file, "}\n");
7414 else
7416 int indent;
7418 /* Make a tree based dump. */
7419 chain = DECL_SAVED_TREE (fndecl);
7420 if (chain && TREE_CODE (chain) == BIND_EXPR)
7422 if (ignore_topmost_bind)
7424 chain = BIND_EXPR_BODY (chain);
7425 indent = 2;
7427 else
7428 indent = 0;
7430 else
7432 if (!ignore_topmost_bind)
7434 fprintf (file, "{\n");
7435 /* No topmost bind, pretend it's ignored for later. */
7436 ignore_topmost_bind = true;
7438 indent = 2;
7441 if (any_var)
7442 fprintf (file, "\n");
7444 print_generic_stmt_indented (file, chain, flags, indent);
7445 if (ignore_topmost_bind)
7446 fprintf (file, "}\n");
7449 if (flags & TDF_ENUMERATE_LOCALS)
7450 dump_enumerated_decls (file, flags);
7451 fprintf (file, "\n\n");
7453 current_function_decl = old_current_fndecl;
7456 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7458 DEBUG_FUNCTION void
7459 debug_function (tree fn, int flags)
7461 dump_function_to_file (fn, stderr, flags);
7465 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7467 static void
7468 print_pred_bbs (FILE *file, basic_block bb)
7470 edge e;
7471 edge_iterator ei;
7473 FOR_EACH_EDGE (e, ei, bb->preds)
7474 fprintf (file, "bb_%d ", e->src->index);
7478 /* Print on FILE the indexes for the successors of basic_block BB. */
7480 static void
7481 print_succ_bbs (FILE *file, basic_block bb)
7483 edge e;
7484 edge_iterator ei;
7486 FOR_EACH_EDGE (e, ei, bb->succs)
7487 fprintf (file, "bb_%d ", e->dest->index);
7490 /* Print to FILE the basic block BB following the VERBOSITY level. */
7492 void
7493 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7495 char *s_indent = (char *) alloca ((size_t) indent + 1);
7496 memset ((void *) s_indent, ' ', (size_t) indent);
7497 s_indent[indent] = '\0';
7499 /* Print basic_block's header. */
7500 if (verbosity >= 2)
7502 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7503 print_pred_bbs (file, bb);
7504 fprintf (file, "}, succs = {");
7505 print_succ_bbs (file, bb);
7506 fprintf (file, "})\n");
7509 /* Print basic_block's body. */
7510 if (verbosity >= 3)
7512 fprintf (file, "%s {\n", s_indent);
7513 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7514 fprintf (file, "%s }\n", s_indent);
7518 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7520 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7521 VERBOSITY level this outputs the contents of the loop, or just its
7522 structure. */
7524 static void
7525 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7527 char *s_indent;
7528 basic_block bb;
7530 if (loop == NULL)
7531 return;
7533 s_indent = (char *) alloca ((size_t) indent + 1);
7534 memset ((void *) s_indent, ' ', (size_t) indent);
7535 s_indent[indent] = '\0';
7537 /* Print loop's header. */
7538 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7539 if (loop->header)
7540 fprintf (file, "header = %d", loop->header->index);
7541 else
7543 fprintf (file, "deleted)\n");
7544 return;
7546 if (loop->latch)
7547 fprintf (file, ", latch = %d", loop->latch->index);
7548 else
7549 fprintf (file, ", multiple latches");
7550 fprintf (file, ", niter = ");
7551 print_generic_expr (file, loop->nb_iterations, 0);
7553 if (loop->any_upper_bound)
7555 fprintf (file, ", upper_bound = ");
7556 print_decu (loop->nb_iterations_upper_bound, file);
7559 if (loop->any_estimate)
7561 fprintf (file, ", estimate = ");
7562 print_decu (loop->nb_iterations_estimate, file);
7564 fprintf (file, ")\n");
7566 /* Print loop's body. */
7567 if (verbosity >= 1)
7569 fprintf (file, "%s{\n", s_indent);
7570 FOR_EACH_BB_FN (bb, cfun)
7571 if (bb->loop_father == loop)
7572 print_loops_bb (file, bb, indent, verbosity);
7574 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7575 fprintf (file, "%s}\n", s_indent);
7579 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7580 spaces. Following VERBOSITY level this outputs the contents of the
7581 loop, or just its structure. */
7583 static void
7584 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7585 int verbosity)
7587 if (loop == NULL)
7588 return;
7590 print_loop (file, loop, indent, verbosity);
7591 print_loop_and_siblings (file, loop->next, indent, verbosity);
7594 /* Follow a CFG edge from the entry point of the program, and on entry
7595 of a loop, pretty print the loop structure on FILE. */
7597 void
7598 print_loops (FILE *file, int verbosity)
7600 basic_block bb;
7602 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7603 if (bb && bb->loop_father)
7604 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7607 /* Dump a loop. */
7609 DEBUG_FUNCTION void
7610 debug (struct loop &ref)
7612 print_loop (stderr, &ref, 0, /*verbosity*/0);
7615 DEBUG_FUNCTION void
7616 debug (struct loop *ptr)
7618 if (ptr)
7619 debug (*ptr);
7620 else
7621 fprintf (stderr, "<nil>\n");
7624 /* Dump a loop verbosely. */
7626 DEBUG_FUNCTION void
7627 debug_verbose (struct loop &ref)
7629 print_loop (stderr, &ref, 0, /*verbosity*/3);
7632 DEBUG_FUNCTION void
7633 debug_verbose (struct loop *ptr)
7635 if (ptr)
7636 debug (*ptr);
7637 else
7638 fprintf (stderr, "<nil>\n");
7642 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7644 DEBUG_FUNCTION void
7645 debug_loops (int verbosity)
7647 print_loops (stderr, verbosity);
7650 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7652 DEBUG_FUNCTION void
7653 debug_loop (struct loop *loop, int verbosity)
7655 print_loop (stderr, loop, 0, verbosity);
7658 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7659 level. */
7661 DEBUG_FUNCTION void
7662 debug_loop_num (unsigned num, int verbosity)
7664 debug_loop (get_loop (cfun, num), verbosity);
7667 /* Return true if BB ends with a call, possibly followed by some
7668 instructions that must stay with the call. Return false,
7669 otherwise. */
7671 static bool
7672 gimple_block_ends_with_call_p (basic_block bb)
7674 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7675 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7679 /* Return true if BB ends with a conditional branch. Return false,
7680 otherwise. */
7682 static bool
7683 gimple_block_ends_with_condjump_p (const_basic_block bb)
7685 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7686 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7690 /* Return true if we need to add fake edge to exit at statement T.
7691 Helper function for gimple_flow_call_edges_add. */
7693 static bool
7694 need_fake_edge_p (gimple t)
7696 tree fndecl = NULL_TREE;
7697 int call_flags = 0;
7699 /* NORETURN and LONGJMP calls already have an edge to exit.
7700 CONST and PURE calls do not need one.
7701 We don't currently check for CONST and PURE here, although
7702 it would be a good idea, because those attributes are
7703 figured out from the RTL in mark_constant_function, and
7704 the counter incrementation code from -fprofile-arcs
7705 leads to different results from -fbranch-probabilities. */
7706 if (is_gimple_call (t))
7708 fndecl = gimple_call_fndecl (t);
7709 call_flags = gimple_call_flags (t);
7712 if (is_gimple_call (t)
7713 && fndecl
7714 && DECL_BUILT_IN (fndecl)
7715 && (call_flags & ECF_NOTHROW)
7716 && !(call_flags & ECF_RETURNS_TWICE)
7717 /* fork() doesn't really return twice, but the effect of
7718 wrapping it in __gcov_fork() which calls __gcov_flush()
7719 and clears the counters before forking has the same
7720 effect as returning twice. Force a fake edge. */
7721 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7722 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7723 return false;
7725 if (is_gimple_call (t))
7727 edge_iterator ei;
7728 edge e;
7729 basic_block bb;
7731 if (!(call_flags & ECF_NORETURN))
7732 return true;
7734 bb = gimple_bb (t);
7735 FOR_EACH_EDGE (e, ei, bb->succs)
7736 if ((e->flags & EDGE_FAKE) == 0)
7737 return true;
7740 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7741 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7742 return true;
7744 return false;
7748 /* Add fake edges to the function exit for any non constant and non
7749 noreturn calls (or noreturn calls with EH/abnormal edges),
7750 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7751 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7752 that were split.
7754 The goal is to expose cases in which entering a basic block does
7755 not imply that all subsequent instructions must be executed. */
7757 static int
7758 gimple_flow_call_edges_add (sbitmap blocks)
7760 int i;
7761 int blocks_split = 0;
7762 int last_bb = last_basic_block_for_fn (cfun);
7763 bool check_last_block = false;
7765 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7766 return 0;
7768 if (! blocks)
7769 check_last_block = true;
7770 else
7771 check_last_block = bitmap_bit_p (blocks,
7772 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7774 /* In the last basic block, before epilogue generation, there will be
7775 a fallthru edge to EXIT. Special care is required if the last insn
7776 of the last basic block is a call because make_edge folds duplicate
7777 edges, which would result in the fallthru edge also being marked
7778 fake, which would result in the fallthru edge being removed by
7779 remove_fake_edges, which would result in an invalid CFG.
7781 Moreover, we can't elide the outgoing fake edge, since the block
7782 profiler needs to take this into account in order to solve the minimal
7783 spanning tree in the case that the call doesn't return.
7785 Handle this by adding a dummy instruction in a new last basic block. */
7786 if (check_last_block)
7788 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7789 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7790 gimple t = NULL;
7792 if (!gsi_end_p (gsi))
7793 t = gsi_stmt (gsi);
7795 if (t && need_fake_edge_p (t))
7797 edge e;
7799 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7800 if (e)
7802 gsi_insert_on_edge (e, gimple_build_nop ());
7803 gsi_commit_edge_inserts ();
7808 /* Now add fake edges to the function exit for any non constant
7809 calls since there is no way that we can determine if they will
7810 return or not... */
7811 for (i = 0; i < last_bb; i++)
7813 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7814 gimple_stmt_iterator gsi;
7815 gimple stmt, last_stmt;
7817 if (!bb)
7818 continue;
7820 if (blocks && !bitmap_bit_p (blocks, i))
7821 continue;
7823 gsi = gsi_last_nondebug_bb (bb);
7824 if (!gsi_end_p (gsi))
7826 last_stmt = gsi_stmt (gsi);
7829 stmt = gsi_stmt (gsi);
7830 if (need_fake_edge_p (stmt))
7832 edge e;
7834 /* The handling above of the final block before the
7835 epilogue should be enough to verify that there is
7836 no edge to the exit block in CFG already.
7837 Calling make_edge in such case would cause us to
7838 mark that edge as fake and remove it later. */
7839 #ifdef ENABLE_CHECKING
7840 if (stmt == last_stmt)
7842 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7843 gcc_assert (e == NULL);
7845 #endif
7847 /* Note that the following may create a new basic block
7848 and renumber the existing basic blocks. */
7849 if (stmt != last_stmt)
7851 e = split_block (bb, stmt);
7852 if (e)
7853 blocks_split++;
7855 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7857 gsi_prev (&gsi);
7859 while (!gsi_end_p (gsi));
7863 if (blocks_split)
7864 verify_flow_info ();
7866 return blocks_split;
7869 /* Removes edge E and all the blocks dominated by it, and updates dominance
7870 information. The IL in E->src needs to be updated separately.
7871 If dominance info is not available, only the edge E is removed.*/
7873 void
7874 remove_edge_and_dominated_blocks (edge e)
7876 vec<basic_block> bbs_to_remove = vNULL;
7877 vec<basic_block> bbs_to_fix_dom = vNULL;
7878 bitmap df, df_idom;
7879 edge f;
7880 edge_iterator ei;
7881 bool none_removed = false;
7882 unsigned i;
7883 basic_block bb, dbb;
7884 bitmap_iterator bi;
7886 /* If we are removing a path inside a non-root loop that may change
7887 loop ownership of blocks or remove loops. Mark loops for fixup. */
7888 if (current_loops
7889 && loop_outer (e->src->loop_father) != NULL
7890 && e->src->loop_father == e->dest->loop_father)
7891 loops_state_set (LOOPS_NEED_FIXUP);
7893 if (!dom_info_available_p (CDI_DOMINATORS))
7895 remove_edge (e);
7896 return;
7899 /* No updating is needed for edges to exit. */
7900 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7902 if (cfgcleanup_altered_bbs)
7903 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7904 remove_edge (e);
7905 return;
7908 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7909 that is not dominated by E->dest, then this set is empty. Otherwise,
7910 all the basic blocks dominated by E->dest are removed.
7912 Also, to DF_IDOM we store the immediate dominators of the blocks in
7913 the dominance frontier of E (i.e., of the successors of the
7914 removed blocks, if there are any, and of E->dest otherwise). */
7915 FOR_EACH_EDGE (f, ei, e->dest->preds)
7917 if (f == e)
7918 continue;
7920 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7922 none_removed = true;
7923 break;
7927 df = BITMAP_ALLOC (NULL);
7928 df_idom = BITMAP_ALLOC (NULL);
7930 if (none_removed)
7931 bitmap_set_bit (df_idom,
7932 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7933 else
7935 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7936 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7938 FOR_EACH_EDGE (f, ei, bb->succs)
7940 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7941 bitmap_set_bit (df, f->dest->index);
7944 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7945 bitmap_clear_bit (df, bb->index);
7947 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7949 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7950 bitmap_set_bit (df_idom,
7951 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7955 if (cfgcleanup_altered_bbs)
7957 /* Record the set of the altered basic blocks. */
7958 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7959 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7962 /* Remove E and the cancelled blocks. */
7963 if (none_removed)
7964 remove_edge (e);
7965 else
7967 /* Walk backwards so as to get a chance to substitute all
7968 released DEFs into debug stmts. See
7969 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7970 details. */
7971 for (i = bbs_to_remove.length (); i-- > 0; )
7972 delete_basic_block (bbs_to_remove[i]);
7975 /* Update the dominance information. The immediate dominator may change only
7976 for blocks whose immediate dominator belongs to DF_IDOM:
7978 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7979 removal. Let Z the arbitrary block such that idom(Z) = Y and
7980 Z dominates X after the removal. Before removal, there exists a path P
7981 from Y to X that avoids Z. Let F be the last edge on P that is
7982 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7983 dominates W, and because of P, Z does not dominate W), and W belongs to
7984 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7985 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7987 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7988 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7989 dbb;
7990 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7991 bbs_to_fix_dom.safe_push (dbb);
7994 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7996 BITMAP_FREE (df);
7997 BITMAP_FREE (df_idom);
7998 bbs_to_remove.release ();
7999 bbs_to_fix_dom.release ();
8002 /* Purge dead EH edges from basic block BB. */
8004 bool
8005 gimple_purge_dead_eh_edges (basic_block bb)
8007 bool changed = false;
8008 edge e;
8009 edge_iterator ei;
8010 gimple stmt = last_stmt (bb);
8012 if (stmt && stmt_can_throw_internal (stmt))
8013 return false;
8015 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8017 if (e->flags & EDGE_EH)
8019 remove_edge_and_dominated_blocks (e);
8020 changed = true;
8022 else
8023 ei_next (&ei);
8026 return changed;
8029 /* Purge dead EH edges from basic block listed in BLOCKS. */
8031 bool
8032 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8034 bool changed = false;
8035 unsigned i;
8036 bitmap_iterator bi;
8038 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8040 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8042 /* Earlier gimple_purge_dead_eh_edges could have removed
8043 this basic block already. */
8044 gcc_assert (bb || changed);
8045 if (bb != NULL)
8046 changed |= gimple_purge_dead_eh_edges (bb);
8049 return changed;
8052 /* Purge dead abnormal call edges from basic block BB. */
8054 bool
8055 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8057 bool changed = false;
8058 edge e;
8059 edge_iterator ei;
8060 gimple stmt = last_stmt (bb);
8062 if (!cfun->has_nonlocal_label
8063 && !cfun->calls_setjmp)
8064 return false;
8066 if (stmt && stmt_can_make_abnormal_goto (stmt))
8067 return false;
8069 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8071 if (e->flags & EDGE_ABNORMAL)
8073 if (e->flags & EDGE_FALLTHRU)
8074 e->flags &= ~EDGE_ABNORMAL;
8075 else
8076 remove_edge_and_dominated_blocks (e);
8077 changed = true;
8079 else
8080 ei_next (&ei);
8083 return changed;
8086 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8088 bool
8089 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8091 bool changed = false;
8092 unsigned i;
8093 bitmap_iterator bi;
8095 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8097 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8099 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8100 this basic block already. */
8101 gcc_assert (bb || changed);
8102 if (bb != NULL)
8103 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8106 return changed;
8109 /* This function is called whenever a new edge is created or
8110 redirected. */
8112 static void
8113 gimple_execute_on_growing_pred (edge e)
8115 basic_block bb = e->dest;
8117 if (!gimple_seq_empty_p (phi_nodes (bb)))
8118 reserve_phi_args_for_new_edge (bb);
8121 /* This function is called immediately before edge E is removed from
8122 the edge vector E->dest->preds. */
8124 static void
8125 gimple_execute_on_shrinking_pred (edge e)
8127 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8128 remove_phi_args (e);
8131 /*---------------------------------------------------------------------------
8132 Helper functions for Loop versioning
8133 ---------------------------------------------------------------------------*/
8135 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8136 of 'first'. Both of them are dominated by 'new_head' basic block. When
8137 'new_head' was created by 'second's incoming edge it received phi arguments
8138 on the edge by split_edge(). Later, additional edge 'e' was created to
8139 connect 'new_head' and 'first'. Now this routine adds phi args on this
8140 additional edge 'e' that new_head to second edge received as part of edge
8141 splitting. */
8143 static void
8144 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8145 basic_block new_head, edge e)
8147 gphi *phi1, *phi2;
8148 gphi_iterator psi1, psi2;
8149 tree def;
8150 edge e2 = find_edge (new_head, second);
8152 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8153 edge, we should always have an edge from NEW_HEAD to SECOND. */
8154 gcc_assert (e2 != NULL);
8156 /* Browse all 'second' basic block phi nodes and add phi args to
8157 edge 'e' for 'first' head. PHI args are always in correct order. */
8159 for (psi2 = gsi_start_phis (second),
8160 psi1 = gsi_start_phis (first);
8161 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8162 gsi_next (&psi2), gsi_next (&psi1))
8164 phi1 = psi1.phi ();
8165 phi2 = psi2.phi ();
8166 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8167 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8172 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8173 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8174 the destination of the ELSE part. */
8176 static void
8177 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8178 basic_block second_head ATTRIBUTE_UNUSED,
8179 basic_block cond_bb, void *cond_e)
8181 gimple_stmt_iterator gsi;
8182 gimple new_cond_expr;
8183 tree cond_expr = (tree) cond_e;
8184 edge e0;
8186 /* Build new conditional expr */
8187 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8188 NULL_TREE, NULL_TREE);
8190 /* Add new cond in cond_bb. */
8191 gsi = gsi_last_bb (cond_bb);
8192 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8194 /* Adjust edges appropriately to connect new head with first head
8195 as well as second head. */
8196 e0 = single_succ_edge (cond_bb);
8197 e0->flags &= ~EDGE_FALLTHRU;
8198 e0->flags |= EDGE_FALSE_VALUE;
8202 /* Do book-keeping of basic block BB for the profile consistency checker.
8203 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8204 then do post-pass accounting. Store the counting in RECORD. */
8205 static void
8206 gimple_account_profile_record (basic_block bb, int after_pass,
8207 struct profile_record *record)
8209 gimple_stmt_iterator i;
8210 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8212 record->size[after_pass]
8213 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8214 if (profile_status_for_fn (cfun) == PROFILE_READ)
8215 record->time[after_pass]
8216 += estimate_num_insns (gsi_stmt (i),
8217 &eni_time_weights) * bb->count;
8218 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8219 record->time[after_pass]
8220 += estimate_num_insns (gsi_stmt (i),
8221 &eni_time_weights) * bb->frequency;
8225 struct cfg_hooks gimple_cfg_hooks = {
8226 "gimple",
8227 gimple_verify_flow_info,
8228 gimple_dump_bb, /* dump_bb */
8229 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8230 create_bb, /* create_basic_block */
8231 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8232 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8233 gimple_can_remove_branch_p, /* can_remove_branch_p */
8234 remove_bb, /* delete_basic_block */
8235 gimple_split_block, /* split_block */
8236 gimple_move_block_after, /* move_block_after */
8237 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8238 gimple_merge_blocks, /* merge_blocks */
8239 gimple_predict_edge, /* predict_edge */
8240 gimple_predicted_by_p, /* predicted_by_p */
8241 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8242 gimple_duplicate_bb, /* duplicate_block */
8243 gimple_split_edge, /* split_edge */
8244 gimple_make_forwarder_block, /* make_forward_block */
8245 NULL, /* tidy_fallthru_edge */
8246 NULL, /* force_nonfallthru */
8247 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8248 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8249 gimple_flow_call_edges_add, /* flow_call_edges_add */
8250 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8251 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8252 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8253 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8254 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8255 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8256 flush_pending_stmts, /* flush_pending_stmts */
8257 gimple_empty_block_p, /* block_empty_p */
8258 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8259 gimple_account_profile_record,
8263 /* Split all critical edges. */
8265 unsigned int
8266 split_critical_edges (void)
8268 basic_block bb;
8269 edge e;
8270 edge_iterator ei;
8272 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8273 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8274 mappings around the calls to split_edge. */
8275 start_recording_case_labels ();
8276 FOR_ALL_BB_FN (bb, cfun)
8278 FOR_EACH_EDGE (e, ei, bb->succs)
8280 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8281 split_edge (e);
8282 /* PRE inserts statements to edges and expects that
8283 since split_critical_edges was done beforehand, committing edge
8284 insertions will not split more edges. In addition to critical
8285 edges we must split edges that have multiple successors and
8286 end by control flow statements, such as RESX.
8287 Go ahead and split them too. This matches the logic in
8288 gimple_find_edge_insert_loc. */
8289 else if ((!single_pred_p (e->dest)
8290 || !gimple_seq_empty_p (phi_nodes (e->dest))
8291 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8292 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8293 && !(e->flags & EDGE_ABNORMAL))
8295 gimple_stmt_iterator gsi;
8297 gsi = gsi_last_bb (e->src);
8298 if (!gsi_end_p (gsi)
8299 && stmt_ends_bb_p (gsi_stmt (gsi))
8300 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8301 && !gimple_call_builtin_p (gsi_stmt (gsi),
8302 BUILT_IN_RETURN)))
8303 split_edge (e);
8307 end_recording_case_labels ();
8308 return 0;
8311 namespace {
8313 const pass_data pass_data_split_crit_edges =
8315 GIMPLE_PASS, /* type */
8316 "crited", /* name */
8317 OPTGROUP_NONE, /* optinfo_flags */
8318 TV_TREE_SPLIT_EDGES, /* tv_id */
8319 PROP_cfg, /* properties_required */
8320 PROP_no_crit_edges, /* properties_provided */
8321 0, /* properties_destroyed */
8322 0, /* todo_flags_start */
8323 0, /* todo_flags_finish */
8326 class pass_split_crit_edges : public gimple_opt_pass
8328 public:
8329 pass_split_crit_edges (gcc::context *ctxt)
8330 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8333 /* opt_pass methods: */
8334 virtual unsigned int execute (function *) { return split_critical_edges (); }
8336 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8337 }; // class pass_split_crit_edges
8339 } // anon namespace
8341 gimple_opt_pass *
8342 make_pass_split_crit_edges (gcc::context *ctxt)
8344 return new pass_split_crit_edges (ctxt);
8348 /* Insert COND expression which is GIMPLE_COND after STMT
8349 in basic block BB with appropriate basic block split
8350 and creation of a new conditionally executed basic block.
8351 Return created basic block. */
8352 basic_block
8353 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8355 edge fall = split_block (bb, stmt);
8356 gimple_stmt_iterator iter = gsi_last_bb (bb);
8357 basic_block new_bb;
8359 /* Insert cond statement. */
8360 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8361 if (gsi_end_p (iter))
8362 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8363 else
8364 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8366 /* Create conditionally executed block. */
8367 new_bb = create_empty_bb (bb);
8368 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8369 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8371 /* Fix edge for split bb. */
8372 fall->flags = EDGE_FALSE_VALUE;
8374 /* Update dominance info. */
8375 if (dom_info_available_p (CDI_DOMINATORS))
8377 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8378 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8381 /* Update loop info. */
8382 if (current_loops)
8383 add_bb_to_loop (new_bb, bb->loop_father);
8385 return new_bb;
8388 /* Build a ternary operation and gimplify it. Emit code before GSI.
8389 Return the gimple_val holding the result. */
8391 tree
8392 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8393 tree type, tree a, tree b, tree c)
8395 tree ret;
8396 location_t loc = gimple_location (gsi_stmt (*gsi));
8398 ret = fold_build3_loc (loc, code, type, a, b, c);
8399 STRIP_NOPS (ret);
8401 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8402 GSI_SAME_STMT);
8405 /* Build a binary operation and gimplify it. Emit code before GSI.
8406 Return the gimple_val holding the result. */
8408 tree
8409 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8410 tree type, tree a, tree b)
8412 tree ret;
8414 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8415 STRIP_NOPS (ret);
8417 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8418 GSI_SAME_STMT);
8421 /* Build a unary operation and gimplify it. Emit code before GSI.
8422 Return the gimple_val holding the result. */
8424 tree
8425 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8426 tree a)
8428 tree ret;
8430 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8431 STRIP_NOPS (ret);
8433 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8434 GSI_SAME_STMT);
8439 /* Given a basic block B which ends with a conditional and has
8440 precisely two successors, determine which of the edges is taken if
8441 the conditional is true and which is taken if the conditional is
8442 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8444 void
8445 extract_true_false_edges_from_block (basic_block b,
8446 edge *true_edge,
8447 edge *false_edge)
8449 edge e = EDGE_SUCC (b, 0);
8451 if (e->flags & EDGE_TRUE_VALUE)
8453 *true_edge = e;
8454 *false_edge = EDGE_SUCC (b, 1);
8456 else
8458 *false_edge = e;
8459 *true_edge = EDGE_SUCC (b, 1);
8463 /* Emit return warnings. */
8465 namespace {
8467 const pass_data pass_data_warn_function_return =
8469 GIMPLE_PASS, /* type */
8470 "*warn_function_return", /* name */
8471 OPTGROUP_NONE, /* optinfo_flags */
8472 TV_NONE, /* tv_id */
8473 PROP_cfg, /* properties_required */
8474 0, /* properties_provided */
8475 0, /* properties_destroyed */
8476 0, /* todo_flags_start */
8477 0, /* todo_flags_finish */
8480 class pass_warn_function_return : public gimple_opt_pass
8482 public:
8483 pass_warn_function_return (gcc::context *ctxt)
8484 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8487 /* opt_pass methods: */
8488 virtual unsigned int execute (function *);
8490 }; // class pass_warn_function_return
8492 unsigned int
8493 pass_warn_function_return::execute (function *fun)
8495 source_location location;
8496 gimple last;
8497 edge e;
8498 edge_iterator ei;
8500 if (!targetm.warn_func_return (fun->decl))
8501 return 0;
8503 /* If we have a path to EXIT, then we do return. */
8504 if (TREE_THIS_VOLATILE (fun->decl)
8505 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8507 location = UNKNOWN_LOCATION;
8508 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8510 last = last_stmt (e->src);
8511 if ((gimple_code (last) == GIMPLE_RETURN
8512 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8513 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8514 break;
8516 if (location == UNKNOWN_LOCATION)
8517 location = cfun->function_end_locus;
8518 warning_at (location, 0, "%<noreturn%> function does return");
8521 /* If we see "return;" in some basic block, then we do reach the end
8522 without returning a value. */
8523 else if (warn_return_type
8524 && !TREE_NO_WARNING (fun->decl)
8525 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8526 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8528 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8530 gimple last = last_stmt (e->src);
8531 greturn *return_stmt = dyn_cast <greturn *> (last);
8532 if (return_stmt
8533 && gimple_return_retval (return_stmt) == NULL
8534 && !gimple_no_warning_p (last))
8536 location = gimple_location (last);
8537 if (location == UNKNOWN_LOCATION)
8538 location = fun->function_end_locus;
8539 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8540 TREE_NO_WARNING (fun->decl) = 1;
8541 break;
8545 return 0;
8548 } // anon namespace
8550 gimple_opt_pass *
8551 make_pass_warn_function_return (gcc::context *ctxt)
8553 return new pass_warn_function_return (ctxt);
8556 /* Walk a gimplified function and warn for functions whose return value is
8557 ignored and attribute((warn_unused_result)) is set. This is done before
8558 inlining, so we don't have to worry about that. */
8560 static void
8561 do_warn_unused_result (gimple_seq seq)
8563 tree fdecl, ftype;
8564 gimple_stmt_iterator i;
8566 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8568 gimple g = gsi_stmt (i);
8570 switch (gimple_code (g))
8572 case GIMPLE_BIND:
8573 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8574 break;
8575 case GIMPLE_TRY:
8576 do_warn_unused_result (gimple_try_eval (g));
8577 do_warn_unused_result (gimple_try_cleanup (g));
8578 break;
8579 case GIMPLE_CATCH:
8580 do_warn_unused_result (gimple_catch_handler (
8581 as_a <gcatch *> (g)));
8582 break;
8583 case GIMPLE_EH_FILTER:
8584 do_warn_unused_result (gimple_eh_filter_failure (g));
8585 break;
8587 case GIMPLE_CALL:
8588 if (gimple_call_lhs (g))
8589 break;
8590 if (gimple_call_internal_p (g))
8591 break;
8593 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8594 LHS. All calls whose value is ignored should be
8595 represented like this. Look for the attribute. */
8596 fdecl = gimple_call_fndecl (g);
8597 ftype = gimple_call_fntype (g);
8599 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8601 location_t loc = gimple_location (g);
8603 if (fdecl)
8604 warning_at (loc, OPT_Wunused_result,
8605 "ignoring return value of %qD, "
8606 "declared with attribute warn_unused_result",
8607 fdecl);
8608 else
8609 warning_at (loc, OPT_Wunused_result,
8610 "ignoring return value of function "
8611 "declared with attribute warn_unused_result");
8613 break;
8615 default:
8616 /* Not a container, not a call, or a call whose value is used. */
8617 break;
8622 namespace {
8624 const pass_data pass_data_warn_unused_result =
8626 GIMPLE_PASS, /* type */
8627 "*warn_unused_result", /* name */
8628 OPTGROUP_NONE, /* optinfo_flags */
8629 TV_NONE, /* tv_id */
8630 PROP_gimple_any, /* properties_required */
8631 0, /* properties_provided */
8632 0, /* properties_destroyed */
8633 0, /* todo_flags_start */
8634 0, /* todo_flags_finish */
8637 class pass_warn_unused_result : public gimple_opt_pass
8639 public:
8640 pass_warn_unused_result (gcc::context *ctxt)
8641 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8644 /* opt_pass methods: */
8645 virtual bool gate (function *) { return flag_warn_unused_result; }
8646 virtual unsigned int execute (function *)
8648 do_warn_unused_result (gimple_body (current_function_decl));
8649 return 0;
8652 }; // class pass_warn_unused_result
8654 } // anon namespace
8656 gimple_opt_pass *
8657 make_pass_warn_unused_result (gcc::context *ctxt)
8659 return new pass_warn_unused_result (ctxt);
8662 /* IPA passes, compilation of earlier functions or inlining
8663 might have changed some properties, such as marked functions nothrow,
8664 pure, const or noreturn.
8665 Remove redundant edges and basic blocks, and create new ones if necessary.
8667 This pass can't be executed as stand alone pass from pass manager, because
8668 in between inlining and this fixup the verify_flow_info would fail. */
8670 unsigned int
8671 execute_fixup_cfg (void)
8673 basic_block bb;
8674 gimple_stmt_iterator gsi;
8675 int todo = 0;
8676 gcov_type count_scale;
8677 edge e;
8678 edge_iterator ei;
8680 count_scale
8681 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8682 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8684 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8685 cgraph_node::get (current_function_decl)->count;
8686 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8687 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8688 count_scale);
8690 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8691 e->count = apply_scale (e->count, count_scale);
8693 FOR_EACH_BB_FN (bb, cfun)
8695 bb->count = apply_scale (bb->count, count_scale);
8696 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8698 gimple stmt = gsi_stmt (gsi);
8699 tree decl = is_gimple_call (stmt)
8700 ? gimple_call_fndecl (stmt)
8701 : NULL;
8702 if (decl)
8704 int flags = gimple_call_flags (stmt);
8705 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8707 if (gimple_purge_dead_abnormal_call_edges (bb))
8708 todo |= TODO_cleanup_cfg;
8710 if (gimple_in_ssa_p (cfun))
8712 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8713 update_stmt (stmt);
8717 if (flags & ECF_NORETURN
8718 && fixup_noreturn_call (stmt))
8719 todo |= TODO_cleanup_cfg;
8722 /* Remove stores to variables we marked write-only.
8723 Keep access when store has side effect, i.e. in case when source
8724 is volatile. */
8725 if (gimple_store_p (stmt)
8726 && !gimple_has_side_effects (stmt))
8728 tree lhs = get_base_address (gimple_get_lhs (stmt));
8730 if (TREE_CODE (lhs) == VAR_DECL
8731 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8732 && varpool_node::get (lhs)->writeonly)
8734 unlink_stmt_vdef (stmt);
8735 gsi_remove (&gsi, true);
8736 release_defs (stmt);
8737 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8738 continue;
8741 /* For calls we can simply remove LHS when it is known
8742 to be write-only. */
8743 if (is_gimple_call (stmt)
8744 && gimple_get_lhs (stmt))
8746 tree lhs = get_base_address (gimple_get_lhs (stmt));
8748 if (TREE_CODE (lhs) == VAR_DECL
8749 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8750 && varpool_node::get (lhs)->writeonly)
8752 gimple_call_set_lhs (stmt, NULL);
8753 update_stmt (stmt);
8754 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8758 if (maybe_clean_eh_stmt (stmt)
8759 && gimple_purge_dead_eh_edges (bb))
8760 todo |= TODO_cleanup_cfg;
8761 gsi_next (&gsi);
8764 FOR_EACH_EDGE (e, ei, bb->succs)
8765 e->count = apply_scale (e->count, count_scale);
8767 /* If we have a basic block with no successors that does not
8768 end with a control statement or a noreturn call end it with
8769 a call to __builtin_unreachable. This situation can occur
8770 when inlining a noreturn call that does in fact return. */
8771 if (EDGE_COUNT (bb->succs) == 0)
8773 gimple stmt = last_stmt (bb);
8774 if (!stmt
8775 || (!is_ctrl_stmt (stmt)
8776 && (!is_gimple_call (stmt)
8777 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8779 if (stmt && is_gimple_call (stmt))
8780 gimple_call_set_ctrl_altering (stmt, false);
8781 stmt = gimple_build_call
8782 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8783 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8784 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8788 if (count_scale != REG_BR_PROB_BASE)
8789 compute_function_frequency ();
8791 if (current_loops
8792 && (todo & TODO_cleanup_cfg))
8793 loops_state_set (LOOPS_NEED_FIXUP);
8795 return todo;
8798 namespace {
8800 const pass_data pass_data_fixup_cfg =
8802 GIMPLE_PASS, /* type */
8803 "fixup_cfg", /* name */
8804 OPTGROUP_NONE, /* optinfo_flags */
8805 TV_NONE, /* tv_id */
8806 PROP_cfg, /* properties_required */
8807 0, /* properties_provided */
8808 0, /* properties_destroyed */
8809 0, /* todo_flags_start */
8810 0, /* todo_flags_finish */
8813 class pass_fixup_cfg : public gimple_opt_pass
8815 public:
8816 pass_fixup_cfg (gcc::context *ctxt)
8817 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8820 /* opt_pass methods: */
8821 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8822 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8824 }; // class pass_fixup_cfg
8826 } // anon namespace
8828 gimple_opt_pass *
8829 make_pass_fixup_cfg (gcc::context *ctxt)
8831 return new pass_fixup_cfg (ctxt);
8834 /* Garbage collection support for edge_def. */
8836 extern void gt_ggc_mx (tree&);
8837 extern void gt_ggc_mx (gimple&);
8838 extern void gt_ggc_mx (rtx&);
8839 extern void gt_ggc_mx (basic_block&);
8841 static void
8842 gt_ggc_mx (rtx_insn *& x)
8844 if (x)
8845 gt_ggc_mx_rtx_def ((void *) x);
8848 void
8849 gt_ggc_mx (edge_def *e)
8851 tree block = LOCATION_BLOCK (e->goto_locus);
8852 gt_ggc_mx (e->src);
8853 gt_ggc_mx (e->dest);
8854 if (current_ir_type () == IR_GIMPLE)
8855 gt_ggc_mx (e->insns.g);
8856 else
8857 gt_ggc_mx (e->insns.r);
8858 gt_ggc_mx (block);
8861 /* PCH support for edge_def. */
8863 extern void gt_pch_nx (tree&);
8864 extern void gt_pch_nx (gimple&);
8865 extern void gt_pch_nx (rtx&);
8866 extern void gt_pch_nx (basic_block&);
8868 static void
8869 gt_pch_nx (rtx_insn *& x)
8871 if (x)
8872 gt_pch_nx_rtx_def ((void *) x);
8875 void
8876 gt_pch_nx (edge_def *e)
8878 tree block = LOCATION_BLOCK (e->goto_locus);
8879 gt_pch_nx (e->src);
8880 gt_pch_nx (e->dest);
8881 if (current_ir_type () == IR_GIMPLE)
8882 gt_pch_nx (e->insns.g);
8883 else
8884 gt_pch_nx (e->insns.r);
8885 gt_pch_nx (block);
8888 void
8889 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8891 tree block = LOCATION_BLOCK (e->goto_locus);
8892 op (&(e->src), cookie);
8893 op (&(e->dest), cookie);
8894 if (current_ir_type () == IR_GIMPLE)
8895 op (&(e->insns.g), cookie);
8896 else
8897 op (&(e->insns.r), cookie);
8898 op (&(block), cookie);