* tree-if-conv.c: Fix various typos in comments.
[official-gcc.git] / gcc / tree-cfg.c
blobe26454a617d878bae2955bf2fee3d503afe9e042
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "cfghooks.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "rtl.h"
29 #include "ssa.h"
30 #include "alias.h"
31 #include "fold-const.h"
32 #include "trans-mem.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "tm_p.h"
36 #include "cfganal.h"
37 #include "flags.h"
38 #include "gimple-pretty-print.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "cgraph.h"
46 #include "tree-cfg.h"
47 #include "tree-ssa-loop-manip.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "tree-into-ssa.h"
50 #include "insn-config.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "varasm.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "tree-dump.h"
62 #include "tree-pass.h"
63 #include "diagnostic-core.h"
64 #include "except.h"
65 #include "cfgloop.h"
66 #include "tree-ssa-propagate.h"
67 #include "value-prof.h"
68 #include "tree-inline.h"
69 #include "target.h"
70 #include "tree-ssa-live.h"
71 #include "omp-low.h"
72 #include "tree-cfgcleanup.h"
73 #include "wide-int-print.h"
74 #include "gimplify.h"
76 /* This file contains functions for building the Control Flow Graph (CFG)
77 for a function tree. */
79 /* Local declarations. */
81 /* Initial capacity for the basic block array. */
82 static const int initial_cfg_capacity = 20;
84 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
85 which use a particular edge. The CASE_LABEL_EXPRs are chained together
86 via their CASE_CHAIN field, which we clear after we're done with the
87 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
89 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
90 update the case vector in response to edge redirections.
92 Right now this table is set up and torn down at key points in the
93 compilation process. It would be nice if we could make the table
94 more persistent. The key is getting notification of changes to
95 the CFG (particularly edge removal, creation and redirection). */
97 static hash_map<edge, tree> *edge_to_cases;
99 /* If we record edge_to_cases, this bitmap will hold indexes
100 of basic blocks that end in a GIMPLE_SWITCH which we touched
101 due to edge manipulations. */
103 static bitmap touched_switch_bbs;
105 /* CFG statistics. */
106 struct cfg_stats_d
108 long num_merged_labels;
111 static struct cfg_stats_d cfg_stats;
113 /* Data to pass to replace_block_vars_by_duplicates_1. */
114 struct replace_decls_d
116 hash_map<tree, tree> *vars_map;
117 tree to_context;
120 /* Hash table to store last discriminator assigned for each locus. */
121 struct locus_discrim_map
123 location_t locus;
124 int discriminator;
127 /* Hashtable helpers. */
129 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
131 static inline hashval_t hash (const locus_discrim_map *);
132 static inline bool equal (const locus_discrim_map *,
133 const locus_discrim_map *);
136 /* Trivial hash function for a location_t. ITEM is a pointer to
137 a hash table entry that maps a location_t to a discriminator. */
139 inline hashval_t
140 locus_discrim_hasher::hash (const locus_discrim_map *item)
142 return LOCATION_LINE (item->locus);
145 /* Equality function for the locus-to-discriminator map. A and B
146 point to the two hash table entries to compare. */
148 inline bool
149 locus_discrim_hasher::equal (const locus_discrim_map *a,
150 const locus_discrim_map *b)
152 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
155 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
157 /* Basic blocks and flowgraphs. */
158 static void make_blocks (gimple_seq);
160 /* Edges. */
161 static void make_edges (void);
162 static void assign_discriminators (void);
163 static void make_cond_expr_edges (basic_block);
164 static void make_gimple_switch_edges (gswitch *, basic_block);
165 static bool make_goto_expr_edges (basic_block);
166 static void make_gimple_asm_edges (basic_block);
167 static edge gimple_redirect_edge_and_branch (edge, basic_block);
168 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
170 /* Various helpers. */
171 static inline bool stmt_starts_bb_p (gimple, gimple);
172 static int gimple_verify_flow_info (void);
173 static void gimple_make_forwarder_block (edge);
174 static gimple first_non_label_stmt (basic_block);
175 static bool verify_gimple_transaction (gtransaction *);
176 static bool call_can_make_abnormal_goto (gimple);
178 /* Flowgraph optimization and cleanup. */
179 static void gimple_merge_blocks (basic_block, basic_block);
180 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
181 static void remove_bb (basic_block);
182 static edge find_taken_edge_computed_goto (basic_block, tree);
183 static edge find_taken_edge_cond_expr (basic_block, tree);
184 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
185 static tree find_case_label_for_value (gswitch *, tree);
187 void
188 init_empty_tree_cfg_for_function (struct function *fn)
190 /* Initialize the basic block array. */
191 init_flow (fn);
192 profile_status_for_fn (fn) = PROFILE_ABSENT;
193 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
194 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
195 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
196 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
197 initial_cfg_capacity);
199 /* Build a mapping of labels to their associated blocks. */
200 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
201 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
202 initial_cfg_capacity);
204 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
205 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
207 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
208 = EXIT_BLOCK_PTR_FOR_FN (fn);
209 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
210 = ENTRY_BLOCK_PTR_FOR_FN (fn);
213 void
214 init_empty_tree_cfg (void)
216 init_empty_tree_cfg_for_function (cfun);
219 /*---------------------------------------------------------------------------
220 Create basic blocks
221 ---------------------------------------------------------------------------*/
223 /* Entry point to the CFG builder for trees. SEQ is the sequence of
224 statements to be added to the flowgraph. */
226 static void
227 build_gimple_cfg (gimple_seq seq)
229 /* Register specific gimple functions. */
230 gimple_register_cfg_hooks ();
232 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
234 init_empty_tree_cfg ();
236 make_blocks (seq);
238 /* Make sure there is always at least one block, even if it's empty. */
239 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
240 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
242 /* Adjust the size of the array. */
243 if (basic_block_info_for_fn (cfun)->length ()
244 < (size_t) n_basic_blocks_for_fn (cfun))
245 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
246 n_basic_blocks_for_fn (cfun));
248 /* To speed up statement iterator walks, we first purge dead labels. */
249 cleanup_dead_labels ();
251 /* Group case nodes to reduce the number of edges.
252 We do this after cleaning up dead labels because otherwise we miss
253 a lot of obvious case merging opportunities. */
254 group_case_labels ();
256 /* Create the edges of the flowgraph. */
257 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
258 make_edges ();
259 assign_discriminators ();
260 cleanup_dead_labels ();
261 delete discriminator_per_locus;
262 discriminator_per_locus = NULL;
265 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
266 them and propagate the information to LOOP. We assume that the annotations
267 come immediately before the condition in BB, if any. */
269 static void
270 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
272 gimple_stmt_iterator gsi = gsi_last_bb (bb);
273 gimple stmt = gsi_stmt (gsi);
275 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
276 return;
278 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
280 stmt = gsi_stmt (gsi);
281 if (gimple_code (stmt) != GIMPLE_CALL)
282 break;
283 if (!gimple_call_internal_p (stmt)
284 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
285 break;
287 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
289 case annot_expr_ivdep_kind:
290 loop->safelen = INT_MAX;
291 break;
292 case annot_expr_no_vector_kind:
293 loop->dont_vectorize = true;
294 break;
295 case annot_expr_vector_kind:
296 loop->force_vectorize = true;
297 cfun->has_force_vectorize_loops = true;
298 break;
299 default:
300 gcc_unreachable ();
303 stmt = gimple_build_assign (gimple_call_lhs (stmt),
304 gimple_call_arg (stmt, 0));
305 gsi_replace (&gsi, stmt, true);
309 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
310 them and propagate the information to the loop. We assume that the
311 annotations come immediately before the condition of the loop. */
313 static void
314 replace_loop_annotate (void)
316 struct loop *loop;
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple stmt;
321 FOR_EACH_LOOP (loop, 0)
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
326 /* Then look into the latch, if any. */
327 if (loop->latch)
328 replace_loop_annotate_in_block (loop->latch, loop);
331 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
332 FOR_EACH_BB_FN (bb, cfun)
334 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
336 stmt = gsi_stmt (gsi);
337 if (gimple_code (stmt) != GIMPLE_CALL)
338 continue;
339 if (!gimple_call_internal_p (stmt)
340 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
341 continue;
343 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
345 case annot_expr_ivdep_kind:
346 case annot_expr_no_vector_kind:
347 case annot_expr_vector_kind:
348 break;
349 default:
350 gcc_unreachable ();
353 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
354 stmt = gimple_build_assign (gimple_call_lhs (stmt),
355 gimple_call_arg (stmt, 0));
356 gsi_replace (&gsi, stmt, true);
362 static unsigned int
363 execute_build_cfg (void)
365 gimple_seq body = gimple_body (current_function_decl);
367 build_gimple_cfg (body);
368 gimple_set_body (current_function_decl, NULL);
369 if (dump_file && (dump_flags & TDF_DETAILS))
371 fprintf (dump_file, "Scope blocks:\n");
372 dump_scope_blocks (dump_file, dump_flags);
374 cleanup_tree_cfg ();
375 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
376 replace_loop_annotate ();
377 return 0;
380 namespace {
382 const pass_data pass_data_build_cfg =
384 GIMPLE_PASS, /* type */
385 "cfg", /* name */
386 OPTGROUP_NONE, /* optinfo_flags */
387 TV_TREE_CFG, /* tv_id */
388 PROP_gimple_leh, /* properties_required */
389 ( PROP_cfg | PROP_loops ), /* properties_provided */
390 0, /* properties_destroyed */
391 0, /* todo_flags_start */
392 0, /* todo_flags_finish */
395 class pass_build_cfg : public gimple_opt_pass
397 public:
398 pass_build_cfg (gcc::context *ctxt)
399 : gimple_opt_pass (pass_data_build_cfg, ctxt)
402 /* opt_pass methods: */
403 virtual unsigned int execute (function *) { return execute_build_cfg (); }
405 }; // class pass_build_cfg
407 } // anon namespace
409 gimple_opt_pass *
410 make_pass_build_cfg (gcc::context *ctxt)
412 return new pass_build_cfg (ctxt);
416 /* Return true if T is a computed goto. */
418 bool
419 computed_goto_p (gimple t)
421 return (gimple_code (t) == GIMPLE_GOTO
422 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
425 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
426 the other edge points to a bb with just __builtin_unreachable ().
427 I.e. return true for C->M edge in:
428 <bb C>:
430 if (something)
431 goto <bb N>;
432 else
433 goto <bb M>;
434 <bb N>:
435 __builtin_unreachable ();
436 <bb M>: */
438 bool
439 assert_unreachable_fallthru_edge_p (edge e)
441 basic_block pred_bb = e->src;
442 gimple last = last_stmt (pred_bb);
443 if (last && gimple_code (last) == GIMPLE_COND)
445 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
446 if (other_bb == e->dest)
447 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
448 if (EDGE_COUNT (other_bb->succs) == 0)
450 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
451 gimple stmt;
453 if (gsi_end_p (gsi))
454 return false;
455 stmt = gsi_stmt (gsi);
456 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
458 gsi_next (&gsi);
459 if (gsi_end_p (gsi))
460 return false;
461 stmt = gsi_stmt (gsi);
463 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
466 return false;
470 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
471 could alter control flow except via eh. We initialize the flag at
472 CFG build time and only ever clear it later. */
474 static void
475 gimple_call_initialize_ctrl_altering (gimple stmt)
477 int flags = gimple_call_flags (stmt);
479 /* A call alters control flow if it can make an abnormal goto. */
480 if (call_can_make_abnormal_goto (stmt)
481 /* A call also alters control flow if it does not return. */
482 || flags & ECF_NORETURN
483 /* TM ending statements have backedges out of the transaction.
484 Return true so we split the basic block containing them.
485 Note that the TM_BUILTIN test is merely an optimization. */
486 || ((flags & ECF_TM_BUILTIN)
487 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
488 /* BUILT_IN_RETURN call is same as return statement. */
489 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
490 gimple_call_set_ctrl_altering (stmt, true);
491 else
492 gimple_call_set_ctrl_altering (stmt, false);
496 /* Insert SEQ after BB and build a flowgraph. */
498 static basic_block
499 make_blocks_1 (gimple_seq seq, basic_block bb)
501 gimple_stmt_iterator i = gsi_start (seq);
502 gimple stmt = NULL;
503 bool start_new_block = true;
504 bool first_stmt_of_seq = true;
506 while (!gsi_end_p (i))
508 gimple prev_stmt;
510 prev_stmt = stmt;
511 stmt = gsi_stmt (i);
513 if (stmt && is_gimple_call (stmt))
514 gimple_call_initialize_ctrl_altering (stmt);
516 /* If the statement starts a new basic block or if we have determined
517 in a previous pass that we need to create a new block for STMT, do
518 so now. */
519 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
521 if (!first_stmt_of_seq)
522 gsi_split_seq_before (&i, &seq);
523 bb = create_basic_block (seq, bb);
524 start_new_block = false;
527 /* Now add STMT to BB and create the subgraphs for special statement
528 codes. */
529 gimple_set_bb (stmt, bb);
531 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
532 next iteration. */
533 if (stmt_ends_bb_p (stmt))
535 /* If the stmt can make abnormal goto use a new temporary
536 for the assignment to the LHS. This makes sure the old value
537 of the LHS is available on the abnormal edge. Otherwise
538 we will end up with overlapping life-ranges for abnormal
539 SSA names. */
540 if (gimple_has_lhs (stmt)
541 && stmt_can_make_abnormal_goto (stmt)
542 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
544 tree lhs = gimple_get_lhs (stmt);
545 tree tmp = create_tmp_var (TREE_TYPE (lhs));
546 gimple s = gimple_build_assign (lhs, tmp);
547 gimple_set_location (s, gimple_location (stmt));
548 gimple_set_block (s, gimple_block (stmt));
549 gimple_set_lhs (stmt, tmp);
550 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
551 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
552 DECL_GIMPLE_REG_P (tmp) = 1;
553 gsi_insert_after (&i, s, GSI_SAME_STMT);
555 start_new_block = true;
558 gsi_next (&i);
559 first_stmt_of_seq = false;
561 return bb;
564 /* Build a flowgraph for the sequence of stmts SEQ. */
566 static void
567 make_blocks (gimple_seq seq)
569 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
572 /* Create and return a new empty basic block after bb AFTER. */
574 static basic_block
575 create_bb (void *h, void *e, basic_block after)
577 basic_block bb;
579 gcc_assert (!e);
581 /* Create and initialize a new basic block. Since alloc_block uses
582 GC allocation that clears memory to allocate a basic block, we do
583 not have to clear the newly allocated basic block here. */
584 bb = alloc_block ();
586 bb->index = last_basic_block_for_fn (cfun);
587 bb->flags = BB_NEW;
588 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
590 /* Add the new block to the linked list of blocks. */
591 link_block (bb, after);
593 /* Grow the basic block array if needed. */
594 if ((size_t) last_basic_block_for_fn (cfun)
595 == basic_block_info_for_fn (cfun)->length ())
597 size_t new_size =
598 (last_basic_block_for_fn (cfun)
599 + (last_basic_block_for_fn (cfun) + 3) / 4);
600 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
603 /* Add the newly created block to the array. */
604 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
606 n_basic_blocks_for_fn (cfun)++;
607 last_basic_block_for_fn (cfun)++;
609 return bb;
613 /*---------------------------------------------------------------------------
614 Edge creation
615 ---------------------------------------------------------------------------*/
617 /* If basic block BB has an abnormal edge to a basic block
618 containing IFN_ABNORMAL_DISPATCHER internal call, return
619 that the dispatcher's basic block, otherwise return NULL. */
621 basic_block
622 get_abnormal_succ_dispatcher (basic_block bb)
624 edge e;
625 edge_iterator ei;
627 FOR_EACH_EDGE (e, ei, bb->succs)
628 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
630 gimple_stmt_iterator gsi
631 = gsi_start_nondebug_after_labels_bb (e->dest);
632 gimple g = gsi_stmt (gsi);
633 if (g
634 && is_gimple_call (g)
635 && gimple_call_internal_p (g)
636 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
637 return e->dest;
639 return NULL;
642 /* Helper function for make_edges. Create a basic block with
643 with ABNORMAL_DISPATCHER internal call in it if needed, and
644 create abnormal edges from BBS to it and from it to FOR_BB
645 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
647 static void
648 handle_abnormal_edges (basic_block *dispatcher_bbs,
649 basic_block for_bb, int *bb_to_omp_idx,
650 auto_vec<basic_block> *bbs, bool computed_goto)
652 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
653 unsigned int idx = 0;
654 basic_block bb;
655 bool inner = false;
657 if (bb_to_omp_idx)
659 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
660 if (bb_to_omp_idx[for_bb->index] != 0)
661 inner = true;
664 /* If the dispatcher has been created already, then there are basic
665 blocks with abnormal edges to it, so just make a new edge to
666 for_bb. */
667 if (*dispatcher == NULL)
669 /* Check if there are any basic blocks that need to have
670 abnormal edges to this dispatcher. If there are none, return
671 early. */
672 if (bb_to_omp_idx == NULL)
674 if (bbs->is_empty ())
675 return;
677 else
679 FOR_EACH_VEC_ELT (*bbs, idx, bb)
680 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
681 break;
682 if (bb == NULL)
683 return;
686 /* Create the dispatcher bb. */
687 *dispatcher = create_basic_block (NULL, for_bb);
688 if (computed_goto)
690 /* Factor computed gotos into a common computed goto site. Also
691 record the location of that site so that we can un-factor the
692 gotos after we have converted back to normal form. */
693 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
695 /* Create the destination of the factored goto. Each original
696 computed goto will put its desired destination into this
697 variable and jump to the label we create immediately below. */
698 tree var = create_tmp_var (ptr_type_node, "gotovar");
700 /* Build a label for the new block which will contain the
701 factored computed goto. */
702 tree factored_label_decl
703 = create_artificial_label (UNKNOWN_LOCATION);
704 gimple factored_computed_goto_label
705 = gimple_build_label (factored_label_decl);
706 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
708 /* Build our new computed goto. */
709 gimple factored_computed_goto = gimple_build_goto (var);
710 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
712 FOR_EACH_VEC_ELT (*bbs, idx, bb)
714 if (bb_to_omp_idx
715 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
716 continue;
718 gsi = gsi_last_bb (bb);
719 gimple last = gsi_stmt (gsi);
721 gcc_assert (computed_goto_p (last));
723 /* Copy the original computed goto's destination into VAR. */
724 gimple assignment
725 = gimple_build_assign (var, gimple_goto_dest (last));
726 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
728 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
729 e->goto_locus = gimple_location (last);
730 gsi_remove (&gsi, true);
733 else
735 tree arg = inner ? boolean_true_node : boolean_false_node;
736 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
737 1, arg);
738 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
739 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
741 /* Create predecessor edges of the dispatcher. */
742 FOR_EACH_VEC_ELT (*bbs, idx, bb)
744 if (bb_to_omp_idx
745 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
746 continue;
747 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
752 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
755 /* Creates outgoing edges for BB. Returns 1 when it ends with an
756 computed goto, returns 2 when it ends with a statement that
757 might return to this function via an nonlocal goto, otherwise
758 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
760 static int
761 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
763 gimple last = last_stmt (bb);
764 bool fallthru = false;
765 int ret = 0;
767 if (!last)
768 return ret;
770 switch (gimple_code (last))
772 case GIMPLE_GOTO:
773 if (make_goto_expr_edges (bb))
774 ret = 1;
775 fallthru = false;
776 break;
777 case GIMPLE_RETURN:
779 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
780 e->goto_locus = gimple_location (last);
781 fallthru = false;
783 break;
784 case GIMPLE_COND:
785 make_cond_expr_edges (bb);
786 fallthru = false;
787 break;
788 case GIMPLE_SWITCH:
789 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
790 fallthru = false;
791 break;
792 case GIMPLE_RESX:
793 make_eh_edges (last);
794 fallthru = false;
795 break;
796 case GIMPLE_EH_DISPATCH:
797 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
798 break;
800 case GIMPLE_CALL:
801 /* If this function receives a nonlocal goto, then we need to
802 make edges from this call site to all the nonlocal goto
803 handlers. */
804 if (stmt_can_make_abnormal_goto (last))
805 ret = 2;
807 /* If this statement has reachable exception handlers, then
808 create abnormal edges to them. */
809 make_eh_edges (last);
811 /* BUILTIN_RETURN is really a return statement. */
812 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
814 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
815 fallthru = false;
817 /* Some calls are known not to return. */
818 else
819 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
820 break;
822 case GIMPLE_ASSIGN:
823 /* A GIMPLE_ASSIGN may throw internally and thus be considered
824 control-altering. */
825 if (is_ctrl_altering_stmt (last))
826 make_eh_edges (last);
827 fallthru = true;
828 break;
830 case GIMPLE_ASM:
831 make_gimple_asm_edges (bb);
832 fallthru = true;
833 break;
835 CASE_GIMPLE_OMP:
836 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
837 break;
839 case GIMPLE_TRANSACTION:
841 tree abort_label
842 = gimple_transaction_label (as_a <gtransaction *> (last));
843 if (abort_label)
844 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
845 fallthru = true;
847 break;
849 default:
850 gcc_assert (!stmt_ends_bb_p (last));
851 fallthru = true;
852 break;
855 if (fallthru)
856 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
858 return ret;
861 /* Join all the blocks in the flowgraph. */
863 static void
864 make_edges (void)
866 basic_block bb;
867 struct omp_region *cur_region = NULL;
868 auto_vec<basic_block> ab_edge_goto;
869 auto_vec<basic_block> ab_edge_call;
870 int *bb_to_omp_idx = NULL;
871 int cur_omp_region_idx = 0;
873 /* Create an edge from entry to the first block with executable
874 statements in it. */
875 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
876 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
877 EDGE_FALLTHRU);
879 /* Traverse the basic block array placing edges. */
880 FOR_EACH_BB_FN (bb, cfun)
882 int mer;
884 if (bb_to_omp_idx)
885 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
887 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
888 if (mer == 1)
889 ab_edge_goto.safe_push (bb);
890 else if (mer == 2)
891 ab_edge_call.safe_push (bb);
893 if (cur_region && bb_to_omp_idx == NULL)
894 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
897 /* Computed gotos are hell to deal with, especially if there are
898 lots of them with a large number of destinations. So we factor
899 them to a common computed goto location before we build the
900 edge list. After we convert back to normal form, we will un-factor
901 the computed gotos since factoring introduces an unwanted jump.
902 For non-local gotos and abnormal edges from calls to calls that return
903 twice or forced labels, factor the abnormal edges too, by having all
904 abnormal edges from the calls go to a common artificial basic block
905 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
906 basic block to all forced labels and calls returning twice.
907 We do this per-OpenMP structured block, because those regions
908 are guaranteed to be single entry single exit by the standard,
909 so it is not allowed to enter or exit such regions abnormally this way,
910 thus all computed gotos, non-local gotos and setjmp/longjmp calls
911 must not transfer control across SESE region boundaries. */
912 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
914 gimple_stmt_iterator gsi;
915 basic_block dispatcher_bb_array[2] = { NULL, NULL };
916 basic_block *dispatcher_bbs = dispatcher_bb_array;
917 int count = n_basic_blocks_for_fn (cfun);
919 if (bb_to_omp_idx)
920 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
922 FOR_EACH_BB_FN (bb, cfun)
924 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
926 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
927 tree target;
929 if (!label_stmt)
930 break;
932 target = gimple_label_label (label_stmt);
934 /* Make an edge to every label block that has been marked as a
935 potential target for a computed goto or a non-local goto. */
936 if (FORCED_LABEL (target))
937 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
938 &ab_edge_goto, true);
939 if (DECL_NONLOCAL (target))
941 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
942 &ab_edge_call, false);
943 break;
947 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
948 gsi_next_nondebug (&gsi);
949 if (!gsi_end_p (gsi))
951 /* Make an edge to every setjmp-like call. */
952 gimple call_stmt = gsi_stmt (gsi);
953 if (is_gimple_call (call_stmt)
954 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
955 || gimple_call_builtin_p (call_stmt,
956 BUILT_IN_SETJMP_RECEIVER)))
957 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
958 &ab_edge_call, false);
962 if (bb_to_omp_idx)
963 XDELETE (dispatcher_bbs);
966 XDELETE (bb_to_omp_idx);
968 free_omp_regions ();
971 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
972 needed. Returns true if new bbs were created.
973 Note: This is transitional code, and should not be used for new code. We
974 should be able to get rid of this by rewriting all target va-arg
975 gimplification hooks to use an interface gimple_build_cond_value as described
976 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
978 bool
979 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
981 gimple stmt = gsi_stmt (*gsi);
982 basic_block bb = gimple_bb (stmt);
983 basic_block lastbb, afterbb;
984 int old_num_bbs = n_basic_blocks_for_fn (cfun);
985 edge e;
986 lastbb = make_blocks_1 (seq, bb);
987 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
988 return false;
989 e = split_block (bb, stmt);
990 /* Move e->dest to come after the new basic blocks. */
991 afterbb = e->dest;
992 unlink_block (afterbb);
993 link_block (afterbb, lastbb);
994 redirect_edge_succ (e, bb->next_bb);
995 bb = bb->next_bb;
996 while (bb != afterbb)
998 struct omp_region *cur_region = NULL;
999 int cur_omp_region_idx = 0;
1000 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1001 gcc_assert (!mer && !cur_region);
1002 add_bb_to_loop (bb, afterbb->loop_father);
1003 bb = bb->next_bb;
1005 return true;
1008 /* Find the next available discriminator value for LOCUS. The
1009 discriminator distinguishes among several basic blocks that
1010 share a common locus, allowing for more accurate sample-based
1011 profiling. */
1013 static int
1014 next_discriminator_for_locus (location_t locus)
1016 struct locus_discrim_map item;
1017 struct locus_discrim_map **slot;
1019 item.locus = locus;
1020 item.discriminator = 0;
1021 slot = discriminator_per_locus->find_slot_with_hash (
1022 &item, LOCATION_LINE (locus), INSERT);
1023 gcc_assert (slot);
1024 if (*slot == HTAB_EMPTY_ENTRY)
1026 *slot = XNEW (struct locus_discrim_map);
1027 gcc_assert (*slot);
1028 (*slot)->locus = locus;
1029 (*slot)->discriminator = 0;
1031 (*slot)->discriminator++;
1032 return (*slot)->discriminator;
1035 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1037 static bool
1038 same_line_p (location_t locus1, location_t locus2)
1040 expanded_location from, to;
1042 if (locus1 == locus2)
1043 return true;
1045 from = expand_location (locus1);
1046 to = expand_location (locus2);
1048 if (from.line != to.line)
1049 return false;
1050 if (from.file == to.file)
1051 return true;
1052 return (from.file != NULL
1053 && to.file != NULL
1054 && filename_cmp (from.file, to.file) == 0);
1057 /* Assign discriminators to each basic block. */
1059 static void
1060 assign_discriminators (void)
1062 basic_block bb;
1064 FOR_EACH_BB_FN (bb, cfun)
1066 edge e;
1067 edge_iterator ei;
1068 gimple last = last_stmt (bb);
1069 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1071 if (locus == UNKNOWN_LOCATION)
1072 continue;
1074 FOR_EACH_EDGE (e, ei, bb->succs)
1076 gimple first = first_non_label_stmt (e->dest);
1077 gimple last = last_stmt (e->dest);
1078 if ((first && same_line_p (locus, gimple_location (first)))
1079 || (last && same_line_p (locus, gimple_location (last))))
1081 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1082 bb->discriminator = next_discriminator_for_locus (locus);
1083 else
1084 e->dest->discriminator = next_discriminator_for_locus (locus);
1090 /* Create the edges for a GIMPLE_COND starting at block BB. */
1092 static void
1093 make_cond_expr_edges (basic_block bb)
1095 gcond *entry = as_a <gcond *> (last_stmt (bb));
1096 gimple then_stmt, else_stmt;
1097 basic_block then_bb, else_bb;
1098 tree then_label, else_label;
1099 edge e;
1101 gcc_assert (entry);
1102 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1104 /* Entry basic blocks for each component. */
1105 then_label = gimple_cond_true_label (entry);
1106 else_label = gimple_cond_false_label (entry);
1107 then_bb = label_to_block (then_label);
1108 else_bb = label_to_block (else_label);
1109 then_stmt = first_stmt (then_bb);
1110 else_stmt = first_stmt (else_bb);
1112 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1113 e->goto_locus = gimple_location (then_stmt);
1114 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1115 if (e)
1116 e->goto_locus = gimple_location (else_stmt);
1118 /* We do not need the labels anymore. */
1119 gimple_cond_set_true_label (entry, NULL_TREE);
1120 gimple_cond_set_false_label (entry, NULL_TREE);
1124 /* Called for each element in the hash table (P) as we delete the
1125 edge to cases hash table.
1127 Clear all the TREE_CHAINs to prevent problems with copying of
1128 SWITCH_EXPRs and structure sharing rules, then free the hash table
1129 element. */
1131 bool
1132 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1134 tree t, next;
1136 for (t = value; t; t = next)
1138 next = CASE_CHAIN (t);
1139 CASE_CHAIN (t) = NULL;
1142 return true;
1145 /* Start recording information mapping edges to case labels. */
1147 void
1148 start_recording_case_labels (void)
1150 gcc_assert (edge_to_cases == NULL);
1151 edge_to_cases = new hash_map<edge, tree>;
1152 touched_switch_bbs = BITMAP_ALLOC (NULL);
1155 /* Return nonzero if we are recording information for case labels. */
1157 static bool
1158 recording_case_labels_p (void)
1160 return (edge_to_cases != NULL);
1163 /* Stop recording information mapping edges to case labels and
1164 remove any information we have recorded. */
1165 void
1166 end_recording_case_labels (void)
1168 bitmap_iterator bi;
1169 unsigned i;
1170 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1171 delete edge_to_cases;
1172 edge_to_cases = NULL;
1173 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1175 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1176 if (bb)
1178 gimple stmt = last_stmt (bb);
1179 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1180 group_case_labels_stmt (as_a <gswitch *> (stmt));
1183 BITMAP_FREE (touched_switch_bbs);
1186 /* If we are inside a {start,end}_recording_cases block, then return
1187 a chain of CASE_LABEL_EXPRs from T which reference E.
1189 Otherwise return NULL. */
1191 static tree
1192 get_cases_for_edge (edge e, gswitch *t)
1194 tree *slot;
1195 size_t i, n;
1197 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1198 chains available. Return NULL so the caller can detect this case. */
1199 if (!recording_case_labels_p ())
1200 return NULL;
1202 slot = edge_to_cases->get (e);
1203 if (slot)
1204 return *slot;
1206 /* If we did not find E in the hash table, then this must be the first
1207 time we have been queried for information about E & T. Add all the
1208 elements from T to the hash table then perform the query again. */
1210 n = gimple_switch_num_labels (t);
1211 for (i = 0; i < n; i++)
1213 tree elt = gimple_switch_label (t, i);
1214 tree lab = CASE_LABEL (elt);
1215 basic_block label_bb = label_to_block (lab);
1216 edge this_edge = find_edge (e->src, label_bb);
1218 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1219 a new chain. */
1220 tree &s = edge_to_cases->get_or_insert (this_edge);
1221 CASE_CHAIN (elt) = s;
1222 s = elt;
1225 return *edge_to_cases->get (e);
1228 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1230 static void
1231 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1233 size_t i, n;
1235 n = gimple_switch_num_labels (entry);
1237 for (i = 0; i < n; ++i)
1239 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1240 basic_block label_bb = label_to_block (lab);
1241 make_edge (bb, label_bb, 0);
1246 /* Return the basic block holding label DEST. */
1248 basic_block
1249 label_to_block_fn (struct function *ifun, tree dest)
1251 int uid = LABEL_DECL_UID (dest);
1253 /* We would die hard when faced by an undefined label. Emit a label to
1254 the very first basic block. This will hopefully make even the dataflow
1255 and undefined variable warnings quite right. */
1256 if (seen_error () && uid < 0)
1258 gimple_stmt_iterator gsi =
1259 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1260 gimple stmt;
1262 stmt = gimple_build_label (dest);
1263 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1264 uid = LABEL_DECL_UID (dest);
1266 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1267 return NULL;
1268 return (*ifun->cfg->x_label_to_block_map)[uid];
1271 /* Create edges for a goto statement at block BB. Returns true
1272 if abnormal edges should be created. */
1274 static bool
1275 make_goto_expr_edges (basic_block bb)
1277 gimple_stmt_iterator last = gsi_last_bb (bb);
1278 gimple goto_t = gsi_stmt (last);
1280 /* A simple GOTO creates normal edges. */
1281 if (simple_goto_p (goto_t))
1283 tree dest = gimple_goto_dest (goto_t);
1284 basic_block label_bb = label_to_block (dest);
1285 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1286 e->goto_locus = gimple_location (goto_t);
1287 gsi_remove (&last, true);
1288 return false;
1291 /* A computed GOTO creates abnormal edges. */
1292 return true;
1295 /* Create edges for an asm statement with labels at block BB. */
1297 static void
1298 make_gimple_asm_edges (basic_block bb)
1300 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1301 int i, n = gimple_asm_nlabels (stmt);
1303 for (i = 0; i < n; ++i)
1305 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1306 basic_block label_bb = label_to_block (label);
1307 make_edge (bb, label_bb, 0);
1311 /*---------------------------------------------------------------------------
1312 Flowgraph analysis
1313 ---------------------------------------------------------------------------*/
1315 /* Cleanup useless labels in basic blocks. This is something we wish
1316 to do early because it allows us to group case labels before creating
1317 the edges for the CFG, and it speeds up block statement iterators in
1318 all passes later on.
1319 We rerun this pass after CFG is created, to get rid of the labels that
1320 are no longer referenced. After then we do not run it any more, since
1321 (almost) no new labels should be created. */
1323 /* A map from basic block index to the leading label of that block. */
1324 static struct label_record
1326 /* The label. */
1327 tree label;
1329 /* True if the label is referenced from somewhere. */
1330 bool used;
1331 } *label_for_bb;
1333 /* Given LABEL return the first label in the same basic block. */
1335 static tree
1336 main_block_label (tree label)
1338 basic_block bb = label_to_block (label);
1339 tree main_label = label_for_bb[bb->index].label;
1341 /* label_to_block possibly inserted undefined label into the chain. */
1342 if (!main_label)
1344 label_for_bb[bb->index].label = label;
1345 main_label = label;
1348 label_for_bb[bb->index].used = true;
1349 return main_label;
1352 /* Clean up redundant labels within the exception tree. */
1354 static void
1355 cleanup_dead_labels_eh (void)
1357 eh_landing_pad lp;
1358 eh_region r;
1359 tree lab;
1360 int i;
1362 if (cfun->eh == NULL)
1363 return;
1365 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1366 if (lp && lp->post_landing_pad)
1368 lab = main_block_label (lp->post_landing_pad);
1369 if (lab != lp->post_landing_pad)
1371 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1372 EH_LANDING_PAD_NR (lab) = lp->index;
1376 FOR_ALL_EH_REGION (r)
1377 switch (r->type)
1379 case ERT_CLEANUP:
1380 case ERT_MUST_NOT_THROW:
1381 break;
1383 case ERT_TRY:
1385 eh_catch c;
1386 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1388 lab = c->label;
1389 if (lab)
1390 c->label = main_block_label (lab);
1393 break;
1395 case ERT_ALLOWED_EXCEPTIONS:
1396 lab = r->u.allowed.label;
1397 if (lab)
1398 r->u.allowed.label = main_block_label (lab);
1399 break;
1404 /* Cleanup redundant labels. This is a three-step process:
1405 1) Find the leading label for each block.
1406 2) Redirect all references to labels to the leading labels.
1407 3) Cleanup all useless labels. */
1409 void
1410 cleanup_dead_labels (void)
1412 basic_block bb;
1413 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1415 /* Find a suitable label for each block. We use the first user-defined
1416 label if there is one, or otherwise just the first label we see. */
1417 FOR_EACH_BB_FN (bb, cfun)
1419 gimple_stmt_iterator i;
1421 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1423 tree label;
1424 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1426 if (!label_stmt)
1427 break;
1429 label = gimple_label_label (label_stmt);
1431 /* If we have not yet seen a label for the current block,
1432 remember this one and see if there are more labels. */
1433 if (!label_for_bb[bb->index].label)
1435 label_for_bb[bb->index].label = label;
1436 continue;
1439 /* If we did see a label for the current block already, but it
1440 is an artificially created label, replace it if the current
1441 label is a user defined label. */
1442 if (!DECL_ARTIFICIAL (label)
1443 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1445 label_for_bb[bb->index].label = label;
1446 break;
1451 /* Now redirect all jumps/branches to the selected label.
1452 First do so for each block ending in a control statement. */
1453 FOR_EACH_BB_FN (bb, cfun)
1455 gimple stmt = last_stmt (bb);
1456 tree label, new_label;
1458 if (!stmt)
1459 continue;
1461 switch (gimple_code (stmt))
1463 case GIMPLE_COND:
1465 gcond *cond_stmt = as_a <gcond *> (stmt);
1466 label = gimple_cond_true_label (cond_stmt);
1467 if (label)
1469 new_label = main_block_label (label);
1470 if (new_label != label)
1471 gimple_cond_set_true_label (cond_stmt, new_label);
1474 label = gimple_cond_false_label (cond_stmt);
1475 if (label)
1477 new_label = main_block_label (label);
1478 if (new_label != label)
1479 gimple_cond_set_false_label (cond_stmt, new_label);
1482 break;
1484 case GIMPLE_SWITCH:
1486 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1487 size_t i, n = gimple_switch_num_labels (switch_stmt);
1489 /* Replace all destination labels. */
1490 for (i = 0; i < n; ++i)
1492 tree case_label = gimple_switch_label (switch_stmt, i);
1493 label = CASE_LABEL (case_label);
1494 new_label = main_block_label (label);
1495 if (new_label != label)
1496 CASE_LABEL (case_label) = new_label;
1498 break;
1501 case GIMPLE_ASM:
1503 gasm *asm_stmt = as_a <gasm *> (stmt);
1504 int i, n = gimple_asm_nlabels (asm_stmt);
1506 for (i = 0; i < n; ++i)
1508 tree cons = gimple_asm_label_op (asm_stmt, i);
1509 tree label = main_block_label (TREE_VALUE (cons));
1510 TREE_VALUE (cons) = label;
1512 break;
1515 /* We have to handle gotos until they're removed, and we don't
1516 remove them until after we've created the CFG edges. */
1517 case GIMPLE_GOTO:
1518 if (!computed_goto_p (stmt))
1520 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1521 label = gimple_goto_dest (goto_stmt);
1522 new_label = main_block_label (label);
1523 if (new_label != label)
1524 gimple_goto_set_dest (goto_stmt, new_label);
1526 break;
1528 case GIMPLE_TRANSACTION:
1530 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1531 tree label = gimple_transaction_label (trans_stmt);
1532 if (label)
1534 tree new_label = main_block_label (label);
1535 if (new_label != label)
1536 gimple_transaction_set_label (trans_stmt, new_label);
1539 break;
1541 default:
1542 break;
1546 /* Do the same for the exception region tree labels. */
1547 cleanup_dead_labels_eh ();
1549 /* Finally, purge dead labels. All user-defined labels and labels that
1550 can be the target of non-local gotos and labels which have their
1551 address taken are preserved. */
1552 FOR_EACH_BB_FN (bb, cfun)
1554 gimple_stmt_iterator i;
1555 tree label_for_this_bb = label_for_bb[bb->index].label;
1557 if (!label_for_this_bb)
1558 continue;
1560 /* If the main label of the block is unused, we may still remove it. */
1561 if (!label_for_bb[bb->index].used)
1562 label_for_this_bb = NULL;
1564 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1566 tree label;
1567 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1569 if (!label_stmt)
1570 break;
1572 label = gimple_label_label (label_stmt);
1574 if (label == label_for_this_bb
1575 || !DECL_ARTIFICIAL (label)
1576 || DECL_NONLOCAL (label)
1577 || FORCED_LABEL (label))
1578 gsi_next (&i);
1579 else
1580 gsi_remove (&i, true);
1584 free (label_for_bb);
1587 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1588 the ones jumping to the same label.
1589 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1591 void
1592 group_case_labels_stmt (gswitch *stmt)
1594 int old_size = gimple_switch_num_labels (stmt);
1595 int i, j, new_size = old_size;
1596 basic_block default_bb = NULL;
1598 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1600 /* Look for possible opportunities to merge cases. */
1601 i = 1;
1602 while (i < old_size)
1604 tree base_case, base_high;
1605 basic_block base_bb;
1607 base_case = gimple_switch_label (stmt, i);
1609 gcc_assert (base_case);
1610 base_bb = label_to_block (CASE_LABEL (base_case));
1612 /* Discard cases that have the same destination as the
1613 default case. */
1614 if (base_bb == default_bb)
1616 gimple_switch_set_label (stmt, i, NULL_TREE);
1617 i++;
1618 new_size--;
1619 continue;
1622 base_high = CASE_HIGH (base_case)
1623 ? CASE_HIGH (base_case)
1624 : CASE_LOW (base_case);
1625 i++;
1627 /* Try to merge case labels. Break out when we reach the end
1628 of the label vector or when we cannot merge the next case
1629 label with the current one. */
1630 while (i < old_size)
1632 tree merge_case = gimple_switch_label (stmt, i);
1633 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1634 wide_int bhp1 = wi::add (base_high, 1);
1636 /* Merge the cases if they jump to the same place,
1637 and their ranges are consecutive. */
1638 if (merge_bb == base_bb
1639 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1641 base_high = CASE_HIGH (merge_case) ?
1642 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1643 CASE_HIGH (base_case) = base_high;
1644 gimple_switch_set_label (stmt, i, NULL_TREE);
1645 new_size--;
1646 i++;
1648 else
1649 break;
1653 /* Compress the case labels in the label vector, and adjust the
1654 length of the vector. */
1655 for (i = 0, j = 0; i < new_size; i++)
1657 while (! gimple_switch_label (stmt, j))
1658 j++;
1659 gimple_switch_set_label (stmt, i,
1660 gimple_switch_label (stmt, j++));
1663 gcc_assert (new_size <= old_size);
1664 gimple_switch_set_num_labels (stmt, new_size);
1667 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1668 and scan the sorted vector of cases. Combine the ones jumping to the
1669 same label. */
1671 void
1672 group_case_labels (void)
1674 basic_block bb;
1676 FOR_EACH_BB_FN (bb, cfun)
1678 gimple stmt = last_stmt (bb);
1679 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1680 group_case_labels_stmt (as_a <gswitch *> (stmt));
1684 /* Checks whether we can merge block B into block A. */
1686 static bool
1687 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1689 gimple stmt;
1691 if (!single_succ_p (a))
1692 return false;
1694 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1695 return false;
1697 if (single_succ (a) != b)
1698 return false;
1700 if (!single_pred_p (b))
1701 return false;
1703 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1704 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1705 return false;
1707 /* If A ends by a statement causing exceptions or something similar, we
1708 cannot merge the blocks. */
1709 stmt = last_stmt (a);
1710 if (stmt && stmt_ends_bb_p (stmt))
1711 return false;
1713 /* Do not allow a block with only a non-local label to be merged. */
1714 if (stmt)
1715 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1716 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1717 return false;
1719 /* Examine the labels at the beginning of B. */
1720 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1721 gsi_next (&gsi))
1723 tree lab;
1724 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1725 if (!label_stmt)
1726 break;
1727 lab = gimple_label_label (label_stmt);
1729 /* Do not remove user forced labels or for -O0 any user labels. */
1730 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1731 return false;
1734 /* Protect simple loop latches. We only want to avoid merging
1735 the latch with the loop header or with a block in another
1736 loop in this case. */
1737 if (current_loops
1738 && b->loop_father->latch == b
1739 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1740 && (b->loop_father->header == a
1741 || b->loop_father != a->loop_father))
1742 return false;
1744 /* It must be possible to eliminate all phi nodes in B. If ssa form
1745 is not up-to-date and a name-mapping is registered, we cannot eliminate
1746 any phis. Symbols marked for renaming are never a problem though. */
1747 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1748 gsi_next (&gsi))
1750 gphi *phi = gsi.phi ();
1751 /* Technically only new names matter. */
1752 if (name_registered_for_update_p (PHI_RESULT (phi)))
1753 return false;
1756 /* When not optimizing, don't merge if we'd lose goto_locus. */
1757 if (!optimize
1758 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1760 location_t goto_locus = single_succ_edge (a)->goto_locus;
1761 gimple_stmt_iterator prev, next;
1762 prev = gsi_last_nondebug_bb (a);
1763 next = gsi_after_labels (b);
1764 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1765 gsi_next_nondebug (&next);
1766 if ((gsi_end_p (prev)
1767 || gimple_location (gsi_stmt (prev)) != goto_locus)
1768 && (gsi_end_p (next)
1769 || gimple_location (gsi_stmt (next)) != goto_locus))
1770 return false;
1773 return true;
1776 /* Replaces all uses of NAME by VAL. */
1778 void
1779 replace_uses_by (tree name, tree val)
1781 imm_use_iterator imm_iter;
1782 use_operand_p use;
1783 gimple stmt;
1784 edge e;
1786 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1788 /* Mark the block if we change the last stmt in it. */
1789 if (cfgcleanup_altered_bbs
1790 && stmt_ends_bb_p (stmt))
1791 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1793 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1795 replace_exp (use, val);
1797 if (gimple_code (stmt) == GIMPLE_PHI)
1799 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1800 PHI_ARG_INDEX_FROM_USE (use));
1801 if (e->flags & EDGE_ABNORMAL
1802 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1804 /* This can only occur for virtual operands, since
1805 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1806 would prevent replacement. */
1807 gcc_checking_assert (virtual_operand_p (name));
1808 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1813 if (gimple_code (stmt) != GIMPLE_PHI)
1815 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1816 gimple orig_stmt = stmt;
1817 size_t i;
1819 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1820 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1821 only change sth from non-invariant to invariant, and only
1822 when propagating constants. */
1823 if (is_gimple_min_invariant (val))
1824 for (i = 0; i < gimple_num_ops (stmt); i++)
1826 tree op = gimple_op (stmt, i);
1827 /* Operands may be empty here. For example, the labels
1828 of a GIMPLE_COND are nulled out following the creation
1829 of the corresponding CFG edges. */
1830 if (op && TREE_CODE (op) == ADDR_EXPR)
1831 recompute_tree_invariant_for_addr_expr (op);
1834 if (fold_stmt (&gsi))
1835 stmt = gsi_stmt (gsi);
1837 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1838 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1840 update_stmt (stmt);
1844 gcc_checking_assert (has_zero_uses (name));
1846 /* Also update the trees stored in loop structures. */
1847 if (current_loops)
1849 struct loop *loop;
1851 FOR_EACH_LOOP (loop, 0)
1853 substitute_in_loop_info (loop, name, val);
1858 /* Merge block B into block A. */
1860 static void
1861 gimple_merge_blocks (basic_block a, basic_block b)
1863 gimple_stmt_iterator last, gsi;
1864 gphi_iterator psi;
1866 if (dump_file)
1867 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1869 /* Remove all single-valued PHI nodes from block B of the form
1870 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1871 gsi = gsi_last_bb (a);
1872 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1874 gimple phi = gsi_stmt (psi);
1875 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1876 gimple copy;
1877 bool may_replace_uses = (virtual_operand_p (def)
1878 || may_propagate_copy (def, use));
1880 /* In case we maintain loop closed ssa form, do not propagate arguments
1881 of loop exit phi nodes. */
1882 if (current_loops
1883 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1884 && !virtual_operand_p (def)
1885 && TREE_CODE (use) == SSA_NAME
1886 && a->loop_father != b->loop_father)
1887 may_replace_uses = false;
1889 if (!may_replace_uses)
1891 gcc_assert (!virtual_operand_p (def));
1893 /* Note that just emitting the copies is fine -- there is no problem
1894 with ordering of phi nodes. This is because A is the single
1895 predecessor of B, therefore results of the phi nodes cannot
1896 appear as arguments of the phi nodes. */
1897 copy = gimple_build_assign (def, use);
1898 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1899 remove_phi_node (&psi, false);
1901 else
1903 /* If we deal with a PHI for virtual operands, we can simply
1904 propagate these without fussing with folding or updating
1905 the stmt. */
1906 if (virtual_operand_p (def))
1908 imm_use_iterator iter;
1909 use_operand_p use_p;
1910 gimple stmt;
1912 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1913 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1914 SET_USE (use_p, use);
1916 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1917 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1919 else
1920 replace_uses_by (def, use);
1922 remove_phi_node (&psi, true);
1926 /* Ensure that B follows A. */
1927 move_block_after (b, a);
1929 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1930 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1932 /* Remove labels from B and set gimple_bb to A for other statements. */
1933 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1935 gimple stmt = gsi_stmt (gsi);
1936 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1938 tree label = gimple_label_label (label_stmt);
1939 int lp_nr;
1941 gsi_remove (&gsi, false);
1943 /* Now that we can thread computed gotos, we might have
1944 a situation where we have a forced label in block B
1945 However, the label at the start of block B might still be
1946 used in other ways (think about the runtime checking for
1947 Fortran assigned gotos). So we can not just delete the
1948 label. Instead we move the label to the start of block A. */
1949 if (FORCED_LABEL (label))
1951 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1952 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1954 /* Other user labels keep around in a form of a debug stmt. */
1955 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1957 gimple dbg = gimple_build_debug_bind (label,
1958 integer_zero_node,
1959 stmt);
1960 gimple_debug_bind_reset_value (dbg);
1961 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1964 lp_nr = EH_LANDING_PAD_NR (label);
1965 if (lp_nr)
1967 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1968 lp->post_landing_pad = NULL;
1971 else
1973 gimple_set_bb (stmt, a);
1974 gsi_next (&gsi);
1978 /* When merging two BBs, if their counts are different, the larger count
1979 is selected as the new bb count. This is to handle inconsistent
1980 profiles. */
1981 if (a->loop_father == b->loop_father)
1983 a->count = MAX (a->count, b->count);
1984 a->frequency = MAX (a->frequency, b->frequency);
1987 /* Merge the sequences. */
1988 last = gsi_last_bb (a);
1989 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1990 set_bb_seq (b, NULL);
1992 if (cfgcleanup_altered_bbs)
1993 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1997 /* Return the one of two successors of BB that is not reachable by a
1998 complex edge, if there is one. Else, return BB. We use
1999 this in optimizations that use post-dominators for their heuristics,
2000 to catch the cases in C++ where function calls are involved. */
2002 basic_block
2003 single_noncomplex_succ (basic_block bb)
2005 edge e0, e1;
2006 if (EDGE_COUNT (bb->succs) != 2)
2007 return bb;
2009 e0 = EDGE_SUCC (bb, 0);
2010 e1 = EDGE_SUCC (bb, 1);
2011 if (e0->flags & EDGE_COMPLEX)
2012 return e1->dest;
2013 if (e1->flags & EDGE_COMPLEX)
2014 return e0->dest;
2016 return bb;
2019 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2021 void
2022 notice_special_calls (gcall *call)
2024 int flags = gimple_call_flags (call);
2026 if (flags & ECF_MAY_BE_ALLOCA)
2027 cfun->calls_alloca = true;
2028 if (flags & ECF_RETURNS_TWICE)
2029 cfun->calls_setjmp = true;
2033 /* Clear flags set by notice_special_calls. Used by dead code removal
2034 to update the flags. */
2036 void
2037 clear_special_calls (void)
2039 cfun->calls_alloca = false;
2040 cfun->calls_setjmp = false;
2043 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2045 static void
2046 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2048 /* Since this block is no longer reachable, we can just delete all
2049 of its PHI nodes. */
2050 remove_phi_nodes (bb);
2052 /* Remove edges to BB's successors. */
2053 while (EDGE_COUNT (bb->succs) > 0)
2054 remove_edge (EDGE_SUCC (bb, 0));
2058 /* Remove statements of basic block BB. */
2060 static void
2061 remove_bb (basic_block bb)
2063 gimple_stmt_iterator i;
2065 if (dump_file)
2067 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2068 if (dump_flags & TDF_DETAILS)
2070 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2071 fprintf (dump_file, "\n");
2075 if (current_loops)
2077 struct loop *loop = bb->loop_father;
2079 /* If a loop gets removed, clean up the information associated
2080 with it. */
2081 if (loop->latch == bb
2082 || loop->header == bb)
2083 free_numbers_of_iterations_estimates_loop (loop);
2086 /* Remove all the instructions in the block. */
2087 if (bb_seq (bb) != NULL)
2089 /* Walk backwards so as to get a chance to substitute all
2090 released DEFs into debug stmts. See
2091 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2092 details. */
2093 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2095 gimple stmt = gsi_stmt (i);
2096 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2097 if (label_stmt
2098 && (FORCED_LABEL (gimple_label_label (label_stmt))
2099 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2101 basic_block new_bb;
2102 gimple_stmt_iterator new_gsi;
2104 /* A non-reachable non-local label may still be referenced.
2105 But it no longer needs to carry the extra semantics of
2106 non-locality. */
2107 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2109 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2110 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2113 new_bb = bb->prev_bb;
2114 new_gsi = gsi_start_bb (new_bb);
2115 gsi_remove (&i, false);
2116 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2118 else
2120 /* Release SSA definitions if we are in SSA. Note that we
2121 may be called when not in SSA. For example,
2122 final_cleanup calls this function via
2123 cleanup_tree_cfg. */
2124 if (gimple_in_ssa_p (cfun))
2125 release_defs (stmt);
2127 gsi_remove (&i, true);
2130 if (gsi_end_p (i))
2131 i = gsi_last_bb (bb);
2132 else
2133 gsi_prev (&i);
2137 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2138 bb->il.gimple.seq = NULL;
2139 bb->il.gimple.phi_nodes = NULL;
2143 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2144 predicate VAL, return the edge that will be taken out of the block.
2145 If VAL does not match a unique edge, NULL is returned. */
2147 edge
2148 find_taken_edge (basic_block bb, tree val)
2150 gimple stmt;
2152 stmt = last_stmt (bb);
2154 gcc_assert (stmt);
2155 gcc_assert (is_ctrl_stmt (stmt));
2157 if (val == NULL)
2158 return NULL;
2160 if (!is_gimple_min_invariant (val))
2161 return NULL;
2163 if (gimple_code (stmt) == GIMPLE_COND)
2164 return find_taken_edge_cond_expr (bb, val);
2166 if (gimple_code (stmt) == GIMPLE_SWITCH)
2167 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2169 if (computed_goto_p (stmt))
2171 /* Only optimize if the argument is a label, if the argument is
2172 not a label then we can not construct a proper CFG.
2174 It may be the case that we only need to allow the LABEL_REF to
2175 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2176 appear inside a LABEL_EXPR just to be safe. */
2177 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2178 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2179 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2180 return NULL;
2183 gcc_unreachable ();
2186 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2187 statement, determine which of the outgoing edges will be taken out of the
2188 block. Return NULL if either edge may be taken. */
2190 static edge
2191 find_taken_edge_computed_goto (basic_block bb, tree val)
2193 basic_block dest;
2194 edge e = NULL;
2196 dest = label_to_block (val);
2197 if (dest)
2199 e = find_edge (bb, dest);
2200 gcc_assert (e != NULL);
2203 return e;
2206 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2207 statement, determine which of the two edges will be taken out of the
2208 block. Return NULL if either edge may be taken. */
2210 static edge
2211 find_taken_edge_cond_expr (basic_block bb, tree val)
2213 edge true_edge, false_edge;
2215 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2217 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2218 return (integer_zerop (val) ? false_edge : true_edge);
2221 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2222 statement, determine which edge will be taken out of the block. Return
2223 NULL if any edge may be taken. */
2225 static edge
2226 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2227 tree val)
2229 basic_block dest_bb;
2230 edge e;
2231 tree taken_case;
2233 taken_case = find_case_label_for_value (switch_stmt, val);
2234 dest_bb = label_to_block (CASE_LABEL (taken_case));
2236 e = find_edge (bb, dest_bb);
2237 gcc_assert (e);
2238 return e;
2242 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2243 We can make optimal use here of the fact that the case labels are
2244 sorted: We can do a binary search for a case matching VAL. */
2246 static tree
2247 find_case_label_for_value (gswitch *switch_stmt, tree val)
2249 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2250 tree default_case = gimple_switch_default_label (switch_stmt);
2252 for (low = 0, high = n; high - low > 1; )
2254 size_t i = (high + low) / 2;
2255 tree t = gimple_switch_label (switch_stmt, i);
2256 int cmp;
2258 /* Cache the result of comparing CASE_LOW and val. */
2259 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2261 if (cmp > 0)
2262 high = i;
2263 else
2264 low = i;
2266 if (CASE_HIGH (t) == NULL)
2268 /* A singe-valued case label. */
2269 if (cmp == 0)
2270 return t;
2272 else
2274 /* A case range. We can only handle integer ranges. */
2275 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2276 return t;
2280 return default_case;
2284 /* Dump a basic block on stderr. */
2286 void
2287 gimple_debug_bb (basic_block bb)
2289 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2293 /* Dump basic block with index N on stderr. */
2295 basic_block
2296 gimple_debug_bb_n (int n)
2298 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2299 return BASIC_BLOCK_FOR_FN (cfun, n);
2303 /* Dump the CFG on stderr.
2305 FLAGS are the same used by the tree dumping functions
2306 (see TDF_* in dumpfile.h). */
2308 void
2309 gimple_debug_cfg (int flags)
2311 gimple_dump_cfg (stderr, flags);
2315 /* Dump the program showing basic block boundaries on the given FILE.
2317 FLAGS are the same used by the tree dumping functions (see TDF_* in
2318 tree.h). */
2320 void
2321 gimple_dump_cfg (FILE *file, int flags)
2323 if (flags & TDF_DETAILS)
2325 dump_function_header (file, current_function_decl, flags);
2326 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2327 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2328 last_basic_block_for_fn (cfun));
2330 brief_dump_cfg (file, flags | TDF_COMMENT);
2331 fprintf (file, "\n");
2334 if (flags & TDF_STATS)
2335 dump_cfg_stats (file);
2337 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2341 /* Dump CFG statistics on FILE. */
2343 void
2344 dump_cfg_stats (FILE *file)
2346 static long max_num_merged_labels = 0;
2347 unsigned long size, total = 0;
2348 long num_edges;
2349 basic_block bb;
2350 const char * const fmt_str = "%-30s%-13s%12s\n";
2351 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2352 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2353 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2354 const char *funcname = current_function_name ();
2356 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2358 fprintf (file, "---------------------------------------------------------\n");
2359 fprintf (file, fmt_str, "", " Number of ", "Memory");
2360 fprintf (file, fmt_str, "", " instances ", "used ");
2361 fprintf (file, "---------------------------------------------------------\n");
2363 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2364 total += size;
2365 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2366 SCALE (size), LABEL (size));
2368 num_edges = 0;
2369 FOR_EACH_BB_FN (bb, cfun)
2370 num_edges += EDGE_COUNT (bb->succs);
2371 size = num_edges * sizeof (struct edge_def);
2372 total += size;
2373 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2375 fprintf (file, "---------------------------------------------------------\n");
2376 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2377 LABEL (total));
2378 fprintf (file, "---------------------------------------------------------\n");
2379 fprintf (file, "\n");
2381 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2382 max_num_merged_labels = cfg_stats.num_merged_labels;
2384 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2385 cfg_stats.num_merged_labels, max_num_merged_labels);
2387 fprintf (file, "\n");
2391 /* Dump CFG statistics on stderr. Keep extern so that it's always
2392 linked in the final executable. */
2394 DEBUG_FUNCTION void
2395 debug_cfg_stats (void)
2397 dump_cfg_stats (stderr);
2400 /*---------------------------------------------------------------------------
2401 Miscellaneous helpers
2402 ---------------------------------------------------------------------------*/
2404 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2405 flow. Transfers of control flow associated with EH are excluded. */
2407 static bool
2408 call_can_make_abnormal_goto (gimple t)
2410 /* If the function has no non-local labels, then a call cannot make an
2411 abnormal transfer of control. */
2412 if (!cfun->has_nonlocal_label
2413 && !cfun->calls_setjmp)
2414 return false;
2416 /* Likewise if the call has no side effects. */
2417 if (!gimple_has_side_effects (t))
2418 return false;
2420 /* Likewise if the called function is leaf. */
2421 if (gimple_call_flags (t) & ECF_LEAF)
2422 return false;
2424 return true;
2428 /* Return true if T can make an abnormal transfer of control flow.
2429 Transfers of control flow associated with EH are excluded. */
2431 bool
2432 stmt_can_make_abnormal_goto (gimple t)
2434 if (computed_goto_p (t))
2435 return true;
2436 if (is_gimple_call (t))
2437 return call_can_make_abnormal_goto (t);
2438 return false;
2442 /* Return true if T represents a stmt that always transfers control. */
2444 bool
2445 is_ctrl_stmt (gimple t)
2447 switch (gimple_code (t))
2449 case GIMPLE_COND:
2450 case GIMPLE_SWITCH:
2451 case GIMPLE_GOTO:
2452 case GIMPLE_RETURN:
2453 case GIMPLE_RESX:
2454 return true;
2455 default:
2456 return false;
2461 /* Return true if T is a statement that may alter the flow of control
2462 (e.g., a call to a non-returning function). */
2464 bool
2465 is_ctrl_altering_stmt (gimple t)
2467 gcc_assert (t);
2469 switch (gimple_code (t))
2471 case GIMPLE_CALL:
2472 /* Per stmt call flag indicates whether the call could alter
2473 controlflow. */
2474 if (gimple_call_ctrl_altering_p (t))
2475 return true;
2476 break;
2478 case GIMPLE_EH_DISPATCH:
2479 /* EH_DISPATCH branches to the individual catch handlers at
2480 this level of a try or allowed-exceptions region. It can
2481 fallthru to the next statement as well. */
2482 return true;
2484 case GIMPLE_ASM:
2485 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2486 return true;
2487 break;
2489 CASE_GIMPLE_OMP:
2490 /* OpenMP directives alter control flow. */
2491 return true;
2493 case GIMPLE_TRANSACTION:
2494 /* A transaction start alters control flow. */
2495 return true;
2497 default:
2498 break;
2501 /* If a statement can throw, it alters control flow. */
2502 return stmt_can_throw_internal (t);
2506 /* Return true if T is a simple local goto. */
2508 bool
2509 simple_goto_p (gimple t)
2511 return (gimple_code (t) == GIMPLE_GOTO
2512 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2516 /* Return true if STMT should start a new basic block. PREV_STMT is
2517 the statement preceding STMT. It is used when STMT is a label or a
2518 case label. Labels should only start a new basic block if their
2519 previous statement wasn't a label. Otherwise, sequence of labels
2520 would generate unnecessary basic blocks that only contain a single
2521 label. */
2523 static inline bool
2524 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2526 if (stmt == NULL)
2527 return false;
2529 /* Labels start a new basic block only if the preceding statement
2530 wasn't a label of the same type. This prevents the creation of
2531 consecutive blocks that have nothing but a single label. */
2532 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2534 /* Nonlocal and computed GOTO targets always start a new block. */
2535 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2536 || FORCED_LABEL (gimple_label_label (label_stmt)))
2537 return true;
2539 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2541 if (DECL_NONLOCAL (gimple_label_label (
2542 as_a <glabel *> (prev_stmt))))
2543 return true;
2545 cfg_stats.num_merged_labels++;
2546 return false;
2548 else
2549 return true;
2551 else if (gimple_code (stmt) == GIMPLE_CALL
2552 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2553 /* setjmp acts similar to a nonlocal GOTO target and thus should
2554 start a new block. */
2555 return true;
2557 return false;
2561 /* Return true if T should end a basic block. */
2563 bool
2564 stmt_ends_bb_p (gimple t)
2566 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2569 /* Remove block annotations and other data structures. */
2571 void
2572 delete_tree_cfg_annotations (void)
2574 vec_free (label_to_block_map_for_fn (cfun));
2577 /* Return the virtual phi in BB. */
2579 gphi *
2580 get_virtual_phi (basic_block bb)
2582 for (gphi_iterator gsi = gsi_start_phis (bb);
2583 !gsi_end_p (gsi);
2584 gsi_next (&gsi))
2586 gphi *phi = gsi.phi ();
2588 if (virtual_operand_p (PHI_RESULT (phi)))
2589 return phi;
2592 return NULL;
2595 /* Return the first statement in basic block BB. */
2597 gimple
2598 first_stmt (basic_block bb)
2600 gimple_stmt_iterator i = gsi_start_bb (bb);
2601 gimple stmt = NULL;
2603 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2605 gsi_next (&i);
2606 stmt = NULL;
2608 return stmt;
2611 /* Return the first non-label statement in basic block BB. */
2613 static gimple
2614 first_non_label_stmt (basic_block bb)
2616 gimple_stmt_iterator i = gsi_start_bb (bb);
2617 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2618 gsi_next (&i);
2619 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2622 /* Return the last statement in basic block BB. */
2624 gimple
2625 last_stmt (basic_block bb)
2627 gimple_stmt_iterator i = gsi_last_bb (bb);
2628 gimple stmt = NULL;
2630 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2632 gsi_prev (&i);
2633 stmt = NULL;
2635 return stmt;
2638 /* Return the last statement of an otherwise empty block. Return NULL
2639 if the block is totally empty, or if it contains more than one
2640 statement. */
2642 gimple
2643 last_and_only_stmt (basic_block bb)
2645 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2646 gimple last, prev;
2648 if (gsi_end_p (i))
2649 return NULL;
2651 last = gsi_stmt (i);
2652 gsi_prev_nondebug (&i);
2653 if (gsi_end_p (i))
2654 return last;
2656 /* Empty statements should no longer appear in the instruction stream.
2657 Everything that might have appeared before should be deleted by
2658 remove_useless_stmts, and the optimizers should just gsi_remove
2659 instead of smashing with build_empty_stmt.
2661 Thus the only thing that should appear here in a block containing
2662 one executable statement is a label. */
2663 prev = gsi_stmt (i);
2664 if (gimple_code (prev) == GIMPLE_LABEL)
2665 return last;
2666 else
2667 return NULL;
2670 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2672 static void
2673 reinstall_phi_args (edge new_edge, edge old_edge)
2675 edge_var_map *vm;
2676 int i;
2677 gphi_iterator phis;
2679 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2680 if (!v)
2681 return;
2683 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2684 v->iterate (i, &vm) && !gsi_end_p (phis);
2685 i++, gsi_next (&phis))
2687 gphi *phi = phis.phi ();
2688 tree result = redirect_edge_var_map_result (vm);
2689 tree arg = redirect_edge_var_map_def (vm);
2691 gcc_assert (result == gimple_phi_result (phi));
2693 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2696 redirect_edge_var_map_clear (old_edge);
2699 /* Returns the basic block after which the new basic block created
2700 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2701 near its "logical" location. This is of most help to humans looking
2702 at debugging dumps. */
2704 basic_block
2705 split_edge_bb_loc (edge edge_in)
2707 basic_block dest = edge_in->dest;
2708 basic_block dest_prev = dest->prev_bb;
2710 if (dest_prev)
2712 edge e = find_edge (dest_prev, dest);
2713 if (e && !(e->flags & EDGE_COMPLEX))
2714 return edge_in->src;
2716 return dest_prev;
2719 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2720 Abort on abnormal edges. */
2722 static basic_block
2723 gimple_split_edge (edge edge_in)
2725 basic_block new_bb, after_bb, dest;
2726 edge new_edge, e;
2728 /* Abnormal edges cannot be split. */
2729 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2731 dest = edge_in->dest;
2733 after_bb = split_edge_bb_loc (edge_in);
2735 new_bb = create_empty_bb (after_bb);
2736 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2737 new_bb->count = edge_in->count;
2738 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2739 new_edge->probability = REG_BR_PROB_BASE;
2740 new_edge->count = edge_in->count;
2742 e = redirect_edge_and_branch (edge_in, new_bb);
2743 gcc_assert (e == edge_in);
2744 reinstall_phi_args (new_edge, e);
2746 return new_bb;
2750 /* Verify properties of the address expression T with base object BASE. */
2752 static tree
2753 verify_address (tree t, tree base)
2755 bool old_constant;
2756 bool old_side_effects;
2757 bool new_constant;
2758 bool new_side_effects;
2760 old_constant = TREE_CONSTANT (t);
2761 old_side_effects = TREE_SIDE_EFFECTS (t);
2763 recompute_tree_invariant_for_addr_expr (t);
2764 new_side_effects = TREE_SIDE_EFFECTS (t);
2765 new_constant = TREE_CONSTANT (t);
2767 if (old_constant != new_constant)
2769 error ("constant not recomputed when ADDR_EXPR changed");
2770 return t;
2772 if (old_side_effects != new_side_effects)
2774 error ("side effects not recomputed when ADDR_EXPR changed");
2775 return t;
2778 if (!(TREE_CODE (base) == VAR_DECL
2779 || TREE_CODE (base) == PARM_DECL
2780 || TREE_CODE (base) == RESULT_DECL))
2781 return NULL_TREE;
2783 if (DECL_GIMPLE_REG_P (base))
2785 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2786 return base;
2789 return NULL_TREE;
2792 /* Callback for walk_tree, check that all elements with address taken are
2793 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2794 inside a PHI node. */
2796 static tree
2797 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2799 tree t = *tp, x;
2801 if (TYPE_P (t))
2802 *walk_subtrees = 0;
2804 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2805 #define CHECK_OP(N, MSG) \
2806 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2807 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2809 switch (TREE_CODE (t))
2811 case SSA_NAME:
2812 if (SSA_NAME_IN_FREE_LIST (t))
2814 error ("SSA name in freelist but still referenced");
2815 return *tp;
2817 break;
2819 case INDIRECT_REF:
2820 error ("INDIRECT_REF in gimple IL");
2821 return t;
2823 case MEM_REF:
2824 x = TREE_OPERAND (t, 0);
2825 if (!POINTER_TYPE_P (TREE_TYPE (x))
2826 || !is_gimple_mem_ref_addr (x))
2828 error ("invalid first operand of MEM_REF");
2829 return x;
2831 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2832 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2834 error ("invalid offset operand of MEM_REF");
2835 return TREE_OPERAND (t, 1);
2837 if (TREE_CODE (x) == ADDR_EXPR
2838 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2839 return x;
2840 *walk_subtrees = 0;
2841 break;
2843 case ASSERT_EXPR:
2844 x = fold (ASSERT_EXPR_COND (t));
2845 if (x == boolean_false_node)
2847 error ("ASSERT_EXPR with an always-false condition");
2848 return *tp;
2850 break;
2852 case MODIFY_EXPR:
2853 error ("MODIFY_EXPR not expected while having tuples");
2854 return *tp;
2856 case ADDR_EXPR:
2858 tree tem;
2860 gcc_assert (is_gimple_address (t));
2862 /* Skip any references (they will be checked when we recurse down the
2863 tree) and ensure that any variable used as a prefix is marked
2864 addressable. */
2865 for (x = TREE_OPERAND (t, 0);
2866 handled_component_p (x);
2867 x = TREE_OPERAND (x, 0))
2870 if ((tem = verify_address (t, x)))
2871 return tem;
2873 if (!(TREE_CODE (x) == VAR_DECL
2874 || TREE_CODE (x) == PARM_DECL
2875 || TREE_CODE (x) == RESULT_DECL))
2876 return NULL;
2878 if (!TREE_ADDRESSABLE (x))
2880 error ("address taken, but ADDRESSABLE bit not set");
2881 return x;
2884 break;
2887 case COND_EXPR:
2888 x = COND_EXPR_COND (t);
2889 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2891 error ("non-integral used in condition");
2892 return x;
2894 if (!is_gimple_condexpr (x))
2896 error ("invalid conditional operand");
2897 return x;
2899 break;
2901 case NON_LVALUE_EXPR:
2902 case TRUTH_NOT_EXPR:
2903 gcc_unreachable ();
2905 CASE_CONVERT:
2906 case FIX_TRUNC_EXPR:
2907 case FLOAT_EXPR:
2908 case NEGATE_EXPR:
2909 case ABS_EXPR:
2910 case BIT_NOT_EXPR:
2911 CHECK_OP (0, "invalid operand to unary operator");
2912 break;
2914 case REALPART_EXPR:
2915 case IMAGPART_EXPR:
2916 case BIT_FIELD_REF:
2917 if (!is_gimple_reg_type (TREE_TYPE (t)))
2919 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2920 return t;
2923 if (TREE_CODE (t) == BIT_FIELD_REF)
2925 tree t0 = TREE_OPERAND (t, 0);
2926 tree t1 = TREE_OPERAND (t, 1);
2927 tree t2 = TREE_OPERAND (t, 2);
2928 if (!tree_fits_uhwi_p (t1)
2929 || !tree_fits_uhwi_p (t2))
2931 error ("invalid position or size operand to BIT_FIELD_REF");
2932 return t;
2934 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2935 && (TYPE_PRECISION (TREE_TYPE (t))
2936 != tree_to_uhwi (t1)))
2938 error ("integral result type precision does not match "
2939 "field size of BIT_FIELD_REF");
2940 return t;
2942 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2943 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2944 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2945 != tree_to_uhwi (t1)))
2947 error ("mode precision of non-integral result does not "
2948 "match field size of BIT_FIELD_REF");
2949 return t;
2951 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2952 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2953 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2955 error ("position plus size exceeds size of referenced object in "
2956 "BIT_FIELD_REF");
2957 return t;
2960 t = TREE_OPERAND (t, 0);
2962 /* Fall-through. */
2963 case COMPONENT_REF:
2964 case ARRAY_REF:
2965 case ARRAY_RANGE_REF:
2966 case VIEW_CONVERT_EXPR:
2967 /* We have a nest of references. Verify that each of the operands
2968 that determine where to reference is either a constant or a variable,
2969 verify that the base is valid, and then show we've already checked
2970 the subtrees. */
2971 while (handled_component_p (t))
2973 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2974 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2975 else if (TREE_CODE (t) == ARRAY_REF
2976 || TREE_CODE (t) == ARRAY_RANGE_REF)
2978 CHECK_OP (1, "invalid array index");
2979 if (TREE_OPERAND (t, 2))
2980 CHECK_OP (2, "invalid array lower bound");
2981 if (TREE_OPERAND (t, 3))
2982 CHECK_OP (3, "invalid array stride");
2984 else if (TREE_CODE (t) == BIT_FIELD_REF
2985 || TREE_CODE (t) == REALPART_EXPR
2986 || TREE_CODE (t) == IMAGPART_EXPR)
2988 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2989 "REALPART_EXPR");
2990 return t;
2993 t = TREE_OPERAND (t, 0);
2996 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2998 error ("invalid reference prefix");
2999 return t;
3001 *walk_subtrees = 0;
3002 break;
3003 case PLUS_EXPR:
3004 case MINUS_EXPR:
3005 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3006 POINTER_PLUS_EXPR. */
3007 if (POINTER_TYPE_P (TREE_TYPE (t)))
3009 error ("invalid operand to plus/minus, type is a pointer");
3010 return t;
3012 CHECK_OP (0, "invalid operand to binary operator");
3013 CHECK_OP (1, "invalid operand to binary operator");
3014 break;
3016 case POINTER_PLUS_EXPR:
3017 /* Check to make sure the first operand is a pointer or reference type. */
3018 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3020 error ("invalid operand to pointer plus, first operand is not a pointer");
3021 return t;
3023 /* Check to make sure the second operand is a ptrofftype. */
3024 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3026 error ("invalid operand to pointer plus, second operand is not an "
3027 "integer type of appropriate width");
3028 return t;
3030 /* FALLTHROUGH */
3031 case LT_EXPR:
3032 case LE_EXPR:
3033 case GT_EXPR:
3034 case GE_EXPR:
3035 case EQ_EXPR:
3036 case NE_EXPR:
3037 case UNORDERED_EXPR:
3038 case ORDERED_EXPR:
3039 case UNLT_EXPR:
3040 case UNLE_EXPR:
3041 case UNGT_EXPR:
3042 case UNGE_EXPR:
3043 case UNEQ_EXPR:
3044 case LTGT_EXPR:
3045 case MULT_EXPR:
3046 case TRUNC_DIV_EXPR:
3047 case CEIL_DIV_EXPR:
3048 case FLOOR_DIV_EXPR:
3049 case ROUND_DIV_EXPR:
3050 case TRUNC_MOD_EXPR:
3051 case CEIL_MOD_EXPR:
3052 case FLOOR_MOD_EXPR:
3053 case ROUND_MOD_EXPR:
3054 case RDIV_EXPR:
3055 case EXACT_DIV_EXPR:
3056 case MIN_EXPR:
3057 case MAX_EXPR:
3058 case LSHIFT_EXPR:
3059 case RSHIFT_EXPR:
3060 case LROTATE_EXPR:
3061 case RROTATE_EXPR:
3062 case BIT_IOR_EXPR:
3063 case BIT_XOR_EXPR:
3064 case BIT_AND_EXPR:
3065 CHECK_OP (0, "invalid operand to binary operator");
3066 CHECK_OP (1, "invalid operand to binary operator");
3067 break;
3069 case CONSTRUCTOR:
3070 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3071 *walk_subtrees = 0;
3072 break;
3074 case CASE_LABEL_EXPR:
3075 if (CASE_CHAIN (t))
3077 error ("invalid CASE_CHAIN");
3078 return t;
3080 break;
3082 default:
3083 break;
3085 return NULL;
3087 #undef CHECK_OP
3091 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3092 Returns true if there is an error, otherwise false. */
3094 static bool
3095 verify_types_in_gimple_min_lval (tree expr)
3097 tree op;
3099 if (is_gimple_id (expr))
3100 return false;
3102 if (TREE_CODE (expr) != TARGET_MEM_REF
3103 && TREE_CODE (expr) != MEM_REF)
3105 error ("invalid expression for min lvalue");
3106 return true;
3109 /* TARGET_MEM_REFs are strange beasts. */
3110 if (TREE_CODE (expr) == TARGET_MEM_REF)
3111 return false;
3113 op = TREE_OPERAND (expr, 0);
3114 if (!is_gimple_val (op))
3116 error ("invalid operand in indirect reference");
3117 debug_generic_stmt (op);
3118 return true;
3120 /* Memory references now generally can involve a value conversion. */
3122 return false;
3125 /* Verify if EXPR is a valid GIMPLE reference expression. If
3126 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3127 if there is an error, otherwise false. */
3129 static bool
3130 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3132 while (handled_component_p (expr))
3134 tree op = TREE_OPERAND (expr, 0);
3136 if (TREE_CODE (expr) == ARRAY_REF
3137 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3139 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3140 || (TREE_OPERAND (expr, 2)
3141 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3142 || (TREE_OPERAND (expr, 3)
3143 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3145 error ("invalid operands to array reference");
3146 debug_generic_stmt (expr);
3147 return true;
3151 /* Verify if the reference array element types are compatible. */
3152 if (TREE_CODE (expr) == ARRAY_REF
3153 && !useless_type_conversion_p (TREE_TYPE (expr),
3154 TREE_TYPE (TREE_TYPE (op))))
3156 error ("type mismatch in array reference");
3157 debug_generic_stmt (TREE_TYPE (expr));
3158 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3159 return true;
3161 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3162 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3163 TREE_TYPE (TREE_TYPE (op))))
3165 error ("type mismatch in array range reference");
3166 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3167 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3168 return true;
3171 if ((TREE_CODE (expr) == REALPART_EXPR
3172 || TREE_CODE (expr) == IMAGPART_EXPR)
3173 && !useless_type_conversion_p (TREE_TYPE (expr),
3174 TREE_TYPE (TREE_TYPE (op))))
3176 error ("type mismatch in real/imagpart reference");
3177 debug_generic_stmt (TREE_TYPE (expr));
3178 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3179 return true;
3182 if (TREE_CODE (expr) == COMPONENT_REF
3183 && !useless_type_conversion_p (TREE_TYPE (expr),
3184 TREE_TYPE (TREE_OPERAND (expr, 1))))
3186 error ("type mismatch in component reference");
3187 debug_generic_stmt (TREE_TYPE (expr));
3188 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3189 return true;
3192 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3194 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3195 that their operand is not an SSA name or an invariant when
3196 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3197 bug). Otherwise there is nothing to verify, gross mismatches at
3198 most invoke undefined behavior. */
3199 if (require_lvalue
3200 && (TREE_CODE (op) == SSA_NAME
3201 || is_gimple_min_invariant (op)))
3203 error ("conversion of an SSA_NAME on the left hand side");
3204 debug_generic_stmt (expr);
3205 return true;
3207 else if (TREE_CODE (op) == SSA_NAME
3208 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3210 error ("conversion of register to a different size");
3211 debug_generic_stmt (expr);
3212 return true;
3214 else if (!handled_component_p (op))
3215 return false;
3218 expr = op;
3221 if (TREE_CODE (expr) == MEM_REF)
3223 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3225 error ("invalid address operand in MEM_REF");
3226 debug_generic_stmt (expr);
3227 return true;
3229 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3230 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3232 error ("invalid offset operand in MEM_REF");
3233 debug_generic_stmt (expr);
3234 return true;
3237 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3239 if (!TMR_BASE (expr)
3240 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3242 error ("invalid address operand in TARGET_MEM_REF");
3243 return true;
3245 if (!TMR_OFFSET (expr)
3246 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3247 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3249 error ("invalid offset operand in TARGET_MEM_REF");
3250 debug_generic_stmt (expr);
3251 return true;
3255 return ((require_lvalue || !is_gimple_min_invariant (expr))
3256 && verify_types_in_gimple_min_lval (expr));
3259 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3260 list of pointer-to types that is trivially convertible to DEST. */
3262 static bool
3263 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3265 tree src;
3267 if (!TYPE_POINTER_TO (src_obj))
3268 return true;
3270 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3271 if (useless_type_conversion_p (dest, src))
3272 return true;
3274 return false;
3277 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3278 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3280 static bool
3281 valid_fixed_convert_types_p (tree type1, tree type2)
3283 return (FIXED_POINT_TYPE_P (type1)
3284 && (INTEGRAL_TYPE_P (type2)
3285 || SCALAR_FLOAT_TYPE_P (type2)
3286 || FIXED_POINT_TYPE_P (type2)));
3289 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3290 is a problem, otherwise false. */
3292 static bool
3293 verify_gimple_call (gcall *stmt)
3295 tree fn = gimple_call_fn (stmt);
3296 tree fntype, fndecl;
3297 unsigned i;
3299 if (gimple_call_internal_p (stmt))
3301 if (fn)
3303 error ("gimple call has two targets");
3304 debug_generic_stmt (fn);
3305 return true;
3308 else
3310 if (!fn)
3312 error ("gimple call has no target");
3313 return true;
3317 if (fn && !is_gimple_call_addr (fn))
3319 error ("invalid function in gimple call");
3320 debug_generic_stmt (fn);
3321 return true;
3324 if (fn
3325 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3326 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3327 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3329 error ("non-function in gimple call");
3330 return true;
3333 fndecl = gimple_call_fndecl (stmt);
3334 if (fndecl
3335 && TREE_CODE (fndecl) == FUNCTION_DECL
3336 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3337 && !DECL_PURE_P (fndecl)
3338 && !TREE_READONLY (fndecl))
3340 error ("invalid pure const state for function");
3341 return true;
3344 tree lhs = gimple_call_lhs (stmt);
3345 if (lhs
3346 && (!is_gimple_lvalue (lhs)
3347 || verify_types_in_gimple_reference (lhs, true)))
3349 error ("invalid LHS in gimple call");
3350 return true;
3353 if (lhs
3354 && gimple_call_ctrl_altering_p (stmt)
3355 && gimple_call_noreturn_p (stmt)
3356 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3358 error ("LHS in noreturn call");
3359 return true;
3362 fntype = gimple_call_fntype (stmt);
3363 if (fntype
3364 && lhs
3365 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3366 /* ??? At least C++ misses conversions at assignments from
3367 void * call results.
3368 ??? Java is completely off. Especially with functions
3369 returning java.lang.Object.
3370 For now simply allow arbitrary pointer type conversions. */
3371 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3372 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3374 error ("invalid conversion in gimple call");
3375 debug_generic_stmt (TREE_TYPE (lhs));
3376 debug_generic_stmt (TREE_TYPE (fntype));
3377 return true;
3380 if (gimple_call_chain (stmt)
3381 && !is_gimple_val (gimple_call_chain (stmt)))
3383 error ("invalid static chain in gimple call");
3384 debug_generic_stmt (gimple_call_chain (stmt));
3385 return true;
3388 /* If there is a static chain argument, the call should either be
3389 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3390 if (gimple_call_chain (stmt)
3391 && fndecl
3392 && !DECL_STATIC_CHAIN (fndecl))
3394 error ("static chain with function that doesn%'t use one");
3395 return true;
3398 /* ??? The C frontend passes unpromoted arguments in case it
3399 didn't see a function declaration before the call. So for now
3400 leave the call arguments mostly unverified. Once we gimplify
3401 unit-at-a-time we have a chance to fix this. */
3403 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3405 tree arg = gimple_call_arg (stmt, i);
3406 if ((is_gimple_reg_type (TREE_TYPE (arg))
3407 && !is_gimple_val (arg))
3408 || (!is_gimple_reg_type (TREE_TYPE (arg))
3409 && !is_gimple_lvalue (arg)))
3411 error ("invalid argument to gimple call");
3412 debug_generic_expr (arg);
3413 return true;
3417 return false;
3420 /* Verifies the gimple comparison with the result type TYPE and
3421 the operands OP0 and OP1. */
3423 static bool
3424 verify_gimple_comparison (tree type, tree op0, tree op1)
3426 tree op0_type = TREE_TYPE (op0);
3427 tree op1_type = TREE_TYPE (op1);
3429 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3431 error ("invalid operands in gimple comparison");
3432 return true;
3435 /* For comparisons we do not have the operations type as the
3436 effective type the comparison is carried out in. Instead
3437 we require that either the first operand is trivially
3438 convertible into the second, or the other way around.
3439 Because we special-case pointers to void we allow
3440 comparisons of pointers with the same mode as well. */
3441 if (!useless_type_conversion_p (op0_type, op1_type)
3442 && !useless_type_conversion_p (op1_type, op0_type)
3443 && (!POINTER_TYPE_P (op0_type)
3444 || !POINTER_TYPE_P (op1_type)
3445 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3447 error ("mismatching comparison operand types");
3448 debug_generic_expr (op0_type);
3449 debug_generic_expr (op1_type);
3450 return true;
3453 /* The resulting type of a comparison may be an effective boolean type. */
3454 if (INTEGRAL_TYPE_P (type)
3455 && (TREE_CODE (type) == BOOLEAN_TYPE
3456 || TYPE_PRECISION (type) == 1))
3458 if (TREE_CODE (op0_type) == VECTOR_TYPE
3459 || TREE_CODE (op1_type) == VECTOR_TYPE)
3461 error ("vector comparison returning a boolean");
3462 debug_generic_expr (op0_type);
3463 debug_generic_expr (op1_type);
3464 return true;
3467 /* Or an integer vector type with the same size and element count
3468 as the comparison operand types. */
3469 else if (TREE_CODE (type) == VECTOR_TYPE
3470 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3472 if (TREE_CODE (op0_type) != VECTOR_TYPE
3473 || TREE_CODE (op1_type) != VECTOR_TYPE)
3475 error ("non-vector operands in vector comparison");
3476 debug_generic_expr (op0_type);
3477 debug_generic_expr (op1_type);
3478 return true;
3481 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3482 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3483 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3484 /* The result of a vector comparison is of signed
3485 integral type. */
3486 || TYPE_UNSIGNED (TREE_TYPE (type)))
3488 error ("invalid vector comparison resulting type");
3489 debug_generic_expr (type);
3490 return true;
3493 else
3495 error ("bogus comparison result type");
3496 debug_generic_expr (type);
3497 return true;
3500 return false;
3503 /* Verify a gimple assignment statement STMT with an unary rhs.
3504 Returns true if anything is wrong. */
3506 static bool
3507 verify_gimple_assign_unary (gassign *stmt)
3509 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3510 tree lhs = gimple_assign_lhs (stmt);
3511 tree lhs_type = TREE_TYPE (lhs);
3512 tree rhs1 = gimple_assign_rhs1 (stmt);
3513 tree rhs1_type = TREE_TYPE (rhs1);
3515 if (!is_gimple_reg (lhs))
3517 error ("non-register as LHS of unary operation");
3518 return true;
3521 if (!is_gimple_val (rhs1))
3523 error ("invalid operand in unary operation");
3524 return true;
3527 /* First handle conversions. */
3528 switch (rhs_code)
3530 CASE_CONVERT:
3532 /* Allow conversions from pointer type to integral type only if
3533 there is no sign or zero extension involved.
3534 For targets were the precision of ptrofftype doesn't match that
3535 of pointers we need to allow arbitrary conversions to ptrofftype. */
3536 if ((POINTER_TYPE_P (lhs_type)
3537 && INTEGRAL_TYPE_P (rhs1_type))
3538 || (POINTER_TYPE_P (rhs1_type)
3539 && INTEGRAL_TYPE_P (lhs_type)
3540 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3541 || ptrofftype_p (sizetype))))
3542 return false;
3544 /* Allow conversion from integral to offset type and vice versa. */
3545 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3546 && INTEGRAL_TYPE_P (rhs1_type))
3547 || (INTEGRAL_TYPE_P (lhs_type)
3548 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3549 return false;
3551 /* Otherwise assert we are converting between types of the
3552 same kind. */
3553 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3555 error ("invalid types in nop conversion");
3556 debug_generic_expr (lhs_type);
3557 debug_generic_expr (rhs1_type);
3558 return true;
3561 return false;
3564 case ADDR_SPACE_CONVERT_EXPR:
3566 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3567 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3568 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3570 error ("invalid types in address space conversion");
3571 debug_generic_expr (lhs_type);
3572 debug_generic_expr (rhs1_type);
3573 return true;
3576 return false;
3579 case FIXED_CONVERT_EXPR:
3581 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3582 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3584 error ("invalid types in fixed-point conversion");
3585 debug_generic_expr (lhs_type);
3586 debug_generic_expr (rhs1_type);
3587 return true;
3590 return false;
3593 case FLOAT_EXPR:
3595 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3596 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3597 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3599 error ("invalid types in conversion to floating point");
3600 debug_generic_expr (lhs_type);
3601 debug_generic_expr (rhs1_type);
3602 return true;
3605 return false;
3608 case FIX_TRUNC_EXPR:
3610 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3611 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3612 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3614 error ("invalid types in conversion to integer");
3615 debug_generic_expr (lhs_type);
3616 debug_generic_expr (rhs1_type);
3617 return true;
3620 return false;
3622 case REDUC_MAX_EXPR:
3623 case REDUC_MIN_EXPR:
3624 case REDUC_PLUS_EXPR:
3625 if (!VECTOR_TYPE_P (rhs1_type)
3626 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3628 error ("reduction should convert from vector to element type");
3629 debug_generic_expr (lhs_type);
3630 debug_generic_expr (rhs1_type);
3631 return true;
3633 return false;
3635 case VEC_UNPACK_HI_EXPR:
3636 case VEC_UNPACK_LO_EXPR:
3637 case VEC_UNPACK_FLOAT_HI_EXPR:
3638 case VEC_UNPACK_FLOAT_LO_EXPR:
3639 /* FIXME. */
3640 return false;
3642 case NEGATE_EXPR:
3643 case ABS_EXPR:
3644 case BIT_NOT_EXPR:
3645 case PAREN_EXPR:
3646 case CONJ_EXPR:
3647 break;
3649 default:
3650 gcc_unreachable ();
3653 /* For the remaining codes assert there is no conversion involved. */
3654 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3656 error ("non-trivial conversion in unary operation");
3657 debug_generic_expr (lhs_type);
3658 debug_generic_expr (rhs1_type);
3659 return true;
3662 return false;
3665 /* Verify a gimple assignment statement STMT with a binary rhs.
3666 Returns true if anything is wrong. */
3668 static bool
3669 verify_gimple_assign_binary (gassign *stmt)
3671 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3672 tree lhs = gimple_assign_lhs (stmt);
3673 tree lhs_type = TREE_TYPE (lhs);
3674 tree rhs1 = gimple_assign_rhs1 (stmt);
3675 tree rhs1_type = TREE_TYPE (rhs1);
3676 tree rhs2 = gimple_assign_rhs2 (stmt);
3677 tree rhs2_type = TREE_TYPE (rhs2);
3679 if (!is_gimple_reg (lhs))
3681 error ("non-register as LHS of binary operation");
3682 return true;
3685 if (!is_gimple_val (rhs1)
3686 || !is_gimple_val (rhs2))
3688 error ("invalid operands in binary operation");
3689 return true;
3692 /* First handle operations that involve different types. */
3693 switch (rhs_code)
3695 case COMPLEX_EXPR:
3697 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3698 || !(INTEGRAL_TYPE_P (rhs1_type)
3699 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3700 || !(INTEGRAL_TYPE_P (rhs2_type)
3701 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3703 error ("type mismatch in complex expression");
3704 debug_generic_expr (lhs_type);
3705 debug_generic_expr (rhs1_type);
3706 debug_generic_expr (rhs2_type);
3707 return true;
3710 return false;
3713 case LSHIFT_EXPR:
3714 case RSHIFT_EXPR:
3715 case LROTATE_EXPR:
3716 case RROTATE_EXPR:
3718 /* Shifts and rotates are ok on integral types, fixed point
3719 types and integer vector types. */
3720 if ((!INTEGRAL_TYPE_P (rhs1_type)
3721 && !FIXED_POINT_TYPE_P (rhs1_type)
3722 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3723 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3724 || (!INTEGRAL_TYPE_P (rhs2_type)
3725 /* Vector shifts of vectors are also ok. */
3726 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3727 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3728 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3729 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3730 || !useless_type_conversion_p (lhs_type, rhs1_type))
3732 error ("type mismatch in shift expression");
3733 debug_generic_expr (lhs_type);
3734 debug_generic_expr (rhs1_type);
3735 debug_generic_expr (rhs2_type);
3736 return true;
3739 return false;
3742 case WIDEN_LSHIFT_EXPR:
3744 if (!INTEGRAL_TYPE_P (lhs_type)
3745 || !INTEGRAL_TYPE_P (rhs1_type)
3746 || TREE_CODE (rhs2) != INTEGER_CST
3747 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3749 error ("type mismatch in widening vector shift expression");
3750 debug_generic_expr (lhs_type);
3751 debug_generic_expr (rhs1_type);
3752 debug_generic_expr (rhs2_type);
3753 return true;
3756 return false;
3759 case VEC_WIDEN_LSHIFT_HI_EXPR:
3760 case VEC_WIDEN_LSHIFT_LO_EXPR:
3762 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3763 || TREE_CODE (lhs_type) != VECTOR_TYPE
3764 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3765 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3766 || TREE_CODE (rhs2) != INTEGER_CST
3767 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3768 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3770 error ("type mismatch in widening vector shift expression");
3771 debug_generic_expr (lhs_type);
3772 debug_generic_expr (rhs1_type);
3773 debug_generic_expr (rhs2_type);
3774 return true;
3777 return false;
3780 case PLUS_EXPR:
3781 case MINUS_EXPR:
3783 tree lhs_etype = lhs_type;
3784 tree rhs1_etype = rhs1_type;
3785 tree rhs2_etype = rhs2_type;
3786 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3788 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3789 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3791 error ("invalid non-vector operands to vector valued plus");
3792 return true;
3794 lhs_etype = TREE_TYPE (lhs_type);
3795 rhs1_etype = TREE_TYPE (rhs1_type);
3796 rhs2_etype = TREE_TYPE (rhs2_type);
3798 if (POINTER_TYPE_P (lhs_etype)
3799 || POINTER_TYPE_P (rhs1_etype)
3800 || POINTER_TYPE_P (rhs2_etype))
3802 error ("invalid (pointer) operands to plus/minus");
3803 return true;
3806 /* Continue with generic binary expression handling. */
3807 break;
3810 case POINTER_PLUS_EXPR:
3812 if (!POINTER_TYPE_P (rhs1_type)
3813 || !useless_type_conversion_p (lhs_type, rhs1_type)
3814 || !ptrofftype_p (rhs2_type))
3816 error ("type mismatch in pointer plus expression");
3817 debug_generic_stmt (lhs_type);
3818 debug_generic_stmt (rhs1_type);
3819 debug_generic_stmt (rhs2_type);
3820 return true;
3823 return false;
3826 case TRUTH_ANDIF_EXPR:
3827 case TRUTH_ORIF_EXPR:
3828 case TRUTH_AND_EXPR:
3829 case TRUTH_OR_EXPR:
3830 case TRUTH_XOR_EXPR:
3832 gcc_unreachable ();
3834 case LT_EXPR:
3835 case LE_EXPR:
3836 case GT_EXPR:
3837 case GE_EXPR:
3838 case EQ_EXPR:
3839 case NE_EXPR:
3840 case UNORDERED_EXPR:
3841 case ORDERED_EXPR:
3842 case UNLT_EXPR:
3843 case UNLE_EXPR:
3844 case UNGT_EXPR:
3845 case UNGE_EXPR:
3846 case UNEQ_EXPR:
3847 case LTGT_EXPR:
3848 /* Comparisons are also binary, but the result type is not
3849 connected to the operand types. */
3850 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3852 case WIDEN_MULT_EXPR:
3853 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3854 return true;
3855 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3856 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3858 case WIDEN_SUM_EXPR:
3859 case VEC_WIDEN_MULT_HI_EXPR:
3860 case VEC_WIDEN_MULT_LO_EXPR:
3861 case VEC_WIDEN_MULT_EVEN_EXPR:
3862 case VEC_WIDEN_MULT_ODD_EXPR:
3863 case VEC_PACK_TRUNC_EXPR:
3864 case VEC_PACK_SAT_EXPR:
3865 case VEC_PACK_FIX_TRUNC_EXPR:
3866 /* FIXME. */
3867 return false;
3869 case MULT_EXPR:
3870 case MULT_HIGHPART_EXPR:
3871 case TRUNC_DIV_EXPR:
3872 case CEIL_DIV_EXPR:
3873 case FLOOR_DIV_EXPR:
3874 case ROUND_DIV_EXPR:
3875 case TRUNC_MOD_EXPR:
3876 case CEIL_MOD_EXPR:
3877 case FLOOR_MOD_EXPR:
3878 case ROUND_MOD_EXPR:
3879 case RDIV_EXPR:
3880 case EXACT_DIV_EXPR:
3881 case MIN_EXPR:
3882 case MAX_EXPR:
3883 case BIT_IOR_EXPR:
3884 case BIT_XOR_EXPR:
3885 case BIT_AND_EXPR:
3886 /* Continue with generic binary expression handling. */
3887 break;
3889 default:
3890 gcc_unreachable ();
3893 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3894 || !useless_type_conversion_p (lhs_type, rhs2_type))
3896 error ("type mismatch in binary expression");
3897 debug_generic_stmt (lhs_type);
3898 debug_generic_stmt (rhs1_type);
3899 debug_generic_stmt (rhs2_type);
3900 return true;
3903 return false;
3906 /* Verify a gimple assignment statement STMT with a ternary rhs.
3907 Returns true if anything is wrong. */
3909 static bool
3910 verify_gimple_assign_ternary (gassign *stmt)
3912 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3913 tree lhs = gimple_assign_lhs (stmt);
3914 tree lhs_type = TREE_TYPE (lhs);
3915 tree rhs1 = gimple_assign_rhs1 (stmt);
3916 tree rhs1_type = TREE_TYPE (rhs1);
3917 tree rhs2 = gimple_assign_rhs2 (stmt);
3918 tree rhs2_type = TREE_TYPE (rhs2);
3919 tree rhs3 = gimple_assign_rhs3 (stmt);
3920 tree rhs3_type = TREE_TYPE (rhs3);
3922 if (!is_gimple_reg (lhs))
3924 error ("non-register as LHS of ternary operation");
3925 return true;
3928 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3929 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3930 || !is_gimple_val (rhs2)
3931 || !is_gimple_val (rhs3))
3933 error ("invalid operands in ternary operation");
3934 return true;
3937 /* First handle operations that involve different types. */
3938 switch (rhs_code)
3940 case WIDEN_MULT_PLUS_EXPR:
3941 case WIDEN_MULT_MINUS_EXPR:
3942 if ((!INTEGRAL_TYPE_P (rhs1_type)
3943 && !FIXED_POINT_TYPE_P (rhs1_type))
3944 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3945 || !useless_type_conversion_p (lhs_type, rhs3_type)
3946 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3947 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3949 error ("type mismatch in widening multiply-accumulate expression");
3950 debug_generic_expr (lhs_type);
3951 debug_generic_expr (rhs1_type);
3952 debug_generic_expr (rhs2_type);
3953 debug_generic_expr (rhs3_type);
3954 return true;
3956 break;
3958 case FMA_EXPR:
3959 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3960 || !useless_type_conversion_p (lhs_type, rhs2_type)
3961 || !useless_type_conversion_p (lhs_type, rhs3_type))
3963 error ("type mismatch in fused multiply-add expression");
3964 debug_generic_expr (lhs_type);
3965 debug_generic_expr (rhs1_type);
3966 debug_generic_expr (rhs2_type);
3967 debug_generic_expr (rhs3_type);
3968 return true;
3970 break;
3972 case VEC_COND_EXPR:
3973 if (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3974 || TYPE_SIGN (rhs1_type) != SIGNED
3975 || TYPE_SIZE (rhs1_type) != TYPE_SIZE (lhs_type)
3976 || TYPE_VECTOR_SUBPARTS (rhs1_type)
3977 != TYPE_VECTOR_SUBPARTS (lhs_type))
3979 error ("the first argument of a VEC_COND_EXPR must be of a signed "
3980 "integral vector type of the same size and number of "
3981 "elements as the result");
3982 debug_generic_expr (lhs_type);
3983 debug_generic_expr (rhs1_type);
3984 return true;
3986 /* Fallthrough. */
3987 case COND_EXPR:
3988 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3989 || !useless_type_conversion_p (lhs_type, rhs3_type))
3991 error ("type mismatch in conditional expression");
3992 debug_generic_expr (lhs_type);
3993 debug_generic_expr (rhs2_type);
3994 debug_generic_expr (rhs3_type);
3995 return true;
3997 break;
3999 case VEC_PERM_EXPR:
4000 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4001 || !useless_type_conversion_p (lhs_type, rhs2_type))
4003 error ("type mismatch in vector permute expression");
4004 debug_generic_expr (lhs_type);
4005 debug_generic_expr (rhs1_type);
4006 debug_generic_expr (rhs2_type);
4007 debug_generic_expr (rhs3_type);
4008 return true;
4011 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4012 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4013 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4015 error ("vector types expected in vector permute expression");
4016 debug_generic_expr (lhs_type);
4017 debug_generic_expr (rhs1_type);
4018 debug_generic_expr (rhs2_type);
4019 debug_generic_expr (rhs3_type);
4020 return true;
4023 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4024 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4025 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4026 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4027 != TYPE_VECTOR_SUBPARTS (lhs_type))
4029 error ("vectors with different element number found "
4030 "in vector permute expression");
4031 debug_generic_expr (lhs_type);
4032 debug_generic_expr (rhs1_type);
4033 debug_generic_expr (rhs2_type);
4034 debug_generic_expr (rhs3_type);
4035 return true;
4038 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4039 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4040 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4042 error ("invalid mask type in vector permute expression");
4043 debug_generic_expr (lhs_type);
4044 debug_generic_expr (rhs1_type);
4045 debug_generic_expr (rhs2_type);
4046 debug_generic_expr (rhs3_type);
4047 return true;
4050 return false;
4052 case SAD_EXPR:
4053 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4054 || !useless_type_conversion_p (lhs_type, rhs3_type)
4055 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4056 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4057 > GET_MODE_BITSIZE (GET_MODE_INNER
4058 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4060 error ("type mismatch in sad expression");
4061 debug_generic_expr (lhs_type);
4062 debug_generic_expr (rhs1_type);
4063 debug_generic_expr (rhs2_type);
4064 debug_generic_expr (rhs3_type);
4065 return true;
4068 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4069 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4070 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4072 error ("vector types expected in sad expression");
4073 debug_generic_expr (lhs_type);
4074 debug_generic_expr (rhs1_type);
4075 debug_generic_expr (rhs2_type);
4076 debug_generic_expr (rhs3_type);
4077 return true;
4080 return false;
4082 case DOT_PROD_EXPR:
4083 case REALIGN_LOAD_EXPR:
4084 /* FIXME. */
4085 return false;
4087 default:
4088 gcc_unreachable ();
4090 return false;
4093 /* Verify a gimple assignment statement STMT with a single rhs.
4094 Returns true if anything is wrong. */
4096 static bool
4097 verify_gimple_assign_single (gassign *stmt)
4099 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4100 tree lhs = gimple_assign_lhs (stmt);
4101 tree lhs_type = TREE_TYPE (lhs);
4102 tree rhs1 = gimple_assign_rhs1 (stmt);
4103 tree rhs1_type = TREE_TYPE (rhs1);
4104 bool res = false;
4106 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4108 error ("non-trivial conversion at assignment");
4109 debug_generic_expr (lhs_type);
4110 debug_generic_expr (rhs1_type);
4111 return true;
4114 if (gimple_clobber_p (stmt)
4115 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4117 error ("non-decl/MEM_REF LHS in clobber statement");
4118 debug_generic_expr (lhs);
4119 return true;
4122 if (handled_component_p (lhs)
4123 || TREE_CODE (lhs) == MEM_REF
4124 || TREE_CODE (lhs) == TARGET_MEM_REF)
4125 res |= verify_types_in_gimple_reference (lhs, true);
4127 /* Special codes we cannot handle via their class. */
4128 switch (rhs_code)
4130 case ADDR_EXPR:
4132 tree op = TREE_OPERAND (rhs1, 0);
4133 if (!is_gimple_addressable (op))
4135 error ("invalid operand in unary expression");
4136 return true;
4139 /* Technically there is no longer a need for matching types, but
4140 gimple hygiene asks for this check. In LTO we can end up
4141 combining incompatible units and thus end up with addresses
4142 of globals that change their type to a common one. */
4143 if (!in_lto_p
4144 && !types_compatible_p (TREE_TYPE (op),
4145 TREE_TYPE (TREE_TYPE (rhs1)))
4146 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4147 TREE_TYPE (op)))
4149 error ("type mismatch in address expression");
4150 debug_generic_stmt (TREE_TYPE (rhs1));
4151 debug_generic_stmt (TREE_TYPE (op));
4152 return true;
4155 return verify_types_in_gimple_reference (op, true);
4158 /* tcc_reference */
4159 case INDIRECT_REF:
4160 error ("INDIRECT_REF in gimple IL");
4161 return true;
4163 case COMPONENT_REF:
4164 case BIT_FIELD_REF:
4165 case ARRAY_REF:
4166 case ARRAY_RANGE_REF:
4167 case VIEW_CONVERT_EXPR:
4168 case REALPART_EXPR:
4169 case IMAGPART_EXPR:
4170 case TARGET_MEM_REF:
4171 case MEM_REF:
4172 if (!is_gimple_reg (lhs)
4173 && is_gimple_reg_type (TREE_TYPE (lhs)))
4175 error ("invalid rhs for gimple memory store");
4176 debug_generic_stmt (lhs);
4177 debug_generic_stmt (rhs1);
4178 return true;
4180 return res || verify_types_in_gimple_reference (rhs1, false);
4182 /* tcc_constant */
4183 case SSA_NAME:
4184 case INTEGER_CST:
4185 case REAL_CST:
4186 case FIXED_CST:
4187 case COMPLEX_CST:
4188 case VECTOR_CST:
4189 case STRING_CST:
4190 return res;
4192 /* tcc_declaration */
4193 case CONST_DECL:
4194 return res;
4195 case VAR_DECL:
4196 case PARM_DECL:
4197 if (!is_gimple_reg (lhs)
4198 && !is_gimple_reg (rhs1)
4199 && is_gimple_reg_type (TREE_TYPE (lhs)))
4201 error ("invalid rhs for gimple memory store");
4202 debug_generic_stmt (lhs);
4203 debug_generic_stmt (rhs1);
4204 return true;
4206 return res;
4208 case CONSTRUCTOR:
4209 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4211 unsigned int i;
4212 tree elt_i, elt_v, elt_t = NULL_TREE;
4214 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4215 return res;
4216 /* For vector CONSTRUCTORs we require that either it is empty
4217 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4218 (then the element count must be correct to cover the whole
4219 outer vector and index must be NULL on all elements, or it is
4220 a CONSTRUCTOR of scalar elements, where we as an exception allow
4221 smaller number of elements (assuming zero filling) and
4222 consecutive indexes as compared to NULL indexes (such
4223 CONSTRUCTORs can appear in the IL from FEs). */
4224 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4226 if (elt_t == NULL_TREE)
4228 elt_t = TREE_TYPE (elt_v);
4229 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4231 tree elt_t = TREE_TYPE (elt_v);
4232 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4233 TREE_TYPE (elt_t)))
4235 error ("incorrect type of vector CONSTRUCTOR"
4236 " elements");
4237 debug_generic_stmt (rhs1);
4238 return true;
4240 else if (CONSTRUCTOR_NELTS (rhs1)
4241 * TYPE_VECTOR_SUBPARTS (elt_t)
4242 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4244 error ("incorrect number of vector CONSTRUCTOR"
4245 " elements");
4246 debug_generic_stmt (rhs1);
4247 return true;
4250 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4251 elt_t))
4253 error ("incorrect type of vector CONSTRUCTOR elements");
4254 debug_generic_stmt (rhs1);
4255 return true;
4257 else if (CONSTRUCTOR_NELTS (rhs1)
4258 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4260 error ("incorrect number of vector CONSTRUCTOR elements");
4261 debug_generic_stmt (rhs1);
4262 return true;
4265 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4267 error ("incorrect type of vector CONSTRUCTOR elements");
4268 debug_generic_stmt (rhs1);
4269 return true;
4271 if (elt_i != NULL_TREE
4272 && (TREE_CODE (elt_t) == VECTOR_TYPE
4273 || TREE_CODE (elt_i) != INTEGER_CST
4274 || compare_tree_int (elt_i, i) != 0))
4276 error ("vector CONSTRUCTOR with non-NULL element index");
4277 debug_generic_stmt (rhs1);
4278 return true;
4280 if (!is_gimple_val (elt_v))
4282 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4283 debug_generic_stmt (rhs1);
4284 return true;
4288 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4290 error ("non-vector CONSTRUCTOR with elements");
4291 debug_generic_stmt (rhs1);
4292 return true;
4294 return res;
4295 case OBJ_TYPE_REF:
4296 case ASSERT_EXPR:
4297 case WITH_SIZE_EXPR:
4298 /* FIXME. */
4299 return res;
4301 default:;
4304 return res;
4307 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4308 is a problem, otherwise false. */
4310 static bool
4311 verify_gimple_assign (gassign *stmt)
4313 switch (gimple_assign_rhs_class (stmt))
4315 case GIMPLE_SINGLE_RHS:
4316 return verify_gimple_assign_single (stmt);
4318 case GIMPLE_UNARY_RHS:
4319 return verify_gimple_assign_unary (stmt);
4321 case GIMPLE_BINARY_RHS:
4322 return verify_gimple_assign_binary (stmt);
4324 case GIMPLE_TERNARY_RHS:
4325 return verify_gimple_assign_ternary (stmt);
4327 default:
4328 gcc_unreachable ();
4332 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4333 is a problem, otherwise false. */
4335 static bool
4336 verify_gimple_return (greturn *stmt)
4338 tree op = gimple_return_retval (stmt);
4339 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4341 /* We cannot test for present return values as we do not fix up missing
4342 return values from the original source. */
4343 if (op == NULL)
4344 return false;
4346 if (!is_gimple_val (op)
4347 && TREE_CODE (op) != RESULT_DECL)
4349 error ("invalid operand in return statement");
4350 debug_generic_stmt (op);
4351 return true;
4354 if ((TREE_CODE (op) == RESULT_DECL
4355 && DECL_BY_REFERENCE (op))
4356 || (TREE_CODE (op) == SSA_NAME
4357 && SSA_NAME_VAR (op)
4358 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4359 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4360 op = TREE_TYPE (op);
4362 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4364 error ("invalid conversion in return statement");
4365 debug_generic_stmt (restype);
4366 debug_generic_stmt (TREE_TYPE (op));
4367 return true;
4370 return false;
4374 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4375 is a problem, otherwise false. */
4377 static bool
4378 verify_gimple_goto (ggoto *stmt)
4380 tree dest = gimple_goto_dest (stmt);
4382 /* ??? We have two canonical forms of direct goto destinations, a
4383 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4384 if (TREE_CODE (dest) != LABEL_DECL
4385 && (!is_gimple_val (dest)
4386 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4388 error ("goto destination is neither a label nor a pointer");
4389 return true;
4392 return false;
4395 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4396 is a problem, otherwise false. */
4398 static bool
4399 verify_gimple_switch (gswitch *stmt)
4401 unsigned int i, n;
4402 tree elt, prev_upper_bound = NULL_TREE;
4403 tree index_type, elt_type = NULL_TREE;
4405 if (!is_gimple_val (gimple_switch_index (stmt)))
4407 error ("invalid operand to switch statement");
4408 debug_generic_stmt (gimple_switch_index (stmt));
4409 return true;
4412 index_type = TREE_TYPE (gimple_switch_index (stmt));
4413 if (! INTEGRAL_TYPE_P (index_type))
4415 error ("non-integral type switch statement");
4416 debug_generic_expr (index_type);
4417 return true;
4420 elt = gimple_switch_label (stmt, 0);
4421 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4423 error ("invalid default case label in switch statement");
4424 debug_generic_expr (elt);
4425 return true;
4428 n = gimple_switch_num_labels (stmt);
4429 for (i = 1; i < n; i++)
4431 elt = gimple_switch_label (stmt, i);
4433 if (! CASE_LOW (elt))
4435 error ("invalid case label in switch statement");
4436 debug_generic_expr (elt);
4437 return true;
4439 if (CASE_HIGH (elt)
4440 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4442 error ("invalid case range in switch statement");
4443 debug_generic_expr (elt);
4444 return true;
4447 if (elt_type)
4449 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4450 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4452 error ("type mismatch for case label in switch statement");
4453 debug_generic_expr (elt);
4454 return true;
4457 else
4459 elt_type = TREE_TYPE (CASE_LOW (elt));
4460 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4462 error ("type precision mismatch in switch statement");
4463 return true;
4467 if (prev_upper_bound)
4469 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4471 error ("case labels not sorted in switch statement");
4472 return true;
4476 prev_upper_bound = CASE_HIGH (elt);
4477 if (! prev_upper_bound)
4478 prev_upper_bound = CASE_LOW (elt);
4481 return false;
4484 /* Verify a gimple debug statement STMT.
4485 Returns true if anything is wrong. */
4487 static bool
4488 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4490 /* There isn't much that could be wrong in a gimple debug stmt. A
4491 gimple debug bind stmt, for example, maps a tree, that's usually
4492 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4493 component or member of an aggregate type, to another tree, that
4494 can be an arbitrary expression. These stmts expand into debug
4495 insns, and are converted to debug notes by var-tracking.c. */
4496 return false;
4499 /* Verify a gimple label statement STMT.
4500 Returns true if anything is wrong. */
4502 static bool
4503 verify_gimple_label (glabel *stmt)
4505 tree decl = gimple_label_label (stmt);
4506 int uid;
4507 bool err = false;
4509 if (TREE_CODE (decl) != LABEL_DECL)
4510 return true;
4511 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4512 && DECL_CONTEXT (decl) != current_function_decl)
4514 error ("label's context is not the current function decl");
4515 err |= true;
4518 uid = LABEL_DECL_UID (decl);
4519 if (cfun->cfg
4520 && (uid == -1
4521 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4523 error ("incorrect entry in label_to_block_map");
4524 err |= true;
4527 uid = EH_LANDING_PAD_NR (decl);
4528 if (uid)
4530 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4531 if (decl != lp->post_landing_pad)
4533 error ("incorrect setting of landing pad number");
4534 err |= true;
4538 return err;
4541 /* Verify a gimple cond statement STMT.
4542 Returns true if anything is wrong. */
4544 static bool
4545 verify_gimple_cond (gcond *stmt)
4547 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4549 error ("invalid comparison code in gimple cond");
4550 return true;
4552 if (!(!gimple_cond_true_label (stmt)
4553 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4554 || !(!gimple_cond_false_label (stmt)
4555 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4557 error ("invalid labels in gimple cond");
4558 return true;
4561 return verify_gimple_comparison (boolean_type_node,
4562 gimple_cond_lhs (stmt),
4563 gimple_cond_rhs (stmt));
4566 /* Verify the GIMPLE statement STMT. Returns true if there is an
4567 error, otherwise false. */
4569 static bool
4570 verify_gimple_stmt (gimple stmt)
4572 switch (gimple_code (stmt))
4574 case GIMPLE_ASSIGN:
4575 return verify_gimple_assign (as_a <gassign *> (stmt));
4577 case GIMPLE_LABEL:
4578 return verify_gimple_label (as_a <glabel *> (stmt));
4580 case GIMPLE_CALL:
4581 return verify_gimple_call (as_a <gcall *> (stmt));
4583 case GIMPLE_COND:
4584 return verify_gimple_cond (as_a <gcond *> (stmt));
4586 case GIMPLE_GOTO:
4587 return verify_gimple_goto (as_a <ggoto *> (stmt));
4589 case GIMPLE_SWITCH:
4590 return verify_gimple_switch (as_a <gswitch *> (stmt));
4592 case GIMPLE_RETURN:
4593 return verify_gimple_return (as_a <greturn *> (stmt));
4595 case GIMPLE_ASM:
4596 return false;
4598 case GIMPLE_TRANSACTION:
4599 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4601 /* Tuples that do not have tree operands. */
4602 case GIMPLE_NOP:
4603 case GIMPLE_PREDICT:
4604 case GIMPLE_RESX:
4605 case GIMPLE_EH_DISPATCH:
4606 case GIMPLE_EH_MUST_NOT_THROW:
4607 return false;
4609 CASE_GIMPLE_OMP:
4610 /* OpenMP directives are validated by the FE and never operated
4611 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4612 non-gimple expressions when the main index variable has had
4613 its address taken. This does not affect the loop itself
4614 because the header of an GIMPLE_OMP_FOR is merely used to determine
4615 how to setup the parallel iteration. */
4616 return false;
4618 case GIMPLE_DEBUG:
4619 return verify_gimple_debug (stmt);
4621 default:
4622 gcc_unreachable ();
4626 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4627 and false otherwise. */
4629 static bool
4630 verify_gimple_phi (gimple phi)
4632 bool err = false;
4633 unsigned i;
4634 tree phi_result = gimple_phi_result (phi);
4635 bool virtual_p;
4637 if (!phi_result)
4639 error ("invalid PHI result");
4640 return true;
4643 virtual_p = virtual_operand_p (phi_result);
4644 if (TREE_CODE (phi_result) != SSA_NAME
4645 || (virtual_p
4646 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4648 error ("invalid PHI result");
4649 err = true;
4652 for (i = 0; i < gimple_phi_num_args (phi); i++)
4654 tree t = gimple_phi_arg_def (phi, i);
4656 if (!t)
4658 error ("missing PHI def");
4659 err |= true;
4660 continue;
4662 /* Addressable variables do have SSA_NAMEs but they
4663 are not considered gimple values. */
4664 else if ((TREE_CODE (t) == SSA_NAME
4665 && virtual_p != virtual_operand_p (t))
4666 || (virtual_p
4667 && (TREE_CODE (t) != SSA_NAME
4668 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4669 || (!virtual_p
4670 && !is_gimple_val (t)))
4672 error ("invalid PHI argument");
4673 debug_generic_expr (t);
4674 err |= true;
4676 #ifdef ENABLE_TYPES_CHECKING
4677 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4679 error ("incompatible types in PHI argument %u", i);
4680 debug_generic_stmt (TREE_TYPE (phi_result));
4681 debug_generic_stmt (TREE_TYPE (t));
4682 err |= true;
4684 #endif
4687 return err;
4690 /* Verify the GIMPLE statements inside the sequence STMTS. */
4692 static bool
4693 verify_gimple_in_seq_2 (gimple_seq stmts)
4695 gimple_stmt_iterator ittr;
4696 bool err = false;
4698 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4700 gimple stmt = gsi_stmt (ittr);
4702 switch (gimple_code (stmt))
4704 case GIMPLE_BIND:
4705 err |= verify_gimple_in_seq_2 (
4706 gimple_bind_body (as_a <gbind *> (stmt)));
4707 break;
4709 case GIMPLE_TRY:
4710 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4711 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4712 break;
4714 case GIMPLE_EH_FILTER:
4715 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4716 break;
4718 case GIMPLE_EH_ELSE:
4720 geh_else *eh_else = as_a <geh_else *> (stmt);
4721 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4722 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4724 break;
4726 case GIMPLE_CATCH:
4727 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4728 as_a <gcatch *> (stmt)));
4729 break;
4731 case GIMPLE_TRANSACTION:
4732 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4733 break;
4735 default:
4737 bool err2 = verify_gimple_stmt (stmt);
4738 if (err2)
4739 debug_gimple_stmt (stmt);
4740 err |= err2;
4745 return err;
4748 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4749 is a problem, otherwise false. */
4751 static bool
4752 verify_gimple_transaction (gtransaction *stmt)
4754 tree lab = gimple_transaction_label (stmt);
4755 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4756 return true;
4757 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4761 /* Verify the GIMPLE statements inside the statement list STMTS. */
4763 DEBUG_FUNCTION void
4764 verify_gimple_in_seq (gimple_seq stmts)
4766 timevar_push (TV_TREE_STMT_VERIFY);
4767 if (verify_gimple_in_seq_2 (stmts))
4768 internal_error ("verify_gimple failed");
4769 timevar_pop (TV_TREE_STMT_VERIFY);
4772 /* Return true when the T can be shared. */
4774 static bool
4775 tree_node_can_be_shared (tree t)
4777 if (IS_TYPE_OR_DECL_P (t)
4778 || is_gimple_min_invariant (t)
4779 || TREE_CODE (t) == SSA_NAME
4780 || t == error_mark_node
4781 || TREE_CODE (t) == IDENTIFIER_NODE)
4782 return true;
4784 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4785 return true;
4787 if (DECL_P (t))
4788 return true;
4790 return false;
4793 /* Called via walk_tree. Verify tree sharing. */
4795 static tree
4796 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4798 hash_set<void *> *visited = (hash_set<void *> *) data;
4800 if (tree_node_can_be_shared (*tp))
4802 *walk_subtrees = false;
4803 return NULL;
4806 if (visited->add (*tp))
4807 return *tp;
4809 return NULL;
4812 /* Called via walk_gimple_stmt. Verify tree sharing. */
4814 static tree
4815 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4817 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4818 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4821 static bool eh_error_found;
4822 bool
4823 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4824 hash_set<gimple> *visited)
4826 if (!visited->contains (stmt))
4828 error ("dead STMT in EH table");
4829 debug_gimple_stmt (stmt);
4830 eh_error_found = true;
4832 return true;
4835 /* Verify if the location LOCs block is in BLOCKS. */
4837 static bool
4838 verify_location (hash_set<tree> *blocks, location_t loc)
4840 tree block = LOCATION_BLOCK (loc);
4841 if (block != NULL_TREE
4842 && !blocks->contains (block))
4844 error ("location references block not in block tree");
4845 return true;
4847 if (block != NULL_TREE)
4848 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4849 return false;
4852 /* Called via walk_tree. Verify that expressions have no blocks. */
4854 static tree
4855 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4857 if (!EXPR_P (*tp))
4859 *walk_subtrees = false;
4860 return NULL;
4863 location_t loc = EXPR_LOCATION (*tp);
4864 if (LOCATION_BLOCK (loc) != NULL)
4865 return *tp;
4867 return NULL;
4870 /* Called via walk_tree. Verify locations of expressions. */
4872 static tree
4873 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4875 hash_set<tree> *blocks = (hash_set<tree> *) data;
4877 if (TREE_CODE (*tp) == VAR_DECL
4878 && DECL_HAS_DEBUG_EXPR_P (*tp))
4880 tree t = DECL_DEBUG_EXPR (*tp);
4881 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4882 if (addr)
4883 return addr;
4885 if ((TREE_CODE (*tp) == VAR_DECL
4886 || TREE_CODE (*tp) == PARM_DECL
4887 || TREE_CODE (*tp) == RESULT_DECL)
4888 && DECL_HAS_VALUE_EXPR_P (*tp))
4890 tree t = DECL_VALUE_EXPR (*tp);
4891 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4892 if (addr)
4893 return addr;
4896 if (!EXPR_P (*tp))
4898 *walk_subtrees = false;
4899 return NULL;
4902 location_t loc = EXPR_LOCATION (*tp);
4903 if (verify_location (blocks, loc))
4904 return *tp;
4906 return NULL;
4909 /* Called via walk_gimple_op. Verify locations of expressions. */
4911 static tree
4912 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4914 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4915 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4918 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4920 static void
4921 collect_subblocks (hash_set<tree> *blocks, tree block)
4923 tree t;
4924 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4926 blocks->add (t);
4927 collect_subblocks (blocks, t);
4931 /* Verify the GIMPLE statements in the CFG of FN. */
4933 DEBUG_FUNCTION void
4934 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4936 basic_block bb;
4937 bool err = false;
4939 timevar_push (TV_TREE_STMT_VERIFY);
4940 hash_set<void *> visited;
4941 hash_set<gimple> visited_stmts;
4943 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4944 hash_set<tree> blocks;
4945 if (DECL_INITIAL (fn->decl))
4947 blocks.add (DECL_INITIAL (fn->decl));
4948 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4951 FOR_EACH_BB_FN (bb, fn)
4953 gimple_stmt_iterator gsi;
4955 for (gphi_iterator gpi = gsi_start_phis (bb);
4956 !gsi_end_p (gpi);
4957 gsi_next (&gpi))
4959 gphi *phi = gpi.phi ();
4960 bool err2 = false;
4961 unsigned i;
4963 visited_stmts.add (phi);
4965 if (gimple_bb (phi) != bb)
4967 error ("gimple_bb (phi) is set to a wrong basic block");
4968 err2 = true;
4971 err2 |= verify_gimple_phi (phi);
4973 /* Only PHI arguments have locations. */
4974 if (gimple_location (phi) != UNKNOWN_LOCATION)
4976 error ("PHI node with location");
4977 err2 = true;
4980 for (i = 0; i < gimple_phi_num_args (phi); i++)
4982 tree arg = gimple_phi_arg_def (phi, i);
4983 tree addr = walk_tree (&arg, verify_node_sharing_1,
4984 &visited, NULL);
4985 if (addr)
4987 error ("incorrect sharing of tree nodes");
4988 debug_generic_expr (addr);
4989 err2 |= true;
4991 location_t loc = gimple_phi_arg_location (phi, i);
4992 if (virtual_operand_p (gimple_phi_result (phi))
4993 && loc != UNKNOWN_LOCATION)
4995 error ("virtual PHI with argument locations");
4996 err2 = true;
4998 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4999 if (addr)
5001 debug_generic_expr (addr);
5002 err2 = true;
5004 err2 |= verify_location (&blocks, loc);
5007 if (err2)
5008 debug_gimple_stmt (phi);
5009 err |= err2;
5012 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5014 gimple stmt = gsi_stmt (gsi);
5015 bool err2 = false;
5016 struct walk_stmt_info wi;
5017 tree addr;
5018 int lp_nr;
5020 visited_stmts.add (stmt);
5022 if (gimple_bb (stmt) != bb)
5024 error ("gimple_bb (stmt) is set to a wrong basic block");
5025 err2 = true;
5028 err2 |= verify_gimple_stmt (stmt);
5029 err2 |= verify_location (&blocks, gimple_location (stmt));
5031 memset (&wi, 0, sizeof (wi));
5032 wi.info = (void *) &visited;
5033 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5034 if (addr)
5036 error ("incorrect sharing of tree nodes");
5037 debug_generic_expr (addr);
5038 err2 |= true;
5041 memset (&wi, 0, sizeof (wi));
5042 wi.info = (void *) &blocks;
5043 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5044 if (addr)
5046 debug_generic_expr (addr);
5047 err2 |= true;
5050 /* ??? Instead of not checking these stmts at all the walker
5051 should know its context via wi. */
5052 if (!is_gimple_debug (stmt)
5053 && !is_gimple_omp (stmt))
5055 memset (&wi, 0, sizeof (wi));
5056 addr = walk_gimple_op (stmt, verify_expr, &wi);
5057 if (addr)
5059 debug_generic_expr (addr);
5060 inform (gimple_location (stmt), "in statement");
5061 err2 |= true;
5065 /* If the statement is marked as part of an EH region, then it is
5066 expected that the statement could throw. Verify that when we
5067 have optimizations that simplify statements such that we prove
5068 that they cannot throw, that we update other data structures
5069 to match. */
5070 lp_nr = lookup_stmt_eh_lp (stmt);
5071 if (lp_nr > 0)
5073 if (!stmt_could_throw_p (stmt))
5075 if (verify_nothrow)
5077 error ("statement marked for throw, but doesn%'t");
5078 err2 |= true;
5081 else if (!gsi_one_before_end_p (gsi))
5083 error ("statement marked for throw in middle of block");
5084 err2 |= true;
5088 if (err2)
5089 debug_gimple_stmt (stmt);
5090 err |= err2;
5094 eh_error_found = false;
5095 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5096 if (eh_table)
5097 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5098 (&visited_stmts);
5100 if (err || eh_error_found)
5101 internal_error ("verify_gimple failed");
5103 verify_histograms ();
5104 timevar_pop (TV_TREE_STMT_VERIFY);
5108 /* Verifies that the flow information is OK. */
5110 static int
5111 gimple_verify_flow_info (void)
5113 int err = 0;
5114 basic_block bb;
5115 gimple_stmt_iterator gsi;
5116 gimple stmt;
5117 edge e;
5118 edge_iterator ei;
5120 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5121 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5123 error ("ENTRY_BLOCK has IL associated with it");
5124 err = 1;
5127 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5128 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5130 error ("EXIT_BLOCK has IL associated with it");
5131 err = 1;
5134 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5135 if (e->flags & EDGE_FALLTHRU)
5137 error ("fallthru to exit from bb %d", e->src->index);
5138 err = 1;
5141 FOR_EACH_BB_FN (bb, cfun)
5143 bool found_ctrl_stmt = false;
5145 stmt = NULL;
5147 /* Skip labels on the start of basic block. */
5148 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5150 tree label;
5151 gimple prev_stmt = stmt;
5153 stmt = gsi_stmt (gsi);
5155 if (gimple_code (stmt) != GIMPLE_LABEL)
5156 break;
5158 label = gimple_label_label (as_a <glabel *> (stmt));
5159 if (prev_stmt && DECL_NONLOCAL (label))
5161 error ("nonlocal label ");
5162 print_generic_expr (stderr, label, 0);
5163 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5164 bb->index);
5165 err = 1;
5168 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5170 error ("EH landing pad label ");
5171 print_generic_expr (stderr, label, 0);
5172 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5173 bb->index);
5174 err = 1;
5177 if (label_to_block (label) != bb)
5179 error ("label ");
5180 print_generic_expr (stderr, label, 0);
5181 fprintf (stderr, " to block does not match in bb %d",
5182 bb->index);
5183 err = 1;
5186 if (decl_function_context (label) != current_function_decl)
5188 error ("label ");
5189 print_generic_expr (stderr, label, 0);
5190 fprintf (stderr, " has incorrect context in bb %d",
5191 bb->index);
5192 err = 1;
5196 /* Verify that body of basic block BB is free of control flow. */
5197 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5199 gimple stmt = gsi_stmt (gsi);
5201 if (found_ctrl_stmt)
5203 error ("control flow in the middle of basic block %d",
5204 bb->index);
5205 err = 1;
5208 if (stmt_ends_bb_p (stmt))
5209 found_ctrl_stmt = true;
5211 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5213 error ("label ");
5214 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5215 fprintf (stderr, " in the middle of basic block %d", bb->index);
5216 err = 1;
5220 gsi = gsi_last_bb (bb);
5221 if (gsi_end_p (gsi))
5222 continue;
5224 stmt = gsi_stmt (gsi);
5226 if (gimple_code (stmt) == GIMPLE_LABEL)
5227 continue;
5229 err |= verify_eh_edges (stmt);
5231 if (is_ctrl_stmt (stmt))
5233 FOR_EACH_EDGE (e, ei, bb->succs)
5234 if (e->flags & EDGE_FALLTHRU)
5236 error ("fallthru edge after a control statement in bb %d",
5237 bb->index);
5238 err = 1;
5242 if (gimple_code (stmt) != GIMPLE_COND)
5244 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5245 after anything else but if statement. */
5246 FOR_EACH_EDGE (e, ei, bb->succs)
5247 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5249 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5250 bb->index);
5251 err = 1;
5255 switch (gimple_code (stmt))
5257 case GIMPLE_COND:
5259 edge true_edge;
5260 edge false_edge;
5262 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5264 if (!true_edge
5265 || !false_edge
5266 || !(true_edge->flags & EDGE_TRUE_VALUE)
5267 || !(false_edge->flags & EDGE_FALSE_VALUE)
5268 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5269 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5270 || EDGE_COUNT (bb->succs) >= 3)
5272 error ("wrong outgoing edge flags at end of bb %d",
5273 bb->index);
5274 err = 1;
5277 break;
5279 case GIMPLE_GOTO:
5280 if (simple_goto_p (stmt))
5282 error ("explicit goto at end of bb %d", bb->index);
5283 err = 1;
5285 else
5287 /* FIXME. We should double check that the labels in the
5288 destination blocks have their address taken. */
5289 FOR_EACH_EDGE (e, ei, bb->succs)
5290 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5291 | EDGE_FALSE_VALUE))
5292 || !(e->flags & EDGE_ABNORMAL))
5294 error ("wrong outgoing edge flags at end of bb %d",
5295 bb->index);
5296 err = 1;
5299 break;
5301 case GIMPLE_CALL:
5302 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5303 break;
5304 /* ... fallthru ... */
5305 case GIMPLE_RETURN:
5306 if (!single_succ_p (bb)
5307 || (single_succ_edge (bb)->flags
5308 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5309 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5311 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5312 err = 1;
5314 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5316 error ("return edge does not point to exit in bb %d",
5317 bb->index);
5318 err = 1;
5320 break;
5322 case GIMPLE_SWITCH:
5324 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5325 tree prev;
5326 edge e;
5327 size_t i, n;
5329 n = gimple_switch_num_labels (switch_stmt);
5331 /* Mark all the destination basic blocks. */
5332 for (i = 0; i < n; ++i)
5334 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5335 basic_block label_bb = label_to_block (lab);
5336 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5337 label_bb->aux = (void *)1;
5340 /* Verify that the case labels are sorted. */
5341 prev = gimple_switch_label (switch_stmt, 0);
5342 for (i = 1; i < n; ++i)
5344 tree c = gimple_switch_label (switch_stmt, i);
5345 if (!CASE_LOW (c))
5347 error ("found default case not at the start of "
5348 "case vector");
5349 err = 1;
5350 continue;
5352 if (CASE_LOW (prev)
5353 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5355 error ("case labels not sorted: ");
5356 print_generic_expr (stderr, prev, 0);
5357 fprintf (stderr," is greater than ");
5358 print_generic_expr (stderr, c, 0);
5359 fprintf (stderr," but comes before it.\n");
5360 err = 1;
5362 prev = c;
5364 /* VRP will remove the default case if it can prove it will
5365 never be executed. So do not verify there always exists
5366 a default case here. */
5368 FOR_EACH_EDGE (e, ei, bb->succs)
5370 if (!e->dest->aux)
5372 error ("extra outgoing edge %d->%d",
5373 bb->index, e->dest->index);
5374 err = 1;
5377 e->dest->aux = (void *)2;
5378 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5379 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5381 error ("wrong outgoing edge flags at end of bb %d",
5382 bb->index);
5383 err = 1;
5387 /* Check that we have all of them. */
5388 for (i = 0; i < n; ++i)
5390 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5391 basic_block label_bb = label_to_block (lab);
5393 if (label_bb->aux != (void *)2)
5395 error ("missing edge %i->%i", bb->index, label_bb->index);
5396 err = 1;
5400 FOR_EACH_EDGE (e, ei, bb->succs)
5401 e->dest->aux = (void *)0;
5403 break;
5405 case GIMPLE_EH_DISPATCH:
5406 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5407 break;
5409 default:
5410 break;
5414 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5415 verify_dominators (CDI_DOMINATORS);
5417 return err;
5421 /* Updates phi nodes after creating a forwarder block joined
5422 by edge FALLTHRU. */
5424 static void
5425 gimple_make_forwarder_block (edge fallthru)
5427 edge e;
5428 edge_iterator ei;
5429 basic_block dummy, bb;
5430 tree var;
5431 gphi_iterator gsi;
5433 dummy = fallthru->src;
5434 bb = fallthru->dest;
5436 if (single_pred_p (bb))
5437 return;
5439 /* If we redirected a branch we must create new PHI nodes at the
5440 start of BB. */
5441 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5443 gphi *phi, *new_phi;
5445 phi = gsi.phi ();
5446 var = gimple_phi_result (phi);
5447 new_phi = create_phi_node (var, bb);
5448 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5449 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5450 UNKNOWN_LOCATION);
5453 /* Add the arguments we have stored on edges. */
5454 FOR_EACH_EDGE (e, ei, bb->preds)
5456 if (e == fallthru)
5457 continue;
5459 flush_pending_stmts (e);
5464 /* Return a non-special label in the head of basic block BLOCK.
5465 Create one if it doesn't exist. */
5467 tree
5468 gimple_block_label (basic_block bb)
5470 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5471 bool first = true;
5472 tree label;
5473 glabel *stmt;
5475 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5477 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5478 if (!stmt)
5479 break;
5480 label = gimple_label_label (stmt);
5481 if (!DECL_NONLOCAL (label))
5483 if (!first)
5484 gsi_move_before (&i, &s);
5485 return label;
5489 label = create_artificial_label (UNKNOWN_LOCATION);
5490 stmt = gimple_build_label (label);
5491 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5492 return label;
5496 /* Attempt to perform edge redirection by replacing a possibly complex
5497 jump instruction by a goto or by removing the jump completely.
5498 This can apply only if all edges now point to the same block. The
5499 parameters and return values are equivalent to
5500 redirect_edge_and_branch. */
5502 static edge
5503 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5505 basic_block src = e->src;
5506 gimple_stmt_iterator i;
5507 gimple stmt;
5509 /* We can replace or remove a complex jump only when we have exactly
5510 two edges. */
5511 if (EDGE_COUNT (src->succs) != 2
5512 /* Verify that all targets will be TARGET. Specifically, the
5513 edge that is not E must also go to TARGET. */
5514 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5515 return NULL;
5517 i = gsi_last_bb (src);
5518 if (gsi_end_p (i))
5519 return NULL;
5521 stmt = gsi_stmt (i);
5523 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5525 gsi_remove (&i, true);
5526 e = ssa_redirect_edge (e, target);
5527 e->flags = EDGE_FALLTHRU;
5528 return e;
5531 return NULL;
5535 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5536 edge representing the redirected branch. */
5538 static edge
5539 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5541 basic_block bb = e->src;
5542 gimple_stmt_iterator gsi;
5543 edge ret;
5544 gimple stmt;
5546 if (e->flags & EDGE_ABNORMAL)
5547 return NULL;
5549 if (e->dest == dest)
5550 return NULL;
5552 if (e->flags & EDGE_EH)
5553 return redirect_eh_edge (e, dest);
5555 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5557 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5558 if (ret)
5559 return ret;
5562 gsi = gsi_last_bb (bb);
5563 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5565 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5567 case GIMPLE_COND:
5568 /* For COND_EXPR, we only need to redirect the edge. */
5569 break;
5571 case GIMPLE_GOTO:
5572 /* No non-abnormal edges should lead from a non-simple goto, and
5573 simple ones should be represented implicitly. */
5574 gcc_unreachable ();
5576 case GIMPLE_SWITCH:
5578 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5579 tree label = gimple_block_label (dest);
5580 tree cases = get_cases_for_edge (e, switch_stmt);
5582 /* If we have a list of cases associated with E, then use it
5583 as it's a lot faster than walking the entire case vector. */
5584 if (cases)
5586 edge e2 = find_edge (e->src, dest);
5587 tree last, first;
5589 first = cases;
5590 while (cases)
5592 last = cases;
5593 CASE_LABEL (cases) = label;
5594 cases = CASE_CHAIN (cases);
5597 /* If there was already an edge in the CFG, then we need
5598 to move all the cases associated with E to E2. */
5599 if (e2)
5601 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5603 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5604 CASE_CHAIN (cases2) = first;
5606 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5608 else
5610 size_t i, n = gimple_switch_num_labels (switch_stmt);
5612 for (i = 0; i < n; i++)
5614 tree elt = gimple_switch_label (switch_stmt, i);
5615 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5616 CASE_LABEL (elt) = label;
5620 break;
5622 case GIMPLE_ASM:
5624 gasm *asm_stmt = as_a <gasm *> (stmt);
5625 int i, n = gimple_asm_nlabels (asm_stmt);
5626 tree label = NULL;
5628 for (i = 0; i < n; ++i)
5630 tree cons = gimple_asm_label_op (asm_stmt, i);
5631 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5633 if (!label)
5634 label = gimple_block_label (dest);
5635 TREE_VALUE (cons) = label;
5639 /* If we didn't find any label matching the former edge in the
5640 asm labels, we must be redirecting the fallthrough
5641 edge. */
5642 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5644 break;
5646 case GIMPLE_RETURN:
5647 gsi_remove (&gsi, true);
5648 e->flags |= EDGE_FALLTHRU;
5649 break;
5651 case GIMPLE_OMP_RETURN:
5652 case GIMPLE_OMP_CONTINUE:
5653 case GIMPLE_OMP_SECTIONS_SWITCH:
5654 case GIMPLE_OMP_FOR:
5655 /* The edges from OMP constructs can be simply redirected. */
5656 break;
5658 case GIMPLE_EH_DISPATCH:
5659 if (!(e->flags & EDGE_FALLTHRU))
5660 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5661 break;
5663 case GIMPLE_TRANSACTION:
5664 /* The ABORT edge has a stored label associated with it, otherwise
5665 the edges are simply redirectable. */
5666 if (e->flags == 0)
5667 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5668 gimple_block_label (dest));
5669 break;
5671 default:
5672 /* Otherwise it must be a fallthru edge, and we don't need to
5673 do anything besides redirecting it. */
5674 gcc_assert (e->flags & EDGE_FALLTHRU);
5675 break;
5678 /* Update/insert PHI nodes as necessary. */
5680 /* Now update the edges in the CFG. */
5681 e = ssa_redirect_edge (e, dest);
5683 return e;
5686 /* Returns true if it is possible to remove edge E by redirecting
5687 it to the destination of the other edge from E->src. */
5689 static bool
5690 gimple_can_remove_branch_p (const_edge e)
5692 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5693 return false;
5695 return true;
5698 /* Simple wrapper, as we can always redirect fallthru edges. */
5700 static basic_block
5701 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5703 e = gimple_redirect_edge_and_branch (e, dest);
5704 gcc_assert (e);
5706 return NULL;
5710 /* Splits basic block BB after statement STMT (but at least after the
5711 labels). If STMT is NULL, BB is split just after the labels. */
5713 static basic_block
5714 gimple_split_block (basic_block bb, void *stmt)
5716 gimple_stmt_iterator gsi;
5717 gimple_stmt_iterator gsi_tgt;
5718 gimple_seq list;
5719 basic_block new_bb;
5720 edge e;
5721 edge_iterator ei;
5723 new_bb = create_empty_bb (bb);
5725 /* Redirect the outgoing edges. */
5726 new_bb->succs = bb->succs;
5727 bb->succs = NULL;
5728 FOR_EACH_EDGE (e, ei, new_bb->succs)
5729 e->src = new_bb;
5731 /* Get a stmt iterator pointing to the first stmt to move. */
5732 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5733 gsi = gsi_after_labels (bb);
5734 else
5736 gsi = gsi_for_stmt ((gimple) stmt);
5737 gsi_next (&gsi);
5740 /* Move everything from GSI to the new basic block. */
5741 if (gsi_end_p (gsi))
5742 return new_bb;
5744 /* Split the statement list - avoid re-creating new containers as this
5745 brings ugly quadratic memory consumption in the inliner.
5746 (We are still quadratic since we need to update stmt BB pointers,
5747 sadly.) */
5748 gsi_split_seq_before (&gsi, &list);
5749 set_bb_seq (new_bb, list);
5750 for (gsi_tgt = gsi_start (list);
5751 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5752 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5754 return new_bb;
5758 /* Moves basic block BB after block AFTER. */
5760 static bool
5761 gimple_move_block_after (basic_block bb, basic_block after)
5763 if (bb->prev_bb == after)
5764 return true;
5766 unlink_block (bb);
5767 link_block (bb, after);
5769 return true;
5773 /* Return TRUE if block BB has no executable statements, otherwise return
5774 FALSE. */
5776 static bool
5777 gimple_empty_block_p (basic_block bb)
5779 /* BB must have no executable statements. */
5780 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5781 if (phi_nodes (bb))
5782 return false;
5783 if (gsi_end_p (gsi))
5784 return true;
5785 if (is_gimple_debug (gsi_stmt (gsi)))
5786 gsi_next_nondebug (&gsi);
5787 return gsi_end_p (gsi);
5791 /* Split a basic block if it ends with a conditional branch and if the
5792 other part of the block is not empty. */
5794 static basic_block
5795 gimple_split_block_before_cond_jump (basic_block bb)
5797 gimple last, split_point;
5798 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5799 if (gsi_end_p (gsi))
5800 return NULL;
5801 last = gsi_stmt (gsi);
5802 if (gimple_code (last) != GIMPLE_COND
5803 && gimple_code (last) != GIMPLE_SWITCH)
5804 return NULL;
5805 gsi_prev_nondebug (&gsi);
5806 split_point = gsi_stmt (gsi);
5807 return split_block (bb, split_point)->dest;
5811 /* Return true if basic_block can be duplicated. */
5813 static bool
5814 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5816 return true;
5819 /* Create a duplicate of the basic block BB. NOTE: This does not
5820 preserve SSA form. */
5822 static basic_block
5823 gimple_duplicate_bb (basic_block bb)
5825 basic_block new_bb;
5826 gimple_stmt_iterator gsi_tgt;
5828 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5830 /* Copy the PHI nodes. We ignore PHI node arguments here because
5831 the incoming edges have not been setup yet. */
5832 for (gphi_iterator gpi = gsi_start_phis (bb);
5833 !gsi_end_p (gpi);
5834 gsi_next (&gpi))
5836 gphi *phi, *copy;
5837 phi = gpi.phi ();
5838 copy = create_phi_node (NULL_TREE, new_bb);
5839 create_new_def_for (gimple_phi_result (phi), copy,
5840 gimple_phi_result_ptr (copy));
5841 gimple_set_uid (copy, gimple_uid (phi));
5844 gsi_tgt = gsi_start_bb (new_bb);
5845 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5846 !gsi_end_p (gsi);
5847 gsi_next (&gsi))
5849 def_operand_p def_p;
5850 ssa_op_iter op_iter;
5851 tree lhs;
5852 gimple stmt, copy;
5854 stmt = gsi_stmt (gsi);
5855 if (gimple_code (stmt) == GIMPLE_LABEL)
5856 continue;
5858 /* Don't duplicate label debug stmts. */
5859 if (gimple_debug_bind_p (stmt)
5860 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5861 == LABEL_DECL)
5862 continue;
5864 /* Create a new copy of STMT and duplicate STMT's virtual
5865 operands. */
5866 copy = gimple_copy (stmt);
5867 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5869 maybe_duplicate_eh_stmt (copy, stmt);
5870 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5872 /* When copying around a stmt writing into a local non-user
5873 aggregate, make sure it won't share stack slot with other
5874 vars. */
5875 lhs = gimple_get_lhs (stmt);
5876 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5878 tree base = get_base_address (lhs);
5879 if (base
5880 && (TREE_CODE (base) == VAR_DECL
5881 || TREE_CODE (base) == RESULT_DECL)
5882 && DECL_IGNORED_P (base)
5883 && !TREE_STATIC (base)
5884 && !DECL_EXTERNAL (base)
5885 && (TREE_CODE (base) != VAR_DECL
5886 || !DECL_HAS_VALUE_EXPR_P (base)))
5887 DECL_NONSHAREABLE (base) = 1;
5890 /* Create new names for all the definitions created by COPY and
5891 add replacement mappings for each new name. */
5892 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5893 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5896 return new_bb;
5899 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5901 static void
5902 add_phi_args_after_copy_edge (edge e_copy)
5904 basic_block bb, bb_copy = e_copy->src, dest;
5905 edge e;
5906 edge_iterator ei;
5907 gphi *phi, *phi_copy;
5908 tree def;
5909 gphi_iterator psi, psi_copy;
5911 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5912 return;
5914 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5916 if (e_copy->dest->flags & BB_DUPLICATED)
5917 dest = get_bb_original (e_copy->dest);
5918 else
5919 dest = e_copy->dest;
5921 e = find_edge (bb, dest);
5922 if (!e)
5924 /* During loop unrolling the target of the latch edge is copied.
5925 In this case we are not looking for edge to dest, but to
5926 duplicated block whose original was dest. */
5927 FOR_EACH_EDGE (e, ei, bb->succs)
5929 if ((e->dest->flags & BB_DUPLICATED)
5930 && get_bb_original (e->dest) == dest)
5931 break;
5934 gcc_assert (e != NULL);
5937 for (psi = gsi_start_phis (e->dest),
5938 psi_copy = gsi_start_phis (e_copy->dest);
5939 !gsi_end_p (psi);
5940 gsi_next (&psi), gsi_next (&psi_copy))
5942 phi = psi.phi ();
5943 phi_copy = psi_copy.phi ();
5944 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5945 add_phi_arg (phi_copy, def, e_copy,
5946 gimple_phi_arg_location_from_edge (phi, e));
5951 /* Basic block BB_COPY was created by code duplication. Add phi node
5952 arguments for edges going out of BB_COPY. The blocks that were
5953 duplicated have BB_DUPLICATED set. */
5955 void
5956 add_phi_args_after_copy_bb (basic_block bb_copy)
5958 edge e_copy;
5959 edge_iterator ei;
5961 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5963 add_phi_args_after_copy_edge (e_copy);
5967 /* Blocks in REGION_COPY array of length N_REGION were created by
5968 duplication of basic blocks. Add phi node arguments for edges
5969 going from these blocks. If E_COPY is not NULL, also add
5970 phi node arguments for its destination.*/
5972 void
5973 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5974 edge e_copy)
5976 unsigned i;
5978 for (i = 0; i < n_region; i++)
5979 region_copy[i]->flags |= BB_DUPLICATED;
5981 for (i = 0; i < n_region; i++)
5982 add_phi_args_after_copy_bb (region_copy[i]);
5983 if (e_copy)
5984 add_phi_args_after_copy_edge (e_copy);
5986 for (i = 0; i < n_region; i++)
5987 region_copy[i]->flags &= ~BB_DUPLICATED;
5990 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5991 important exit edge EXIT. By important we mean that no SSA name defined
5992 inside region is live over the other exit edges of the region. All entry
5993 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5994 to the duplicate of the region. Dominance and loop information is
5995 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5996 UPDATE_DOMINANCE is false then we assume that the caller will update the
5997 dominance information after calling this function. The new basic
5998 blocks are stored to REGION_COPY in the same order as they had in REGION,
5999 provided that REGION_COPY is not NULL.
6000 The function returns false if it is unable to copy the region,
6001 true otherwise. */
6003 bool
6004 gimple_duplicate_sese_region (edge entry, edge exit,
6005 basic_block *region, unsigned n_region,
6006 basic_block *region_copy,
6007 bool update_dominance)
6009 unsigned i;
6010 bool free_region_copy = false, copying_header = false;
6011 struct loop *loop = entry->dest->loop_father;
6012 edge exit_copy;
6013 vec<basic_block> doms;
6014 edge redirected;
6015 int total_freq = 0, entry_freq = 0;
6016 gcov_type total_count = 0, entry_count = 0;
6018 if (!can_copy_bbs_p (region, n_region))
6019 return false;
6021 /* Some sanity checking. Note that we do not check for all possible
6022 missuses of the functions. I.e. if you ask to copy something weird,
6023 it will work, but the state of structures probably will not be
6024 correct. */
6025 for (i = 0; i < n_region; i++)
6027 /* We do not handle subloops, i.e. all the blocks must belong to the
6028 same loop. */
6029 if (region[i]->loop_father != loop)
6030 return false;
6032 if (region[i] != entry->dest
6033 && region[i] == loop->header)
6034 return false;
6037 /* In case the function is used for loop header copying (which is the primary
6038 use), ensure that EXIT and its copy will be new latch and entry edges. */
6039 if (loop->header == entry->dest)
6041 copying_header = true;
6043 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6044 return false;
6046 for (i = 0; i < n_region; i++)
6047 if (region[i] != exit->src
6048 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6049 return false;
6052 initialize_original_copy_tables ();
6054 if (copying_header)
6055 set_loop_copy (loop, loop_outer (loop));
6056 else
6057 set_loop_copy (loop, loop);
6059 if (!region_copy)
6061 region_copy = XNEWVEC (basic_block, n_region);
6062 free_region_copy = true;
6065 /* Record blocks outside the region that are dominated by something
6066 inside. */
6067 if (update_dominance)
6069 doms.create (0);
6070 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6073 if (entry->dest->count)
6075 total_count = entry->dest->count;
6076 entry_count = entry->count;
6077 /* Fix up corner cases, to avoid division by zero or creation of negative
6078 frequencies. */
6079 if (entry_count > total_count)
6080 entry_count = total_count;
6082 else
6084 total_freq = entry->dest->frequency;
6085 entry_freq = EDGE_FREQUENCY (entry);
6086 /* Fix up corner cases, to avoid division by zero or creation of negative
6087 frequencies. */
6088 if (total_freq == 0)
6089 total_freq = 1;
6090 else if (entry_freq > total_freq)
6091 entry_freq = total_freq;
6094 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6095 split_edge_bb_loc (entry), update_dominance);
6096 if (total_count)
6098 scale_bbs_frequencies_gcov_type (region, n_region,
6099 total_count - entry_count,
6100 total_count);
6101 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6102 total_count);
6104 else
6106 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6107 total_freq);
6108 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6111 if (copying_header)
6113 loop->header = exit->dest;
6114 loop->latch = exit->src;
6117 /* Redirect the entry and add the phi node arguments. */
6118 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6119 gcc_assert (redirected != NULL);
6120 flush_pending_stmts (entry);
6122 /* Concerning updating of dominators: We must recount dominators
6123 for entry block and its copy. Anything that is outside of the
6124 region, but was dominated by something inside needs recounting as
6125 well. */
6126 if (update_dominance)
6128 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6129 doms.safe_push (get_bb_original (entry->dest));
6130 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6131 doms.release ();
6134 /* Add the other PHI node arguments. */
6135 add_phi_args_after_copy (region_copy, n_region, NULL);
6137 if (free_region_copy)
6138 free (region_copy);
6140 free_original_copy_tables ();
6141 return true;
6144 /* Checks if BB is part of the region defined by N_REGION BBS. */
6145 static bool
6146 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6148 unsigned int n;
6150 for (n = 0; n < n_region; n++)
6152 if (bb == bbs[n])
6153 return true;
6155 return false;
6158 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6159 are stored to REGION_COPY in the same order in that they appear
6160 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6161 the region, EXIT an exit from it. The condition guarding EXIT
6162 is moved to ENTRY. Returns true if duplication succeeds, false
6163 otherwise.
6165 For example,
6167 some_code;
6168 if (cond)
6170 else
6173 is transformed to
6175 if (cond)
6177 some_code;
6180 else
6182 some_code;
6187 bool
6188 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6189 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6190 basic_block *region_copy ATTRIBUTE_UNUSED)
6192 unsigned i;
6193 bool free_region_copy = false;
6194 struct loop *loop = exit->dest->loop_father;
6195 struct loop *orig_loop = entry->dest->loop_father;
6196 basic_block switch_bb, entry_bb, nentry_bb;
6197 vec<basic_block> doms;
6198 int total_freq = 0, exit_freq = 0;
6199 gcov_type total_count = 0, exit_count = 0;
6200 edge exits[2], nexits[2], e;
6201 gimple_stmt_iterator gsi;
6202 gimple cond_stmt;
6203 edge sorig, snew;
6204 basic_block exit_bb;
6205 gphi_iterator psi;
6206 gphi *phi;
6207 tree def;
6208 struct loop *target, *aloop, *cloop;
6210 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6211 exits[0] = exit;
6212 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6214 if (!can_copy_bbs_p (region, n_region))
6215 return false;
6217 initialize_original_copy_tables ();
6218 set_loop_copy (orig_loop, loop);
6220 target= loop;
6221 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6223 if (bb_part_of_region_p (aloop->header, region, n_region))
6225 cloop = duplicate_loop (aloop, target);
6226 duplicate_subloops (aloop, cloop);
6230 if (!region_copy)
6232 region_copy = XNEWVEC (basic_block, n_region);
6233 free_region_copy = true;
6236 gcc_assert (!need_ssa_update_p (cfun));
6238 /* Record blocks outside the region that are dominated by something
6239 inside. */
6240 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6242 if (exit->src->count)
6244 total_count = exit->src->count;
6245 exit_count = exit->count;
6246 /* Fix up corner cases, to avoid division by zero or creation of negative
6247 frequencies. */
6248 if (exit_count > total_count)
6249 exit_count = total_count;
6251 else
6253 total_freq = exit->src->frequency;
6254 exit_freq = EDGE_FREQUENCY (exit);
6255 /* Fix up corner cases, to avoid division by zero or creation of negative
6256 frequencies. */
6257 if (total_freq == 0)
6258 total_freq = 1;
6259 if (exit_freq > total_freq)
6260 exit_freq = total_freq;
6263 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6264 split_edge_bb_loc (exit), true);
6265 if (total_count)
6267 scale_bbs_frequencies_gcov_type (region, n_region,
6268 total_count - exit_count,
6269 total_count);
6270 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6271 total_count);
6273 else
6275 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6276 total_freq);
6277 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6280 /* Create the switch block, and put the exit condition to it. */
6281 entry_bb = entry->dest;
6282 nentry_bb = get_bb_copy (entry_bb);
6283 if (!last_stmt (entry->src)
6284 || !stmt_ends_bb_p (last_stmt (entry->src)))
6285 switch_bb = entry->src;
6286 else
6287 switch_bb = split_edge (entry);
6288 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6290 gsi = gsi_last_bb (switch_bb);
6291 cond_stmt = last_stmt (exit->src);
6292 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6293 cond_stmt = gimple_copy (cond_stmt);
6295 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6297 sorig = single_succ_edge (switch_bb);
6298 sorig->flags = exits[1]->flags;
6299 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6301 /* Register the new edge from SWITCH_BB in loop exit lists. */
6302 rescan_loop_exit (snew, true, false);
6304 /* Add the PHI node arguments. */
6305 add_phi_args_after_copy (region_copy, n_region, snew);
6307 /* Get rid of now superfluous conditions and associated edges (and phi node
6308 arguments). */
6309 exit_bb = exit->dest;
6311 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6312 PENDING_STMT (e) = NULL;
6314 /* The latch of ORIG_LOOP was copied, and so was the backedge
6315 to the original header. We redirect this backedge to EXIT_BB. */
6316 for (i = 0; i < n_region; i++)
6317 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6319 gcc_assert (single_succ_edge (region_copy[i]));
6320 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6321 PENDING_STMT (e) = NULL;
6322 for (psi = gsi_start_phis (exit_bb);
6323 !gsi_end_p (psi);
6324 gsi_next (&psi))
6326 phi = psi.phi ();
6327 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6328 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6331 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6332 PENDING_STMT (e) = NULL;
6334 /* Anything that is outside of the region, but was dominated by something
6335 inside needs to update dominance info. */
6336 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6337 doms.release ();
6338 /* Update the SSA web. */
6339 update_ssa (TODO_update_ssa);
6341 if (free_region_copy)
6342 free (region_copy);
6344 free_original_copy_tables ();
6345 return true;
6348 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6349 adding blocks when the dominator traversal reaches EXIT. This
6350 function silently assumes that ENTRY strictly dominates EXIT. */
6352 void
6353 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6354 vec<basic_block> *bbs_p)
6356 basic_block son;
6358 for (son = first_dom_son (CDI_DOMINATORS, entry);
6359 son;
6360 son = next_dom_son (CDI_DOMINATORS, son))
6362 bbs_p->safe_push (son);
6363 if (son != exit)
6364 gather_blocks_in_sese_region (son, exit, bbs_p);
6368 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6369 The duplicates are recorded in VARS_MAP. */
6371 static void
6372 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6373 tree to_context)
6375 tree t = *tp, new_t;
6376 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6378 if (DECL_CONTEXT (t) == to_context)
6379 return;
6381 bool existed;
6382 tree &loc = vars_map->get_or_insert (t, &existed);
6384 if (!existed)
6386 if (SSA_VAR_P (t))
6388 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6389 add_local_decl (f, new_t);
6391 else
6393 gcc_assert (TREE_CODE (t) == CONST_DECL);
6394 new_t = copy_node (t);
6396 DECL_CONTEXT (new_t) = to_context;
6398 loc = new_t;
6400 else
6401 new_t = loc;
6403 *tp = new_t;
6407 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6408 VARS_MAP maps old ssa names and var_decls to the new ones. */
6410 static tree
6411 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6412 tree to_context)
6414 tree new_name;
6416 gcc_assert (!virtual_operand_p (name));
6418 tree *loc = vars_map->get (name);
6420 if (!loc)
6422 tree decl = SSA_NAME_VAR (name);
6423 if (decl)
6425 replace_by_duplicate_decl (&decl, vars_map, to_context);
6426 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6427 decl, SSA_NAME_DEF_STMT (name));
6428 if (SSA_NAME_IS_DEFAULT_DEF (name))
6429 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6430 decl, new_name);
6432 else
6433 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6434 name, SSA_NAME_DEF_STMT (name));
6436 vars_map->put (name, new_name);
6438 else
6439 new_name = *loc;
6441 return new_name;
6444 struct move_stmt_d
6446 tree orig_block;
6447 tree new_block;
6448 tree from_context;
6449 tree to_context;
6450 hash_map<tree, tree> *vars_map;
6451 htab_t new_label_map;
6452 hash_map<void *, void *> *eh_map;
6453 bool remap_decls_p;
6456 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6457 contained in *TP if it has been ORIG_BLOCK previously and change the
6458 DECL_CONTEXT of every local variable referenced in *TP. */
6460 static tree
6461 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6463 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6464 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6465 tree t = *tp;
6467 if (EXPR_P (t))
6469 tree block = TREE_BLOCK (t);
6470 if (block == p->orig_block
6471 || (p->orig_block == NULL_TREE
6472 && block != NULL_TREE))
6473 TREE_SET_BLOCK (t, p->new_block);
6474 #ifdef ENABLE_CHECKING
6475 else if (block != NULL_TREE)
6477 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6478 block = BLOCK_SUPERCONTEXT (block);
6479 gcc_assert (block == p->orig_block);
6481 #endif
6483 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6485 if (TREE_CODE (t) == SSA_NAME)
6486 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6487 else if (TREE_CODE (t) == LABEL_DECL)
6489 if (p->new_label_map)
6491 struct tree_map in, *out;
6492 in.base.from = t;
6493 out = (struct tree_map *)
6494 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6495 if (out)
6496 *tp = t = out->to;
6499 DECL_CONTEXT (t) = p->to_context;
6501 else if (p->remap_decls_p)
6503 /* Replace T with its duplicate. T should no longer appear in the
6504 parent function, so this looks wasteful; however, it may appear
6505 in referenced_vars, and more importantly, as virtual operands of
6506 statements, and in alias lists of other variables. It would be
6507 quite difficult to expunge it from all those places. ??? It might
6508 suffice to do this for addressable variables. */
6509 if ((TREE_CODE (t) == VAR_DECL
6510 && !is_global_var (t))
6511 || TREE_CODE (t) == CONST_DECL)
6512 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6514 *walk_subtrees = 0;
6516 else if (TYPE_P (t))
6517 *walk_subtrees = 0;
6519 return NULL_TREE;
6522 /* Helper for move_stmt_r. Given an EH region number for the source
6523 function, map that to the duplicate EH regio number in the dest. */
6525 static int
6526 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6528 eh_region old_r, new_r;
6530 old_r = get_eh_region_from_number (old_nr);
6531 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6533 return new_r->index;
6536 /* Similar, but operate on INTEGER_CSTs. */
6538 static tree
6539 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6541 int old_nr, new_nr;
6543 old_nr = tree_to_shwi (old_t_nr);
6544 new_nr = move_stmt_eh_region_nr (old_nr, p);
6546 return build_int_cst (integer_type_node, new_nr);
6549 /* Like move_stmt_op, but for gimple statements.
6551 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6552 contained in the current statement in *GSI_P and change the
6553 DECL_CONTEXT of every local variable referenced in the current
6554 statement. */
6556 static tree
6557 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6558 struct walk_stmt_info *wi)
6560 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6561 gimple stmt = gsi_stmt (*gsi_p);
6562 tree block = gimple_block (stmt);
6564 if (block == p->orig_block
6565 || (p->orig_block == NULL_TREE
6566 && block != NULL_TREE))
6567 gimple_set_block (stmt, p->new_block);
6569 switch (gimple_code (stmt))
6571 case GIMPLE_CALL:
6572 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6574 tree r, fndecl = gimple_call_fndecl (stmt);
6575 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6576 switch (DECL_FUNCTION_CODE (fndecl))
6578 case BUILT_IN_EH_COPY_VALUES:
6579 r = gimple_call_arg (stmt, 1);
6580 r = move_stmt_eh_region_tree_nr (r, p);
6581 gimple_call_set_arg (stmt, 1, r);
6582 /* FALLTHRU */
6584 case BUILT_IN_EH_POINTER:
6585 case BUILT_IN_EH_FILTER:
6586 r = gimple_call_arg (stmt, 0);
6587 r = move_stmt_eh_region_tree_nr (r, p);
6588 gimple_call_set_arg (stmt, 0, r);
6589 break;
6591 default:
6592 break;
6595 break;
6597 case GIMPLE_RESX:
6599 gresx *resx_stmt = as_a <gresx *> (stmt);
6600 int r = gimple_resx_region (resx_stmt);
6601 r = move_stmt_eh_region_nr (r, p);
6602 gimple_resx_set_region (resx_stmt, r);
6604 break;
6606 case GIMPLE_EH_DISPATCH:
6608 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6609 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6610 r = move_stmt_eh_region_nr (r, p);
6611 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6613 break;
6615 case GIMPLE_OMP_RETURN:
6616 case GIMPLE_OMP_CONTINUE:
6617 break;
6618 default:
6619 if (is_gimple_omp (stmt))
6621 /* Do not remap variables inside OMP directives. Variables
6622 referenced in clauses and directive header belong to the
6623 parent function and should not be moved into the child
6624 function. */
6625 bool save_remap_decls_p = p->remap_decls_p;
6626 p->remap_decls_p = false;
6627 *handled_ops_p = true;
6629 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6630 move_stmt_op, wi);
6632 p->remap_decls_p = save_remap_decls_p;
6634 break;
6637 return NULL_TREE;
6640 /* Move basic block BB from function CFUN to function DEST_FN. The
6641 block is moved out of the original linked list and placed after
6642 block AFTER in the new list. Also, the block is removed from the
6643 original array of blocks and placed in DEST_FN's array of blocks.
6644 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6645 updated to reflect the moved edges.
6647 The local variables are remapped to new instances, VARS_MAP is used
6648 to record the mapping. */
6650 static void
6651 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6652 basic_block after, bool update_edge_count_p,
6653 struct move_stmt_d *d)
6655 struct control_flow_graph *cfg;
6656 edge_iterator ei;
6657 edge e;
6658 gimple_stmt_iterator si;
6659 unsigned old_len, new_len;
6661 /* Remove BB from dominance structures. */
6662 delete_from_dominance_info (CDI_DOMINATORS, bb);
6664 /* Move BB from its current loop to the copy in the new function. */
6665 if (current_loops)
6667 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6668 if (new_loop)
6669 bb->loop_father = new_loop;
6672 /* Link BB to the new linked list. */
6673 move_block_after (bb, after);
6675 /* Update the edge count in the corresponding flowgraphs. */
6676 if (update_edge_count_p)
6677 FOR_EACH_EDGE (e, ei, bb->succs)
6679 cfun->cfg->x_n_edges--;
6680 dest_cfun->cfg->x_n_edges++;
6683 /* Remove BB from the original basic block array. */
6684 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6685 cfun->cfg->x_n_basic_blocks--;
6687 /* Grow DEST_CFUN's basic block array if needed. */
6688 cfg = dest_cfun->cfg;
6689 cfg->x_n_basic_blocks++;
6690 if (bb->index >= cfg->x_last_basic_block)
6691 cfg->x_last_basic_block = bb->index + 1;
6693 old_len = vec_safe_length (cfg->x_basic_block_info);
6694 if ((unsigned) cfg->x_last_basic_block >= old_len)
6696 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6697 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6700 (*cfg->x_basic_block_info)[bb->index] = bb;
6702 /* Remap the variables in phi nodes. */
6703 for (gphi_iterator psi = gsi_start_phis (bb);
6704 !gsi_end_p (psi); )
6706 gphi *phi = psi.phi ();
6707 use_operand_p use;
6708 tree op = PHI_RESULT (phi);
6709 ssa_op_iter oi;
6710 unsigned i;
6712 if (virtual_operand_p (op))
6714 /* Remove the phi nodes for virtual operands (alias analysis will be
6715 run for the new function, anyway). */
6716 remove_phi_node (&psi, true);
6717 continue;
6720 SET_PHI_RESULT (phi,
6721 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6722 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6724 op = USE_FROM_PTR (use);
6725 if (TREE_CODE (op) == SSA_NAME)
6726 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6729 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6731 location_t locus = gimple_phi_arg_location (phi, i);
6732 tree block = LOCATION_BLOCK (locus);
6734 if (locus == UNKNOWN_LOCATION)
6735 continue;
6736 if (d->orig_block == NULL_TREE || block == d->orig_block)
6738 if (d->new_block == NULL_TREE)
6739 locus = LOCATION_LOCUS (locus);
6740 else
6741 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6742 gimple_phi_arg_set_location (phi, i, locus);
6746 gsi_next (&psi);
6749 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6751 gimple stmt = gsi_stmt (si);
6752 struct walk_stmt_info wi;
6754 memset (&wi, 0, sizeof (wi));
6755 wi.info = d;
6756 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6758 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6760 tree label = gimple_label_label (label_stmt);
6761 int uid = LABEL_DECL_UID (label);
6763 gcc_assert (uid > -1);
6765 old_len = vec_safe_length (cfg->x_label_to_block_map);
6766 if (old_len <= (unsigned) uid)
6768 new_len = 3 * uid / 2 + 1;
6769 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6772 (*cfg->x_label_to_block_map)[uid] = bb;
6773 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6775 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6777 if (uid >= dest_cfun->cfg->last_label_uid)
6778 dest_cfun->cfg->last_label_uid = uid + 1;
6781 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6782 remove_stmt_from_eh_lp_fn (cfun, stmt);
6784 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6785 gimple_remove_stmt_histograms (cfun, stmt);
6787 /* We cannot leave any operands allocated from the operand caches of
6788 the current function. */
6789 free_stmt_operands (cfun, stmt);
6790 push_cfun (dest_cfun);
6791 update_stmt (stmt);
6792 pop_cfun ();
6795 FOR_EACH_EDGE (e, ei, bb->succs)
6796 if (e->goto_locus != UNKNOWN_LOCATION)
6798 tree block = LOCATION_BLOCK (e->goto_locus);
6799 if (d->orig_block == NULL_TREE
6800 || block == d->orig_block)
6801 e->goto_locus = d->new_block ?
6802 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6803 LOCATION_LOCUS (e->goto_locus);
6807 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6808 the outermost EH region. Use REGION as the incoming base EH region. */
6810 static eh_region
6811 find_outermost_region_in_block (struct function *src_cfun,
6812 basic_block bb, eh_region region)
6814 gimple_stmt_iterator si;
6816 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6818 gimple stmt = gsi_stmt (si);
6819 eh_region stmt_region;
6820 int lp_nr;
6822 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6823 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6824 if (stmt_region)
6826 if (region == NULL)
6827 region = stmt_region;
6828 else if (stmt_region != region)
6830 region = eh_region_outermost (src_cfun, stmt_region, region);
6831 gcc_assert (region != NULL);
6836 return region;
6839 static tree
6840 new_label_mapper (tree decl, void *data)
6842 htab_t hash = (htab_t) data;
6843 struct tree_map *m;
6844 void **slot;
6846 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6848 m = XNEW (struct tree_map);
6849 m->hash = DECL_UID (decl);
6850 m->base.from = decl;
6851 m->to = create_artificial_label (UNKNOWN_LOCATION);
6852 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6853 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6854 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6856 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6857 gcc_assert (*slot == NULL);
6859 *slot = m;
6861 return m->to;
6864 /* Tree walker to replace the decls used inside value expressions by
6865 duplicates. */
6867 static tree
6868 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
6870 struct replace_decls_d *rd = (struct replace_decls_d *)data;
6872 switch (TREE_CODE (*tp))
6874 case VAR_DECL:
6875 case PARM_DECL:
6876 case RESULT_DECL:
6877 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
6878 break;
6879 default:
6880 break;
6883 if (IS_TYPE_OR_DECL_P (*tp))
6884 *walk_subtrees = false;
6886 return NULL;
6889 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6890 subblocks. */
6892 static void
6893 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6894 tree to_context)
6896 tree *tp, t;
6898 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6900 t = *tp;
6901 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6902 continue;
6903 replace_by_duplicate_decl (&t, vars_map, to_context);
6904 if (t != *tp)
6906 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6908 tree x = DECL_VALUE_EXPR (*tp);
6909 struct replace_decls_d rd = { vars_map, to_context };
6910 unshare_expr (x);
6911 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
6912 SET_DECL_VALUE_EXPR (t, x);
6913 DECL_HAS_VALUE_EXPR_P (t) = 1;
6915 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6916 *tp = t;
6920 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6921 replace_block_vars_by_duplicates (block, vars_map, to_context);
6924 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6925 from FN1 to FN2. */
6927 static void
6928 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6929 struct loop *loop)
6931 /* Discard it from the old loop array. */
6932 (*get_loops (fn1))[loop->num] = NULL;
6934 /* Place it in the new loop array, assigning it a new number. */
6935 loop->num = number_of_loops (fn2);
6936 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6938 /* Recurse to children. */
6939 for (loop = loop->inner; loop; loop = loop->next)
6940 fixup_loop_arrays_after_move (fn1, fn2, loop);
6943 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6944 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6946 DEBUG_FUNCTION void
6947 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6949 basic_block bb;
6950 edge_iterator ei;
6951 edge e;
6952 bitmap bbs = BITMAP_ALLOC (NULL);
6953 int i;
6955 gcc_assert (entry != NULL);
6956 gcc_assert (entry != exit);
6957 gcc_assert (bbs_p != NULL);
6959 gcc_assert (bbs_p->length () > 0);
6961 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6962 bitmap_set_bit (bbs, bb->index);
6964 gcc_assert (bitmap_bit_p (bbs, entry->index));
6965 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6967 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6969 if (bb == entry)
6971 gcc_assert (single_pred_p (entry));
6972 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6974 else
6975 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6977 e = ei_edge (ei);
6978 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6981 if (bb == exit)
6983 gcc_assert (single_succ_p (exit));
6984 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6986 else
6987 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6989 e = ei_edge (ei);
6990 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6994 BITMAP_FREE (bbs);
6998 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6999 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7000 single basic block in the original CFG and the new basic block is
7001 returned. DEST_CFUN must not have a CFG yet.
7003 Note that the region need not be a pure SESE region. Blocks inside
7004 the region may contain calls to abort/exit. The only restriction
7005 is that ENTRY_BB should be the only entry point and it must
7006 dominate EXIT_BB.
7008 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7009 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7010 to the new function.
7012 All local variables referenced in the region are assumed to be in
7013 the corresponding BLOCK_VARS and unexpanded variable lists
7014 associated with DEST_CFUN. */
7016 basic_block
7017 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7018 basic_block exit_bb, tree orig_block)
7020 vec<basic_block> bbs, dom_bbs;
7021 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7022 basic_block after, bb, *entry_pred, *exit_succ, abb;
7023 struct function *saved_cfun = cfun;
7024 int *entry_flag, *exit_flag;
7025 unsigned *entry_prob, *exit_prob;
7026 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7027 edge e;
7028 edge_iterator ei;
7029 htab_t new_label_map;
7030 hash_map<void *, void *> *eh_map;
7031 struct loop *loop = entry_bb->loop_father;
7032 struct loop *loop0 = get_loop (saved_cfun, 0);
7033 struct move_stmt_d d;
7035 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7036 region. */
7037 gcc_assert (entry_bb != exit_bb
7038 && (!exit_bb
7039 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7041 /* Collect all the blocks in the region. Manually add ENTRY_BB
7042 because it won't be added by dfs_enumerate_from. */
7043 bbs.create (0);
7044 bbs.safe_push (entry_bb);
7045 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7046 #ifdef ENABLE_CHECKING
7047 verify_sese (entry_bb, exit_bb, &bbs);
7048 #endif
7050 /* The blocks that used to be dominated by something in BBS will now be
7051 dominated by the new block. */
7052 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7053 bbs.address (),
7054 bbs.length ());
7056 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7057 the predecessor edges to ENTRY_BB and the successor edges to
7058 EXIT_BB so that we can re-attach them to the new basic block that
7059 will replace the region. */
7060 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7061 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7062 entry_flag = XNEWVEC (int, num_entry_edges);
7063 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7064 i = 0;
7065 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7067 entry_prob[i] = e->probability;
7068 entry_flag[i] = e->flags;
7069 entry_pred[i++] = e->src;
7070 remove_edge (e);
7073 if (exit_bb)
7075 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7076 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7077 exit_flag = XNEWVEC (int, num_exit_edges);
7078 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7079 i = 0;
7080 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7082 exit_prob[i] = e->probability;
7083 exit_flag[i] = e->flags;
7084 exit_succ[i++] = e->dest;
7085 remove_edge (e);
7088 else
7090 num_exit_edges = 0;
7091 exit_succ = NULL;
7092 exit_flag = NULL;
7093 exit_prob = NULL;
7096 /* Switch context to the child function to initialize DEST_FN's CFG. */
7097 gcc_assert (dest_cfun->cfg == NULL);
7098 push_cfun (dest_cfun);
7100 init_empty_tree_cfg ();
7102 /* Initialize EH information for the new function. */
7103 eh_map = NULL;
7104 new_label_map = NULL;
7105 if (saved_cfun->eh)
7107 eh_region region = NULL;
7109 FOR_EACH_VEC_ELT (bbs, i, bb)
7110 region = find_outermost_region_in_block (saved_cfun, bb, region);
7112 init_eh_for_function ();
7113 if (region != NULL)
7115 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7116 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7117 new_label_mapper, new_label_map);
7121 /* Initialize an empty loop tree. */
7122 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7123 init_loops_structure (dest_cfun, loops, 1);
7124 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7125 set_loops_for_fn (dest_cfun, loops);
7127 /* Move the outlined loop tree part. */
7128 num_nodes = bbs.length ();
7129 FOR_EACH_VEC_ELT (bbs, i, bb)
7131 if (bb->loop_father->header == bb)
7133 struct loop *this_loop = bb->loop_father;
7134 struct loop *outer = loop_outer (this_loop);
7135 if (outer == loop
7136 /* If the SESE region contains some bbs ending with
7137 a noreturn call, those are considered to belong
7138 to the outermost loop in saved_cfun, rather than
7139 the entry_bb's loop_father. */
7140 || outer == loop0)
7142 if (outer != loop)
7143 num_nodes -= this_loop->num_nodes;
7144 flow_loop_tree_node_remove (bb->loop_father);
7145 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7146 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7149 else if (bb->loop_father == loop0 && loop0 != loop)
7150 num_nodes--;
7152 /* Remove loop exits from the outlined region. */
7153 if (loops_for_fn (saved_cfun)->exits)
7154 FOR_EACH_EDGE (e, ei, bb->succs)
7156 struct loops *l = loops_for_fn (saved_cfun);
7157 loop_exit **slot
7158 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7159 NO_INSERT);
7160 if (slot)
7161 l->exits->clear_slot (slot);
7166 /* Adjust the number of blocks in the tree root of the outlined part. */
7167 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7169 /* Setup a mapping to be used by move_block_to_fn. */
7170 loop->aux = current_loops->tree_root;
7171 loop0->aux = current_loops->tree_root;
7173 pop_cfun ();
7175 /* Move blocks from BBS into DEST_CFUN. */
7176 gcc_assert (bbs.length () >= 2);
7177 after = dest_cfun->cfg->x_entry_block_ptr;
7178 hash_map<tree, tree> vars_map;
7180 memset (&d, 0, sizeof (d));
7181 d.orig_block = orig_block;
7182 d.new_block = DECL_INITIAL (dest_cfun->decl);
7183 d.from_context = cfun->decl;
7184 d.to_context = dest_cfun->decl;
7185 d.vars_map = &vars_map;
7186 d.new_label_map = new_label_map;
7187 d.eh_map = eh_map;
7188 d.remap_decls_p = true;
7190 FOR_EACH_VEC_ELT (bbs, i, bb)
7192 /* No need to update edge counts on the last block. It has
7193 already been updated earlier when we detached the region from
7194 the original CFG. */
7195 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7196 after = bb;
7199 loop->aux = NULL;
7200 loop0->aux = NULL;
7201 /* Loop sizes are no longer correct, fix them up. */
7202 loop->num_nodes -= num_nodes;
7203 for (struct loop *outer = loop_outer (loop);
7204 outer; outer = loop_outer (outer))
7205 outer->num_nodes -= num_nodes;
7206 loop0->num_nodes -= bbs.length () - num_nodes;
7208 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7210 struct loop *aloop;
7211 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7212 if (aloop != NULL)
7214 if (aloop->simduid)
7216 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7217 d.to_context);
7218 dest_cfun->has_simduid_loops = true;
7220 if (aloop->force_vectorize)
7221 dest_cfun->has_force_vectorize_loops = true;
7225 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7226 if (orig_block)
7228 tree block;
7229 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7230 == NULL_TREE);
7231 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7232 = BLOCK_SUBBLOCKS (orig_block);
7233 for (block = BLOCK_SUBBLOCKS (orig_block);
7234 block; block = BLOCK_CHAIN (block))
7235 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7236 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7239 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7240 &vars_map, dest_cfun->decl);
7242 if (new_label_map)
7243 htab_delete (new_label_map);
7244 if (eh_map)
7245 delete eh_map;
7247 /* Rewire the entry and exit blocks. The successor to the entry
7248 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7249 the child function. Similarly, the predecessor of DEST_FN's
7250 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7251 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7252 various CFG manipulation function get to the right CFG.
7254 FIXME, this is silly. The CFG ought to become a parameter to
7255 these helpers. */
7256 push_cfun (dest_cfun);
7257 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7258 if (exit_bb)
7259 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7260 pop_cfun ();
7262 /* Back in the original function, the SESE region has disappeared,
7263 create a new basic block in its place. */
7264 bb = create_empty_bb (entry_pred[0]);
7265 if (current_loops)
7266 add_bb_to_loop (bb, loop);
7267 for (i = 0; i < num_entry_edges; i++)
7269 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7270 e->probability = entry_prob[i];
7273 for (i = 0; i < num_exit_edges; i++)
7275 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7276 e->probability = exit_prob[i];
7279 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7280 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7281 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7282 dom_bbs.release ();
7284 if (exit_bb)
7286 free (exit_prob);
7287 free (exit_flag);
7288 free (exit_succ);
7290 free (entry_prob);
7291 free (entry_flag);
7292 free (entry_pred);
7293 bbs.release ();
7295 return bb;
7299 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7302 void
7303 dump_function_to_file (tree fndecl, FILE *file, int flags)
7305 tree arg, var, old_current_fndecl = current_function_decl;
7306 struct function *dsf;
7307 bool ignore_topmost_bind = false, any_var = false;
7308 basic_block bb;
7309 tree chain;
7310 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7311 && decl_is_tm_clone (fndecl));
7312 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7314 current_function_decl = fndecl;
7315 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7317 arg = DECL_ARGUMENTS (fndecl);
7318 while (arg)
7320 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7321 fprintf (file, " ");
7322 print_generic_expr (file, arg, dump_flags);
7323 if (flags & TDF_VERBOSE)
7324 print_node (file, "", arg, 4);
7325 if (DECL_CHAIN (arg))
7326 fprintf (file, ", ");
7327 arg = DECL_CHAIN (arg);
7329 fprintf (file, ")\n");
7331 if (flags & TDF_VERBOSE)
7332 print_node (file, "", fndecl, 2);
7334 dsf = DECL_STRUCT_FUNCTION (fndecl);
7335 if (dsf && (flags & TDF_EH))
7336 dump_eh_tree (file, dsf);
7338 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7340 dump_node (fndecl, TDF_SLIM | flags, file);
7341 current_function_decl = old_current_fndecl;
7342 return;
7345 /* When GIMPLE is lowered, the variables are no longer available in
7346 BIND_EXPRs, so display them separately. */
7347 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7349 unsigned ix;
7350 ignore_topmost_bind = true;
7352 fprintf (file, "{\n");
7353 if (!vec_safe_is_empty (fun->local_decls))
7354 FOR_EACH_LOCAL_DECL (fun, ix, var)
7356 print_generic_decl (file, var, flags);
7357 if (flags & TDF_VERBOSE)
7358 print_node (file, "", var, 4);
7359 fprintf (file, "\n");
7361 any_var = true;
7363 if (gimple_in_ssa_p (cfun))
7364 for (ix = 1; ix < num_ssa_names; ++ix)
7366 tree name = ssa_name (ix);
7367 if (name && !SSA_NAME_VAR (name))
7369 fprintf (file, " ");
7370 print_generic_expr (file, TREE_TYPE (name), flags);
7371 fprintf (file, " ");
7372 print_generic_expr (file, name, flags);
7373 fprintf (file, ";\n");
7375 any_var = true;
7380 if (fun && fun->decl == fndecl
7381 && fun->cfg
7382 && basic_block_info_for_fn (fun))
7384 /* If the CFG has been built, emit a CFG-based dump. */
7385 if (!ignore_topmost_bind)
7386 fprintf (file, "{\n");
7388 if (any_var && n_basic_blocks_for_fn (fun))
7389 fprintf (file, "\n");
7391 FOR_EACH_BB_FN (bb, fun)
7392 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7394 fprintf (file, "}\n");
7396 else if (DECL_SAVED_TREE (fndecl) == NULL)
7398 /* The function is now in GIMPLE form but the CFG has not been
7399 built yet. Emit the single sequence of GIMPLE statements
7400 that make up its body. */
7401 gimple_seq body = gimple_body (fndecl);
7403 if (gimple_seq_first_stmt (body)
7404 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7405 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7406 print_gimple_seq (file, body, 0, flags);
7407 else
7409 if (!ignore_topmost_bind)
7410 fprintf (file, "{\n");
7412 if (any_var)
7413 fprintf (file, "\n");
7415 print_gimple_seq (file, body, 2, flags);
7416 fprintf (file, "}\n");
7419 else
7421 int indent;
7423 /* Make a tree based dump. */
7424 chain = DECL_SAVED_TREE (fndecl);
7425 if (chain && TREE_CODE (chain) == BIND_EXPR)
7427 if (ignore_topmost_bind)
7429 chain = BIND_EXPR_BODY (chain);
7430 indent = 2;
7432 else
7433 indent = 0;
7435 else
7437 if (!ignore_topmost_bind)
7439 fprintf (file, "{\n");
7440 /* No topmost bind, pretend it's ignored for later. */
7441 ignore_topmost_bind = true;
7443 indent = 2;
7446 if (any_var)
7447 fprintf (file, "\n");
7449 print_generic_stmt_indented (file, chain, flags, indent);
7450 if (ignore_topmost_bind)
7451 fprintf (file, "}\n");
7454 if (flags & TDF_ENUMERATE_LOCALS)
7455 dump_enumerated_decls (file, flags);
7456 fprintf (file, "\n\n");
7458 current_function_decl = old_current_fndecl;
7461 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7463 DEBUG_FUNCTION void
7464 debug_function (tree fn, int flags)
7466 dump_function_to_file (fn, stderr, flags);
7470 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7472 static void
7473 print_pred_bbs (FILE *file, basic_block bb)
7475 edge e;
7476 edge_iterator ei;
7478 FOR_EACH_EDGE (e, ei, bb->preds)
7479 fprintf (file, "bb_%d ", e->src->index);
7483 /* Print on FILE the indexes for the successors of basic_block BB. */
7485 static void
7486 print_succ_bbs (FILE *file, basic_block bb)
7488 edge e;
7489 edge_iterator ei;
7491 FOR_EACH_EDGE (e, ei, bb->succs)
7492 fprintf (file, "bb_%d ", e->dest->index);
7495 /* Print to FILE the basic block BB following the VERBOSITY level. */
7497 void
7498 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7500 char *s_indent = (char *) alloca ((size_t) indent + 1);
7501 memset ((void *) s_indent, ' ', (size_t) indent);
7502 s_indent[indent] = '\0';
7504 /* Print basic_block's header. */
7505 if (verbosity >= 2)
7507 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7508 print_pred_bbs (file, bb);
7509 fprintf (file, "}, succs = {");
7510 print_succ_bbs (file, bb);
7511 fprintf (file, "})\n");
7514 /* Print basic_block's body. */
7515 if (verbosity >= 3)
7517 fprintf (file, "%s {\n", s_indent);
7518 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7519 fprintf (file, "%s }\n", s_indent);
7523 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7525 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7526 VERBOSITY level this outputs the contents of the loop, or just its
7527 structure. */
7529 static void
7530 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7532 char *s_indent;
7533 basic_block bb;
7535 if (loop == NULL)
7536 return;
7538 s_indent = (char *) alloca ((size_t) indent + 1);
7539 memset ((void *) s_indent, ' ', (size_t) indent);
7540 s_indent[indent] = '\0';
7542 /* Print loop's header. */
7543 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7544 if (loop->header)
7545 fprintf (file, "header = %d", loop->header->index);
7546 else
7548 fprintf (file, "deleted)\n");
7549 return;
7551 if (loop->latch)
7552 fprintf (file, ", latch = %d", loop->latch->index);
7553 else
7554 fprintf (file, ", multiple latches");
7555 fprintf (file, ", niter = ");
7556 print_generic_expr (file, loop->nb_iterations, 0);
7558 if (loop->any_upper_bound)
7560 fprintf (file, ", upper_bound = ");
7561 print_decu (loop->nb_iterations_upper_bound, file);
7564 if (loop->any_estimate)
7566 fprintf (file, ", estimate = ");
7567 print_decu (loop->nb_iterations_estimate, file);
7569 fprintf (file, ")\n");
7571 /* Print loop's body. */
7572 if (verbosity >= 1)
7574 fprintf (file, "%s{\n", s_indent);
7575 FOR_EACH_BB_FN (bb, cfun)
7576 if (bb->loop_father == loop)
7577 print_loops_bb (file, bb, indent, verbosity);
7579 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7580 fprintf (file, "%s}\n", s_indent);
7584 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7585 spaces. Following VERBOSITY level this outputs the contents of the
7586 loop, or just its structure. */
7588 static void
7589 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7590 int verbosity)
7592 if (loop == NULL)
7593 return;
7595 print_loop (file, loop, indent, verbosity);
7596 print_loop_and_siblings (file, loop->next, indent, verbosity);
7599 /* Follow a CFG edge from the entry point of the program, and on entry
7600 of a loop, pretty print the loop structure on FILE. */
7602 void
7603 print_loops (FILE *file, int verbosity)
7605 basic_block bb;
7607 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7608 if (bb && bb->loop_father)
7609 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7612 /* Dump a loop. */
7614 DEBUG_FUNCTION void
7615 debug (struct loop &ref)
7617 print_loop (stderr, &ref, 0, /*verbosity*/0);
7620 DEBUG_FUNCTION void
7621 debug (struct loop *ptr)
7623 if (ptr)
7624 debug (*ptr);
7625 else
7626 fprintf (stderr, "<nil>\n");
7629 /* Dump a loop verbosely. */
7631 DEBUG_FUNCTION void
7632 debug_verbose (struct loop &ref)
7634 print_loop (stderr, &ref, 0, /*verbosity*/3);
7637 DEBUG_FUNCTION void
7638 debug_verbose (struct loop *ptr)
7640 if (ptr)
7641 debug (*ptr);
7642 else
7643 fprintf (stderr, "<nil>\n");
7647 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7649 DEBUG_FUNCTION void
7650 debug_loops (int verbosity)
7652 print_loops (stderr, verbosity);
7655 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7657 DEBUG_FUNCTION void
7658 debug_loop (struct loop *loop, int verbosity)
7660 print_loop (stderr, loop, 0, verbosity);
7663 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7664 level. */
7666 DEBUG_FUNCTION void
7667 debug_loop_num (unsigned num, int verbosity)
7669 debug_loop (get_loop (cfun, num), verbosity);
7672 /* Return true if BB ends with a call, possibly followed by some
7673 instructions that must stay with the call. Return false,
7674 otherwise. */
7676 static bool
7677 gimple_block_ends_with_call_p (basic_block bb)
7679 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7680 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7684 /* Return true if BB ends with a conditional branch. Return false,
7685 otherwise. */
7687 static bool
7688 gimple_block_ends_with_condjump_p (const_basic_block bb)
7690 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7691 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7695 /* Return true if we need to add fake edge to exit at statement T.
7696 Helper function for gimple_flow_call_edges_add. */
7698 static bool
7699 need_fake_edge_p (gimple t)
7701 tree fndecl = NULL_TREE;
7702 int call_flags = 0;
7704 /* NORETURN and LONGJMP calls already have an edge to exit.
7705 CONST and PURE calls do not need one.
7706 We don't currently check for CONST and PURE here, although
7707 it would be a good idea, because those attributes are
7708 figured out from the RTL in mark_constant_function, and
7709 the counter incrementation code from -fprofile-arcs
7710 leads to different results from -fbranch-probabilities. */
7711 if (is_gimple_call (t))
7713 fndecl = gimple_call_fndecl (t);
7714 call_flags = gimple_call_flags (t);
7717 if (is_gimple_call (t)
7718 && fndecl
7719 && DECL_BUILT_IN (fndecl)
7720 && (call_flags & ECF_NOTHROW)
7721 && !(call_flags & ECF_RETURNS_TWICE)
7722 /* fork() doesn't really return twice, but the effect of
7723 wrapping it in __gcov_fork() which calls __gcov_flush()
7724 and clears the counters before forking has the same
7725 effect as returning twice. Force a fake edge. */
7726 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7727 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7728 return false;
7730 if (is_gimple_call (t))
7732 edge_iterator ei;
7733 edge e;
7734 basic_block bb;
7736 if (!(call_flags & ECF_NORETURN))
7737 return true;
7739 bb = gimple_bb (t);
7740 FOR_EACH_EDGE (e, ei, bb->succs)
7741 if ((e->flags & EDGE_FAKE) == 0)
7742 return true;
7745 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7746 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7747 return true;
7749 return false;
7753 /* Add fake edges to the function exit for any non constant and non
7754 noreturn calls (or noreturn calls with EH/abnormal edges),
7755 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7756 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7757 that were split.
7759 The goal is to expose cases in which entering a basic block does
7760 not imply that all subsequent instructions must be executed. */
7762 static int
7763 gimple_flow_call_edges_add (sbitmap blocks)
7765 int i;
7766 int blocks_split = 0;
7767 int last_bb = last_basic_block_for_fn (cfun);
7768 bool check_last_block = false;
7770 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7771 return 0;
7773 if (! blocks)
7774 check_last_block = true;
7775 else
7776 check_last_block = bitmap_bit_p (blocks,
7777 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7779 /* In the last basic block, before epilogue generation, there will be
7780 a fallthru edge to EXIT. Special care is required if the last insn
7781 of the last basic block is a call because make_edge folds duplicate
7782 edges, which would result in the fallthru edge also being marked
7783 fake, which would result in the fallthru edge being removed by
7784 remove_fake_edges, which would result in an invalid CFG.
7786 Moreover, we can't elide the outgoing fake edge, since the block
7787 profiler needs to take this into account in order to solve the minimal
7788 spanning tree in the case that the call doesn't return.
7790 Handle this by adding a dummy instruction in a new last basic block. */
7791 if (check_last_block)
7793 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7794 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7795 gimple t = NULL;
7797 if (!gsi_end_p (gsi))
7798 t = gsi_stmt (gsi);
7800 if (t && need_fake_edge_p (t))
7802 edge e;
7804 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7805 if (e)
7807 gsi_insert_on_edge (e, gimple_build_nop ());
7808 gsi_commit_edge_inserts ();
7813 /* Now add fake edges to the function exit for any non constant
7814 calls since there is no way that we can determine if they will
7815 return or not... */
7816 for (i = 0; i < last_bb; i++)
7818 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7819 gimple_stmt_iterator gsi;
7820 gimple stmt, last_stmt;
7822 if (!bb)
7823 continue;
7825 if (blocks && !bitmap_bit_p (blocks, i))
7826 continue;
7828 gsi = gsi_last_nondebug_bb (bb);
7829 if (!gsi_end_p (gsi))
7831 last_stmt = gsi_stmt (gsi);
7834 stmt = gsi_stmt (gsi);
7835 if (need_fake_edge_p (stmt))
7837 edge e;
7839 /* The handling above of the final block before the
7840 epilogue should be enough to verify that there is
7841 no edge to the exit block in CFG already.
7842 Calling make_edge in such case would cause us to
7843 mark that edge as fake and remove it later. */
7844 #ifdef ENABLE_CHECKING
7845 if (stmt == last_stmt)
7847 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7848 gcc_assert (e == NULL);
7850 #endif
7852 /* Note that the following may create a new basic block
7853 and renumber the existing basic blocks. */
7854 if (stmt != last_stmt)
7856 e = split_block (bb, stmt);
7857 if (e)
7858 blocks_split++;
7860 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7862 gsi_prev (&gsi);
7864 while (!gsi_end_p (gsi));
7868 if (blocks_split)
7869 verify_flow_info ();
7871 return blocks_split;
7874 /* Removes edge E and all the blocks dominated by it, and updates dominance
7875 information. The IL in E->src needs to be updated separately.
7876 If dominance info is not available, only the edge E is removed.*/
7878 void
7879 remove_edge_and_dominated_blocks (edge e)
7881 vec<basic_block> bbs_to_remove = vNULL;
7882 vec<basic_block> bbs_to_fix_dom = vNULL;
7883 bitmap df, df_idom;
7884 edge f;
7885 edge_iterator ei;
7886 bool none_removed = false;
7887 unsigned i;
7888 basic_block bb, dbb;
7889 bitmap_iterator bi;
7891 /* If we are removing a path inside a non-root loop that may change
7892 loop ownership of blocks or remove loops. Mark loops for fixup. */
7893 if (current_loops
7894 && loop_outer (e->src->loop_father) != NULL
7895 && e->src->loop_father == e->dest->loop_father)
7896 loops_state_set (LOOPS_NEED_FIXUP);
7898 if (!dom_info_available_p (CDI_DOMINATORS))
7900 remove_edge (e);
7901 return;
7904 /* No updating is needed for edges to exit. */
7905 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7907 if (cfgcleanup_altered_bbs)
7908 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7909 remove_edge (e);
7910 return;
7913 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7914 that is not dominated by E->dest, then this set is empty. Otherwise,
7915 all the basic blocks dominated by E->dest are removed.
7917 Also, to DF_IDOM we store the immediate dominators of the blocks in
7918 the dominance frontier of E (i.e., of the successors of the
7919 removed blocks, if there are any, and of E->dest otherwise). */
7920 FOR_EACH_EDGE (f, ei, e->dest->preds)
7922 if (f == e)
7923 continue;
7925 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7927 none_removed = true;
7928 break;
7932 df = BITMAP_ALLOC (NULL);
7933 df_idom = BITMAP_ALLOC (NULL);
7935 if (none_removed)
7936 bitmap_set_bit (df_idom,
7937 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7938 else
7940 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7941 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7943 FOR_EACH_EDGE (f, ei, bb->succs)
7945 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7946 bitmap_set_bit (df, f->dest->index);
7949 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7950 bitmap_clear_bit (df, bb->index);
7952 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7954 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7955 bitmap_set_bit (df_idom,
7956 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7960 if (cfgcleanup_altered_bbs)
7962 /* Record the set of the altered basic blocks. */
7963 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7964 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7967 /* Remove E and the cancelled blocks. */
7968 if (none_removed)
7969 remove_edge (e);
7970 else
7972 /* Walk backwards so as to get a chance to substitute all
7973 released DEFs into debug stmts. See
7974 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7975 details. */
7976 for (i = bbs_to_remove.length (); i-- > 0; )
7977 delete_basic_block (bbs_to_remove[i]);
7980 /* Update the dominance information. The immediate dominator may change only
7981 for blocks whose immediate dominator belongs to DF_IDOM:
7983 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7984 removal. Let Z the arbitrary block such that idom(Z) = Y and
7985 Z dominates X after the removal. Before removal, there exists a path P
7986 from Y to X that avoids Z. Let F be the last edge on P that is
7987 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7988 dominates W, and because of P, Z does not dominate W), and W belongs to
7989 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7990 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7992 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7993 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7994 dbb;
7995 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7996 bbs_to_fix_dom.safe_push (dbb);
7999 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8001 BITMAP_FREE (df);
8002 BITMAP_FREE (df_idom);
8003 bbs_to_remove.release ();
8004 bbs_to_fix_dom.release ();
8007 /* Purge dead EH edges from basic block BB. */
8009 bool
8010 gimple_purge_dead_eh_edges (basic_block bb)
8012 bool changed = false;
8013 edge e;
8014 edge_iterator ei;
8015 gimple stmt = last_stmt (bb);
8017 if (stmt && stmt_can_throw_internal (stmt))
8018 return false;
8020 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8022 if (e->flags & EDGE_EH)
8024 remove_edge_and_dominated_blocks (e);
8025 changed = true;
8027 else
8028 ei_next (&ei);
8031 return changed;
8034 /* Purge dead EH edges from basic block listed in BLOCKS. */
8036 bool
8037 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8039 bool changed = false;
8040 unsigned i;
8041 bitmap_iterator bi;
8043 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8045 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8047 /* Earlier gimple_purge_dead_eh_edges could have removed
8048 this basic block already. */
8049 gcc_assert (bb || changed);
8050 if (bb != NULL)
8051 changed |= gimple_purge_dead_eh_edges (bb);
8054 return changed;
8057 /* Purge dead abnormal call edges from basic block BB. */
8059 bool
8060 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8062 bool changed = false;
8063 edge e;
8064 edge_iterator ei;
8065 gimple stmt = last_stmt (bb);
8067 if (!cfun->has_nonlocal_label
8068 && !cfun->calls_setjmp)
8069 return false;
8071 if (stmt && stmt_can_make_abnormal_goto (stmt))
8072 return false;
8074 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8076 if (e->flags & EDGE_ABNORMAL)
8078 if (e->flags & EDGE_FALLTHRU)
8079 e->flags &= ~EDGE_ABNORMAL;
8080 else
8081 remove_edge_and_dominated_blocks (e);
8082 changed = true;
8084 else
8085 ei_next (&ei);
8088 return changed;
8091 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8093 bool
8094 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8096 bool changed = false;
8097 unsigned i;
8098 bitmap_iterator bi;
8100 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8102 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8104 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8105 this basic block already. */
8106 gcc_assert (bb || changed);
8107 if (bb != NULL)
8108 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8111 return changed;
8114 /* This function is called whenever a new edge is created or
8115 redirected. */
8117 static void
8118 gimple_execute_on_growing_pred (edge e)
8120 basic_block bb = e->dest;
8122 if (!gimple_seq_empty_p (phi_nodes (bb)))
8123 reserve_phi_args_for_new_edge (bb);
8126 /* This function is called immediately before edge E is removed from
8127 the edge vector E->dest->preds. */
8129 static void
8130 gimple_execute_on_shrinking_pred (edge e)
8132 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8133 remove_phi_args (e);
8136 /*---------------------------------------------------------------------------
8137 Helper functions for Loop versioning
8138 ---------------------------------------------------------------------------*/
8140 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8141 of 'first'. Both of them are dominated by 'new_head' basic block. When
8142 'new_head' was created by 'second's incoming edge it received phi arguments
8143 on the edge by split_edge(). Later, additional edge 'e' was created to
8144 connect 'new_head' and 'first'. Now this routine adds phi args on this
8145 additional edge 'e' that new_head to second edge received as part of edge
8146 splitting. */
8148 static void
8149 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8150 basic_block new_head, edge e)
8152 gphi *phi1, *phi2;
8153 gphi_iterator psi1, psi2;
8154 tree def;
8155 edge e2 = find_edge (new_head, second);
8157 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8158 edge, we should always have an edge from NEW_HEAD to SECOND. */
8159 gcc_assert (e2 != NULL);
8161 /* Browse all 'second' basic block phi nodes and add phi args to
8162 edge 'e' for 'first' head. PHI args are always in correct order. */
8164 for (psi2 = gsi_start_phis (second),
8165 psi1 = gsi_start_phis (first);
8166 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8167 gsi_next (&psi2), gsi_next (&psi1))
8169 phi1 = psi1.phi ();
8170 phi2 = psi2.phi ();
8171 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8172 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8177 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8178 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8179 the destination of the ELSE part. */
8181 static void
8182 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8183 basic_block second_head ATTRIBUTE_UNUSED,
8184 basic_block cond_bb, void *cond_e)
8186 gimple_stmt_iterator gsi;
8187 gimple new_cond_expr;
8188 tree cond_expr = (tree) cond_e;
8189 edge e0;
8191 /* Build new conditional expr */
8192 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8193 NULL_TREE, NULL_TREE);
8195 /* Add new cond in cond_bb. */
8196 gsi = gsi_last_bb (cond_bb);
8197 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8199 /* Adjust edges appropriately to connect new head with first head
8200 as well as second head. */
8201 e0 = single_succ_edge (cond_bb);
8202 e0->flags &= ~EDGE_FALLTHRU;
8203 e0->flags |= EDGE_FALSE_VALUE;
8207 /* Do book-keeping of basic block BB for the profile consistency checker.
8208 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8209 then do post-pass accounting. Store the counting in RECORD. */
8210 static void
8211 gimple_account_profile_record (basic_block bb, int after_pass,
8212 struct profile_record *record)
8214 gimple_stmt_iterator i;
8215 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8217 record->size[after_pass]
8218 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8219 if (profile_status_for_fn (cfun) == PROFILE_READ)
8220 record->time[after_pass]
8221 += estimate_num_insns (gsi_stmt (i),
8222 &eni_time_weights) * bb->count;
8223 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8224 record->time[after_pass]
8225 += estimate_num_insns (gsi_stmt (i),
8226 &eni_time_weights) * bb->frequency;
8230 struct cfg_hooks gimple_cfg_hooks = {
8231 "gimple",
8232 gimple_verify_flow_info,
8233 gimple_dump_bb, /* dump_bb */
8234 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8235 create_bb, /* create_basic_block */
8236 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8237 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8238 gimple_can_remove_branch_p, /* can_remove_branch_p */
8239 remove_bb, /* delete_basic_block */
8240 gimple_split_block, /* split_block */
8241 gimple_move_block_after, /* move_block_after */
8242 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8243 gimple_merge_blocks, /* merge_blocks */
8244 gimple_predict_edge, /* predict_edge */
8245 gimple_predicted_by_p, /* predicted_by_p */
8246 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8247 gimple_duplicate_bb, /* duplicate_block */
8248 gimple_split_edge, /* split_edge */
8249 gimple_make_forwarder_block, /* make_forward_block */
8250 NULL, /* tidy_fallthru_edge */
8251 NULL, /* force_nonfallthru */
8252 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8253 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8254 gimple_flow_call_edges_add, /* flow_call_edges_add */
8255 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8256 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8257 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8258 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8259 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8260 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8261 flush_pending_stmts, /* flush_pending_stmts */
8262 gimple_empty_block_p, /* block_empty_p */
8263 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8264 gimple_account_profile_record,
8268 /* Split all critical edges. */
8270 unsigned int
8271 split_critical_edges (void)
8273 basic_block bb;
8274 edge e;
8275 edge_iterator ei;
8277 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8278 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8279 mappings around the calls to split_edge. */
8280 start_recording_case_labels ();
8281 FOR_ALL_BB_FN (bb, cfun)
8283 FOR_EACH_EDGE (e, ei, bb->succs)
8285 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8286 split_edge (e);
8287 /* PRE inserts statements to edges and expects that
8288 since split_critical_edges was done beforehand, committing edge
8289 insertions will not split more edges. In addition to critical
8290 edges we must split edges that have multiple successors and
8291 end by control flow statements, such as RESX.
8292 Go ahead and split them too. This matches the logic in
8293 gimple_find_edge_insert_loc. */
8294 else if ((!single_pred_p (e->dest)
8295 || !gimple_seq_empty_p (phi_nodes (e->dest))
8296 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8297 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8298 && !(e->flags & EDGE_ABNORMAL))
8300 gimple_stmt_iterator gsi;
8302 gsi = gsi_last_bb (e->src);
8303 if (!gsi_end_p (gsi)
8304 && stmt_ends_bb_p (gsi_stmt (gsi))
8305 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8306 && !gimple_call_builtin_p (gsi_stmt (gsi),
8307 BUILT_IN_RETURN)))
8308 split_edge (e);
8312 end_recording_case_labels ();
8313 return 0;
8316 namespace {
8318 const pass_data pass_data_split_crit_edges =
8320 GIMPLE_PASS, /* type */
8321 "crited", /* name */
8322 OPTGROUP_NONE, /* optinfo_flags */
8323 TV_TREE_SPLIT_EDGES, /* tv_id */
8324 PROP_cfg, /* properties_required */
8325 PROP_no_crit_edges, /* properties_provided */
8326 0, /* properties_destroyed */
8327 0, /* todo_flags_start */
8328 0, /* todo_flags_finish */
8331 class pass_split_crit_edges : public gimple_opt_pass
8333 public:
8334 pass_split_crit_edges (gcc::context *ctxt)
8335 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8338 /* opt_pass methods: */
8339 virtual unsigned int execute (function *) { return split_critical_edges (); }
8341 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8342 }; // class pass_split_crit_edges
8344 } // anon namespace
8346 gimple_opt_pass *
8347 make_pass_split_crit_edges (gcc::context *ctxt)
8349 return new pass_split_crit_edges (ctxt);
8353 /* Insert COND expression which is GIMPLE_COND after STMT
8354 in basic block BB with appropriate basic block split
8355 and creation of a new conditionally executed basic block.
8356 Return created basic block. */
8357 basic_block
8358 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8360 edge fall = split_block (bb, stmt);
8361 gimple_stmt_iterator iter = gsi_last_bb (bb);
8362 basic_block new_bb;
8364 /* Insert cond statement. */
8365 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8366 if (gsi_end_p (iter))
8367 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8368 else
8369 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8371 /* Create conditionally executed block. */
8372 new_bb = create_empty_bb (bb);
8373 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8374 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8376 /* Fix edge for split bb. */
8377 fall->flags = EDGE_FALSE_VALUE;
8379 /* Update dominance info. */
8380 if (dom_info_available_p (CDI_DOMINATORS))
8382 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8383 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8386 /* Update loop info. */
8387 if (current_loops)
8388 add_bb_to_loop (new_bb, bb->loop_father);
8390 return new_bb;
8393 /* Build a ternary operation and gimplify it. Emit code before GSI.
8394 Return the gimple_val holding the result. */
8396 tree
8397 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8398 tree type, tree a, tree b, tree c)
8400 tree ret;
8401 location_t loc = gimple_location (gsi_stmt (*gsi));
8403 ret = fold_build3_loc (loc, code, type, a, b, c);
8404 STRIP_NOPS (ret);
8406 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8407 GSI_SAME_STMT);
8410 /* Build a binary operation and gimplify it. Emit code before GSI.
8411 Return the gimple_val holding the result. */
8413 tree
8414 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8415 tree type, tree a, tree b)
8417 tree ret;
8419 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8420 STRIP_NOPS (ret);
8422 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8423 GSI_SAME_STMT);
8426 /* Build a unary operation and gimplify it. Emit code before GSI.
8427 Return the gimple_val holding the result. */
8429 tree
8430 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8431 tree a)
8433 tree ret;
8435 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8436 STRIP_NOPS (ret);
8438 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8439 GSI_SAME_STMT);
8444 /* Given a basic block B which ends with a conditional and has
8445 precisely two successors, determine which of the edges is taken if
8446 the conditional is true and which is taken if the conditional is
8447 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8449 void
8450 extract_true_false_edges_from_block (basic_block b,
8451 edge *true_edge,
8452 edge *false_edge)
8454 edge e = EDGE_SUCC (b, 0);
8456 if (e->flags & EDGE_TRUE_VALUE)
8458 *true_edge = e;
8459 *false_edge = EDGE_SUCC (b, 1);
8461 else
8463 *false_edge = e;
8464 *true_edge = EDGE_SUCC (b, 1);
8468 /* Emit return warnings. */
8470 namespace {
8472 const pass_data pass_data_warn_function_return =
8474 GIMPLE_PASS, /* type */
8475 "*warn_function_return", /* name */
8476 OPTGROUP_NONE, /* optinfo_flags */
8477 TV_NONE, /* tv_id */
8478 PROP_cfg, /* properties_required */
8479 0, /* properties_provided */
8480 0, /* properties_destroyed */
8481 0, /* todo_flags_start */
8482 0, /* todo_flags_finish */
8485 class pass_warn_function_return : public gimple_opt_pass
8487 public:
8488 pass_warn_function_return (gcc::context *ctxt)
8489 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8492 /* opt_pass methods: */
8493 virtual unsigned int execute (function *);
8495 }; // class pass_warn_function_return
8497 unsigned int
8498 pass_warn_function_return::execute (function *fun)
8500 source_location location;
8501 gimple last;
8502 edge e;
8503 edge_iterator ei;
8505 if (!targetm.warn_func_return (fun->decl))
8506 return 0;
8508 /* If we have a path to EXIT, then we do return. */
8509 if (TREE_THIS_VOLATILE (fun->decl)
8510 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8512 location = UNKNOWN_LOCATION;
8513 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8515 last = last_stmt (e->src);
8516 if ((gimple_code (last) == GIMPLE_RETURN
8517 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8518 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8519 break;
8521 if (location == UNKNOWN_LOCATION)
8522 location = cfun->function_end_locus;
8523 warning_at (location, 0, "%<noreturn%> function does return");
8526 /* If we see "return;" in some basic block, then we do reach the end
8527 without returning a value. */
8528 else if (warn_return_type
8529 && !TREE_NO_WARNING (fun->decl)
8530 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8531 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8533 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8535 gimple last = last_stmt (e->src);
8536 greturn *return_stmt = dyn_cast <greturn *> (last);
8537 if (return_stmt
8538 && gimple_return_retval (return_stmt) == NULL
8539 && !gimple_no_warning_p (last))
8541 location = gimple_location (last);
8542 if (location == UNKNOWN_LOCATION)
8543 location = fun->function_end_locus;
8544 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8545 TREE_NO_WARNING (fun->decl) = 1;
8546 break;
8550 return 0;
8553 } // anon namespace
8555 gimple_opt_pass *
8556 make_pass_warn_function_return (gcc::context *ctxt)
8558 return new pass_warn_function_return (ctxt);
8561 /* Walk a gimplified function and warn for functions whose return value is
8562 ignored and attribute((warn_unused_result)) is set. This is done before
8563 inlining, so we don't have to worry about that. */
8565 static void
8566 do_warn_unused_result (gimple_seq seq)
8568 tree fdecl, ftype;
8569 gimple_stmt_iterator i;
8571 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8573 gimple g = gsi_stmt (i);
8575 switch (gimple_code (g))
8577 case GIMPLE_BIND:
8578 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8579 break;
8580 case GIMPLE_TRY:
8581 do_warn_unused_result (gimple_try_eval (g));
8582 do_warn_unused_result (gimple_try_cleanup (g));
8583 break;
8584 case GIMPLE_CATCH:
8585 do_warn_unused_result (gimple_catch_handler (
8586 as_a <gcatch *> (g)));
8587 break;
8588 case GIMPLE_EH_FILTER:
8589 do_warn_unused_result (gimple_eh_filter_failure (g));
8590 break;
8592 case GIMPLE_CALL:
8593 if (gimple_call_lhs (g))
8594 break;
8595 if (gimple_call_internal_p (g))
8596 break;
8598 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8599 LHS. All calls whose value is ignored should be
8600 represented like this. Look for the attribute. */
8601 fdecl = gimple_call_fndecl (g);
8602 ftype = gimple_call_fntype (g);
8604 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8606 location_t loc = gimple_location (g);
8608 if (fdecl)
8609 warning_at (loc, OPT_Wunused_result,
8610 "ignoring return value of %qD, "
8611 "declared with attribute warn_unused_result",
8612 fdecl);
8613 else
8614 warning_at (loc, OPT_Wunused_result,
8615 "ignoring return value of function "
8616 "declared with attribute warn_unused_result");
8618 break;
8620 default:
8621 /* Not a container, not a call, or a call whose value is used. */
8622 break;
8627 namespace {
8629 const pass_data pass_data_warn_unused_result =
8631 GIMPLE_PASS, /* type */
8632 "*warn_unused_result", /* name */
8633 OPTGROUP_NONE, /* optinfo_flags */
8634 TV_NONE, /* tv_id */
8635 PROP_gimple_any, /* properties_required */
8636 0, /* properties_provided */
8637 0, /* properties_destroyed */
8638 0, /* todo_flags_start */
8639 0, /* todo_flags_finish */
8642 class pass_warn_unused_result : public gimple_opt_pass
8644 public:
8645 pass_warn_unused_result (gcc::context *ctxt)
8646 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8649 /* opt_pass methods: */
8650 virtual bool gate (function *) { return flag_warn_unused_result; }
8651 virtual unsigned int execute (function *)
8653 do_warn_unused_result (gimple_body (current_function_decl));
8654 return 0;
8657 }; // class pass_warn_unused_result
8659 } // anon namespace
8661 gimple_opt_pass *
8662 make_pass_warn_unused_result (gcc::context *ctxt)
8664 return new pass_warn_unused_result (ctxt);
8667 /* IPA passes, compilation of earlier functions or inlining
8668 might have changed some properties, such as marked functions nothrow,
8669 pure, const or noreturn.
8670 Remove redundant edges and basic blocks, and create new ones if necessary.
8672 This pass can't be executed as stand alone pass from pass manager, because
8673 in between inlining and this fixup the verify_flow_info would fail. */
8675 unsigned int
8676 execute_fixup_cfg (void)
8678 basic_block bb;
8679 gimple_stmt_iterator gsi;
8680 int todo = 0;
8681 gcov_type count_scale;
8682 edge e;
8683 edge_iterator ei;
8685 count_scale
8686 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8687 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8689 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8690 cgraph_node::get (current_function_decl)->count;
8691 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8692 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8693 count_scale);
8695 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8696 e->count = apply_scale (e->count, count_scale);
8698 FOR_EACH_BB_FN (bb, cfun)
8700 bb->count = apply_scale (bb->count, count_scale);
8701 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8703 gimple stmt = gsi_stmt (gsi);
8704 tree decl = is_gimple_call (stmt)
8705 ? gimple_call_fndecl (stmt)
8706 : NULL;
8707 if (decl)
8709 int flags = gimple_call_flags (stmt);
8710 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8712 if (gimple_purge_dead_abnormal_call_edges (bb))
8713 todo |= TODO_cleanup_cfg;
8715 if (gimple_in_ssa_p (cfun))
8717 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8718 update_stmt (stmt);
8722 if (flags & ECF_NORETURN
8723 && fixup_noreturn_call (stmt))
8724 todo |= TODO_cleanup_cfg;
8727 /* Remove stores to variables we marked write-only.
8728 Keep access when store has side effect, i.e. in case when source
8729 is volatile. */
8730 if (gimple_store_p (stmt)
8731 && !gimple_has_side_effects (stmt))
8733 tree lhs = get_base_address (gimple_get_lhs (stmt));
8735 if (TREE_CODE (lhs) == VAR_DECL
8736 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8737 && varpool_node::get (lhs)->writeonly)
8739 unlink_stmt_vdef (stmt);
8740 gsi_remove (&gsi, true);
8741 release_defs (stmt);
8742 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8743 continue;
8746 /* For calls we can simply remove LHS when it is known
8747 to be write-only. */
8748 if (is_gimple_call (stmt)
8749 && gimple_get_lhs (stmt))
8751 tree lhs = get_base_address (gimple_get_lhs (stmt));
8753 if (TREE_CODE (lhs) == VAR_DECL
8754 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8755 && varpool_node::get (lhs)->writeonly)
8757 gimple_call_set_lhs (stmt, NULL);
8758 update_stmt (stmt);
8759 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8763 if (maybe_clean_eh_stmt (stmt)
8764 && gimple_purge_dead_eh_edges (bb))
8765 todo |= TODO_cleanup_cfg;
8766 gsi_next (&gsi);
8769 FOR_EACH_EDGE (e, ei, bb->succs)
8770 e->count = apply_scale (e->count, count_scale);
8772 /* If we have a basic block with no successors that does not
8773 end with a control statement or a noreturn call end it with
8774 a call to __builtin_unreachable. This situation can occur
8775 when inlining a noreturn call that does in fact return. */
8776 if (EDGE_COUNT (bb->succs) == 0)
8778 gimple stmt = last_stmt (bb);
8779 if (!stmt
8780 || (!is_ctrl_stmt (stmt)
8781 && (!is_gimple_call (stmt)
8782 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8784 if (stmt && is_gimple_call (stmt))
8785 gimple_call_set_ctrl_altering (stmt, false);
8786 stmt = gimple_build_call
8787 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8788 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8789 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8793 if (count_scale != REG_BR_PROB_BASE)
8794 compute_function_frequency ();
8796 if (current_loops
8797 && (todo & TODO_cleanup_cfg))
8798 loops_state_set (LOOPS_NEED_FIXUP);
8800 return todo;
8803 namespace {
8805 const pass_data pass_data_fixup_cfg =
8807 GIMPLE_PASS, /* type */
8808 "fixup_cfg", /* name */
8809 OPTGROUP_NONE, /* optinfo_flags */
8810 TV_NONE, /* tv_id */
8811 PROP_cfg, /* properties_required */
8812 0, /* properties_provided */
8813 0, /* properties_destroyed */
8814 0, /* todo_flags_start */
8815 0, /* todo_flags_finish */
8818 class pass_fixup_cfg : public gimple_opt_pass
8820 public:
8821 pass_fixup_cfg (gcc::context *ctxt)
8822 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8825 /* opt_pass methods: */
8826 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8827 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8829 }; // class pass_fixup_cfg
8831 } // anon namespace
8833 gimple_opt_pass *
8834 make_pass_fixup_cfg (gcc::context *ctxt)
8836 return new pass_fixup_cfg (ctxt);
8839 /* Garbage collection support for edge_def. */
8841 extern void gt_ggc_mx (tree&);
8842 extern void gt_ggc_mx (gimple&);
8843 extern void gt_ggc_mx (rtx&);
8844 extern void gt_ggc_mx (basic_block&);
8846 static void
8847 gt_ggc_mx (rtx_insn *& x)
8849 if (x)
8850 gt_ggc_mx_rtx_def ((void *) x);
8853 void
8854 gt_ggc_mx (edge_def *e)
8856 tree block = LOCATION_BLOCK (e->goto_locus);
8857 gt_ggc_mx (e->src);
8858 gt_ggc_mx (e->dest);
8859 if (current_ir_type () == IR_GIMPLE)
8860 gt_ggc_mx (e->insns.g);
8861 else
8862 gt_ggc_mx (e->insns.r);
8863 gt_ggc_mx (block);
8866 /* PCH support for edge_def. */
8868 extern void gt_pch_nx (tree&);
8869 extern void gt_pch_nx (gimple&);
8870 extern void gt_pch_nx (rtx&);
8871 extern void gt_pch_nx (basic_block&);
8873 static void
8874 gt_pch_nx (rtx_insn *& x)
8876 if (x)
8877 gt_pch_nx_rtx_def ((void *) x);
8880 void
8881 gt_pch_nx (edge_def *e)
8883 tree block = LOCATION_BLOCK (e->goto_locus);
8884 gt_pch_nx (e->src);
8885 gt_pch_nx (e->dest);
8886 if (current_ir_type () == IR_GIMPLE)
8887 gt_pch_nx (e->insns.g);
8888 else
8889 gt_pch_nx (e->insns.r);
8890 gt_pch_nx (block);
8893 void
8894 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8896 tree block = LOCATION_BLOCK (e->goto_locus);
8897 op (&(e->src), cookie);
8898 op (&(e->dest), cookie);
8899 if (current_ir_type () == IR_GIMPLE)
8900 op (&(e->insns.g), cookie);
8901 else
8902 op (&(e->insns.r), cookie);
8903 op (&(block), cookie);