Add C++11 header <cuchar>.
[official-gcc.git] / gcc / tree-cfg.c
blob5ac73b3266d14dbfaa27a15387f8c66a0a1b824a
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "cfghooks.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "rtl.h"
29 #include "ssa.h"
30 #include "alias.h"
31 #include "fold-const.h"
32 #include "trans-mem.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "tm_p.h"
36 #include "cfganal.h"
37 #include "flags.h"
38 #include "gimple-pretty-print.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "cgraph.h"
46 #include "tree-cfg.h"
47 #include "tree-ssa-loop-manip.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "tree-into-ssa.h"
50 #include "insn-config.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "varasm.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "tree-dump.h"
62 #include "tree-pass.h"
63 #include "diagnostic-core.h"
64 #include "except.h"
65 #include "cfgloop.h"
66 #include "tree-ssa-propagate.h"
67 #include "value-prof.h"
68 #include "tree-inline.h"
69 #include "target.h"
70 #include "tree-ssa-live.h"
71 #include "omp-low.h"
72 #include "tree-cfgcleanup.h"
73 #include "wide-int-print.h"
74 #include "gimplify.h"
76 /* This file contains functions for building the Control Flow Graph (CFG)
77 for a function tree. */
79 /* Local declarations. */
81 /* Initial capacity for the basic block array. */
82 static const int initial_cfg_capacity = 20;
84 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
85 which use a particular edge. The CASE_LABEL_EXPRs are chained together
86 via their CASE_CHAIN field, which we clear after we're done with the
87 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
89 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
90 update the case vector in response to edge redirections.
92 Right now this table is set up and torn down at key points in the
93 compilation process. It would be nice if we could make the table
94 more persistent. The key is getting notification of changes to
95 the CFG (particularly edge removal, creation and redirection). */
97 static hash_map<edge, tree> *edge_to_cases;
99 /* If we record edge_to_cases, this bitmap will hold indexes
100 of basic blocks that end in a GIMPLE_SWITCH which we touched
101 due to edge manipulations. */
103 static bitmap touched_switch_bbs;
105 /* CFG statistics. */
106 struct cfg_stats_d
108 long num_merged_labels;
111 static struct cfg_stats_d cfg_stats;
113 /* Data to pass to replace_block_vars_by_duplicates_1. */
114 struct replace_decls_d
116 hash_map<tree, tree> *vars_map;
117 tree to_context;
120 /* Hash table to store last discriminator assigned for each locus. */
121 struct locus_discrim_map
123 location_t locus;
124 int discriminator;
127 /* Hashtable helpers. */
129 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
131 static inline hashval_t hash (const locus_discrim_map *);
132 static inline bool equal (const locus_discrim_map *,
133 const locus_discrim_map *);
136 /* Trivial hash function for a location_t. ITEM is a pointer to
137 a hash table entry that maps a location_t to a discriminator. */
139 inline hashval_t
140 locus_discrim_hasher::hash (const locus_discrim_map *item)
142 return LOCATION_LINE (item->locus);
145 /* Equality function for the locus-to-discriminator map. A and B
146 point to the two hash table entries to compare. */
148 inline bool
149 locus_discrim_hasher::equal (const locus_discrim_map *a,
150 const locus_discrim_map *b)
152 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
155 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
157 /* Basic blocks and flowgraphs. */
158 static void make_blocks (gimple_seq);
160 /* Edges. */
161 static void make_edges (void);
162 static void assign_discriminators (void);
163 static void make_cond_expr_edges (basic_block);
164 static void make_gimple_switch_edges (gswitch *, basic_block);
165 static bool make_goto_expr_edges (basic_block);
166 static void make_gimple_asm_edges (basic_block);
167 static edge gimple_redirect_edge_and_branch (edge, basic_block);
168 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
170 /* Various helpers. */
171 static inline bool stmt_starts_bb_p (gimple, gimple);
172 static int gimple_verify_flow_info (void);
173 static void gimple_make_forwarder_block (edge);
174 static gimple first_non_label_stmt (basic_block);
175 static bool verify_gimple_transaction (gtransaction *);
176 static bool call_can_make_abnormal_goto (gimple);
178 /* Flowgraph optimization and cleanup. */
179 static void gimple_merge_blocks (basic_block, basic_block);
180 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
181 static void remove_bb (basic_block);
182 static edge find_taken_edge_computed_goto (basic_block, tree);
183 static edge find_taken_edge_cond_expr (basic_block, tree);
184 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
185 static tree find_case_label_for_value (gswitch *, tree);
187 void
188 init_empty_tree_cfg_for_function (struct function *fn)
190 /* Initialize the basic block array. */
191 init_flow (fn);
192 profile_status_for_fn (fn) = PROFILE_ABSENT;
193 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
194 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
195 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
196 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
197 initial_cfg_capacity);
199 /* Build a mapping of labels to their associated blocks. */
200 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
201 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
202 initial_cfg_capacity);
204 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
205 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
207 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
208 = EXIT_BLOCK_PTR_FOR_FN (fn);
209 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
210 = ENTRY_BLOCK_PTR_FOR_FN (fn);
213 void
214 init_empty_tree_cfg (void)
216 init_empty_tree_cfg_for_function (cfun);
219 /*---------------------------------------------------------------------------
220 Create basic blocks
221 ---------------------------------------------------------------------------*/
223 /* Entry point to the CFG builder for trees. SEQ is the sequence of
224 statements to be added to the flowgraph. */
226 static void
227 build_gimple_cfg (gimple_seq seq)
229 /* Register specific gimple functions. */
230 gimple_register_cfg_hooks ();
232 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
234 init_empty_tree_cfg ();
236 make_blocks (seq);
238 /* Make sure there is always at least one block, even if it's empty. */
239 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
240 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
242 /* Adjust the size of the array. */
243 if (basic_block_info_for_fn (cfun)->length ()
244 < (size_t) n_basic_blocks_for_fn (cfun))
245 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
246 n_basic_blocks_for_fn (cfun));
248 /* To speed up statement iterator walks, we first purge dead labels. */
249 cleanup_dead_labels ();
251 /* Group case nodes to reduce the number of edges.
252 We do this after cleaning up dead labels because otherwise we miss
253 a lot of obvious case merging opportunities. */
254 group_case_labels ();
256 /* Create the edges of the flowgraph. */
257 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
258 make_edges ();
259 assign_discriminators ();
260 cleanup_dead_labels ();
261 delete discriminator_per_locus;
262 discriminator_per_locus = NULL;
265 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
266 them and propagate the information to LOOP. We assume that the annotations
267 come immediately before the condition in BB, if any. */
269 static void
270 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
272 gimple_stmt_iterator gsi = gsi_last_bb (bb);
273 gimple stmt = gsi_stmt (gsi);
275 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
276 return;
278 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
280 stmt = gsi_stmt (gsi);
281 if (gimple_code (stmt) != GIMPLE_CALL)
282 break;
283 if (!gimple_call_internal_p (stmt)
284 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
285 break;
287 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
289 case annot_expr_ivdep_kind:
290 loop->safelen = INT_MAX;
291 break;
292 case annot_expr_no_vector_kind:
293 loop->dont_vectorize = true;
294 break;
295 case annot_expr_vector_kind:
296 loop->force_vectorize = true;
297 cfun->has_force_vectorize_loops = true;
298 break;
299 default:
300 gcc_unreachable ();
303 stmt = gimple_build_assign (gimple_call_lhs (stmt),
304 gimple_call_arg (stmt, 0));
305 gsi_replace (&gsi, stmt, true);
309 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
310 them and propagate the information to the loop. We assume that the
311 annotations come immediately before the condition of the loop. */
313 static void
314 replace_loop_annotate (void)
316 struct loop *loop;
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple stmt;
321 FOR_EACH_LOOP (loop, 0)
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
326 /* Then look into the latch, if any. */
327 if (loop->latch)
328 replace_loop_annotate_in_block (loop->latch, loop);
331 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
332 FOR_EACH_BB_FN (bb, cfun)
334 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
336 stmt = gsi_stmt (gsi);
337 if (gimple_code (stmt) != GIMPLE_CALL)
338 continue;
339 if (!gimple_call_internal_p (stmt)
340 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
341 continue;
343 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
345 case annot_expr_ivdep_kind:
346 case annot_expr_no_vector_kind:
347 case annot_expr_vector_kind:
348 break;
349 default:
350 gcc_unreachable ();
353 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
354 stmt = gimple_build_assign (gimple_call_lhs (stmt),
355 gimple_call_arg (stmt, 0));
356 gsi_replace (&gsi, stmt, true);
362 static unsigned int
363 execute_build_cfg (void)
365 gimple_seq body = gimple_body (current_function_decl);
367 build_gimple_cfg (body);
368 gimple_set_body (current_function_decl, NULL);
369 if (dump_file && (dump_flags & TDF_DETAILS))
371 fprintf (dump_file, "Scope blocks:\n");
372 dump_scope_blocks (dump_file, dump_flags);
374 cleanup_tree_cfg ();
375 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
376 replace_loop_annotate ();
377 return 0;
380 namespace {
382 const pass_data pass_data_build_cfg =
384 GIMPLE_PASS, /* type */
385 "cfg", /* name */
386 OPTGROUP_NONE, /* optinfo_flags */
387 TV_TREE_CFG, /* tv_id */
388 PROP_gimple_leh, /* properties_required */
389 ( PROP_cfg | PROP_loops ), /* properties_provided */
390 0, /* properties_destroyed */
391 0, /* todo_flags_start */
392 0, /* todo_flags_finish */
395 class pass_build_cfg : public gimple_opt_pass
397 public:
398 pass_build_cfg (gcc::context *ctxt)
399 : gimple_opt_pass (pass_data_build_cfg, ctxt)
402 /* opt_pass methods: */
403 virtual unsigned int execute (function *) { return execute_build_cfg (); }
405 }; // class pass_build_cfg
407 } // anon namespace
409 gimple_opt_pass *
410 make_pass_build_cfg (gcc::context *ctxt)
412 return new pass_build_cfg (ctxt);
416 /* Return true if T is a computed goto. */
418 bool
419 computed_goto_p (gimple t)
421 return (gimple_code (t) == GIMPLE_GOTO
422 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
425 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
426 the other edge points to a bb with just __builtin_unreachable ().
427 I.e. return true for C->M edge in:
428 <bb C>:
430 if (something)
431 goto <bb N>;
432 else
433 goto <bb M>;
434 <bb N>:
435 __builtin_unreachable ();
436 <bb M>: */
438 bool
439 assert_unreachable_fallthru_edge_p (edge e)
441 basic_block pred_bb = e->src;
442 gimple last = last_stmt (pred_bb);
443 if (last && gimple_code (last) == GIMPLE_COND)
445 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
446 if (other_bb == e->dest)
447 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
448 if (EDGE_COUNT (other_bb->succs) == 0)
450 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
451 gimple stmt;
453 if (gsi_end_p (gsi))
454 return false;
455 stmt = gsi_stmt (gsi);
456 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
458 gsi_next (&gsi);
459 if (gsi_end_p (gsi))
460 return false;
461 stmt = gsi_stmt (gsi);
463 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
466 return false;
470 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
471 could alter control flow except via eh. We initialize the flag at
472 CFG build time and only ever clear it later. */
474 static void
475 gimple_call_initialize_ctrl_altering (gimple stmt)
477 int flags = gimple_call_flags (stmt);
479 /* A call alters control flow if it can make an abnormal goto. */
480 if (call_can_make_abnormal_goto (stmt)
481 /* A call also alters control flow if it does not return. */
482 || flags & ECF_NORETURN
483 /* TM ending statements have backedges out of the transaction.
484 Return true so we split the basic block containing them.
485 Note that the TM_BUILTIN test is merely an optimization. */
486 || ((flags & ECF_TM_BUILTIN)
487 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
488 /* BUILT_IN_RETURN call is same as return statement. */
489 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
490 gimple_call_set_ctrl_altering (stmt, true);
491 else
492 gimple_call_set_ctrl_altering (stmt, false);
496 /* Insert SEQ after BB and build a flowgraph. */
498 static basic_block
499 make_blocks_1 (gimple_seq seq, basic_block bb)
501 gimple_stmt_iterator i = gsi_start (seq);
502 gimple stmt = NULL;
503 bool start_new_block = true;
504 bool first_stmt_of_seq = true;
506 while (!gsi_end_p (i))
508 gimple prev_stmt;
510 prev_stmt = stmt;
511 stmt = gsi_stmt (i);
513 if (stmt && is_gimple_call (stmt))
514 gimple_call_initialize_ctrl_altering (stmt);
516 /* If the statement starts a new basic block or if we have determined
517 in a previous pass that we need to create a new block for STMT, do
518 so now. */
519 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
521 if (!first_stmt_of_seq)
522 gsi_split_seq_before (&i, &seq);
523 bb = create_basic_block (seq, bb);
524 start_new_block = false;
527 /* Now add STMT to BB and create the subgraphs for special statement
528 codes. */
529 gimple_set_bb (stmt, bb);
531 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
532 next iteration. */
533 if (stmt_ends_bb_p (stmt))
535 /* If the stmt can make abnormal goto use a new temporary
536 for the assignment to the LHS. This makes sure the old value
537 of the LHS is available on the abnormal edge. Otherwise
538 we will end up with overlapping life-ranges for abnormal
539 SSA names. */
540 if (gimple_has_lhs (stmt)
541 && stmt_can_make_abnormal_goto (stmt)
542 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
544 tree lhs = gimple_get_lhs (stmt);
545 tree tmp = create_tmp_var (TREE_TYPE (lhs));
546 gimple s = gimple_build_assign (lhs, tmp);
547 gimple_set_location (s, gimple_location (stmt));
548 gimple_set_block (s, gimple_block (stmt));
549 gimple_set_lhs (stmt, tmp);
550 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
551 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
552 DECL_GIMPLE_REG_P (tmp) = 1;
553 gsi_insert_after (&i, s, GSI_SAME_STMT);
555 start_new_block = true;
558 gsi_next (&i);
559 first_stmt_of_seq = false;
561 return bb;
564 /* Build a flowgraph for the sequence of stmts SEQ. */
566 static void
567 make_blocks (gimple_seq seq)
569 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
572 /* Create and return a new empty basic block after bb AFTER. */
574 static basic_block
575 create_bb (void *h, void *e, basic_block after)
577 basic_block bb;
579 gcc_assert (!e);
581 /* Create and initialize a new basic block. Since alloc_block uses
582 GC allocation that clears memory to allocate a basic block, we do
583 not have to clear the newly allocated basic block here. */
584 bb = alloc_block ();
586 bb->index = last_basic_block_for_fn (cfun);
587 bb->flags = BB_NEW;
588 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
590 /* Add the new block to the linked list of blocks. */
591 link_block (bb, after);
593 /* Grow the basic block array if needed. */
594 if ((size_t) last_basic_block_for_fn (cfun)
595 == basic_block_info_for_fn (cfun)->length ())
597 size_t new_size =
598 (last_basic_block_for_fn (cfun)
599 + (last_basic_block_for_fn (cfun) + 3) / 4);
600 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
603 /* Add the newly created block to the array. */
604 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
606 n_basic_blocks_for_fn (cfun)++;
607 last_basic_block_for_fn (cfun)++;
609 return bb;
613 /*---------------------------------------------------------------------------
614 Edge creation
615 ---------------------------------------------------------------------------*/
617 /* If basic block BB has an abnormal edge to a basic block
618 containing IFN_ABNORMAL_DISPATCHER internal call, return
619 that the dispatcher's basic block, otherwise return NULL. */
621 basic_block
622 get_abnormal_succ_dispatcher (basic_block bb)
624 edge e;
625 edge_iterator ei;
627 FOR_EACH_EDGE (e, ei, bb->succs)
628 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
630 gimple_stmt_iterator gsi
631 = gsi_start_nondebug_after_labels_bb (e->dest);
632 gimple g = gsi_stmt (gsi);
633 if (g
634 && is_gimple_call (g)
635 && gimple_call_internal_p (g)
636 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
637 return e->dest;
639 return NULL;
642 /* Helper function for make_edges. Create a basic block with
643 with ABNORMAL_DISPATCHER internal call in it if needed, and
644 create abnormal edges from BBS to it and from it to FOR_BB
645 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
647 static void
648 handle_abnormal_edges (basic_block *dispatcher_bbs,
649 basic_block for_bb, int *bb_to_omp_idx,
650 auto_vec<basic_block> *bbs, bool computed_goto)
652 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
653 unsigned int idx = 0;
654 basic_block bb;
655 bool inner = false;
657 if (bb_to_omp_idx)
659 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
660 if (bb_to_omp_idx[for_bb->index] != 0)
661 inner = true;
664 /* If the dispatcher has been created already, then there are basic
665 blocks with abnormal edges to it, so just make a new edge to
666 for_bb. */
667 if (*dispatcher == NULL)
669 /* Check if there are any basic blocks that need to have
670 abnormal edges to this dispatcher. If there are none, return
671 early. */
672 if (bb_to_omp_idx == NULL)
674 if (bbs->is_empty ())
675 return;
677 else
679 FOR_EACH_VEC_ELT (*bbs, idx, bb)
680 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
681 break;
682 if (bb == NULL)
683 return;
686 /* Create the dispatcher bb. */
687 *dispatcher = create_basic_block (NULL, for_bb);
688 if (computed_goto)
690 /* Factor computed gotos into a common computed goto site. Also
691 record the location of that site so that we can un-factor the
692 gotos after we have converted back to normal form. */
693 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
695 /* Create the destination of the factored goto. Each original
696 computed goto will put its desired destination into this
697 variable and jump to the label we create immediately below. */
698 tree var = create_tmp_var (ptr_type_node, "gotovar");
700 /* Build a label for the new block which will contain the
701 factored computed goto. */
702 tree factored_label_decl
703 = create_artificial_label (UNKNOWN_LOCATION);
704 gimple factored_computed_goto_label
705 = gimple_build_label (factored_label_decl);
706 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
708 /* Build our new computed goto. */
709 gimple factored_computed_goto = gimple_build_goto (var);
710 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
712 FOR_EACH_VEC_ELT (*bbs, idx, bb)
714 if (bb_to_omp_idx
715 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
716 continue;
718 gsi = gsi_last_bb (bb);
719 gimple last = gsi_stmt (gsi);
721 gcc_assert (computed_goto_p (last));
723 /* Copy the original computed goto's destination into VAR. */
724 gimple assignment
725 = gimple_build_assign (var, gimple_goto_dest (last));
726 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
728 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
729 e->goto_locus = gimple_location (last);
730 gsi_remove (&gsi, true);
733 else
735 tree arg = inner ? boolean_true_node : boolean_false_node;
736 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
737 1, arg);
738 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
739 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
741 /* Create predecessor edges of the dispatcher. */
742 FOR_EACH_VEC_ELT (*bbs, idx, bb)
744 if (bb_to_omp_idx
745 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
746 continue;
747 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
752 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
755 /* Creates outgoing edges for BB. Returns 1 when it ends with an
756 computed goto, returns 2 when it ends with a statement that
757 might return to this function via an nonlocal goto, otherwise
758 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
760 static int
761 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
763 gimple last = last_stmt (bb);
764 bool fallthru = false;
765 int ret = 0;
767 if (!last)
768 return ret;
770 switch (gimple_code (last))
772 case GIMPLE_GOTO:
773 if (make_goto_expr_edges (bb))
774 ret = 1;
775 fallthru = false;
776 break;
777 case GIMPLE_RETURN:
779 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
780 e->goto_locus = gimple_location (last);
781 fallthru = false;
783 break;
784 case GIMPLE_COND:
785 make_cond_expr_edges (bb);
786 fallthru = false;
787 break;
788 case GIMPLE_SWITCH:
789 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
790 fallthru = false;
791 break;
792 case GIMPLE_RESX:
793 make_eh_edges (last);
794 fallthru = false;
795 break;
796 case GIMPLE_EH_DISPATCH:
797 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
798 break;
800 case GIMPLE_CALL:
801 /* If this function receives a nonlocal goto, then we need to
802 make edges from this call site to all the nonlocal goto
803 handlers. */
804 if (stmt_can_make_abnormal_goto (last))
805 ret = 2;
807 /* If this statement has reachable exception handlers, then
808 create abnormal edges to them. */
809 make_eh_edges (last);
811 /* BUILTIN_RETURN is really a return statement. */
812 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
814 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
815 fallthru = false;
817 /* Some calls are known not to return. */
818 else
819 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
820 break;
822 case GIMPLE_ASSIGN:
823 /* A GIMPLE_ASSIGN may throw internally and thus be considered
824 control-altering. */
825 if (is_ctrl_altering_stmt (last))
826 make_eh_edges (last);
827 fallthru = true;
828 break;
830 case GIMPLE_ASM:
831 make_gimple_asm_edges (bb);
832 fallthru = true;
833 break;
835 CASE_GIMPLE_OMP:
836 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
837 break;
839 case GIMPLE_TRANSACTION:
841 tree abort_label
842 = gimple_transaction_label (as_a <gtransaction *> (last));
843 if (abort_label)
844 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
845 fallthru = true;
847 break;
849 default:
850 gcc_assert (!stmt_ends_bb_p (last));
851 fallthru = true;
852 break;
855 if (fallthru)
856 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
858 return ret;
861 /* Join all the blocks in the flowgraph. */
863 static void
864 make_edges (void)
866 basic_block bb;
867 struct omp_region *cur_region = NULL;
868 auto_vec<basic_block> ab_edge_goto;
869 auto_vec<basic_block> ab_edge_call;
870 int *bb_to_omp_idx = NULL;
871 int cur_omp_region_idx = 0;
873 /* Create an edge from entry to the first block with executable
874 statements in it. */
875 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
876 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
877 EDGE_FALLTHRU);
879 /* Traverse the basic block array placing edges. */
880 FOR_EACH_BB_FN (bb, cfun)
882 int mer;
884 if (bb_to_omp_idx)
885 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
887 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
888 if (mer == 1)
889 ab_edge_goto.safe_push (bb);
890 else if (mer == 2)
891 ab_edge_call.safe_push (bb);
893 if (cur_region && bb_to_omp_idx == NULL)
894 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
897 /* Computed gotos are hell to deal with, especially if there are
898 lots of them with a large number of destinations. So we factor
899 them to a common computed goto location before we build the
900 edge list. After we convert back to normal form, we will un-factor
901 the computed gotos since factoring introduces an unwanted jump.
902 For non-local gotos and abnormal edges from calls to calls that return
903 twice or forced labels, factor the abnormal edges too, by having all
904 abnormal edges from the calls go to a common artificial basic block
905 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
906 basic block to all forced labels and calls returning twice.
907 We do this per-OpenMP structured block, because those regions
908 are guaranteed to be single entry single exit by the standard,
909 so it is not allowed to enter or exit such regions abnormally this way,
910 thus all computed gotos, non-local gotos and setjmp/longjmp calls
911 must not transfer control across SESE region boundaries. */
912 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
914 gimple_stmt_iterator gsi;
915 basic_block dispatcher_bb_array[2] = { NULL, NULL };
916 basic_block *dispatcher_bbs = dispatcher_bb_array;
917 int count = n_basic_blocks_for_fn (cfun);
919 if (bb_to_omp_idx)
920 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
922 FOR_EACH_BB_FN (bb, cfun)
924 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
926 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
927 tree target;
929 if (!label_stmt)
930 break;
932 target = gimple_label_label (label_stmt);
934 /* Make an edge to every label block that has been marked as a
935 potential target for a computed goto or a non-local goto. */
936 if (FORCED_LABEL (target))
937 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
938 &ab_edge_goto, true);
939 if (DECL_NONLOCAL (target))
941 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
942 &ab_edge_call, false);
943 break;
947 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
948 gsi_next_nondebug (&gsi);
949 if (!gsi_end_p (gsi))
951 /* Make an edge to every setjmp-like call. */
952 gimple call_stmt = gsi_stmt (gsi);
953 if (is_gimple_call (call_stmt)
954 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
955 || gimple_call_builtin_p (call_stmt,
956 BUILT_IN_SETJMP_RECEIVER)))
957 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
958 &ab_edge_call, false);
962 if (bb_to_omp_idx)
963 XDELETE (dispatcher_bbs);
966 XDELETE (bb_to_omp_idx);
968 free_omp_regions ();
971 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
972 needed. Returns true if new bbs were created.
973 Note: This is transitional code, and should not be used for new code. We
974 should be able to get rid of this by rewriting all target va-arg
975 gimplification hooks to use an interface gimple_build_cond_value as described
976 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
978 bool
979 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
981 gimple stmt = gsi_stmt (*gsi);
982 basic_block bb = gimple_bb (stmt);
983 basic_block lastbb, afterbb;
984 int old_num_bbs = n_basic_blocks_for_fn (cfun);
985 edge e;
986 lastbb = make_blocks_1 (seq, bb);
987 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
988 return false;
989 e = split_block (bb, stmt);
990 /* Move e->dest to come after the new basic blocks. */
991 afterbb = e->dest;
992 unlink_block (afterbb);
993 link_block (afterbb, lastbb);
994 redirect_edge_succ (e, bb->next_bb);
995 bb = bb->next_bb;
996 while (bb != afterbb)
998 struct omp_region *cur_region = NULL;
999 int cur_omp_region_idx = 0;
1000 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1001 gcc_assert (!mer && !cur_region);
1002 add_bb_to_loop (bb, afterbb->loop_father);
1003 bb = bb->next_bb;
1005 return true;
1008 /* Find the next available discriminator value for LOCUS. The
1009 discriminator distinguishes among several basic blocks that
1010 share a common locus, allowing for more accurate sample-based
1011 profiling. */
1013 static int
1014 next_discriminator_for_locus (location_t locus)
1016 struct locus_discrim_map item;
1017 struct locus_discrim_map **slot;
1019 item.locus = locus;
1020 item.discriminator = 0;
1021 slot = discriminator_per_locus->find_slot_with_hash (
1022 &item, LOCATION_LINE (locus), INSERT);
1023 gcc_assert (slot);
1024 if (*slot == HTAB_EMPTY_ENTRY)
1026 *slot = XNEW (struct locus_discrim_map);
1027 gcc_assert (*slot);
1028 (*slot)->locus = locus;
1029 (*slot)->discriminator = 0;
1031 (*slot)->discriminator++;
1032 return (*slot)->discriminator;
1035 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1037 static bool
1038 same_line_p (location_t locus1, location_t locus2)
1040 expanded_location from, to;
1042 if (locus1 == locus2)
1043 return true;
1045 from = expand_location (locus1);
1046 to = expand_location (locus2);
1048 if (from.line != to.line)
1049 return false;
1050 if (from.file == to.file)
1051 return true;
1052 return (from.file != NULL
1053 && to.file != NULL
1054 && filename_cmp (from.file, to.file) == 0);
1057 /* Assign discriminators to each basic block. */
1059 static void
1060 assign_discriminators (void)
1062 basic_block bb;
1064 FOR_EACH_BB_FN (bb, cfun)
1066 edge e;
1067 edge_iterator ei;
1068 gimple last = last_stmt (bb);
1069 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1071 if (locus == UNKNOWN_LOCATION)
1072 continue;
1074 FOR_EACH_EDGE (e, ei, bb->succs)
1076 gimple first = first_non_label_stmt (e->dest);
1077 gimple last = last_stmt (e->dest);
1078 if ((first && same_line_p (locus, gimple_location (first)))
1079 || (last && same_line_p (locus, gimple_location (last))))
1081 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1082 bb->discriminator = next_discriminator_for_locus (locus);
1083 else
1084 e->dest->discriminator = next_discriminator_for_locus (locus);
1090 /* Create the edges for a GIMPLE_COND starting at block BB. */
1092 static void
1093 make_cond_expr_edges (basic_block bb)
1095 gcond *entry = as_a <gcond *> (last_stmt (bb));
1096 gimple then_stmt, else_stmt;
1097 basic_block then_bb, else_bb;
1098 tree then_label, else_label;
1099 edge e;
1101 gcc_assert (entry);
1102 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1104 /* Entry basic blocks for each component. */
1105 then_label = gimple_cond_true_label (entry);
1106 else_label = gimple_cond_false_label (entry);
1107 then_bb = label_to_block (then_label);
1108 else_bb = label_to_block (else_label);
1109 then_stmt = first_stmt (then_bb);
1110 else_stmt = first_stmt (else_bb);
1112 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1113 e->goto_locus = gimple_location (then_stmt);
1114 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1115 if (e)
1116 e->goto_locus = gimple_location (else_stmt);
1118 /* We do not need the labels anymore. */
1119 gimple_cond_set_true_label (entry, NULL_TREE);
1120 gimple_cond_set_false_label (entry, NULL_TREE);
1124 /* Called for each element in the hash table (P) as we delete the
1125 edge to cases hash table.
1127 Clear all the TREE_CHAINs to prevent problems with copying of
1128 SWITCH_EXPRs and structure sharing rules, then free the hash table
1129 element. */
1131 bool
1132 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1134 tree t, next;
1136 for (t = value; t; t = next)
1138 next = CASE_CHAIN (t);
1139 CASE_CHAIN (t) = NULL;
1142 return true;
1145 /* Start recording information mapping edges to case labels. */
1147 void
1148 start_recording_case_labels (void)
1150 gcc_assert (edge_to_cases == NULL);
1151 edge_to_cases = new hash_map<edge, tree>;
1152 touched_switch_bbs = BITMAP_ALLOC (NULL);
1155 /* Return nonzero if we are recording information for case labels. */
1157 static bool
1158 recording_case_labels_p (void)
1160 return (edge_to_cases != NULL);
1163 /* Stop recording information mapping edges to case labels and
1164 remove any information we have recorded. */
1165 void
1166 end_recording_case_labels (void)
1168 bitmap_iterator bi;
1169 unsigned i;
1170 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1171 delete edge_to_cases;
1172 edge_to_cases = NULL;
1173 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1175 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1176 if (bb)
1178 gimple stmt = last_stmt (bb);
1179 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1180 group_case_labels_stmt (as_a <gswitch *> (stmt));
1183 BITMAP_FREE (touched_switch_bbs);
1186 /* If we are inside a {start,end}_recording_cases block, then return
1187 a chain of CASE_LABEL_EXPRs from T which reference E.
1189 Otherwise return NULL. */
1191 static tree
1192 get_cases_for_edge (edge e, gswitch *t)
1194 tree *slot;
1195 size_t i, n;
1197 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1198 chains available. Return NULL so the caller can detect this case. */
1199 if (!recording_case_labels_p ())
1200 return NULL;
1202 slot = edge_to_cases->get (e);
1203 if (slot)
1204 return *slot;
1206 /* If we did not find E in the hash table, then this must be the first
1207 time we have been queried for information about E & T. Add all the
1208 elements from T to the hash table then perform the query again. */
1210 n = gimple_switch_num_labels (t);
1211 for (i = 0; i < n; i++)
1213 tree elt = gimple_switch_label (t, i);
1214 tree lab = CASE_LABEL (elt);
1215 basic_block label_bb = label_to_block (lab);
1216 edge this_edge = find_edge (e->src, label_bb);
1218 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1219 a new chain. */
1220 tree &s = edge_to_cases->get_or_insert (this_edge);
1221 CASE_CHAIN (elt) = s;
1222 s = elt;
1225 return *edge_to_cases->get (e);
1228 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1230 static void
1231 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1233 size_t i, n;
1235 n = gimple_switch_num_labels (entry);
1237 for (i = 0; i < n; ++i)
1239 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1240 basic_block label_bb = label_to_block (lab);
1241 make_edge (bb, label_bb, 0);
1246 /* Return the basic block holding label DEST. */
1248 basic_block
1249 label_to_block_fn (struct function *ifun, tree dest)
1251 int uid = LABEL_DECL_UID (dest);
1253 /* We would die hard when faced by an undefined label. Emit a label to
1254 the very first basic block. This will hopefully make even the dataflow
1255 and undefined variable warnings quite right. */
1256 if (seen_error () && uid < 0)
1258 gimple_stmt_iterator gsi =
1259 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1260 gimple stmt;
1262 stmt = gimple_build_label (dest);
1263 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1264 uid = LABEL_DECL_UID (dest);
1266 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1267 return NULL;
1268 return (*ifun->cfg->x_label_to_block_map)[uid];
1271 /* Create edges for a goto statement at block BB. Returns true
1272 if abnormal edges should be created. */
1274 static bool
1275 make_goto_expr_edges (basic_block bb)
1277 gimple_stmt_iterator last = gsi_last_bb (bb);
1278 gimple goto_t = gsi_stmt (last);
1280 /* A simple GOTO creates normal edges. */
1281 if (simple_goto_p (goto_t))
1283 tree dest = gimple_goto_dest (goto_t);
1284 basic_block label_bb = label_to_block (dest);
1285 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1286 e->goto_locus = gimple_location (goto_t);
1287 gsi_remove (&last, true);
1288 return false;
1291 /* A computed GOTO creates abnormal edges. */
1292 return true;
1295 /* Create edges for an asm statement with labels at block BB. */
1297 static void
1298 make_gimple_asm_edges (basic_block bb)
1300 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1301 int i, n = gimple_asm_nlabels (stmt);
1303 for (i = 0; i < n; ++i)
1305 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1306 basic_block label_bb = label_to_block (label);
1307 make_edge (bb, label_bb, 0);
1311 /*---------------------------------------------------------------------------
1312 Flowgraph analysis
1313 ---------------------------------------------------------------------------*/
1315 /* Cleanup useless labels in basic blocks. This is something we wish
1316 to do early because it allows us to group case labels before creating
1317 the edges for the CFG, and it speeds up block statement iterators in
1318 all passes later on.
1319 We rerun this pass after CFG is created, to get rid of the labels that
1320 are no longer referenced. After then we do not run it any more, since
1321 (almost) no new labels should be created. */
1323 /* A map from basic block index to the leading label of that block. */
1324 static struct label_record
1326 /* The label. */
1327 tree label;
1329 /* True if the label is referenced from somewhere. */
1330 bool used;
1331 } *label_for_bb;
1333 /* Given LABEL return the first label in the same basic block. */
1335 static tree
1336 main_block_label (tree label)
1338 basic_block bb = label_to_block (label);
1339 tree main_label = label_for_bb[bb->index].label;
1341 /* label_to_block possibly inserted undefined label into the chain. */
1342 if (!main_label)
1344 label_for_bb[bb->index].label = label;
1345 main_label = label;
1348 label_for_bb[bb->index].used = true;
1349 return main_label;
1352 /* Clean up redundant labels within the exception tree. */
1354 static void
1355 cleanup_dead_labels_eh (void)
1357 eh_landing_pad lp;
1358 eh_region r;
1359 tree lab;
1360 int i;
1362 if (cfun->eh == NULL)
1363 return;
1365 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1366 if (lp && lp->post_landing_pad)
1368 lab = main_block_label (lp->post_landing_pad);
1369 if (lab != lp->post_landing_pad)
1371 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1372 EH_LANDING_PAD_NR (lab) = lp->index;
1376 FOR_ALL_EH_REGION (r)
1377 switch (r->type)
1379 case ERT_CLEANUP:
1380 case ERT_MUST_NOT_THROW:
1381 break;
1383 case ERT_TRY:
1385 eh_catch c;
1386 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1388 lab = c->label;
1389 if (lab)
1390 c->label = main_block_label (lab);
1393 break;
1395 case ERT_ALLOWED_EXCEPTIONS:
1396 lab = r->u.allowed.label;
1397 if (lab)
1398 r->u.allowed.label = main_block_label (lab);
1399 break;
1404 /* Cleanup redundant labels. This is a three-step process:
1405 1) Find the leading label for each block.
1406 2) Redirect all references to labels to the leading labels.
1407 3) Cleanup all useless labels. */
1409 void
1410 cleanup_dead_labels (void)
1412 basic_block bb;
1413 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1415 /* Find a suitable label for each block. We use the first user-defined
1416 label if there is one, or otherwise just the first label we see. */
1417 FOR_EACH_BB_FN (bb, cfun)
1419 gimple_stmt_iterator i;
1421 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1423 tree label;
1424 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1426 if (!label_stmt)
1427 break;
1429 label = gimple_label_label (label_stmt);
1431 /* If we have not yet seen a label for the current block,
1432 remember this one and see if there are more labels. */
1433 if (!label_for_bb[bb->index].label)
1435 label_for_bb[bb->index].label = label;
1436 continue;
1439 /* If we did see a label for the current block already, but it
1440 is an artificially created label, replace it if the current
1441 label is a user defined label. */
1442 if (!DECL_ARTIFICIAL (label)
1443 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1445 label_for_bb[bb->index].label = label;
1446 break;
1451 /* Now redirect all jumps/branches to the selected label.
1452 First do so for each block ending in a control statement. */
1453 FOR_EACH_BB_FN (bb, cfun)
1455 gimple stmt = last_stmt (bb);
1456 tree label, new_label;
1458 if (!stmt)
1459 continue;
1461 switch (gimple_code (stmt))
1463 case GIMPLE_COND:
1465 gcond *cond_stmt = as_a <gcond *> (stmt);
1466 label = gimple_cond_true_label (cond_stmt);
1467 if (label)
1469 new_label = main_block_label (label);
1470 if (new_label != label)
1471 gimple_cond_set_true_label (cond_stmt, new_label);
1474 label = gimple_cond_false_label (cond_stmt);
1475 if (label)
1477 new_label = main_block_label (label);
1478 if (new_label != label)
1479 gimple_cond_set_false_label (cond_stmt, new_label);
1482 break;
1484 case GIMPLE_SWITCH:
1486 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1487 size_t i, n = gimple_switch_num_labels (switch_stmt);
1489 /* Replace all destination labels. */
1490 for (i = 0; i < n; ++i)
1492 tree case_label = gimple_switch_label (switch_stmt, i);
1493 label = CASE_LABEL (case_label);
1494 new_label = main_block_label (label);
1495 if (new_label != label)
1496 CASE_LABEL (case_label) = new_label;
1498 break;
1501 case GIMPLE_ASM:
1503 gasm *asm_stmt = as_a <gasm *> (stmt);
1504 int i, n = gimple_asm_nlabels (asm_stmt);
1506 for (i = 0; i < n; ++i)
1508 tree cons = gimple_asm_label_op (asm_stmt, i);
1509 tree label = main_block_label (TREE_VALUE (cons));
1510 TREE_VALUE (cons) = label;
1512 break;
1515 /* We have to handle gotos until they're removed, and we don't
1516 remove them until after we've created the CFG edges. */
1517 case GIMPLE_GOTO:
1518 if (!computed_goto_p (stmt))
1520 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1521 label = gimple_goto_dest (goto_stmt);
1522 new_label = main_block_label (label);
1523 if (new_label != label)
1524 gimple_goto_set_dest (goto_stmt, new_label);
1526 break;
1528 case GIMPLE_TRANSACTION:
1530 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1531 tree label = gimple_transaction_label (trans_stmt);
1532 if (label)
1534 tree new_label = main_block_label (label);
1535 if (new_label != label)
1536 gimple_transaction_set_label (trans_stmt, new_label);
1539 break;
1541 default:
1542 break;
1546 /* Do the same for the exception region tree labels. */
1547 cleanup_dead_labels_eh ();
1549 /* Finally, purge dead labels. All user-defined labels and labels that
1550 can be the target of non-local gotos and labels which have their
1551 address taken are preserved. */
1552 FOR_EACH_BB_FN (bb, cfun)
1554 gimple_stmt_iterator i;
1555 tree label_for_this_bb = label_for_bb[bb->index].label;
1557 if (!label_for_this_bb)
1558 continue;
1560 /* If the main label of the block is unused, we may still remove it. */
1561 if (!label_for_bb[bb->index].used)
1562 label_for_this_bb = NULL;
1564 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1566 tree label;
1567 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1569 if (!label_stmt)
1570 break;
1572 label = gimple_label_label (label_stmt);
1574 if (label == label_for_this_bb
1575 || !DECL_ARTIFICIAL (label)
1576 || DECL_NONLOCAL (label)
1577 || FORCED_LABEL (label))
1578 gsi_next (&i);
1579 else
1580 gsi_remove (&i, true);
1584 free (label_for_bb);
1587 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1588 the ones jumping to the same label.
1589 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1591 void
1592 group_case_labels_stmt (gswitch *stmt)
1594 int old_size = gimple_switch_num_labels (stmt);
1595 int i, j, new_size = old_size;
1596 basic_block default_bb = NULL;
1598 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1600 /* Look for possible opportunities to merge cases. */
1601 i = 1;
1602 while (i < old_size)
1604 tree base_case, base_high;
1605 basic_block base_bb;
1607 base_case = gimple_switch_label (stmt, i);
1609 gcc_assert (base_case);
1610 base_bb = label_to_block (CASE_LABEL (base_case));
1612 /* Discard cases that have the same destination as the
1613 default case. */
1614 if (base_bb == default_bb)
1616 gimple_switch_set_label (stmt, i, NULL_TREE);
1617 i++;
1618 new_size--;
1619 continue;
1622 base_high = CASE_HIGH (base_case)
1623 ? CASE_HIGH (base_case)
1624 : CASE_LOW (base_case);
1625 i++;
1627 /* Try to merge case labels. Break out when we reach the end
1628 of the label vector or when we cannot merge the next case
1629 label with the current one. */
1630 while (i < old_size)
1632 tree merge_case = gimple_switch_label (stmt, i);
1633 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1634 wide_int bhp1 = wi::add (base_high, 1);
1636 /* Merge the cases if they jump to the same place,
1637 and their ranges are consecutive. */
1638 if (merge_bb == base_bb
1639 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1641 base_high = CASE_HIGH (merge_case) ?
1642 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1643 CASE_HIGH (base_case) = base_high;
1644 gimple_switch_set_label (stmt, i, NULL_TREE);
1645 new_size--;
1646 i++;
1648 else
1649 break;
1653 /* Compress the case labels in the label vector, and adjust the
1654 length of the vector. */
1655 for (i = 0, j = 0; i < new_size; i++)
1657 while (! gimple_switch_label (stmt, j))
1658 j++;
1659 gimple_switch_set_label (stmt, i,
1660 gimple_switch_label (stmt, j++));
1663 gcc_assert (new_size <= old_size);
1664 gimple_switch_set_num_labels (stmt, new_size);
1667 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1668 and scan the sorted vector of cases. Combine the ones jumping to the
1669 same label. */
1671 void
1672 group_case_labels (void)
1674 basic_block bb;
1676 FOR_EACH_BB_FN (bb, cfun)
1678 gimple stmt = last_stmt (bb);
1679 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1680 group_case_labels_stmt (as_a <gswitch *> (stmt));
1684 /* Checks whether we can merge block B into block A. */
1686 static bool
1687 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1689 gimple stmt;
1691 if (!single_succ_p (a))
1692 return false;
1694 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1695 return false;
1697 if (single_succ (a) != b)
1698 return false;
1700 if (!single_pred_p (b))
1701 return false;
1703 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1704 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1705 return false;
1707 /* If A ends by a statement causing exceptions or something similar, we
1708 cannot merge the blocks. */
1709 stmt = last_stmt (a);
1710 if (stmt && stmt_ends_bb_p (stmt))
1711 return false;
1713 /* Do not allow a block with only a non-local label to be merged. */
1714 if (stmt)
1715 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1716 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1717 return false;
1719 /* Examine the labels at the beginning of B. */
1720 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1721 gsi_next (&gsi))
1723 tree lab;
1724 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1725 if (!label_stmt)
1726 break;
1727 lab = gimple_label_label (label_stmt);
1729 /* Do not remove user forced labels or for -O0 any user labels. */
1730 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1731 return false;
1734 /* Protect simple loop latches. We only want to avoid merging
1735 the latch with the loop header or with a block in another
1736 loop in this case. */
1737 if (current_loops
1738 && b->loop_father->latch == b
1739 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1740 && (b->loop_father->header == a
1741 || b->loop_father != a->loop_father))
1742 return false;
1744 /* It must be possible to eliminate all phi nodes in B. If ssa form
1745 is not up-to-date and a name-mapping is registered, we cannot eliminate
1746 any phis. Symbols marked for renaming are never a problem though. */
1747 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1748 gsi_next (&gsi))
1750 gphi *phi = gsi.phi ();
1751 /* Technically only new names matter. */
1752 if (name_registered_for_update_p (PHI_RESULT (phi)))
1753 return false;
1756 /* When not optimizing, don't merge if we'd lose goto_locus. */
1757 if (!optimize
1758 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1760 location_t goto_locus = single_succ_edge (a)->goto_locus;
1761 gimple_stmt_iterator prev, next;
1762 prev = gsi_last_nondebug_bb (a);
1763 next = gsi_after_labels (b);
1764 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1765 gsi_next_nondebug (&next);
1766 if ((gsi_end_p (prev)
1767 || gimple_location (gsi_stmt (prev)) != goto_locus)
1768 && (gsi_end_p (next)
1769 || gimple_location (gsi_stmt (next)) != goto_locus))
1770 return false;
1773 return true;
1776 /* Replaces all uses of NAME by VAL. */
1778 void
1779 replace_uses_by (tree name, tree val)
1781 imm_use_iterator imm_iter;
1782 use_operand_p use;
1783 gimple stmt;
1784 edge e;
1786 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1788 /* Mark the block if we change the last stmt in it. */
1789 if (cfgcleanup_altered_bbs
1790 && stmt_ends_bb_p (stmt))
1791 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1793 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1795 replace_exp (use, val);
1797 if (gimple_code (stmt) == GIMPLE_PHI)
1799 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1800 PHI_ARG_INDEX_FROM_USE (use));
1801 if (e->flags & EDGE_ABNORMAL
1802 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1804 /* This can only occur for virtual operands, since
1805 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1806 would prevent replacement. */
1807 gcc_checking_assert (virtual_operand_p (name));
1808 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1813 if (gimple_code (stmt) != GIMPLE_PHI)
1815 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1816 gimple orig_stmt = stmt;
1817 size_t i;
1819 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1820 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1821 only change sth from non-invariant to invariant, and only
1822 when propagating constants. */
1823 if (is_gimple_min_invariant (val))
1824 for (i = 0; i < gimple_num_ops (stmt); i++)
1826 tree op = gimple_op (stmt, i);
1827 /* Operands may be empty here. For example, the labels
1828 of a GIMPLE_COND are nulled out following the creation
1829 of the corresponding CFG edges. */
1830 if (op && TREE_CODE (op) == ADDR_EXPR)
1831 recompute_tree_invariant_for_addr_expr (op);
1834 if (fold_stmt (&gsi))
1835 stmt = gsi_stmt (gsi);
1837 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1838 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1840 update_stmt (stmt);
1844 gcc_checking_assert (has_zero_uses (name));
1846 /* Also update the trees stored in loop structures. */
1847 if (current_loops)
1849 struct loop *loop;
1851 FOR_EACH_LOOP (loop, 0)
1853 substitute_in_loop_info (loop, name, val);
1858 /* Merge block B into block A. */
1860 static void
1861 gimple_merge_blocks (basic_block a, basic_block b)
1863 gimple_stmt_iterator last, gsi;
1864 gphi_iterator psi;
1866 if (dump_file)
1867 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1869 /* Remove all single-valued PHI nodes from block B of the form
1870 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1871 gsi = gsi_last_bb (a);
1872 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1874 gimple phi = gsi_stmt (psi);
1875 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1876 gimple copy;
1877 bool may_replace_uses = (virtual_operand_p (def)
1878 || may_propagate_copy (def, use));
1880 /* In case we maintain loop closed ssa form, do not propagate arguments
1881 of loop exit phi nodes. */
1882 if (current_loops
1883 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1884 && !virtual_operand_p (def)
1885 && TREE_CODE (use) == SSA_NAME
1886 && a->loop_father != b->loop_father)
1887 may_replace_uses = false;
1889 if (!may_replace_uses)
1891 gcc_assert (!virtual_operand_p (def));
1893 /* Note that just emitting the copies is fine -- there is no problem
1894 with ordering of phi nodes. This is because A is the single
1895 predecessor of B, therefore results of the phi nodes cannot
1896 appear as arguments of the phi nodes. */
1897 copy = gimple_build_assign (def, use);
1898 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1899 remove_phi_node (&psi, false);
1901 else
1903 /* If we deal with a PHI for virtual operands, we can simply
1904 propagate these without fussing with folding or updating
1905 the stmt. */
1906 if (virtual_operand_p (def))
1908 imm_use_iterator iter;
1909 use_operand_p use_p;
1910 gimple stmt;
1912 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1913 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1914 SET_USE (use_p, use);
1916 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1917 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1919 else
1920 replace_uses_by (def, use);
1922 remove_phi_node (&psi, true);
1926 /* Ensure that B follows A. */
1927 move_block_after (b, a);
1929 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1930 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1932 /* Remove labels from B and set gimple_bb to A for other statements. */
1933 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1935 gimple stmt = gsi_stmt (gsi);
1936 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1938 tree label = gimple_label_label (label_stmt);
1939 int lp_nr;
1941 gsi_remove (&gsi, false);
1943 /* Now that we can thread computed gotos, we might have
1944 a situation where we have a forced label in block B
1945 However, the label at the start of block B might still be
1946 used in other ways (think about the runtime checking for
1947 Fortran assigned gotos). So we can not just delete the
1948 label. Instead we move the label to the start of block A. */
1949 if (FORCED_LABEL (label))
1951 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1952 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1954 /* Other user labels keep around in a form of a debug stmt. */
1955 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1957 gimple dbg = gimple_build_debug_bind (label,
1958 integer_zero_node,
1959 stmt);
1960 gimple_debug_bind_reset_value (dbg);
1961 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1964 lp_nr = EH_LANDING_PAD_NR (label);
1965 if (lp_nr)
1967 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1968 lp->post_landing_pad = NULL;
1971 else
1973 gimple_set_bb (stmt, a);
1974 gsi_next (&gsi);
1978 /* When merging two BBs, if their counts are different, the larger count
1979 is selected as the new bb count. This is to handle inconsistent
1980 profiles. */
1981 if (a->loop_father == b->loop_father)
1983 a->count = MAX (a->count, b->count);
1984 a->frequency = MAX (a->frequency, b->frequency);
1987 /* Merge the sequences. */
1988 last = gsi_last_bb (a);
1989 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1990 set_bb_seq (b, NULL);
1992 if (cfgcleanup_altered_bbs)
1993 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1997 /* Return the one of two successors of BB that is not reachable by a
1998 complex edge, if there is one. Else, return BB. We use
1999 this in optimizations that use post-dominators for their heuristics,
2000 to catch the cases in C++ where function calls are involved. */
2002 basic_block
2003 single_noncomplex_succ (basic_block bb)
2005 edge e0, e1;
2006 if (EDGE_COUNT (bb->succs) != 2)
2007 return bb;
2009 e0 = EDGE_SUCC (bb, 0);
2010 e1 = EDGE_SUCC (bb, 1);
2011 if (e0->flags & EDGE_COMPLEX)
2012 return e1->dest;
2013 if (e1->flags & EDGE_COMPLEX)
2014 return e0->dest;
2016 return bb;
2019 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2021 void
2022 notice_special_calls (gcall *call)
2024 int flags = gimple_call_flags (call);
2026 if (flags & ECF_MAY_BE_ALLOCA)
2027 cfun->calls_alloca = true;
2028 if (flags & ECF_RETURNS_TWICE)
2029 cfun->calls_setjmp = true;
2033 /* Clear flags set by notice_special_calls. Used by dead code removal
2034 to update the flags. */
2036 void
2037 clear_special_calls (void)
2039 cfun->calls_alloca = false;
2040 cfun->calls_setjmp = false;
2043 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2045 static void
2046 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2048 /* Since this block is no longer reachable, we can just delete all
2049 of its PHI nodes. */
2050 remove_phi_nodes (bb);
2052 /* Remove edges to BB's successors. */
2053 while (EDGE_COUNT (bb->succs) > 0)
2054 remove_edge (EDGE_SUCC (bb, 0));
2058 /* Remove statements of basic block BB. */
2060 static void
2061 remove_bb (basic_block bb)
2063 gimple_stmt_iterator i;
2065 if (dump_file)
2067 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2068 if (dump_flags & TDF_DETAILS)
2070 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2071 fprintf (dump_file, "\n");
2075 if (current_loops)
2077 struct loop *loop = bb->loop_father;
2079 /* If a loop gets removed, clean up the information associated
2080 with it. */
2081 if (loop->latch == bb
2082 || loop->header == bb)
2083 free_numbers_of_iterations_estimates_loop (loop);
2086 /* Remove all the instructions in the block. */
2087 if (bb_seq (bb) != NULL)
2089 /* Walk backwards so as to get a chance to substitute all
2090 released DEFs into debug stmts. See
2091 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2092 details. */
2093 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2095 gimple stmt = gsi_stmt (i);
2096 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2097 if (label_stmt
2098 && (FORCED_LABEL (gimple_label_label (label_stmt))
2099 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2101 basic_block new_bb;
2102 gimple_stmt_iterator new_gsi;
2104 /* A non-reachable non-local label may still be referenced.
2105 But it no longer needs to carry the extra semantics of
2106 non-locality. */
2107 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2109 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2110 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2113 new_bb = bb->prev_bb;
2114 new_gsi = gsi_start_bb (new_bb);
2115 gsi_remove (&i, false);
2116 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2118 else
2120 /* Release SSA definitions if we are in SSA. Note that we
2121 may be called when not in SSA. For example,
2122 final_cleanup calls this function via
2123 cleanup_tree_cfg. */
2124 if (gimple_in_ssa_p (cfun))
2125 release_defs (stmt);
2127 gsi_remove (&i, true);
2130 if (gsi_end_p (i))
2131 i = gsi_last_bb (bb);
2132 else
2133 gsi_prev (&i);
2137 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2138 bb->il.gimple.seq = NULL;
2139 bb->il.gimple.phi_nodes = NULL;
2143 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2144 predicate VAL, return the edge that will be taken out of the block.
2145 If VAL does not match a unique edge, NULL is returned. */
2147 edge
2148 find_taken_edge (basic_block bb, tree val)
2150 gimple stmt;
2152 stmt = last_stmt (bb);
2154 gcc_assert (stmt);
2155 gcc_assert (is_ctrl_stmt (stmt));
2157 if (val == NULL)
2158 return NULL;
2160 if (!is_gimple_min_invariant (val))
2161 return NULL;
2163 if (gimple_code (stmt) == GIMPLE_COND)
2164 return find_taken_edge_cond_expr (bb, val);
2166 if (gimple_code (stmt) == GIMPLE_SWITCH)
2167 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2169 if (computed_goto_p (stmt))
2171 /* Only optimize if the argument is a label, if the argument is
2172 not a label then we can not construct a proper CFG.
2174 It may be the case that we only need to allow the LABEL_REF to
2175 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2176 appear inside a LABEL_EXPR just to be safe. */
2177 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2178 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2179 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2180 return NULL;
2183 gcc_unreachable ();
2186 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2187 statement, determine which of the outgoing edges will be taken out of the
2188 block. Return NULL if either edge may be taken. */
2190 static edge
2191 find_taken_edge_computed_goto (basic_block bb, tree val)
2193 basic_block dest;
2194 edge e = NULL;
2196 dest = label_to_block (val);
2197 if (dest)
2199 e = find_edge (bb, dest);
2200 gcc_assert (e != NULL);
2203 return e;
2206 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2207 statement, determine which of the two edges will be taken out of the
2208 block. Return NULL if either edge may be taken. */
2210 static edge
2211 find_taken_edge_cond_expr (basic_block bb, tree val)
2213 edge true_edge, false_edge;
2215 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2217 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2218 return (integer_zerop (val) ? false_edge : true_edge);
2221 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2222 statement, determine which edge will be taken out of the block. Return
2223 NULL if any edge may be taken. */
2225 static edge
2226 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2227 tree val)
2229 basic_block dest_bb;
2230 edge e;
2231 tree taken_case;
2233 taken_case = find_case_label_for_value (switch_stmt, val);
2234 dest_bb = label_to_block (CASE_LABEL (taken_case));
2236 e = find_edge (bb, dest_bb);
2237 gcc_assert (e);
2238 return e;
2242 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2243 We can make optimal use here of the fact that the case labels are
2244 sorted: We can do a binary search for a case matching VAL. */
2246 static tree
2247 find_case_label_for_value (gswitch *switch_stmt, tree val)
2249 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2250 tree default_case = gimple_switch_default_label (switch_stmt);
2252 for (low = 0, high = n; high - low > 1; )
2254 size_t i = (high + low) / 2;
2255 tree t = gimple_switch_label (switch_stmt, i);
2256 int cmp;
2258 /* Cache the result of comparing CASE_LOW and val. */
2259 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2261 if (cmp > 0)
2262 high = i;
2263 else
2264 low = i;
2266 if (CASE_HIGH (t) == NULL)
2268 /* A singe-valued case label. */
2269 if (cmp == 0)
2270 return t;
2272 else
2274 /* A case range. We can only handle integer ranges. */
2275 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2276 return t;
2280 return default_case;
2284 /* Dump a basic block on stderr. */
2286 void
2287 gimple_debug_bb (basic_block bb)
2289 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2293 /* Dump basic block with index N on stderr. */
2295 basic_block
2296 gimple_debug_bb_n (int n)
2298 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2299 return BASIC_BLOCK_FOR_FN (cfun, n);
2303 /* Dump the CFG on stderr.
2305 FLAGS are the same used by the tree dumping functions
2306 (see TDF_* in dumpfile.h). */
2308 void
2309 gimple_debug_cfg (int flags)
2311 gimple_dump_cfg (stderr, flags);
2315 /* Dump the program showing basic block boundaries on the given FILE.
2317 FLAGS are the same used by the tree dumping functions (see TDF_* in
2318 tree.h). */
2320 void
2321 gimple_dump_cfg (FILE *file, int flags)
2323 if (flags & TDF_DETAILS)
2325 dump_function_header (file, current_function_decl, flags);
2326 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2327 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2328 last_basic_block_for_fn (cfun));
2330 brief_dump_cfg (file, flags | TDF_COMMENT);
2331 fprintf (file, "\n");
2334 if (flags & TDF_STATS)
2335 dump_cfg_stats (file);
2337 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2341 /* Dump CFG statistics on FILE. */
2343 void
2344 dump_cfg_stats (FILE *file)
2346 static long max_num_merged_labels = 0;
2347 unsigned long size, total = 0;
2348 long num_edges;
2349 basic_block bb;
2350 const char * const fmt_str = "%-30s%-13s%12s\n";
2351 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2352 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2353 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2354 const char *funcname = current_function_name ();
2356 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2358 fprintf (file, "---------------------------------------------------------\n");
2359 fprintf (file, fmt_str, "", " Number of ", "Memory");
2360 fprintf (file, fmt_str, "", " instances ", "used ");
2361 fprintf (file, "---------------------------------------------------------\n");
2363 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2364 total += size;
2365 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2366 SCALE (size), LABEL (size));
2368 num_edges = 0;
2369 FOR_EACH_BB_FN (bb, cfun)
2370 num_edges += EDGE_COUNT (bb->succs);
2371 size = num_edges * sizeof (struct edge_def);
2372 total += size;
2373 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2375 fprintf (file, "---------------------------------------------------------\n");
2376 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2377 LABEL (total));
2378 fprintf (file, "---------------------------------------------------------\n");
2379 fprintf (file, "\n");
2381 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2382 max_num_merged_labels = cfg_stats.num_merged_labels;
2384 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2385 cfg_stats.num_merged_labels, max_num_merged_labels);
2387 fprintf (file, "\n");
2391 /* Dump CFG statistics on stderr. Keep extern so that it's always
2392 linked in the final executable. */
2394 DEBUG_FUNCTION void
2395 debug_cfg_stats (void)
2397 dump_cfg_stats (stderr);
2400 /*---------------------------------------------------------------------------
2401 Miscellaneous helpers
2402 ---------------------------------------------------------------------------*/
2404 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2405 flow. Transfers of control flow associated with EH are excluded. */
2407 static bool
2408 call_can_make_abnormal_goto (gimple t)
2410 /* If the function has no non-local labels, then a call cannot make an
2411 abnormal transfer of control. */
2412 if (!cfun->has_nonlocal_label
2413 && !cfun->calls_setjmp)
2414 return false;
2416 /* Likewise if the call has no side effects. */
2417 if (!gimple_has_side_effects (t))
2418 return false;
2420 /* Likewise if the called function is leaf. */
2421 if (gimple_call_flags (t) & ECF_LEAF)
2422 return false;
2424 return true;
2428 /* Return true if T can make an abnormal transfer of control flow.
2429 Transfers of control flow associated with EH are excluded. */
2431 bool
2432 stmt_can_make_abnormal_goto (gimple t)
2434 if (computed_goto_p (t))
2435 return true;
2436 if (is_gimple_call (t))
2437 return call_can_make_abnormal_goto (t);
2438 return false;
2442 /* Return true if T represents a stmt that always transfers control. */
2444 bool
2445 is_ctrl_stmt (gimple t)
2447 switch (gimple_code (t))
2449 case GIMPLE_COND:
2450 case GIMPLE_SWITCH:
2451 case GIMPLE_GOTO:
2452 case GIMPLE_RETURN:
2453 case GIMPLE_RESX:
2454 return true;
2455 default:
2456 return false;
2461 /* Return true if T is a statement that may alter the flow of control
2462 (e.g., a call to a non-returning function). */
2464 bool
2465 is_ctrl_altering_stmt (gimple t)
2467 gcc_assert (t);
2469 switch (gimple_code (t))
2471 case GIMPLE_CALL:
2472 /* Per stmt call flag indicates whether the call could alter
2473 controlflow. */
2474 if (gimple_call_ctrl_altering_p (t))
2475 return true;
2476 break;
2478 case GIMPLE_EH_DISPATCH:
2479 /* EH_DISPATCH branches to the individual catch handlers at
2480 this level of a try or allowed-exceptions region. It can
2481 fallthru to the next statement as well. */
2482 return true;
2484 case GIMPLE_ASM:
2485 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2486 return true;
2487 break;
2489 CASE_GIMPLE_OMP:
2490 /* OpenMP directives alter control flow. */
2491 return true;
2493 case GIMPLE_TRANSACTION:
2494 /* A transaction start alters control flow. */
2495 return true;
2497 default:
2498 break;
2501 /* If a statement can throw, it alters control flow. */
2502 return stmt_can_throw_internal (t);
2506 /* Return true if T is a simple local goto. */
2508 bool
2509 simple_goto_p (gimple t)
2511 return (gimple_code (t) == GIMPLE_GOTO
2512 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2516 /* Return true if STMT should start a new basic block. PREV_STMT is
2517 the statement preceding STMT. It is used when STMT is a label or a
2518 case label. Labels should only start a new basic block if their
2519 previous statement wasn't a label. Otherwise, sequence of labels
2520 would generate unnecessary basic blocks that only contain a single
2521 label. */
2523 static inline bool
2524 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2526 if (stmt == NULL)
2527 return false;
2529 /* Labels start a new basic block only if the preceding statement
2530 wasn't a label of the same type. This prevents the creation of
2531 consecutive blocks that have nothing but a single label. */
2532 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2534 /* Nonlocal and computed GOTO targets always start a new block. */
2535 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2536 || FORCED_LABEL (gimple_label_label (label_stmt)))
2537 return true;
2539 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2541 if (DECL_NONLOCAL (gimple_label_label (
2542 as_a <glabel *> (prev_stmt))))
2543 return true;
2545 cfg_stats.num_merged_labels++;
2546 return false;
2548 else
2549 return true;
2551 else if (gimple_code (stmt) == GIMPLE_CALL
2552 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2553 /* setjmp acts similar to a nonlocal GOTO target and thus should
2554 start a new block. */
2555 return true;
2557 return false;
2561 /* Return true if T should end a basic block. */
2563 bool
2564 stmt_ends_bb_p (gimple t)
2566 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2569 /* Remove block annotations and other data structures. */
2571 void
2572 delete_tree_cfg_annotations (void)
2574 vec_free (label_to_block_map_for_fn (cfun));
2577 /* Return the virtual phi in BB. */
2579 gphi *
2580 get_virtual_phi (basic_block bb)
2582 for (gphi_iterator gsi = gsi_start_phis (bb);
2583 !gsi_end_p (gsi);
2584 gsi_next (&gsi))
2586 gphi *phi = gsi.phi ();
2588 if (virtual_operand_p (PHI_RESULT (phi)))
2589 return phi;
2592 return NULL;
2595 /* Return the first statement in basic block BB. */
2597 gimple
2598 first_stmt (basic_block bb)
2600 gimple_stmt_iterator i = gsi_start_bb (bb);
2601 gimple stmt = NULL;
2603 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2605 gsi_next (&i);
2606 stmt = NULL;
2608 return stmt;
2611 /* Return the first non-label statement in basic block BB. */
2613 static gimple
2614 first_non_label_stmt (basic_block bb)
2616 gimple_stmt_iterator i = gsi_start_bb (bb);
2617 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2618 gsi_next (&i);
2619 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2622 /* Return the last statement in basic block BB. */
2624 gimple
2625 last_stmt (basic_block bb)
2627 gimple_stmt_iterator i = gsi_last_bb (bb);
2628 gimple stmt = NULL;
2630 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2632 gsi_prev (&i);
2633 stmt = NULL;
2635 return stmt;
2638 /* Return the last statement of an otherwise empty block. Return NULL
2639 if the block is totally empty, or if it contains more than one
2640 statement. */
2642 gimple
2643 last_and_only_stmt (basic_block bb)
2645 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2646 gimple last, prev;
2648 if (gsi_end_p (i))
2649 return NULL;
2651 last = gsi_stmt (i);
2652 gsi_prev_nondebug (&i);
2653 if (gsi_end_p (i))
2654 return last;
2656 /* Empty statements should no longer appear in the instruction stream.
2657 Everything that might have appeared before should be deleted by
2658 remove_useless_stmts, and the optimizers should just gsi_remove
2659 instead of smashing with build_empty_stmt.
2661 Thus the only thing that should appear here in a block containing
2662 one executable statement is a label. */
2663 prev = gsi_stmt (i);
2664 if (gimple_code (prev) == GIMPLE_LABEL)
2665 return last;
2666 else
2667 return NULL;
2670 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2672 static void
2673 reinstall_phi_args (edge new_edge, edge old_edge)
2675 edge_var_map *vm;
2676 int i;
2677 gphi_iterator phis;
2679 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2680 if (!v)
2681 return;
2683 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2684 v->iterate (i, &vm) && !gsi_end_p (phis);
2685 i++, gsi_next (&phis))
2687 gphi *phi = phis.phi ();
2688 tree result = redirect_edge_var_map_result (vm);
2689 tree arg = redirect_edge_var_map_def (vm);
2691 gcc_assert (result == gimple_phi_result (phi));
2693 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2696 redirect_edge_var_map_clear (old_edge);
2699 /* Returns the basic block after which the new basic block created
2700 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2701 near its "logical" location. This is of most help to humans looking
2702 at debugging dumps. */
2704 basic_block
2705 split_edge_bb_loc (edge edge_in)
2707 basic_block dest = edge_in->dest;
2708 basic_block dest_prev = dest->prev_bb;
2710 if (dest_prev)
2712 edge e = find_edge (dest_prev, dest);
2713 if (e && !(e->flags & EDGE_COMPLEX))
2714 return edge_in->src;
2716 return dest_prev;
2719 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2720 Abort on abnormal edges. */
2722 static basic_block
2723 gimple_split_edge (edge edge_in)
2725 basic_block new_bb, after_bb, dest;
2726 edge new_edge, e;
2728 /* Abnormal edges cannot be split. */
2729 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2731 dest = edge_in->dest;
2733 after_bb = split_edge_bb_loc (edge_in);
2735 new_bb = create_empty_bb (after_bb);
2736 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2737 new_bb->count = edge_in->count;
2738 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2739 new_edge->probability = REG_BR_PROB_BASE;
2740 new_edge->count = edge_in->count;
2742 e = redirect_edge_and_branch (edge_in, new_bb);
2743 gcc_assert (e == edge_in);
2744 reinstall_phi_args (new_edge, e);
2746 return new_bb;
2750 /* Verify properties of the address expression T with base object BASE. */
2752 static tree
2753 verify_address (tree t, tree base)
2755 bool old_constant;
2756 bool old_side_effects;
2757 bool new_constant;
2758 bool new_side_effects;
2760 old_constant = TREE_CONSTANT (t);
2761 old_side_effects = TREE_SIDE_EFFECTS (t);
2763 recompute_tree_invariant_for_addr_expr (t);
2764 new_side_effects = TREE_SIDE_EFFECTS (t);
2765 new_constant = TREE_CONSTANT (t);
2767 if (old_constant != new_constant)
2769 error ("constant not recomputed when ADDR_EXPR changed");
2770 return t;
2772 if (old_side_effects != new_side_effects)
2774 error ("side effects not recomputed when ADDR_EXPR changed");
2775 return t;
2778 if (!(TREE_CODE (base) == VAR_DECL
2779 || TREE_CODE (base) == PARM_DECL
2780 || TREE_CODE (base) == RESULT_DECL))
2781 return NULL_TREE;
2783 if (DECL_GIMPLE_REG_P (base))
2785 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2786 return base;
2789 return NULL_TREE;
2792 /* Callback for walk_tree, check that all elements with address taken are
2793 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2794 inside a PHI node. */
2796 static tree
2797 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2799 tree t = *tp, x;
2801 if (TYPE_P (t))
2802 *walk_subtrees = 0;
2804 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2805 #define CHECK_OP(N, MSG) \
2806 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2807 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2809 switch (TREE_CODE (t))
2811 case SSA_NAME:
2812 if (SSA_NAME_IN_FREE_LIST (t))
2814 error ("SSA name in freelist but still referenced");
2815 return *tp;
2817 break;
2819 case INDIRECT_REF:
2820 error ("INDIRECT_REF in gimple IL");
2821 return t;
2823 case MEM_REF:
2824 x = TREE_OPERAND (t, 0);
2825 if (!POINTER_TYPE_P (TREE_TYPE (x))
2826 || !is_gimple_mem_ref_addr (x))
2828 error ("invalid first operand of MEM_REF");
2829 return x;
2831 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2832 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2834 error ("invalid offset operand of MEM_REF");
2835 return TREE_OPERAND (t, 1);
2837 if (TREE_CODE (x) == ADDR_EXPR
2838 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2839 return x;
2840 *walk_subtrees = 0;
2841 break;
2843 case ASSERT_EXPR:
2844 x = fold (ASSERT_EXPR_COND (t));
2845 if (x == boolean_false_node)
2847 error ("ASSERT_EXPR with an always-false condition");
2848 return *tp;
2850 break;
2852 case MODIFY_EXPR:
2853 error ("MODIFY_EXPR not expected while having tuples");
2854 return *tp;
2856 case ADDR_EXPR:
2858 tree tem;
2860 gcc_assert (is_gimple_address (t));
2862 /* Skip any references (they will be checked when we recurse down the
2863 tree) and ensure that any variable used as a prefix is marked
2864 addressable. */
2865 for (x = TREE_OPERAND (t, 0);
2866 handled_component_p (x);
2867 x = TREE_OPERAND (x, 0))
2870 if ((tem = verify_address (t, x)))
2871 return tem;
2873 if (!(TREE_CODE (x) == VAR_DECL
2874 || TREE_CODE (x) == PARM_DECL
2875 || TREE_CODE (x) == RESULT_DECL))
2876 return NULL;
2878 if (!TREE_ADDRESSABLE (x))
2880 error ("address taken, but ADDRESSABLE bit not set");
2881 return x;
2884 break;
2887 case COND_EXPR:
2888 x = COND_EXPR_COND (t);
2889 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2891 error ("non-integral used in condition");
2892 return x;
2894 if (!is_gimple_condexpr (x))
2896 error ("invalid conditional operand");
2897 return x;
2899 break;
2901 case NON_LVALUE_EXPR:
2902 case TRUTH_NOT_EXPR:
2903 gcc_unreachable ();
2905 CASE_CONVERT:
2906 case FIX_TRUNC_EXPR:
2907 case FLOAT_EXPR:
2908 case NEGATE_EXPR:
2909 case ABS_EXPR:
2910 case BIT_NOT_EXPR:
2911 CHECK_OP (0, "invalid operand to unary operator");
2912 break;
2914 case REALPART_EXPR:
2915 case IMAGPART_EXPR:
2916 case BIT_FIELD_REF:
2917 if (!is_gimple_reg_type (TREE_TYPE (t)))
2919 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2920 return t;
2923 if (TREE_CODE (t) == BIT_FIELD_REF)
2925 tree t0 = TREE_OPERAND (t, 0);
2926 tree t1 = TREE_OPERAND (t, 1);
2927 tree t2 = TREE_OPERAND (t, 2);
2928 if (!tree_fits_uhwi_p (t1)
2929 || !tree_fits_uhwi_p (t2))
2931 error ("invalid position or size operand to BIT_FIELD_REF");
2932 return t;
2934 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2935 && (TYPE_PRECISION (TREE_TYPE (t))
2936 != tree_to_uhwi (t1)))
2938 error ("integral result type precision does not match "
2939 "field size of BIT_FIELD_REF");
2940 return t;
2942 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2943 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2944 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2945 != tree_to_uhwi (t1)))
2947 error ("mode precision of non-integral result does not "
2948 "match field size of BIT_FIELD_REF");
2949 return t;
2951 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2952 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2953 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2955 error ("position plus size exceeds size of referenced object in "
2956 "BIT_FIELD_REF");
2957 return t;
2960 t = TREE_OPERAND (t, 0);
2962 /* Fall-through. */
2963 case COMPONENT_REF:
2964 case ARRAY_REF:
2965 case ARRAY_RANGE_REF:
2966 case VIEW_CONVERT_EXPR:
2967 /* We have a nest of references. Verify that each of the operands
2968 that determine where to reference is either a constant or a variable,
2969 verify that the base is valid, and then show we've already checked
2970 the subtrees. */
2971 while (handled_component_p (t))
2973 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2974 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2975 else if (TREE_CODE (t) == ARRAY_REF
2976 || TREE_CODE (t) == ARRAY_RANGE_REF)
2978 CHECK_OP (1, "invalid array index");
2979 if (TREE_OPERAND (t, 2))
2980 CHECK_OP (2, "invalid array lower bound");
2981 if (TREE_OPERAND (t, 3))
2982 CHECK_OP (3, "invalid array stride");
2984 else if (TREE_CODE (t) == BIT_FIELD_REF
2985 || TREE_CODE (t) == REALPART_EXPR
2986 || TREE_CODE (t) == IMAGPART_EXPR)
2988 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2989 "REALPART_EXPR");
2990 return t;
2993 t = TREE_OPERAND (t, 0);
2996 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2998 error ("invalid reference prefix");
2999 return t;
3001 *walk_subtrees = 0;
3002 break;
3003 case PLUS_EXPR:
3004 case MINUS_EXPR:
3005 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3006 POINTER_PLUS_EXPR. */
3007 if (POINTER_TYPE_P (TREE_TYPE (t)))
3009 error ("invalid operand to plus/minus, type is a pointer");
3010 return t;
3012 CHECK_OP (0, "invalid operand to binary operator");
3013 CHECK_OP (1, "invalid operand to binary operator");
3014 break;
3016 case POINTER_PLUS_EXPR:
3017 /* Check to make sure the first operand is a pointer or reference type. */
3018 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3020 error ("invalid operand to pointer plus, first operand is not a pointer");
3021 return t;
3023 /* Check to make sure the second operand is a ptrofftype. */
3024 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3026 error ("invalid operand to pointer plus, second operand is not an "
3027 "integer type of appropriate width");
3028 return t;
3030 /* FALLTHROUGH */
3031 case LT_EXPR:
3032 case LE_EXPR:
3033 case GT_EXPR:
3034 case GE_EXPR:
3035 case EQ_EXPR:
3036 case NE_EXPR:
3037 case UNORDERED_EXPR:
3038 case ORDERED_EXPR:
3039 case UNLT_EXPR:
3040 case UNLE_EXPR:
3041 case UNGT_EXPR:
3042 case UNGE_EXPR:
3043 case UNEQ_EXPR:
3044 case LTGT_EXPR:
3045 case MULT_EXPR:
3046 case TRUNC_DIV_EXPR:
3047 case CEIL_DIV_EXPR:
3048 case FLOOR_DIV_EXPR:
3049 case ROUND_DIV_EXPR:
3050 case TRUNC_MOD_EXPR:
3051 case CEIL_MOD_EXPR:
3052 case FLOOR_MOD_EXPR:
3053 case ROUND_MOD_EXPR:
3054 case RDIV_EXPR:
3055 case EXACT_DIV_EXPR:
3056 case MIN_EXPR:
3057 case MAX_EXPR:
3058 case LSHIFT_EXPR:
3059 case RSHIFT_EXPR:
3060 case LROTATE_EXPR:
3061 case RROTATE_EXPR:
3062 case BIT_IOR_EXPR:
3063 case BIT_XOR_EXPR:
3064 case BIT_AND_EXPR:
3065 CHECK_OP (0, "invalid operand to binary operator");
3066 CHECK_OP (1, "invalid operand to binary operator");
3067 break;
3069 case CONSTRUCTOR:
3070 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3071 *walk_subtrees = 0;
3072 break;
3074 case CASE_LABEL_EXPR:
3075 if (CASE_CHAIN (t))
3077 error ("invalid CASE_CHAIN");
3078 return t;
3080 break;
3082 default:
3083 break;
3085 return NULL;
3087 #undef CHECK_OP
3091 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3092 Returns true if there is an error, otherwise false. */
3094 static bool
3095 verify_types_in_gimple_min_lval (tree expr)
3097 tree op;
3099 if (is_gimple_id (expr))
3100 return false;
3102 if (TREE_CODE (expr) != TARGET_MEM_REF
3103 && TREE_CODE (expr) != MEM_REF)
3105 error ("invalid expression for min lvalue");
3106 return true;
3109 /* TARGET_MEM_REFs are strange beasts. */
3110 if (TREE_CODE (expr) == TARGET_MEM_REF)
3111 return false;
3113 op = TREE_OPERAND (expr, 0);
3114 if (!is_gimple_val (op))
3116 error ("invalid operand in indirect reference");
3117 debug_generic_stmt (op);
3118 return true;
3120 /* Memory references now generally can involve a value conversion. */
3122 return false;
3125 /* Verify if EXPR is a valid GIMPLE reference expression. If
3126 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3127 if there is an error, otherwise false. */
3129 static bool
3130 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3132 while (handled_component_p (expr))
3134 tree op = TREE_OPERAND (expr, 0);
3136 if (TREE_CODE (expr) == ARRAY_REF
3137 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3139 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3140 || (TREE_OPERAND (expr, 2)
3141 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3142 || (TREE_OPERAND (expr, 3)
3143 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3145 error ("invalid operands to array reference");
3146 debug_generic_stmt (expr);
3147 return true;
3151 /* Verify if the reference array element types are compatible. */
3152 if (TREE_CODE (expr) == ARRAY_REF
3153 && !useless_type_conversion_p (TREE_TYPE (expr),
3154 TREE_TYPE (TREE_TYPE (op))))
3156 error ("type mismatch in array reference");
3157 debug_generic_stmt (TREE_TYPE (expr));
3158 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3159 return true;
3161 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3162 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3163 TREE_TYPE (TREE_TYPE (op))))
3165 error ("type mismatch in array range reference");
3166 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3167 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3168 return true;
3171 if ((TREE_CODE (expr) == REALPART_EXPR
3172 || TREE_CODE (expr) == IMAGPART_EXPR)
3173 && !useless_type_conversion_p (TREE_TYPE (expr),
3174 TREE_TYPE (TREE_TYPE (op))))
3176 error ("type mismatch in real/imagpart reference");
3177 debug_generic_stmt (TREE_TYPE (expr));
3178 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3179 return true;
3182 if (TREE_CODE (expr) == COMPONENT_REF
3183 && !useless_type_conversion_p (TREE_TYPE (expr),
3184 TREE_TYPE (TREE_OPERAND (expr, 1))))
3186 error ("type mismatch in component reference");
3187 debug_generic_stmt (TREE_TYPE (expr));
3188 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3189 return true;
3192 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3194 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3195 that their operand is not an SSA name or an invariant when
3196 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3197 bug). Otherwise there is nothing to verify, gross mismatches at
3198 most invoke undefined behavior. */
3199 if (require_lvalue
3200 && (TREE_CODE (op) == SSA_NAME
3201 || is_gimple_min_invariant (op)))
3203 error ("conversion of an SSA_NAME on the left hand side");
3204 debug_generic_stmt (expr);
3205 return true;
3207 else if (TREE_CODE (op) == SSA_NAME
3208 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3210 error ("conversion of register to a different size");
3211 debug_generic_stmt (expr);
3212 return true;
3214 else if (!handled_component_p (op))
3215 return false;
3218 expr = op;
3221 if (TREE_CODE (expr) == MEM_REF)
3223 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3225 error ("invalid address operand in MEM_REF");
3226 debug_generic_stmt (expr);
3227 return true;
3229 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3230 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3232 error ("invalid offset operand in MEM_REF");
3233 debug_generic_stmt (expr);
3234 return true;
3237 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3239 if (!TMR_BASE (expr)
3240 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3242 error ("invalid address operand in TARGET_MEM_REF");
3243 return true;
3245 if (!TMR_OFFSET (expr)
3246 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3247 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3249 error ("invalid offset operand in TARGET_MEM_REF");
3250 debug_generic_stmt (expr);
3251 return true;
3255 return ((require_lvalue || !is_gimple_min_invariant (expr))
3256 && verify_types_in_gimple_min_lval (expr));
3259 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3260 list of pointer-to types that is trivially convertible to DEST. */
3262 static bool
3263 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3265 tree src;
3267 if (!TYPE_POINTER_TO (src_obj))
3268 return true;
3270 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3271 if (useless_type_conversion_p (dest, src))
3272 return true;
3274 return false;
3277 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3278 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3280 static bool
3281 valid_fixed_convert_types_p (tree type1, tree type2)
3283 return (FIXED_POINT_TYPE_P (type1)
3284 && (INTEGRAL_TYPE_P (type2)
3285 || SCALAR_FLOAT_TYPE_P (type2)
3286 || FIXED_POINT_TYPE_P (type2)));
3289 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3290 is a problem, otherwise false. */
3292 static bool
3293 verify_gimple_call (gcall *stmt)
3295 tree fn = gimple_call_fn (stmt);
3296 tree fntype, fndecl;
3297 unsigned i;
3299 if (gimple_call_internal_p (stmt))
3301 if (fn)
3303 error ("gimple call has two targets");
3304 debug_generic_stmt (fn);
3305 return true;
3308 else
3310 if (!fn)
3312 error ("gimple call has no target");
3313 return true;
3317 if (fn && !is_gimple_call_addr (fn))
3319 error ("invalid function in gimple call");
3320 debug_generic_stmt (fn);
3321 return true;
3324 if (fn
3325 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3326 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3327 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3329 error ("non-function in gimple call");
3330 return true;
3333 fndecl = gimple_call_fndecl (stmt);
3334 if (fndecl
3335 && TREE_CODE (fndecl) == FUNCTION_DECL
3336 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3337 && !DECL_PURE_P (fndecl)
3338 && !TREE_READONLY (fndecl))
3340 error ("invalid pure const state for function");
3341 return true;
3344 tree lhs = gimple_call_lhs (stmt);
3345 if (lhs
3346 && (!is_gimple_lvalue (lhs)
3347 || verify_types_in_gimple_reference (lhs, true)))
3349 error ("invalid LHS in gimple call");
3350 return true;
3353 if (lhs
3354 && gimple_call_ctrl_altering_p (stmt)
3355 && gimple_call_noreturn_p (stmt)
3356 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3358 error ("LHS in noreturn call");
3359 return true;
3362 fntype = gimple_call_fntype (stmt);
3363 if (fntype
3364 && lhs
3365 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3366 /* ??? At least C++ misses conversions at assignments from
3367 void * call results.
3368 ??? Java is completely off. Especially with functions
3369 returning java.lang.Object.
3370 For now simply allow arbitrary pointer type conversions. */
3371 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3372 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3374 error ("invalid conversion in gimple call");
3375 debug_generic_stmt (TREE_TYPE (lhs));
3376 debug_generic_stmt (TREE_TYPE (fntype));
3377 return true;
3380 if (gimple_call_chain (stmt)
3381 && !is_gimple_val (gimple_call_chain (stmt)))
3383 error ("invalid static chain in gimple call");
3384 debug_generic_stmt (gimple_call_chain (stmt));
3385 return true;
3388 /* If there is a static chain argument, the call should either be
3389 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3390 if (gimple_call_chain (stmt)
3391 && fndecl
3392 && !DECL_STATIC_CHAIN (fndecl))
3394 error ("static chain with function that doesn%'t use one");
3395 return true;
3398 /* ??? The C frontend passes unpromoted arguments in case it
3399 didn't see a function declaration before the call. So for now
3400 leave the call arguments mostly unverified. Once we gimplify
3401 unit-at-a-time we have a chance to fix this. */
3403 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3405 tree arg = gimple_call_arg (stmt, i);
3406 if ((is_gimple_reg_type (TREE_TYPE (arg))
3407 && !is_gimple_val (arg))
3408 || (!is_gimple_reg_type (TREE_TYPE (arg))
3409 && !is_gimple_lvalue (arg)))
3411 error ("invalid argument to gimple call");
3412 debug_generic_expr (arg);
3413 return true;
3417 return false;
3420 /* Verifies the gimple comparison with the result type TYPE and
3421 the operands OP0 and OP1. */
3423 static bool
3424 verify_gimple_comparison (tree type, tree op0, tree op1)
3426 tree op0_type = TREE_TYPE (op0);
3427 tree op1_type = TREE_TYPE (op1);
3429 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3431 error ("invalid operands in gimple comparison");
3432 return true;
3435 /* For comparisons we do not have the operations type as the
3436 effective type the comparison is carried out in. Instead
3437 we require that either the first operand is trivially
3438 convertible into the second, or the other way around.
3439 Because we special-case pointers to void we allow
3440 comparisons of pointers with the same mode as well. */
3441 if (!useless_type_conversion_p (op0_type, op1_type)
3442 && !useless_type_conversion_p (op1_type, op0_type)
3443 && (!POINTER_TYPE_P (op0_type)
3444 || !POINTER_TYPE_P (op1_type)
3445 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3447 error ("mismatching comparison operand types");
3448 debug_generic_expr (op0_type);
3449 debug_generic_expr (op1_type);
3450 return true;
3453 /* The resulting type of a comparison may be an effective boolean type. */
3454 if (INTEGRAL_TYPE_P (type)
3455 && (TREE_CODE (type) == BOOLEAN_TYPE
3456 || TYPE_PRECISION (type) == 1))
3458 if (TREE_CODE (op0_type) == VECTOR_TYPE
3459 || TREE_CODE (op1_type) == VECTOR_TYPE)
3461 error ("vector comparison returning a boolean");
3462 debug_generic_expr (op0_type);
3463 debug_generic_expr (op1_type);
3464 return true;
3467 /* Or an integer vector type with the same size and element count
3468 as the comparison operand types. */
3469 else if (TREE_CODE (type) == VECTOR_TYPE
3470 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3472 if (TREE_CODE (op0_type) != VECTOR_TYPE
3473 || TREE_CODE (op1_type) != VECTOR_TYPE)
3475 error ("non-vector operands in vector comparison");
3476 debug_generic_expr (op0_type);
3477 debug_generic_expr (op1_type);
3478 return true;
3481 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3482 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3483 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3484 /* The result of a vector comparison is of signed
3485 integral type. */
3486 || TYPE_UNSIGNED (TREE_TYPE (type)))
3488 error ("invalid vector comparison resulting type");
3489 debug_generic_expr (type);
3490 return true;
3493 else
3495 error ("bogus comparison result type");
3496 debug_generic_expr (type);
3497 return true;
3500 return false;
3503 /* Verify a gimple assignment statement STMT with an unary rhs.
3504 Returns true if anything is wrong. */
3506 static bool
3507 verify_gimple_assign_unary (gassign *stmt)
3509 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3510 tree lhs = gimple_assign_lhs (stmt);
3511 tree lhs_type = TREE_TYPE (lhs);
3512 tree rhs1 = gimple_assign_rhs1 (stmt);
3513 tree rhs1_type = TREE_TYPE (rhs1);
3515 if (!is_gimple_reg (lhs))
3517 error ("non-register as LHS of unary operation");
3518 return true;
3521 if (!is_gimple_val (rhs1))
3523 error ("invalid operand in unary operation");
3524 return true;
3527 /* First handle conversions. */
3528 switch (rhs_code)
3530 CASE_CONVERT:
3532 /* Allow conversions from pointer type to integral type only if
3533 there is no sign or zero extension involved.
3534 For targets were the precision of ptrofftype doesn't match that
3535 of pointers we need to allow arbitrary conversions to ptrofftype. */
3536 if ((POINTER_TYPE_P (lhs_type)
3537 && INTEGRAL_TYPE_P (rhs1_type))
3538 || (POINTER_TYPE_P (rhs1_type)
3539 && INTEGRAL_TYPE_P (lhs_type)
3540 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3541 || ptrofftype_p (sizetype))))
3542 return false;
3544 /* Allow conversion from integral to offset type and vice versa. */
3545 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3546 && INTEGRAL_TYPE_P (rhs1_type))
3547 || (INTEGRAL_TYPE_P (lhs_type)
3548 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3549 return false;
3551 /* Otherwise assert we are converting between types of the
3552 same kind. */
3553 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3555 error ("invalid types in nop conversion");
3556 debug_generic_expr (lhs_type);
3557 debug_generic_expr (rhs1_type);
3558 return true;
3561 return false;
3564 case ADDR_SPACE_CONVERT_EXPR:
3566 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3567 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3568 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3570 error ("invalid types in address space conversion");
3571 debug_generic_expr (lhs_type);
3572 debug_generic_expr (rhs1_type);
3573 return true;
3576 return false;
3579 case FIXED_CONVERT_EXPR:
3581 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3582 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3584 error ("invalid types in fixed-point conversion");
3585 debug_generic_expr (lhs_type);
3586 debug_generic_expr (rhs1_type);
3587 return true;
3590 return false;
3593 case FLOAT_EXPR:
3595 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3596 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3597 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3599 error ("invalid types in conversion to floating point");
3600 debug_generic_expr (lhs_type);
3601 debug_generic_expr (rhs1_type);
3602 return true;
3605 return false;
3608 case FIX_TRUNC_EXPR:
3610 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3611 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3612 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3614 error ("invalid types in conversion to integer");
3615 debug_generic_expr (lhs_type);
3616 debug_generic_expr (rhs1_type);
3617 return true;
3620 return false;
3622 case REDUC_MAX_EXPR:
3623 case REDUC_MIN_EXPR:
3624 case REDUC_PLUS_EXPR:
3625 if (!VECTOR_TYPE_P (rhs1_type)
3626 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3628 error ("reduction should convert from vector to element type");
3629 debug_generic_expr (lhs_type);
3630 debug_generic_expr (rhs1_type);
3631 return true;
3633 return false;
3635 case VEC_UNPACK_HI_EXPR:
3636 case VEC_UNPACK_LO_EXPR:
3637 case VEC_UNPACK_FLOAT_HI_EXPR:
3638 case VEC_UNPACK_FLOAT_LO_EXPR:
3639 /* FIXME. */
3640 return false;
3642 case NEGATE_EXPR:
3643 case ABS_EXPR:
3644 case BIT_NOT_EXPR:
3645 case PAREN_EXPR:
3646 case CONJ_EXPR:
3647 break;
3649 default:
3650 gcc_unreachable ();
3653 /* For the remaining codes assert there is no conversion involved. */
3654 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3656 error ("non-trivial conversion in unary operation");
3657 debug_generic_expr (lhs_type);
3658 debug_generic_expr (rhs1_type);
3659 return true;
3662 return false;
3665 /* Verify a gimple assignment statement STMT with a binary rhs.
3666 Returns true if anything is wrong. */
3668 static bool
3669 verify_gimple_assign_binary (gassign *stmt)
3671 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3672 tree lhs = gimple_assign_lhs (stmt);
3673 tree lhs_type = TREE_TYPE (lhs);
3674 tree rhs1 = gimple_assign_rhs1 (stmt);
3675 tree rhs1_type = TREE_TYPE (rhs1);
3676 tree rhs2 = gimple_assign_rhs2 (stmt);
3677 tree rhs2_type = TREE_TYPE (rhs2);
3679 if (!is_gimple_reg (lhs))
3681 error ("non-register as LHS of binary operation");
3682 return true;
3685 if (!is_gimple_val (rhs1)
3686 || !is_gimple_val (rhs2))
3688 error ("invalid operands in binary operation");
3689 return true;
3692 /* First handle operations that involve different types. */
3693 switch (rhs_code)
3695 case COMPLEX_EXPR:
3697 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3698 || !(INTEGRAL_TYPE_P (rhs1_type)
3699 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3700 || !(INTEGRAL_TYPE_P (rhs2_type)
3701 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3703 error ("type mismatch in complex expression");
3704 debug_generic_expr (lhs_type);
3705 debug_generic_expr (rhs1_type);
3706 debug_generic_expr (rhs2_type);
3707 return true;
3710 return false;
3713 case LSHIFT_EXPR:
3714 case RSHIFT_EXPR:
3715 case LROTATE_EXPR:
3716 case RROTATE_EXPR:
3718 /* Shifts and rotates are ok on integral types, fixed point
3719 types and integer vector types. */
3720 if ((!INTEGRAL_TYPE_P (rhs1_type)
3721 && !FIXED_POINT_TYPE_P (rhs1_type)
3722 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3723 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3724 || (!INTEGRAL_TYPE_P (rhs2_type)
3725 /* Vector shifts of vectors are also ok. */
3726 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3727 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3728 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3729 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3730 || !useless_type_conversion_p (lhs_type, rhs1_type))
3732 error ("type mismatch in shift expression");
3733 debug_generic_expr (lhs_type);
3734 debug_generic_expr (rhs1_type);
3735 debug_generic_expr (rhs2_type);
3736 return true;
3739 return false;
3742 case WIDEN_LSHIFT_EXPR:
3744 if (!INTEGRAL_TYPE_P (lhs_type)
3745 || !INTEGRAL_TYPE_P (rhs1_type)
3746 || TREE_CODE (rhs2) != INTEGER_CST
3747 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3749 error ("type mismatch in widening vector shift expression");
3750 debug_generic_expr (lhs_type);
3751 debug_generic_expr (rhs1_type);
3752 debug_generic_expr (rhs2_type);
3753 return true;
3756 return false;
3759 case VEC_WIDEN_LSHIFT_HI_EXPR:
3760 case VEC_WIDEN_LSHIFT_LO_EXPR:
3762 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3763 || TREE_CODE (lhs_type) != VECTOR_TYPE
3764 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3765 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3766 || TREE_CODE (rhs2) != INTEGER_CST
3767 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3768 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3770 error ("type mismatch in widening vector shift expression");
3771 debug_generic_expr (lhs_type);
3772 debug_generic_expr (rhs1_type);
3773 debug_generic_expr (rhs2_type);
3774 return true;
3777 return false;
3780 case PLUS_EXPR:
3781 case MINUS_EXPR:
3783 tree lhs_etype = lhs_type;
3784 tree rhs1_etype = rhs1_type;
3785 tree rhs2_etype = rhs2_type;
3786 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3788 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3789 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3791 error ("invalid non-vector operands to vector valued plus");
3792 return true;
3794 lhs_etype = TREE_TYPE (lhs_type);
3795 rhs1_etype = TREE_TYPE (rhs1_type);
3796 rhs2_etype = TREE_TYPE (rhs2_type);
3798 if (POINTER_TYPE_P (lhs_etype)
3799 || POINTER_TYPE_P (rhs1_etype)
3800 || POINTER_TYPE_P (rhs2_etype))
3802 error ("invalid (pointer) operands to plus/minus");
3803 return true;
3806 /* Continue with generic binary expression handling. */
3807 break;
3810 case POINTER_PLUS_EXPR:
3812 if (!POINTER_TYPE_P (rhs1_type)
3813 || !useless_type_conversion_p (lhs_type, rhs1_type)
3814 || !ptrofftype_p (rhs2_type))
3816 error ("type mismatch in pointer plus expression");
3817 debug_generic_stmt (lhs_type);
3818 debug_generic_stmt (rhs1_type);
3819 debug_generic_stmt (rhs2_type);
3820 return true;
3823 return false;
3826 case TRUTH_ANDIF_EXPR:
3827 case TRUTH_ORIF_EXPR:
3828 case TRUTH_AND_EXPR:
3829 case TRUTH_OR_EXPR:
3830 case TRUTH_XOR_EXPR:
3832 gcc_unreachable ();
3834 case LT_EXPR:
3835 case LE_EXPR:
3836 case GT_EXPR:
3837 case GE_EXPR:
3838 case EQ_EXPR:
3839 case NE_EXPR:
3840 case UNORDERED_EXPR:
3841 case ORDERED_EXPR:
3842 case UNLT_EXPR:
3843 case UNLE_EXPR:
3844 case UNGT_EXPR:
3845 case UNGE_EXPR:
3846 case UNEQ_EXPR:
3847 case LTGT_EXPR:
3848 /* Comparisons are also binary, but the result type is not
3849 connected to the operand types. */
3850 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3852 case WIDEN_MULT_EXPR:
3853 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3854 return true;
3855 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3856 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3858 case WIDEN_SUM_EXPR:
3859 case VEC_WIDEN_MULT_HI_EXPR:
3860 case VEC_WIDEN_MULT_LO_EXPR:
3861 case VEC_WIDEN_MULT_EVEN_EXPR:
3862 case VEC_WIDEN_MULT_ODD_EXPR:
3863 case VEC_PACK_TRUNC_EXPR:
3864 case VEC_PACK_SAT_EXPR:
3865 case VEC_PACK_FIX_TRUNC_EXPR:
3866 /* FIXME. */
3867 return false;
3869 case MULT_EXPR:
3870 case MULT_HIGHPART_EXPR:
3871 case TRUNC_DIV_EXPR:
3872 case CEIL_DIV_EXPR:
3873 case FLOOR_DIV_EXPR:
3874 case ROUND_DIV_EXPR:
3875 case TRUNC_MOD_EXPR:
3876 case CEIL_MOD_EXPR:
3877 case FLOOR_MOD_EXPR:
3878 case ROUND_MOD_EXPR:
3879 case RDIV_EXPR:
3880 case EXACT_DIV_EXPR:
3881 case MIN_EXPR:
3882 case MAX_EXPR:
3883 case BIT_IOR_EXPR:
3884 case BIT_XOR_EXPR:
3885 case BIT_AND_EXPR:
3886 /* Continue with generic binary expression handling. */
3887 break;
3889 default:
3890 gcc_unreachable ();
3893 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3894 || !useless_type_conversion_p (lhs_type, rhs2_type))
3896 error ("type mismatch in binary expression");
3897 debug_generic_stmt (lhs_type);
3898 debug_generic_stmt (rhs1_type);
3899 debug_generic_stmt (rhs2_type);
3900 return true;
3903 return false;
3906 /* Verify a gimple assignment statement STMT with a ternary rhs.
3907 Returns true if anything is wrong. */
3909 static bool
3910 verify_gimple_assign_ternary (gassign *stmt)
3912 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3913 tree lhs = gimple_assign_lhs (stmt);
3914 tree lhs_type = TREE_TYPE (lhs);
3915 tree rhs1 = gimple_assign_rhs1 (stmt);
3916 tree rhs1_type = TREE_TYPE (rhs1);
3917 tree rhs2 = gimple_assign_rhs2 (stmt);
3918 tree rhs2_type = TREE_TYPE (rhs2);
3919 tree rhs3 = gimple_assign_rhs3 (stmt);
3920 tree rhs3_type = TREE_TYPE (rhs3);
3922 if (!is_gimple_reg (lhs))
3924 error ("non-register as LHS of ternary operation");
3925 return true;
3928 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3929 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3930 || !is_gimple_val (rhs2)
3931 || !is_gimple_val (rhs3))
3933 error ("invalid operands in ternary operation");
3934 return true;
3937 /* First handle operations that involve different types. */
3938 switch (rhs_code)
3940 case WIDEN_MULT_PLUS_EXPR:
3941 case WIDEN_MULT_MINUS_EXPR:
3942 if ((!INTEGRAL_TYPE_P (rhs1_type)
3943 && !FIXED_POINT_TYPE_P (rhs1_type))
3944 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3945 || !useless_type_conversion_p (lhs_type, rhs3_type)
3946 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3947 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3949 error ("type mismatch in widening multiply-accumulate expression");
3950 debug_generic_expr (lhs_type);
3951 debug_generic_expr (rhs1_type);
3952 debug_generic_expr (rhs2_type);
3953 debug_generic_expr (rhs3_type);
3954 return true;
3956 break;
3958 case FMA_EXPR:
3959 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3960 || !useless_type_conversion_p (lhs_type, rhs2_type)
3961 || !useless_type_conversion_p (lhs_type, rhs3_type))
3963 error ("type mismatch in fused multiply-add expression");
3964 debug_generic_expr (lhs_type);
3965 debug_generic_expr (rhs1_type);
3966 debug_generic_expr (rhs2_type);
3967 debug_generic_expr (rhs3_type);
3968 return true;
3970 break;
3972 case VEC_COND_EXPR:
3973 if (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3974 || TYPE_SIGN (rhs1_type) != SIGNED
3975 || TYPE_SIZE (rhs1_type) != TYPE_SIZE (lhs_type)
3976 || TYPE_VECTOR_SUBPARTS (rhs1_type)
3977 != TYPE_VECTOR_SUBPARTS (lhs_type))
3979 error ("the first argument of a VEC_COND_EXPR must be of a signed "
3980 "integral vector type of the same size and number of "
3981 "elements as the result");
3982 debug_generic_expr (lhs_type);
3983 debug_generic_expr (rhs1_type);
3984 return true;
3986 /* Fallthrough. */
3987 case COND_EXPR:
3988 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3989 || !useless_type_conversion_p (lhs_type, rhs3_type))
3991 error ("type mismatch in conditional expression");
3992 debug_generic_expr (lhs_type);
3993 debug_generic_expr (rhs2_type);
3994 debug_generic_expr (rhs3_type);
3995 return true;
3997 break;
3999 case VEC_PERM_EXPR:
4000 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4001 || !useless_type_conversion_p (lhs_type, rhs2_type))
4003 error ("type mismatch in vector permute expression");
4004 debug_generic_expr (lhs_type);
4005 debug_generic_expr (rhs1_type);
4006 debug_generic_expr (rhs2_type);
4007 debug_generic_expr (rhs3_type);
4008 return true;
4011 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4012 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4013 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4015 error ("vector types expected in vector permute expression");
4016 debug_generic_expr (lhs_type);
4017 debug_generic_expr (rhs1_type);
4018 debug_generic_expr (rhs2_type);
4019 debug_generic_expr (rhs3_type);
4020 return true;
4023 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4024 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4025 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4026 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4027 != TYPE_VECTOR_SUBPARTS (lhs_type))
4029 error ("vectors with different element number found "
4030 "in vector permute expression");
4031 debug_generic_expr (lhs_type);
4032 debug_generic_expr (rhs1_type);
4033 debug_generic_expr (rhs2_type);
4034 debug_generic_expr (rhs3_type);
4035 return true;
4038 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4039 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4040 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4042 error ("invalid mask type in vector permute expression");
4043 debug_generic_expr (lhs_type);
4044 debug_generic_expr (rhs1_type);
4045 debug_generic_expr (rhs2_type);
4046 debug_generic_expr (rhs3_type);
4047 return true;
4050 return false;
4052 case SAD_EXPR:
4053 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4054 || !useless_type_conversion_p (lhs_type, rhs3_type)
4055 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4056 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4058 error ("type mismatch in sad expression");
4059 debug_generic_expr (lhs_type);
4060 debug_generic_expr (rhs1_type);
4061 debug_generic_expr (rhs2_type);
4062 debug_generic_expr (rhs3_type);
4063 return true;
4066 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4067 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4068 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4070 error ("vector types expected in sad expression");
4071 debug_generic_expr (lhs_type);
4072 debug_generic_expr (rhs1_type);
4073 debug_generic_expr (rhs2_type);
4074 debug_generic_expr (rhs3_type);
4075 return true;
4078 return false;
4080 case DOT_PROD_EXPR:
4081 case REALIGN_LOAD_EXPR:
4082 /* FIXME. */
4083 return false;
4085 default:
4086 gcc_unreachable ();
4088 return false;
4091 /* Verify a gimple assignment statement STMT with a single rhs.
4092 Returns true if anything is wrong. */
4094 static bool
4095 verify_gimple_assign_single (gassign *stmt)
4097 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4098 tree lhs = gimple_assign_lhs (stmt);
4099 tree lhs_type = TREE_TYPE (lhs);
4100 tree rhs1 = gimple_assign_rhs1 (stmt);
4101 tree rhs1_type = TREE_TYPE (rhs1);
4102 bool res = false;
4104 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4106 error ("non-trivial conversion at assignment");
4107 debug_generic_expr (lhs_type);
4108 debug_generic_expr (rhs1_type);
4109 return true;
4112 if (gimple_clobber_p (stmt)
4113 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4115 error ("non-decl/MEM_REF LHS in clobber statement");
4116 debug_generic_expr (lhs);
4117 return true;
4120 if (handled_component_p (lhs)
4121 || TREE_CODE (lhs) == MEM_REF
4122 || TREE_CODE (lhs) == TARGET_MEM_REF)
4123 res |= verify_types_in_gimple_reference (lhs, true);
4125 /* Special codes we cannot handle via their class. */
4126 switch (rhs_code)
4128 case ADDR_EXPR:
4130 tree op = TREE_OPERAND (rhs1, 0);
4131 if (!is_gimple_addressable (op))
4133 error ("invalid operand in unary expression");
4134 return true;
4137 /* Technically there is no longer a need for matching types, but
4138 gimple hygiene asks for this check. In LTO we can end up
4139 combining incompatible units and thus end up with addresses
4140 of globals that change their type to a common one. */
4141 if (!in_lto_p
4142 && !types_compatible_p (TREE_TYPE (op),
4143 TREE_TYPE (TREE_TYPE (rhs1)))
4144 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4145 TREE_TYPE (op)))
4147 error ("type mismatch in address expression");
4148 debug_generic_stmt (TREE_TYPE (rhs1));
4149 debug_generic_stmt (TREE_TYPE (op));
4150 return true;
4153 return verify_types_in_gimple_reference (op, true);
4156 /* tcc_reference */
4157 case INDIRECT_REF:
4158 error ("INDIRECT_REF in gimple IL");
4159 return true;
4161 case COMPONENT_REF:
4162 case BIT_FIELD_REF:
4163 case ARRAY_REF:
4164 case ARRAY_RANGE_REF:
4165 case VIEW_CONVERT_EXPR:
4166 case REALPART_EXPR:
4167 case IMAGPART_EXPR:
4168 case TARGET_MEM_REF:
4169 case MEM_REF:
4170 if (!is_gimple_reg (lhs)
4171 && is_gimple_reg_type (TREE_TYPE (lhs)))
4173 error ("invalid rhs for gimple memory store");
4174 debug_generic_stmt (lhs);
4175 debug_generic_stmt (rhs1);
4176 return true;
4178 return res || verify_types_in_gimple_reference (rhs1, false);
4180 /* tcc_constant */
4181 case SSA_NAME:
4182 case INTEGER_CST:
4183 case REAL_CST:
4184 case FIXED_CST:
4185 case COMPLEX_CST:
4186 case VECTOR_CST:
4187 case STRING_CST:
4188 return res;
4190 /* tcc_declaration */
4191 case CONST_DECL:
4192 return res;
4193 case VAR_DECL:
4194 case PARM_DECL:
4195 if (!is_gimple_reg (lhs)
4196 && !is_gimple_reg (rhs1)
4197 && is_gimple_reg_type (TREE_TYPE (lhs)))
4199 error ("invalid rhs for gimple memory store");
4200 debug_generic_stmt (lhs);
4201 debug_generic_stmt (rhs1);
4202 return true;
4204 return res;
4206 case CONSTRUCTOR:
4207 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4209 unsigned int i;
4210 tree elt_i, elt_v, elt_t = NULL_TREE;
4212 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4213 return res;
4214 /* For vector CONSTRUCTORs we require that either it is empty
4215 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4216 (then the element count must be correct to cover the whole
4217 outer vector and index must be NULL on all elements, or it is
4218 a CONSTRUCTOR of scalar elements, where we as an exception allow
4219 smaller number of elements (assuming zero filling) and
4220 consecutive indexes as compared to NULL indexes (such
4221 CONSTRUCTORs can appear in the IL from FEs). */
4222 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4224 if (elt_t == NULL_TREE)
4226 elt_t = TREE_TYPE (elt_v);
4227 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4229 tree elt_t = TREE_TYPE (elt_v);
4230 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4231 TREE_TYPE (elt_t)))
4233 error ("incorrect type of vector CONSTRUCTOR"
4234 " elements");
4235 debug_generic_stmt (rhs1);
4236 return true;
4238 else if (CONSTRUCTOR_NELTS (rhs1)
4239 * TYPE_VECTOR_SUBPARTS (elt_t)
4240 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4242 error ("incorrect number of vector CONSTRUCTOR"
4243 " elements");
4244 debug_generic_stmt (rhs1);
4245 return true;
4248 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4249 elt_t))
4251 error ("incorrect type of vector CONSTRUCTOR elements");
4252 debug_generic_stmt (rhs1);
4253 return true;
4255 else if (CONSTRUCTOR_NELTS (rhs1)
4256 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4258 error ("incorrect number of vector CONSTRUCTOR elements");
4259 debug_generic_stmt (rhs1);
4260 return true;
4263 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4265 error ("incorrect type of vector CONSTRUCTOR elements");
4266 debug_generic_stmt (rhs1);
4267 return true;
4269 if (elt_i != NULL_TREE
4270 && (TREE_CODE (elt_t) == VECTOR_TYPE
4271 || TREE_CODE (elt_i) != INTEGER_CST
4272 || compare_tree_int (elt_i, i) != 0))
4274 error ("vector CONSTRUCTOR with non-NULL element index");
4275 debug_generic_stmt (rhs1);
4276 return true;
4278 if (!is_gimple_val (elt_v))
4280 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4281 debug_generic_stmt (rhs1);
4282 return true;
4286 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4288 error ("non-vector CONSTRUCTOR with elements");
4289 debug_generic_stmt (rhs1);
4290 return true;
4292 return res;
4293 case OBJ_TYPE_REF:
4294 case ASSERT_EXPR:
4295 case WITH_SIZE_EXPR:
4296 /* FIXME. */
4297 return res;
4299 default:;
4302 return res;
4305 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4306 is a problem, otherwise false. */
4308 static bool
4309 verify_gimple_assign (gassign *stmt)
4311 switch (gimple_assign_rhs_class (stmt))
4313 case GIMPLE_SINGLE_RHS:
4314 return verify_gimple_assign_single (stmt);
4316 case GIMPLE_UNARY_RHS:
4317 return verify_gimple_assign_unary (stmt);
4319 case GIMPLE_BINARY_RHS:
4320 return verify_gimple_assign_binary (stmt);
4322 case GIMPLE_TERNARY_RHS:
4323 return verify_gimple_assign_ternary (stmt);
4325 default:
4326 gcc_unreachable ();
4330 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4331 is a problem, otherwise false. */
4333 static bool
4334 verify_gimple_return (greturn *stmt)
4336 tree op = gimple_return_retval (stmt);
4337 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4339 /* We cannot test for present return values as we do not fix up missing
4340 return values from the original source. */
4341 if (op == NULL)
4342 return false;
4344 if (!is_gimple_val (op)
4345 && TREE_CODE (op) != RESULT_DECL)
4347 error ("invalid operand in return statement");
4348 debug_generic_stmt (op);
4349 return true;
4352 if ((TREE_CODE (op) == RESULT_DECL
4353 && DECL_BY_REFERENCE (op))
4354 || (TREE_CODE (op) == SSA_NAME
4355 && SSA_NAME_VAR (op)
4356 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4357 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4358 op = TREE_TYPE (op);
4360 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4362 error ("invalid conversion in return statement");
4363 debug_generic_stmt (restype);
4364 debug_generic_stmt (TREE_TYPE (op));
4365 return true;
4368 return false;
4372 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4373 is a problem, otherwise false. */
4375 static bool
4376 verify_gimple_goto (ggoto *stmt)
4378 tree dest = gimple_goto_dest (stmt);
4380 /* ??? We have two canonical forms of direct goto destinations, a
4381 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4382 if (TREE_CODE (dest) != LABEL_DECL
4383 && (!is_gimple_val (dest)
4384 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4386 error ("goto destination is neither a label nor a pointer");
4387 return true;
4390 return false;
4393 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4394 is a problem, otherwise false. */
4396 static bool
4397 verify_gimple_switch (gswitch *stmt)
4399 unsigned int i, n;
4400 tree elt, prev_upper_bound = NULL_TREE;
4401 tree index_type, elt_type = NULL_TREE;
4403 if (!is_gimple_val (gimple_switch_index (stmt)))
4405 error ("invalid operand to switch statement");
4406 debug_generic_stmt (gimple_switch_index (stmt));
4407 return true;
4410 index_type = TREE_TYPE (gimple_switch_index (stmt));
4411 if (! INTEGRAL_TYPE_P (index_type))
4413 error ("non-integral type switch statement");
4414 debug_generic_expr (index_type);
4415 return true;
4418 elt = gimple_switch_label (stmt, 0);
4419 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4421 error ("invalid default case label in switch statement");
4422 debug_generic_expr (elt);
4423 return true;
4426 n = gimple_switch_num_labels (stmt);
4427 for (i = 1; i < n; i++)
4429 elt = gimple_switch_label (stmt, i);
4431 if (! CASE_LOW (elt))
4433 error ("invalid case label in switch statement");
4434 debug_generic_expr (elt);
4435 return true;
4437 if (CASE_HIGH (elt)
4438 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4440 error ("invalid case range in switch statement");
4441 debug_generic_expr (elt);
4442 return true;
4445 if (elt_type)
4447 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4448 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4450 error ("type mismatch for case label in switch statement");
4451 debug_generic_expr (elt);
4452 return true;
4455 else
4457 elt_type = TREE_TYPE (CASE_LOW (elt));
4458 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4460 error ("type precision mismatch in switch statement");
4461 return true;
4465 if (prev_upper_bound)
4467 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4469 error ("case labels not sorted in switch statement");
4470 return true;
4474 prev_upper_bound = CASE_HIGH (elt);
4475 if (! prev_upper_bound)
4476 prev_upper_bound = CASE_LOW (elt);
4479 return false;
4482 /* Verify a gimple debug statement STMT.
4483 Returns true if anything is wrong. */
4485 static bool
4486 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4488 /* There isn't much that could be wrong in a gimple debug stmt. A
4489 gimple debug bind stmt, for example, maps a tree, that's usually
4490 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4491 component or member of an aggregate type, to another tree, that
4492 can be an arbitrary expression. These stmts expand into debug
4493 insns, and are converted to debug notes by var-tracking.c. */
4494 return false;
4497 /* Verify a gimple label statement STMT.
4498 Returns true if anything is wrong. */
4500 static bool
4501 verify_gimple_label (glabel *stmt)
4503 tree decl = gimple_label_label (stmt);
4504 int uid;
4505 bool err = false;
4507 if (TREE_CODE (decl) != LABEL_DECL)
4508 return true;
4509 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4510 && DECL_CONTEXT (decl) != current_function_decl)
4512 error ("label's context is not the current function decl");
4513 err |= true;
4516 uid = LABEL_DECL_UID (decl);
4517 if (cfun->cfg
4518 && (uid == -1
4519 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4521 error ("incorrect entry in label_to_block_map");
4522 err |= true;
4525 uid = EH_LANDING_PAD_NR (decl);
4526 if (uid)
4528 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4529 if (decl != lp->post_landing_pad)
4531 error ("incorrect setting of landing pad number");
4532 err |= true;
4536 return err;
4539 /* Verify a gimple cond statement STMT.
4540 Returns true if anything is wrong. */
4542 static bool
4543 verify_gimple_cond (gcond *stmt)
4545 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4547 error ("invalid comparison code in gimple cond");
4548 return true;
4550 if (!(!gimple_cond_true_label (stmt)
4551 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4552 || !(!gimple_cond_false_label (stmt)
4553 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4555 error ("invalid labels in gimple cond");
4556 return true;
4559 return verify_gimple_comparison (boolean_type_node,
4560 gimple_cond_lhs (stmt),
4561 gimple_cond_rhs (stmt));
4564 /* Verify the GIMPLE statement STMT. Returns true if there is an
4565 error, otherwise false. */
4567 static bool
4568 verify_gimple_stmt (gimple stmt)
4570 switch (gimple_code (stmt))
4572 case GIMPLE_ASSIGN:
4573 return verify_gimple_assign (as_a <gassign *> (stmt));
4575 case GIMPLE_LABEL:
4576 return verify_gimple_label (as_a <glabel *> (stmt));
4578 case GIMPLE_CALL:
4579 return verify_gimple_call (as_a <gcall *> (stmt));
4581 case GIMPLE_COND:
4582 return verify_gimple_cond (as_a <gcond *> (stmt));
4584 case GIMPLE_GOTO:
4585 return verify_gimple_goto (as_a <ggoto *> (stmt));
4587 case GIMPLE_SWITCH:
4588 return verify_gimple_switch (as_a <gswitch *> (stmt));
4590 case GIMPLE_RETURN:
4591 return verify_gimple_return (as_a <greturn *> (stmt));
4593 case GIMPLE_ASM:
4594 return false;
4596 case GIMPLE_TRANSACTION:
4597 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4599 /* Tuples that do not have tree operands. */
4600 case GIMPLE_NOP:
4601 case GIMPLE_PREDICT:
4602 case GIMPLE_RESX:
4603 case GIMPLE_EH_DISPATCH:
4604 case GIMPLE_EH_MUST_NOT_THROW:
4605 return false;
4607 CASE_GIMPLE_OMP:
4608 /* OpenMP directives are validated by the FE and never operated
4609 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4610 non-gimple expressions when the main index variable has had
4611 its address taken. This does not affect the loop itself
4612 because the header of an GIMPLE_OMP_FOR is merely used to determine
4613 how to setup the parallel iteration. */
4614 return false;
4616 case GIMPLE_DEBUG:
4617 return verify_gimple_debug (stmt);
4619 default:
4620 gcc_unreachable ();
4624 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4625 and false otherwise. */
4627 static bool
4628 verify_gimple_phi (gimple phi)
4630 bool err = false;
4631 unsigned i;
4632 tree phi_result = gimple_phi_result (phi);
4633 bool virtual_p;
4635 if (!phi_result)
4637 error ("invalid PHI result");
4638 return true;
4641 virtual_p = virtual_operand_p (phi_result);
4642 if (TREE_CODE (phi_result) != SSA_NAME
4643 || (virtual_p
4644 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4646 error ("invalid PHI result");
4647 err = true;
4650 for (i = 0; i < gimple_phi_num_args (phi); i++)
4652 tree t = gimple_phi_arg_def (phi, i);
4654 if (!t)
4656 error ("missing PHI def");
4657 err |= true;
4658 continue;
4660 /* Addressable variables do have SSA_NAMEs but they
4661 are not considered gimple values. */
4662 else if ((TREE_CODE (t) == SSA_NAME
4663 && virtual_p != virtual_operand_p (t))
4664 || (virtual_p
4665 && (TREE_CODE (t) != SSA_NAME
4666 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4667 || (!virtual_p
4668 && !is_gimple_val (t)))
4670 error ("invalid PHI argument");
4671 debug_generic_expr (t);
4672 err |= true;
4674 #ifdef ENABLE_TYPES_CHECKING
4675 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4677 error ("incompatible types in PHI argument %u", i);
4678 debug_generic_stmt (TREE_TYPE (phi_result));
4679 debug_generic_stmt (TREE_TYPE (t));
4680 err |= true;
4682 #endif
4685 return err;
4688 /* Verify the GIMPLE statements inside the sequence STMTS. */
4690 static bool
4691 verify_gimple_in_seq_2 (gimple_seq stmts)
4693 gimple_stmt_iterator ittr;
4694 bool err = false;
4696 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4698 gimple stmt = gsi_stmt (ittr);
4700 switch (gimple_code (stmt))
4702 case GIMPLE_BIND:
4703 err |= verify_gimple_in_seq_2 (
4704 gimple_bind_body (as_a <gbind *> (stmt)));
4705 break;
4707 case GIMPLE_TRY:
4708 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4709 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4710 break;
4712 case GIMPLE_EH_FILTER:
4713 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4714 break;
4716 case GIMPLE_EH_ELSE:
4718 geh_else *eh_else = as_a <geh_else *> (stmt);
4719 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4720 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4722 break;
4724 case GIMPLE_CATCH:
4725 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4726 as_a <gcatch *> (stmt)));
4727 break;
4729 case GIMPLE_TRANSACTION:
4730 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4731 break;
4733 default:
4735 bool err2 = verify_gimple_stmt (stmt);
4736 if (err2)
4737 debug_gimple_stmt (stmt);
4738 err |= err2;
4743 return err;
4746 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4747 is a problem, otherwise false. */
4749 static bool
4750 verify_gimple_transaction (gtransaction *stmt)
4752 tree lab = gimple_transaction_label (stmt);
4753 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4754 return true;
4755 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4759 /* Verify the GIMPLE statements inside the statement list STMTS. */
4761 DEBUG_FUNCTION void
4762 verify_gimple_in_seq (gimple_seq stmts)
4764 timevar_push (TV_TREE_STMT_VERIFY);
4765 if (verify_gimple_in_seq_2 (stmts))
4766 internal_error ("verify_gimple failed");
4767 timevar_pop (TV_TREE_STMT_VERIFY);
4770 /* Return true when the T can be shared. */
4772 static bool
4773 tree_node_can_be_shared (tree t)
4775 if (IS_TYPE_OR_DECL_P (t)
4776 || is_gimple_min_invariant (t)
4777 || TREE_CODE (t) == SSA_NAME
4778 || t == error_mark_node
4779 || TREE_CODE (t) == IDENTIFIER_NODE)
4780 return true;
4782 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4783 return true;
4785 if (DECL_P (t))
4786 return true;
4788 return false;
4791 /* Called via walk_tree. Verify tree sharing. */
4793 static tree
4794 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4796 hash_set<void *> *visited = (hash_set<void *> *) data;
4798 if (tree_node_can_be_shared (*tp))
4800 *walk_subtrees = false;
4801 return NULL;
4804 if (visited->add (*tp))
4805 return *tp;
4807 return NULL;
4810 /* Called via walk_gimple_stmt. Verify tree sharing. */
4812 static tree
4813 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4815 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4816 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4819 static bool eh_error_found;
4820 bool
4821 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4822 hash_set<gimple> *visited)
4824 if (!visited->contains (stmt))
4826 error ("dead STMT in EH table");
4827 debug_gimple_stmt (stmt);
4828 eh_error_found = true;
4830 return true;
4833 /* Verify if the location LOCs block is in BLOCKS. */
4835 static bool
4836 verify_location (hash_set<tree> *blocks, location_t loc)
4838 tree block = LOCATION_BLOCK (loc);
4839 if (block != NULL_TREE
4840 && !blocks->contains (block))
4842 error ("location references block not in block tree");
4843 return true;
4845 if (block != NULL_TREE)
4846 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4847 return false;
4850 /* Called via walk_tree. Verify that expressions have no blocks. */
4852 static tree
4853 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4855 if (!EXPR_P (*tp))
4857 *walk_subtrees = false;
4858 return NULL;
4861 location_t loc = EXPR_LOCATION (*tp);
4862 if (LOCATION_BLOCK (loc) != NULL)
4863 return *tp;
4865 return NULL;
4868 /* Called via walk_tree. Verify locations of expressions. */
4870 static tree
4871 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4873 hash_set<tree> *blocks = (hash_set<tree> *) data;
4875 if (TREE_CODE (*tp) == VAR_DECL
4876 && DECL_HAS_DEBUG_EXPR_P (*tp))
4878 tree t = DECL_DEBUG_EXPR (*tp);
4879 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4880 if (addr)
4881 return addr;
4883 if ((TREE_CODE (*tp) == VAR_DECL
4884 || TREE_CODE (*tp) == PARM_DECL
4885 || TREE_CODE (*tp) == RESULT_DECL)
4886 && DECL_HAS_VALUE_EXPR_P (*tp))
4888 tree t = DECL_VALUE_EXPR (*tp);
4889 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4890 if (addr)
4891 return addr;
4894 if (!EXPR_P (*tp))
4896 *walk_subtrees = false;
4897 return NULL;
4900 location_t loc = EXPR_LOCATION (*tp);
4901 if (verify_location (blocks, loc))
4902 return *tp;
4904 return NULL;
4907 /* Called via walk_gimple_op. Verify locations of expressions. */
4909 static tree
4910 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4912 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4913 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4916 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4918 static void
4919 collect_subblocks (hash_set<tree> *blocks, tree block)
4921 tree t;
4922 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4924 blocks->add (t);
4925 collect_subblocks (blocks, t);
4929 /* Verify the GIMPLE statements in the CFG of FN. */
4931 DEBUG_FUNCTION void
4932 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4934 basic_block bb;
4935 bool err = false;
4937 timevar_push (TV_TREE_STMT_VERIFY);
4938 hash_set<void *> visited;
4939 hash_set<gimple> visited_stmts;
4941 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4942 hash_set<tree> blocks;
4943 if (DECL_INITIAL (fn->decl))
4945 blocks.add (DECL_INITIAL (fn->decl));
4946 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4949 FOR_EACH_BB_FN (bb, fn)
4951 gimple_stmt_iterator gsi;
4953 for (gphi_iterator gpi = gsi_start_phis (bb);
4954 !gsi_end_p (gpi);
4955 gsi_next (&gpi))
4957 gphi *phi = gpi.phi ();
4958 bool err2 = false;
4959 unsigned i;
4961 visited_stmts.add (phi);
4963 if (gimple_bb (phi) != bb)
4965 error ("gimple_bb (phi) is set to a wrong basic block");
4966 err2 = true;
4969 err2 |= verify_gimple_phi (phi);
4971 /* Only PHI arguments have locations. */
4972 if (gimple_location (phi) != UNKNOWN_LOCATION)
4974 error ("PHI node with location");
4975 err2 = true;
4978 for (i = 0; i < gimple_phi_num_args (phi); i++)
4980 tree arg = gimple_phi_arg_def (phi, i);
4981 tree addr = walk_tree (&arg, verify_node_sharing_1,
4982 &visited, NULL);
4983 if (addr)
4985 error ("incorrect sharing of tree nodes");
4986 debug_generic_expr (addr);
4987 err2 |= true;
4989 location_t loc = gimple_phi_arg_location (phi, i);
4990 if (virtual_operand_p (gimple_phi_result (phi))
4991 && loc != UNKNOWN_LOCATION)
4993 error ("virtual PHI with argument locations");
4994 err2 = true;
4996 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4997 if (addr)
4999 debug_generic_expr (addr);
5000 err2 = true;
5002 err2 |= verify_location (&blocks, loc);
5005 if (err2)
5006 debug_gimple_stmt (phi);
5007 err |= err2;
5010 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5012 gimple stmt = gsi_stmt (gsi);
5013 bool err2 = false;
5014 struct walk_stmt_info wi;
5015 tree addr;
5016 int lp_nr;
5018 visited_stmts.add (stmt);
5020 if (gimple_bb (stmt) != bb)
5022 error ("gimple_bb (stmt) is set to a wrong basic block");
5023 err2 = true;
5026 err2 |= verify_gimple_stmt (stmt);
5027 err2 |= verify_location (&blocks, gimple_location (stmt));
5029 memset (&wi, 0, sizeof (wi));
5030 wi.info = (void *) &visited;
5031 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5032 if (addr)
5034 error ("incorrect sharing of tree nodes");
5035 debug_generic_expr (addr);
5036 err2 |= true;
5039 memset (&wi, 0, sizeof (wi));
5040 wi.info = (void *) &blocks;
5041 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5042 if (addr)
5044 debug_generic_expr (addr);
5045 err2 |= true;
5048 /* ??? Instead of not checking these stmts at all the walker
5049 should know its context via wi. */
5050 if (!is_gimple_debug (stmt)
5051 && !is_gimple_omp (stmt))
5053 memset (&wi, 0, sizeof (wi));
5054 addr = walk_gimple_op (stmt, verify_expr, &wi);
5055 if (addr)
5057 debug_generic_expr (addr);
5058 inform (gimple_location (stmt), "in statement");
5059 err2 |= true;
5063 /* If the statement is marked as part of an EH region, then it is
5064 expected that the statement could throw. Verify that when we
5065 have optimizations that simplify statements such that we prove
5066 that they cannot throw, that we update other data structures
5067 to match. */
5068 lp_nr = lookup_stmt_eh_lp (stmt);
5069 if (lp_nr > 0)
5071 if (!stmt_could_throw_p (stmt))
5073 if (verify_nothrow)
5075 error ("statement marked for throw, but doesn%'t");
5076 err2 |= true;
5079 else if (!gsi_one_before_end_p (gsi))
5081 error ("statement marked for throw in middle of block");
5082 err2 |= true;
5086 if (err2)
5087 debug_gimple_stmt (stmt);
5088 err |= err2;
5092 eh_error_found = false;
5093 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5094 if (eh_table)
5095 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5096 (&visited_stmts);
5098 if (err || eh_error_found)
5099 internal_error ("verify_gimple failed");
5101 verify_histograms ();
5102 timevar_pop (TV_TREE_STMT_VERIFY);
5106 /* Verifies that the flow information is OK. */
5108 static int
5109 gimple_verify_flow_info (void)
5111 int err = 0;
5112 basic_block bb;
5113 gimple_stmt_iterator gsi;
5114 gimple stmt;
5115 edge e;
5116 edge_iterator ei;
5118 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5119 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5121 error ("ENTRY_BLOCK has IL associated with it");
5122 err = 1;
5125 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5126 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5128 error ("EXIT_BLOCK has IL associated with it");
5129 err = 1;
5132 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5133 if (e->flags & EDGE_FALLTHRU)
5135 error ("fallthru to exit from bb %d", e->src->index);
5136 err = 1;
5139 FOR_EACH_BB_FN (bb, cfun)
5141 bool found_ctrl_stmt = false;
5143 stmt = NULL;
5145 /* Skip labels on the start of basic block. */
5146 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5148 tree label;
5149 gimple prev_stmt = stmt;
5151 stmt = gsi_stmt (gsi);
5153 if (gimple_code (stmt) != GIMPLE_LABEL)
5154 break;
5156 label = gimple_label_label (as_a <glabel *> (stmt));
5157 if (prev_stmt && DECL_NONLOCAL (label))
5159 error ("nonlocal label ");
5160 print_generic_expr (stderr, label, 0);
5161 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5162 bb->index);
5163 err = 1;
5166 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5168 error ("EH landing pad label ");
5169 print_generic_expr (stderr, label, 0);
5170 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5171 bb->index);
5172 err = 1;
5175 if (label_to_block (label) != bb)
5177 error ("label ");
5178 print_generic_expr (stderr, label, 0);
5179 fprintf (stderr, " to block does not match in bb %d",
5180 bb->index);
5181 err = 1;
5184 if (decl_function_context (label) != current_function_decl)
5186 error ("label ");
5187 print_generic_expr (stderr, label, 0);
5188 fprintf (stderr, " has incorrect context in bb %d",
5189 bb->index);
5190 err = 1;
5194 /* Verify that body of basic block BB is free of control flow. */
5195 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5197 gimple stmt = gsi_stmt (gsi);
5199 if (found_ctrl_stmt)
5201 error ("control flow in the middle of basic block %d",
5202 bb->index);
5203 err = 1;
5206 if (stmt_ends_bb_p (stmt))
5207 found_ctrl_stmt = true;
5209 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5211 error ("label ");
5212 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5213 fprintf (stderr, " in the middle of basic block %d", bb->index);
5214 err = 1;
5218 gsi = gsi_last_bb (bb);
5219 if (gsi_end_p (gsi))
5220 continue;
5222 stmt = gsi_stmt (gsi);
5224 if (gimple_code (stmt) == GIMPLE_LABEL)
5225 continue;
5227 err |= verify_eh_edges (stmt);
5229 if (is_ctrl_stmt (stmt))
5231 FOR_EACH_EDGE (e, ei, bb->succs)
5232 if (e->flags & EDGE_FALLTHRU)
5234 error ("fallthru edge after a control statement in bb %d",
5235 bb->index);
5236 err = 1;
5240 if (gimple_code (stmt) != GIMPLE_COND)
5242 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5243 after anything else but if statement. */
5244 FOR_EACH_EDGE (e, ei, bb->succs)
5245 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5247 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5248 bb->index);
5249 err = 1;
5253 switch (gimple_code (stmt))
5255 case GIMPLE_COND:
5257 edge true_edge;
5258 edge false_edge;
5260 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5262 if (!true_edge
5263 || !false_edge
5264 || !(true_edge->flags & EDGE_TRUE_VALUE)
5265 || !(false_edge->flags & EDGE_FALSE_VALUE)
5266 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5267 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5268 || EDGE_COUNT (bb->succs) >= 3)
5270 error ("wrong outgoing edge flags at end of bb %d",
5271 bb->index);
5272 err = 1;
5275 break;
5277 case GIMPLE_GOTO:
5278 if (simple_goto_p (stmt))
5280 error ("explicit goto at end of bb %d", bb->index);
5281 err = 1;
5283 else
5285 /* FIXME. We should double check that the labels in the
5286 destination blocks have their address taken. */
5287 FOR_EACH_EDGE (e, ei, bb->succs)
5288 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5289 | EDGE_FALSE_VALUE))
5290 || !(e->flags & EDGE_ABNORMAL))
5292 error ("wrong outgoing edge flags at end of bb %d",
5293 bb->index);
5294 err = 1;
5297 break;
5299 case GIMPLE_CALL:
5300 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5301 break;
5302 /* ... fallthru ... */
5303 case GIMPLE_RETURN:
5304 if (!single_succ_p (bb)
5305 || (single_succ_edge (bb)->flags
5306 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5307 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5309 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5310 err = 1;
5312 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5314 error ("return edge does not point to exit in bb %d",
5315 bb->index);
5316 err = 1;
5318 break;
5320 case GIMPLE_SWITCH:
5322 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5323 tree prev;
5324 edge e;
5325 size_t i, n;
5327 n = gimple_switch_num_labels (switch_stmt);
5329 /* Mark all the destination basic blocks. */
5330 for (i = 0; i < n; ++i)
5332 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5333 basic_block label_bb = label_to_block (lab);
5334 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5335 label_bb->aux = (void *)1;
5338 /* Verify that the case labels are sorted. */
5339 prev = gimple_switch_label (switch_stmt, 0);
5340 for (i = 1; i < n; ++i)
5342 tree c = gimple_switch_label (switch_stmt, i);
5343 if (!CASE_LOW (c))
5345 error ("found default case not at the start of "
5346 "case vector");
5347 err = 1;
5348 continue;
5350 if (CASE_LOW (prev)
5351 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5353 error ("case labels not sorted: ");
5354 print_generic_expr (stderr, prev, 0);
5355 fprintf (stderr," is greater than ");
5356 print_generic_expr (stderr, c, 0);
5357 fprintf (stderr," but comes before it.\n");
5358 err = 1;
5360 prev = c;
5362 /* VRP will remove the default case if it can prove it will
5363 never be executed. So do not verify there always exists
5364 a default case here. */
5366 FOR_EACH_EDGE (e, ei, bb->succs)
5368 if (!e->dest->aux)
5370 error ("extra outgoing edge %d->%d",
5371 bb->index, e->dest->index);
5372 err = 1;
5375 e->dest->aux = (void *)2;
5376 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5377 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5379 error ("wrong outgoing edge flags at end of bb %d",
5380 bb->index);
5381 err = 1;
5385 /* Check that we have all of them. */
5386 for (i = 0; i < n; ++i)
5388 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5389 basic_block label_bb = label_to_block (lab);
5391 if (label_bb->aux != (void *)2)
5393 error ("missing edge %i->%i", bb->index, label_bb->index);
5394 err = 1;
5398 FOR_EACH_EDGE (e, ei, bb->succs)
5399 e->dest->aux = (void *)0;
5401 break;
5403 case GIMPLE_EH_DISPATCH:
5404 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5405 break;
5407 default:
5408 break;
5412 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5413 verify_dominators (CDI_DOMINATORS);
5415 return err;
5419 /* Updates phi nodes after creating a forwarder block joined
5420 by edge FALLTHRU. */
5422 static void
5423 gimple_make_forwarder_block (edge fallthru)
5425 edge e;
5426 edge_iterator ei;
5427 basic_block dummy, bb;
5428 tree var;
5429 gphi_iterator gsi;
5431 dummy = fallthru->src;
5432 bb = fallthru->dest;
5434 if (single_pred_p (bb))
5435 return;
5437 /* If we redirected a branch we must create new PHI nodes at the
5438 start of BB. */
5439 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5441 gphi *phi, *new_phi;
5443 phi = gsi.phi ();
5444 var = gimple_phi_result (phi);
5445 new_phi = create_phi_node (var, bb);
5446 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5447 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5448 UNKNOWN_LOCATION);
5451 /* Add the arguments we have stored on edges. */
5452 FOR_EACH_EDGE (e, ei, bb->preds)
5454 if (e == fallthru)
5455 continue;
5457 flush_pending_stmts (e);
5462 /* Return a non-special label in the head of basic block BLOCK.
5463 Create one if it doesn't exist. */
5465 tree
5466 gimple_block_label (basic_block bb)
5468 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5469 bool first = true;
5470 tree label;
5471 glabel *stmt;
5473 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5475 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5476 if (!stmt)
5477 break;
5478 label = gimple_label_label (stmt);
5479 if (!DECL_NONLOCAL (label))
5481 if (!first)
5482 gsi_move_before (&i, &s);
5483 return label;
5487 label = create_artificial_label (UNKNOWN_LOCATION);
5488 stmt = gimple_build_label (label);
5489 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5490 return label;
5494 /* Attempt to perform edge redirection by replacing a possibly complex
5495 jump instruction by a goto or by removing the jump completely.
5496 This can apply only if all edges now point to the same block. The
5497 parameters and return values are equivalent to
5498 redirect_edge_and_branch. */
5500 static edge
5501 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5503 basic_block src = e->src;
5504 gimple_stmt_iterator i;
5505 gimple stmt;
5507 /* We can replace or remove a complex jump only when we have exactly
5508 two edges. */
5509 if (EDGE_COUNT (src->succs) != 2
5510 /* Verify that all targets will be TARGET. Specifically, the
5511 edge that is not E must also go to TARGET. */
5512 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5513 return NULL;
5515 i = gsi_last_bb (src);
5516 if (gsi_end_p (i))
5517 return NULL;
5519 stmt = gsi_stmt (i);
5521 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5523 gsi_remove (&i, true);
5524 e = ssa_redirect_edge (e, target);
5525 e->flags = EDGE_FALLTHRU;
5526 return e;
5529 return NULL;
5533 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5534 edge representing the redirected branch. */
5536 static edge
5537 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5539 basic_block bb = e->src;
5540 gimple_stmt_iterator gsi;
5541 edge ret;
5542 gimple stmt;
5544 if (e->flags & EDGE_ABNORMAL)
5545 return NULL;
5547 if (e->dest == dest)
5548 return NULL;
5550 if (e->flags & EDGE_EH)
5551 return redirect_eh_edge (e, dest);
5553 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5555 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5556 if (ret)
5557 return ret;
5560 gsi = gsi_last_bb (bb);
5561 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5563 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5565 case GIMPLE_COND:
5566 /* For COND_EXPR, we only need to redirect the edge. */
5567 break;
5569 case GIMPLE_GOTO:
5570 /* No non-abnormal edges should lead from a non-simple goto, and
5571 simple ones should be represented implicitly. */
5572 gcc_unreachable ();
5574 case GIMPLE_SWITCH:
5576 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5577 tree label = gimple_block_label (dest);
5578 tree cases = get_cases_for_edge (e, switch_stmt);
5580 /* If we have a list of cases associated with E, then use it
5581 as it's a lot faster than walking the entire case vector. */
5582 if (cases)
5584 edge e2 = find_edge (e->src, dest);
5585 tree last, first;
5587 first = cases;
5588 while (cases)
5590 last = cases;
5591 CASE_LABEL (cases) = label;
5592 cases = CASE_CHAIN (cases);
5595 /* If there was already an edge in the CFG, then we need
5596 to move all the cases associated with E to E2. */
5597 if (e2)
5599 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5601 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5602 CASE_CHAIN (cases2) = first;
5604 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5606 else
5608 size_t i, n = gimple_switch_num_labels (switch_stmt);
5610 for (i = 0; i < n; i++)
5612 tree elt = gimple_switch_label (switch_stmt, i);
5613 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5614 CASE_LABEL (elt) = label;
5618 break;
5620 case GIMPLE_ASM:
5622 gasm *asm_stmt = as_a <gasm *> (stmt);
5623 int i, n = gimple_asm_nlabels (asm_stmt);
5624 tree label = NULL;
5626 for (i = 0; i < n; ++i)
5628 tree cons = gimple_asm_label_op (asm_stmt, i);
5629 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5631 if (!label)
5632 label = gimple_block_label (dest);
5633 TREE_VALUE (cons) = label;
5637 /* If we didn't find any label matching the former edge in the
5638 asm labels, we must be redirecting the fallthrough
5639 edge. */
5640 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5642 break;
5644 case GIMPLE_RETURN:
5645 gsi_remove (&gsi, true);
5646 e->flags |= EDGE_FALLTHRU;
5647 break;
5649 case GIMPLE_OMP_RETURN:
5650 case GIMPLE_OMP_CONTINUE:
5651 case GIMPLE_OMP_SECTIONS_SWITCH:
5652 case GIMPLE_OMP_FOR:
5653 /* The edges from OMP constructs can be simply redirected. */
5654 break;
5656 case GIMPLE_EH_DISPATCH:
5657 if (!(e->flags & EDGE_FALLTHRU))
5658 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5659 break;
5661 case GIMPLE_TRANSACTION:
5662 /* The ABORT edge has a stored label associated with it, otherwise
5663 the edges are simply redirectable. */
5664 if (e->flags == 0)
5665 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5666 gimple_block_label (dest));
5667 break;
5669 default:
5670 /* Otherwise it must be a fallthru edge, and we don't need to
5671 do anything besides redirecting it. */
5672 gcc_assert (e->flags & EDGE_FALLTHRU);
5673 break;
5676 /* Update/insert PHI nodes as necessary. */
5678 /* Now update the edges in the CFG. */
5679 e = ssa_redirect_edge (e, dest);
5681 return e;
5684 /* Returns true if it is possible to remove edge E by redirecting
5685 it to the destination of the other edge from E->src. */
5687 static bool
5688 gimple_can_remove_branch_p (const_edge e)
5690 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5691 return false;
5693 return true;
5696 /* Simple wrapper, as we can always redirect fallthru edges. */
5698 static basic_block
5699 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5701 e = gimple_redirect_edge_and_branch (e, dest);
5702 gcc_assert (e);
5704 return NULL;
5708 /* Splits basic block BB after statement STMT (but at least after the
5709 labels). If STMT is NULL, BB is split just after the labels. */
5711 static basic_block
5712 gimple_split_block (basic_block bb, void *stmt)
5714 gimple_stmt_iterator gsi;
5715 gimple_stmt_iterator gsi_tgt;
5716 gimple_seq list;
5717 basic_block new_bb;
5718 edge e;
5719 edge_iterator ei;
5721 new_bb = create_empty_bb (bb);
5723 /* Redirect the outgoing edges. */
5724 new_bb->succs = bb->succs;
5725 bb->succs = NULL;
5726 FOR_EACH_EDGE (e, ei, new_bb->succs)
5727 e->src = new_bb;
5729 /* Get a stmt iterator pointing to the first stmt to move. */
5730 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5731 gsi = gsi_after_labels (bb);
5732 else
5734 gsi = gsi_for_stmt ((gimple) stmt);
5735 gsi_next (&gsi);
5738 /* Move everything from GSI to the new basic block. */
5739 if (gsi_end_p (gsi))
5740 return new_bb;
5742 /* Split the statement list - avoid re-creating new containers as this
5743 brings ugly quadratic memory consumption in the inliner.
5744 (We are still quadratic since we need to update stmt BB pointers,
5745 sadly.) */
5746 gsi_split_seq_before (&gsi, &list);
5747 set_bb_seq (new_bb, list);
5748 for (gsi_tgt = gsi_start (list);
5749 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5750 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5752 return new_bb;
5756 /* Moves basic block BB after block AFTER. */
5758 static bool
5759 gimple_move_block_after (basic_block bb, basic_block after)
5761 if (bb->prev_bb == after)
5762 return true;
5764 unlink_block (bb);
5765 link_block (bb, after);
5767 return true;
5771 /* Return TRUE if block BB has no executable statements, otherwise return
5772 FALSE. */
5774 static bool
5775 gimple_empty_block_p (basic_block bb)
5777 /* BB must have no executable statements. */
5778 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5779 if (phi_nodes (bb))
5780 return false;
5781 if (gsi_end_p (gsi))
5782 return true;
5783 if (is_gimple_debug (gsi_stmt (gsi)))
5784 gsi_next_nondebug (&gsi);
5785 return gsi_end_p (gsi);
5789 /* Split a basic block if it ends with a conditional branch and if the
5790 other part of the block is not empty. */
5792 static basic_block
5793 gimple_split_block_before_cond_jump (basic_block bb)
5795 gimple last, split_point;
5796 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5797 if (gsi_end_p (gsi))
5798 return NULL;
5799 last = gsi_stmt (gsi);
5800 if (gimple_code (last) != GIMPLE_COND
5801 && gimple_code (last) != GIMPLE_SWITCH)
5802 return NULL;
5803 gsi_prev_nondebug (&gsi);
5804 split_point = gsi_stmt (gsi);
5805 return split_block (bb, split_point)->dest;
5809 /* Return true if basic_block can be duplicated. */
5811 static bool
5812 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5814 return true;
5817 /* Create a duplicate of the basic block BB. NOTE: This does not
5818 preserve SSA form. */
5820 static basic_block
5821 gimple_duplicate_bb (basic_block bb)
5823 basic_block new_bb;
5824 gimple_stmt_iterator gsi_tgt;
5826 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5828 /* Copy the PHI nodes. We ignore PHI node arguments here because
5829 the incoming edges have not been setup yet. */
5830 for (gphi_iterator gpi = gsi_start_phis (bb);
5831 !gsi_end_p (gpi);
5832 gsi_next (&gpi))
5834 gphi *phi, *copy;
5835 phi = gpi.phi ();
5836 copy = create_phi_node (NULL_TREE, new_bb);
5837 create_new_def_for (gimple_phi_result (phi), copy,
5838 gimple_phi_result_ptr (copy));
5839 gimple_set_uid (copy, gimple_uid (phi));
5842 gsi_tgt = gsi_start_bb (new_bb);
5843 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5844 !gsi_end_p (gsi);
5845 gsi_next (&gsi))
5847 def_operand_p def_p;
5848 ssa_op_iter op_iter;
5849 tree lhs;
5850 gimple stmt, copy;
5852 stmt = gsi_stmt (gsi);
5853 if (gimple_code (stmt) == GIMPLE_LABEL)
5854 continue;
5856 /* Don't duplicate label debug stmts. */
5857 if (gimple_debug_bind_p (stmt)
5858 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5859 == LABEL_DECL)
5860 continue;
5862 /* Create a new copy of STMT and duplicate STMT's virtual
5863 operands. */
5864 copy = gimple_copy (stmt);
5865 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5867 maybe_duplicate_eh_stmt (copy, stmt);
5868 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5870 /* When copying around a stmt writing into a local non-user
5871 aggregate, make sure it won't share stack slot with other
5872 vars. */
5873 lhs = gimple_get_lhs (stmt);
5874 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5876 tree base = get_base_address (lhs);
5877 if (base
5878 && (TREE_CODE (base) == VAR_DECL
5879 || TREE_CODE (base) == RESULT_DECL)
5880 && DECL_IGNORED_P (base)
5881 && !TREE_STATIC (base)
5882 && !DECL_EXTERNAL (base)
5883 && (TREE_CODE (base) != VAR_DECL
5884 || !DECL_HAS_VALUE_EXPR_P (base)))
5885 DECL_NONSHAREABLE (base) = 1;
5888 /* Create new names for all the definitions created by COPY and
5889 add replacement mappings for each new name. */
5890 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5891 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5894 return new_bb;
5897 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5899 static void
5900 add_phi_args_after_copy_edge (edge e_copy)
5902 basic_block bb, bb_copy = e_copy->src, dest;
5903 edge e;
5904 edge_iterator ei;
5905 gphi *phi, *phi_copy;
5906 tree def;
5907 gphi_iterator psi, psi_copy;
5909 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5910 return;
5912 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5914 if (e_copy->dest->flags & BB_DUPLICATED)
5915 dest = get_bb_original (e_copy->dest);
5916 else
5917 dest = e_copy->dest;
5919 e = find_edge (bb, dest);
5920 if (!e)
5922 /* During loop unrolling the target of the latch edge is copied.
5923 In this case we are not looking for edge to dest, but to
5924 duplicated block whose original was dest. */
5925 FOR_EACH_EDGE (e, ei, bb->succs)
5927 if ((e->dest->flags & BB_DUPLICATED)
5928 && get_bb_original (e->dest) == dest)
5929 break;
5932 gcc_assert (e != NULL);
5935 for (psi = gsi_start_phis (e->dest),
5936 psi_copy = gsi_start_phis (e_copy->dest);
5937 !gsi_end_p (psi);
5938 gsi_next (&psi), gsi_next (&psi_copy))
5940 phi = psi.phi ();
5941 phi_copy = psi_copy.phi ();
5942 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5943 add_phi_arg (phi_copy, def, e_copy,
5944 gimple_phi_arg_location_from_edge (phi, e));
5949 /* Basic block BB_COPY was created by code duplication. Add phi node
5950 arguments for edges going out of BB_COPY. The blocks that were
5951 duplicated have BB_DUPLICATED set. */
5953 void
5954 add_phi_args_after_copy_bb (basic_block bb_copy)
5956 edge e_copy;
5957 edge_iterator ei;
5959 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5961 add_phi_args_after_copy_edge (e_copy);
5965 /* Blocks in REGION_COPY array of length N_REGION were created by
5966 duplication of basic blocks. Add phi node arguments for edges
5967 going from these blocks. If E_COPY is not NULL, also add
5968 phi node arguments for its destination.*/
5970 void
5971 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5972 edge e_copy)
5974 unsigned i;
5976 for (i = 0; i < n_region; i++)
5977 region_copy[i]->flags |= BB_DUPLICATED;
5979 for (i = 0; i < n_region; i++)
5980 add_phi_args_after_copy_bb (region_copy[i]);
5981 if (e_copy)
5982 add_phi_args_after_copy_edge (e_copy);
5984 for (i = 0; i < n_region; i++)
5985 region_copy[i]->flags &= ~BB_DUPLICATED;
5988 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5989 important exit edge EXIT. By important we mean that no SSA name defined
5990 inside region is live over the other exit edges of the region. All entry
5991 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5992 to the duplicate of the region. Dominance and loop information is
5993 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5994 UPDATE_DOMINANCE is false then we assume that the caller will update the
5995 dominance information after calling this function. The new basic
5996 blocks are stored to REGION_COPY in the same order as they had in REGION,
5997 provided that REGION_COPY is not NULL.
5998 The function returns false if it is unable to copy the region,
5999 true otherwise. */
6001 bool
6002 gimple_duplicate_sese_region (edge entry, edge exit,
6003 basic_block *region, unsigned n_region,
6004 basic_block *region_copy,
6005 bool update_dominance)
6007 unsigned i;
6008 bool free_region_copy = false, copying_header = false;
6009 struct loop *loop = entry->dest->loop_father;
6010 edge exit_copy;
6011 vec<basic_block> doms;
6012 edge redirected;
6013 int total_freq = 0, entry_freq = 0;
6014 gcov_type total_count = 0, entry_count = 0;
6016 if (!can_copy_bbs_p (region, n_region))
6017 return false;
6019 /* Some sanity checking. Note that we do not check for all possible
6020 missuses of the functions. I.e. if you ask to copy something weird,
6021 it will work, but the state of structures probably will not be
6022 correct. */
6023 for (i = 0; i < n_region; i++)
6025 /* We do not handle subloops, i.e. all the blocks must belong to the
6026 same loop. */
6027 if (region[i]->loop_father != loop)
6028 return false;
6030 if (region[i] != entry->dest
6031 && region[i] == loop->header)
6032 return false;
6035 /* In case the function is used for loop header copying (which is the primary
6036 use), ensure that EXIT and its copy will be new latch and entry edges. */
6037 if (loop->header == entry->dest)
6039 copying_header = true;
6041 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6042 return false;
6044 for (i = 0; i < n_region; i++)
6045 if (region[i] != exit->src
6046 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6047 return false;
6050 initialize_original_copy_tables ();
6052 if (copying_header)
6053 set_loop_copy (loop, loop_outer (loop));
6054 else
6055 set_loop_copy (loop, loop);
6057 if (!region_copy)
6059 region_copy = XNEWVEC (basic_block, n_region);
6060 free_region_copy = true;
6063 /* Record blocks outside the region that are dominated by something
6064 inside. */
6065 if (update_dominance)
6067 doms.create (0);
6068 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6071 if (entry->dest->count)
6073 total_count = entry->dest->count;
6074 entry_count = entry->count;
6075 /* Fix up corner cases, to avoid division by zero or creation of negative
6076 frequencies. */
6077 if (entry_count > total_count)
6078 entry_count = total_count;
6080 else
6082 total_freq = entry->dest->frequency;
6083 entry_freq = EDGE_FREQUENCY (entry);
6084 /* Fix up corner cases, to avoid division by zero or creation of negative
6085 frequencies. */
6086 if (total_freq == 0)
6087 total_freq = 1;
6088 else if (entry_freq > total_freq)
6089 entry_freq = total_freq;
6092 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6093 split_edge_bb_loc (entry), update_dominance);
6094 if (total_count)
6096 scale_bbs_frequencies_gcov_type (region, n_region,
6097 total_count - entry_count,
6098 total_count);
6099 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6100 total_count);
6102 else
6104 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6105 total_freq);
6106 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6109 if (copying_header)
6111 loop->header = exit->dest;
6112 loop->latch = exit->src;
6115 /* Redirect the entry and add the phi node arguments. */
6116 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6117 gcc_assert (redirected != NULL);
6118 flush_pending_stmts (entry);
6120 /* Concerning updating of dominators: We must recount dominators
6121 for entry block and its copy. Anything that is outside of the
6122 region, but was dominated by something inside needs recounting as
6123 well. */
6124 if (update_dominance)
6126 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6127 doms.safe_push (get_bb_original (entry->dest));
6128 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6129 doms.release ();
6132 /* Add the other PHI node arguments. */
6133 add_phi_args_after_copy (region_copy, n_region, NULL);
6135 if (free_region_copy)
6136 free (region_copy);
6138 free_original_copy_tables ();
6139 return true;
6142 /* Checks if BB is part of the region defined by N_REGION BBS. */
6143 static bool
6144 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6146 unsigned int n;
6148 for (n = 0; n < n_region; n++)
6150 if (bb == bbs[n])
6151 return true;
6153 return false;
6156 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6157 are stored to REGION_COPY in the same order in that they appear
6158 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6159 the region, EXIT an exit from it. The condition guarding EXIT
6160 is moved to ENTRY. Returns true if duplication succeeds, false
6161 otherwise.
6163 For example,
6165 some_code;
6166 if (cond)
6168 else
6171 is transformed to
6173 if (cond)
6175 some_code;
6178 else
6180 some_code;
6185 bool
6186 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6187 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6188 basic_block *region_copy ATTRIBUTE_UNUSED)
6190 unsigned i;
6191 bool free_region_copy = false;
6192 struct loop *loop = exit->dest->loop_father;
6193 struct loop *orig_loop = entry->dest->loop_father;
6194 basic_block switch_bb, entry_bb, nentry_bb;
6195 vec<basic_block> doms;
6196 int total_freq = 0, exit_freq = 0;
6197 gcov_type total_count = 0, exit_count = 0;
6198 edge exits[2], nexits[2], e;
6199 gimple_stmt_iterator gsi;
6200 gimple cond_stmt;
6201 edge sorig, snew;
6202 basic_block exit_bb;
6203 gphi_iterator psi;
6204 gphi *phi;
6205 tree def;
6206 struct loop *target, *aloop, *cloop;
6208 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6209 exits[0] = exit;
6210 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6212 if (!can_copy_bbs_p (region, n_region))
6213 return false;
6215 initialize_original_copy_tables ();
6216 set_loop_copy (orig_loop, loop);
6218 target= loop;
6219 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6221 if (bb_part_of_region_p (aloop->header, region, n_region))
6223 cloop = duplicate_loop (aloop, target);
6224 duplicate_subloops (aloop, cloop);
6228 if (!region_copy)
6230 region_copy = XNEWVEC (basic_block, n_region);
6231 free_region_copy = true;
6234 gcc_assert (!need_ssa_update_p (cfun));
6236 /* Record blocks outside the region that are dominated by something
6237 inside. */
6238 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6240 if (exit->src->count)
6242 total_count = exit->src->count;
6243 exit_count = exit->count;
6244 /* Fix up corner cases, to avoid division by zero or creation of negative
6245 frequencies. */
6246 if (exit_count > total_count)
6247 exit_count = total_count;
6249 else
6251 total_freq = exit->src->frequency;
6252 exit_freq = EDGE_FREQUENCY (exit);
6253 /* Fix up corner cases, to avoid division by zero or creation of negative
6254 frequencies. */
6255 if (total_freq == 0)
6256 total_freq = 1;
6257 if (exit_freq > total_freq)
6258 exit_freq = total_freq;
6261 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6262 split_edge_bb_loc (exit), true);
6263 if (total_count)
6265 scale_bbs_frequencies_gcov_type (region, n_region,
6266 total_count - exit_count,
6267 total_count);
6268 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6269 total_count);
6271 else
6273 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6274 total_freq);
6275 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6278 /* Create the switch block, and put the exit condition to it. */
6279 entry_bb = entry->dest;
6280 nentry_bb = get_bb_copy (entry_bb);
6281 if (!last_stmt (entry->src)
6282 || !stmt_ends_bb_p (last_stmt (entry->src)))
6283 switch_bb = entry->src;
6284 else
6285 switch_bb = split_edge (entry);
6286 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6288 gsi = gsi_last_bb (switch_bb);
6289 cond_stmt = last_stmt (exit->src);
6290 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6291 cond_stmt = gimple_copy (cond_stmt);
6293 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6295 sorig = single_succ_edge (switch_bb);
6296 sorig->flags = exits[1]->flags;
6297 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6299 /* Register the new edge from SWITCH_BB in loop exit lists. */
6300 rescan_loop_exit (snew, true, false);
6302 /* Add the PHI node arguments. */
6303 add_phi_args_after_copy (region_copy, n_region, snew);
6305 /* Get rid of now superfluous conditions and associated edges (and phi node
6306 arguments). */
6307 exit_bb = exit->dest;
6309 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6310 PENDING_STMT (e) = NULL;
6312 /* The latch of ORIG_LOOP was copied, and so was the backedge
6313 to the original header. We redirect this backedge to EXIT_BB. */
6314 for (i = 0; i < n_region; i++)
6315 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6317 gcc_assert (single_succ_edge (region_copy[i]));
6318 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6319 PENDING_STMT (e) = NULL;
6320 for (psi = gsi_start_phis (exit_bb);
6321 !gsi_end_p (psi);
6322 gsi_next (&psi))
6324 phi = psi.phi ();
6325 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6326 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6329 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6330 PENDING_STMT (e) = NULL;
6332 /* Anything that is outside of the region, but was dominated by something
6333 inside needs to update dominance info. */
6334 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6335 doms.release ();
6336 /* Update the SSA web. */
6337 update_ssa (TODO_update_ssa);
6339 if (free_region_copy)
6340 free (region_copy);
6342 free_original_copy_tables ();
6343 return true;
6346 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6347 adding blocks when the dominator traversal reaches EXIT. This
6348 function silently assumes that ENTRY strictly dominates EXIT. */
6350 void
6351 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6352 vec<basic_block> *bbs_p)
6354 basic_block son;
6356 for (son = first_dom_son (CDI_DOMINATORS, entry);
6357 son;
6358 son = next_dom_son (CDI_DOMINATORS, son))
6360 bbs_p->safe_push (son);
6361 if (son != exit)
6362 gather_blocks_in_sese_region (son, exit, bbs_p);
6366 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6367 The duplicates are recorded in VARS_MAP. */
6369 static void
6370 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6371 tree to_context)
6373 tree t = *tp, new_t;
6374 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6376 if (DECL_CONTEXT (t) == to_context)
6377 return;
6379 bool existed;
6380 tree &loc = vars_map->get_or_insert (t, &existed);
6382 if (!existed)
6384 if (SSA_VAR_P (t))
6386 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6387 add_local_decl (f, new_t);
6389 else
6391 gcc_assert (TREE_CODE (t) == CONST_DECL);
6392 new_t = copy_node (t);
6394 DECL_CONTEXT (new_t) = to_context;
6396 loc = new_t;
6398 else
6399 new_t = loc;
6401 *tp = new_t;
6405 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6406 VARS_MAP maps old ssa names and var_decls to the new ones. */
6408 static tree
6409 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6410 tree to_context)
6412 tree new_name;
6414 gcc_assert (!virtual_operand_p (name));
6416 tree *loc = vars_map->get (name);
6418 if (!loc)
6420 tree decl = SSA_NAME_VAR (name);
6421 if (decl)
6423 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6424 replace_by_duplicate_decl (&decl, vars_map, to_context);
6425 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6426 decl, SSA_NAME_DEF_STMT (name));
6428 else
6429 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6430 name, SSA_NAME_DEF_STMT (name));
6432 /* Now that we've used the def stmt to define new_name, make sure it
6433 doesn't define name anymore. */
6434 SSA_NAME_DEF_STMT (name) = NULL;
6436 vars_map->put (name, new_name);
6438 else
6439 new_name = *loc;
6441 return new_name;
6444 struct move_stmt_d
6446 tree orig_block;
6447 tree new_block;
6448 tree from_context;
6449 tree to_context;
6450 hash_map<tree, tree> *vars_map;
6451 htab_t new_label_map;
6452 hash_map<void *, void *> *eh_map;
6453 bool remap_decls_p;
6456 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6457 contained in *TP if it has been ORIG_BLOCK previously and change the
6458 DECL_CONTEXT of every local variable referenced in *TP. */
6460 static tree
6461 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6463 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6464 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6465 tree t = *tp;
6467 if (EXPR_P (t))
6469 tree block = TREE_BLOCK (t);
6470 if (block == p->orig_block
6471 || (p->orig_block == NULL_TREE
6472 && block != NULL_TREE))
6473 TREE_SET_BLOCK (t, p->new_block);
6474 #ifdef ENABLE_CHECKING
6475 else if (block != NULL_TREE)
6477 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6478 block = BLOCK_SUPERCONTEXT (block);
6479 gcc_assert (block == p->orig_block);
6481 #endif
6483 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6485 if (TREE_CODE (t) == SSA_NAME)
6486 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6487 else if (TREE_CODE (t) == PARM_DECL
6488 && gimple_in_ssa_p (cfun))
6489 *tp = *(p->vars_map->get (t));
6490 else if (TREE_CODE (t) == LABEL_DECL)
6492 if (p->new_label_map)
6494 struct tree_map in, *out;
6495 in.base.from = t;
6496 out = (struct tree_map *)
6497 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6498 if (out)
6499 *tp = t = out->to;
6502 DECL_CONTEXT (t) = p->to_context;
6504 else if (p->remap_decls_p)
6506 /* Replace T with its duplicate. T should no longer appear in the
6507 parent function, so this looks wasteful; however, it may appear
6508 in referenced_vars, and more importantly, as virtual operands of
6509 statements, and in alias lists of other variables. It would be
6510 quite difficult to expunge it from all those places. ??? It might
6511 suffice to do this for addressable variables. */
6512 if ((TREE_CODE (t) == VAR_DECL
6513 && !is_global_var (t))
6514 || TREE_CODE (t) == CONST_DECL)
6515 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6517 *walk_subtrees = 0;
6519 else if (TYPE_P (t))
6520 *walk_subtrees = 0;
6522 return NULL_TREE;
6525 /* Helper for move_stmt_r. Given an EH region number for the source
6526 function, map that to the duplicate EH regio number in the dest. */
6528 static int
6529 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6531 eh_region old_r, new_r;
6533 old_r = get_eh_region_from_number (old_nr);
6534 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6536 return new_r->index;
6539 /* Similar, but operate on INTEGER_CSTs. */
6541 static tree
6542 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6544 int old_nr, new_nr;
6546 old_nr = tree_to_shwi (old_t_nr);
6547 new_nr = move_stmt_eh_region_nr (old_nr, p);
6549 return build_int_cst (integer_type_node, new_nr);
6552 /* Like move_stmt_op, but for gimple statements.
6554 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6555 contained in the current statement in *GSI_P and change the
6556 DECL_CONTEXT of every local variable referenced in the current
6557 statement. */
6559 static tree
6560 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6561 struct walk_stmt_info *wi)
6563 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6564 gimple stmt = gsi_stmt (*gsi_p);
6565 tree block = gimple_block (stmt);
6567 if (block == p->orig_block
6568 || (p->orig_block == NULL_TREE
6569 && block != NULL_TREE))
6570 gimple_set_block (stmt, p->new_block);
6572 switch (gimple_code (stmt))
6574 case GIMPLE_CALL:
6575 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6577 tree r, fndecl = gimple_call_fndecl (stmt);
6578 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6579 switch (DECL_FUNCTION_CODE (fndecl))
6581 case BUILT_IN_EH_COPY_VALUES:
6582 r = gimple_call_arg (stmt, 1);
6583 r = move_stmt_eh_region_tree_nr (r, p);
6584 gimple_call_set_arg (stmt, 1, r);
6585 /* FALLTHRU */
6587 case BUILT_IN_EH_POINTER:
6588 case BUILT_IN_EH_FILTER:
6589 r = gimple_call_arg (stmt, 0);
6590 r = move_stmt_eh_region_tree_nr (r, p);
6591 gimple_call_set_arg (stmt, 0, r);
6592 break;
6594 default:
6595 break;
6598 break;
6600 case GIMPLE_RESX:
6602 gresx *resx_stmt = as_a <gresx *> (stmt);
6603 int r = gimple_resx_region (resx_stmt);
6604 r = move_stmt_eh_region_nr (r, p);
6605 gimple_resx_set_region (resx_stmt, r);
6607 break;
6609 case GIMPLE_EH_DISPATCH:
6611 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6612 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6613 r = move_stmt_eh_region_nr (r, p);
6614 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6616 break;
6618 case GIMPLE_OMP_RETURN:
6619 case GIMPLE_OMP_CONTINUE:
6620 break;
6621 default:
6622 if (is_gimple_omp (stmt))
6624 /* Do not remap variables inside OMP directives. Variables
6625 referenced in clauses and directive header belong to the
6626 parent function and should not be moved into the child
6627 function. */
6628 bool save_remap_decls_p = p->remap_decls_p;
6629 p->remap_decls_p = false;
6630 *handled_ops_p = true;
6632 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6633 move_stmt_op, wi);
6635 p->remap_decls_p = save_remap_decls_p;
6637 break;
6640 return NULL_TREE;
6643 /* Move basic block BB from function CFUN to function DEST_FN. The
6644 block is moved out of the original linked list and placed after
6645 block AFTER in the new list. Also, the block is removed from the
6646 original array of blocks and placed in DEST_FN's array of blocks.
6647 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6648 updated to reflect the moved edges.
6650 The local variables are remapped to new instances, VARS_MAP is used
6651 to record the mapping. */
6653 static void
6654 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6655 basic_block after, bool update_edge_count_p,
6656 struct move_stmt_d *d)
6658 struct control_flow_graph *cfg;
6659 edge_iterator ei;
6660 edge e;
6661 gimple_stmt_iterator si;
6662 unsigned old_len, new_len;
6664 /* Remove BB from dominance structures. */
6665 delete_from_dominance_info (CDI_DOMINATORS, bb);
6667 /* Move BB from its current loop to the copy in the new function. */
6668 if (current_loops)
6670 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6671 if (new_loop)
6672 bb->loop_father = new_loop;
6675 /* Link BB to the new linked list. */
6676 move_block_after (bb, after);
6678 /* Update the edge count in the corresponding flowgraphs. */
6679 if (update_edge_count_p)
6680 FOR_EACH_EDGE (e, ei, bb->succs)
6682 cfun->cfg->x_n_edges--;
6683 dest_cfun->cfg->x_n_edges++;
6686 /* Remove BB from the original basic block array. */
6687 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6688 cfun->cfg->x_n_basic_blocks--;
6690 /* Grow DEST_CFUN's basic block array if needed. */
6691 cfg = dest_cfun->cfg;
6692 cfg->x_n_basic_blocks++;
6693 if (bb->index >= cfg->x_last_basic_block)
6694 cfg->x_last_basic_block = bb->index + 1;
6696 old_len = vec_safe_length (cfg->x_basic_block_info);
6697 if ((unsigned) cfg->x_last_basic_block >= old_len)
6699 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6700 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6703 (*cfg->x_basic_block_info)[bb->index] = bb;
6705 /* Remap the variables in phi nodes. */
6706 for (gphi_iterator psi = gsi_start_phis (bb);
6707 !gsi_end_p (psi); )
6709 gphi *phi = psi.phi ();
6710 use_operand_p use;
6711 tree op = PHI_RESULT (phi);
6712 ssa_op_iter oi;
6713 unsigned i;
6715 if (virtual_operand_p (op))
6717 /* Remove the phi nodes for virtual operands (alias analysis will be
6718 run for the new function, anyway). */
6719 remove_phi_node (&psi, true);
6720 continue;
6723 SET_PHI_RESULT (phi,
6724 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6725 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6727 op = USE_FROM_PTR (use);
6728 if (TREE_CODE (op) == SSA_NAME)
6729 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6732 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6734 location_t locus = gimple_phi_arg_location (phi, i);
6735 tree block = LOCATION_BLOCK (locus);
6737 if (locus == UNKNOWN_LOCATION)
6738 continue;
6739 if (d->orig_block == NULL_TREE || block == d->orig_block)
6741 if (d->new_block == NULL_TREE)
6742 locus = LOCATION_LOCUS (locus);
6743 else
6744 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6745 gimple_phi_arg_set_location (phi, i, locus);
6749 gsi_next (&psi);
6752 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6754 gimple stmt = gsi_stmt (si);
6755 struct walk_stmt_info wi;
6757 memset (&wi, 0, sizeof (wi));
6758 wi.info = d;
6759 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6761 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6763 tree label = gimple_label_label (label_stmt);
6764 int uid = LABEL_DECL_UID (label);
6766 gcc_assert (uid > -1);
6768 old_len = vec_safe_length (cfg->x_label_to_block_map);
6769 if (old_len <= (unsigned) uid)
6771 new_len = 3 * uid / 2 + 1;
6772 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6775 (*cfg->x_label_to_block_map)[uid] = bb;
6776 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6778 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6780 if (uid >= dest_cfun->cfg->last_label_uid)
6781 dest_cfun->cfg->last_label_uid = uid + 1;
6784 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6785 remove_stmt_from_eh_lp_fn (cfun, stmt);
6787 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6788 gimple_remove_stmt_histograms (cfun, stmt);
6790 /* We cannot leave any operands allocated from the operand caches of
6791 the current function. */
6792 free_stmt_operands (cfun, stmt);
6793 push_cfun (dest_cfun);
6794 update_stmt (stmt);
6795 pop_cfun ();
6798 FOR_EACH_EDGE (e, ei, bb->succs)
6799 if (e->goto_locus != UNKNOWN_LOCATION)
6801 tree block = LOCATION_BLOCK (e->goto_locus);
6802 if (d->orig_block == NULL_TREE
6803 || block == d->orig_block)
6804 e->goto_locus = d->new_block ?
6805 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6806 LOCATION_LOCUS (e->goto_locus);
6810 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6811 the outermost EH region. Use REGION as the incoming base EH region. */
6813 static eh_region
6814 find_outermost_region_in_block (struct function *src_cfun,
6815 basic_block bb, eh_region region)
6817 gimple_stmt_iterator si;
6819 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6821 gimple stmt = gsi_stmt (si);
6822 eh_region stmt_region;
6823 int lp_nr;
6825 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6826 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6827 if (stmt_region)
6829 if (region == NULL)
6830 region = stmt_region;
6831 else if (stmt_region != region)
6833 region = eh_region_outermost (src_cfun, stmt_region, region);
6834 gcc_assert (region != NULL);
6839 return region;
6842 static tree
6843 new_label_mapper (tree decl, void *data)
6845 htab_t hash = (htab_t) data;
6846 struct tree_map *m;
6847 void **slot;
6849 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6851 m = XNEW (struct tree_map);
6852 m->hash = DECL_UID (decl);
6853 m->base.from = decl;
6854 m->to = create_artificial_label (UNKNOWN_LOCATION);
6855 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6856 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6857 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6859 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6860 gcc_assert (*slot == NULL);
6862 *slot = m;
6864 return m->to;
6867 /* Tree walker to replace the decls used inside value expressions by
6868 duplicates. */
6870 static tree
6871 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
6873 struct replace_decls_d *rd = (struct replace_decls_d *)data;
6875 switch (TREE_CODE (*tp))
6877 case VAR_DECL:
6878 case PARM_DECL:
6879 case RESULT_DECL:
6880 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
6881 break;
6882 default:
6883 break;
6886 if (IS_TYPE_OR_DECL_P (*tp))
6887 *walk_subtrees = false;
6889 return NULL;
6892 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6893 subblocks. */
6895 static void
6896 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6897 tree to_context)
6899 tree *tp, t;
6901 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6903 t = *tp;
6904 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6905 continue;
6906 replace_by_duplicate_decl (&t, vars_map, to_context);
6907 if (t != *tp)
6909 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6911 tree x = DECL_VALUE_EXPR (*tp);
6912 struct replace_decls_d rd = { vars_map, to_context };
6913 unshare_expr (x);
6914 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
6915 SET_DECL_VALUE_EXPR (t, x);
6916 DECL_HAS_VALUE_EXPR_P (t) = 1;
6918 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6919 *tp = t;
6923 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6924 replace_block_vars_by_duplicates (block, vars_map, to_context);
6927 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6928 from FN1 to FN2. */
6930 static void
6931 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6932 struct loop *loop)
6934 /* Discard it from the old loop array. */
6935 (*get_loops (fn1))[loop->num] = NULL;
6937 /* Place it in the new loop array, assigning it a new number. */
6938 loop->num = number_of_loops (fn2);
6939 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6941 /* Recurse to children. */
6942 for (loop = loop->inner; loop; loop = loop->next)
6943 fixup_loop_arrays_after_move (fn1, fn2, loop);
6946 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6947 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6949 DEBUG_FUNCTION void
6950 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6952 basic_block bb;
6953 edge_iterator ei;
6954 edge e;
6955 bitmap bbs = BITMAP_ALLOC (NULL);
6956 int i;
6958 gcc_assert (entry != NULL);
6959 gcc_assert (entry != exit);
6960 gcc_assert (bbs_p != NULL);
6962 gcc_assert (bbs_p->length () > 0);
6964 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6965 bitmap_set_bit (bbs, bb->index);
6967 gcc_assert (bitmap_bit_p (bbs, entry->index));
6968 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6970 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6972 if (bb == entry)
6974 gcc_assert (single_pred_p (entry));
6975 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6977 else
6978 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6980 e = ei_edge (ei);
6981 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6984 if (bb == exit)
6986 gcc_assert (single_succ_p (exit));
6987 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6989 else
6990 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6992 e = ei_edge (ei);
6993 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6997 BITMAP_FREE (bbs);
7000 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7002 bool
7003 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7005 bitmap release_names = (bitmap)data;
7007 if (TREE_CODE (from) != SSA_NAME)
7008 return true;
7010 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7011 return true;
7014 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7015 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7016 single basic block in the original CFG and the new basic block is
7017 returned. DEST_CFUN must not have a CFG yet.
7019 Note that the region need not be a pure SESE region. Blocks inside
7020 the region may contain calls to abort/exit. The only restriction
7021 is that ENTRY_BB should be the only entry point and it must
7022 dominate EXIT_BB.
7024 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7025 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7026 to the new function.
7028 All local variables referenced in the region are assumed to be in
7029 the corresponding BLOCK_VARS and unexpanded variable lists
7030 associated with DEST_CFUN.
7032 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7033 reimplement move_sese_region_to_fn by duplicating the region rather than
7034 moving it. */
7036 basic_block
7037 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7038 basic_block exit_bb, tree orig_block)
7040 vec<basic_block> bbs, dom_bbs;
7041 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7042 basic_block after, bb, *entry_pred, *exit_succ, abb;
7043 struct function *saved_cfun = cfun;
7044 int *entry_flag, *exit_flag;
7045 unsigned *entry_prob, *exit_prob;
7046 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7047 edge e;
7048 edge_iterator ei;
7049 htab_t new_label_map;
7050 hash_map<void *, void *> *eh_map;
7051 struct loop *loop = entry_bb->loop_father;
7052 struct loop *loop0 = get_loop (saved_cfun, 0);
7053 struct move_stmt_d d;
7055 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7056 region. */
7057 gcc_assert (entry_bb != exit_bb
7058 && (!exit_bb
7059 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7061 /* Collect all the blocks in the region. Manually add ENTRY_BB
7062 because it won't be added by dfs_enumerate_from. */
7063 bbs.create (0);
7064 bbs.safe_push (entry_bb);
7065 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7066 #ifdef ENABLE_CHECKING
7067 verify_sese (entry_bb, exit_bb, &bbs);
7068 #endif
7070 /* The blocks that used to be dominated by something in BBS will now be
7071 dominated by the new block. */
7072 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7073 bbs.address (),
7074 bbs.length ());
7076 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7077 the predecessor edges to ENTRY_BB and the successor edges to
7078 EXIT_BB so that we can re-attach them to the new basic block that
7079 will replace the region. */
7080 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7081 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7082 entry_flag = XNEWVEC (int, num_entry_edges);
7083 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7084 i = 0;
7085 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7087 entry_prob[i] = e->probability;
7088 entry_flag[i] = e->flags;
7089 entry_pred[i++] = e->src;
7090 remove_edge (e);
7093 if (exit_bb)
7095 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7096 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7097 exit_flag = XNEWVEC (int, num_exit_edges);
7098 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7099 i = 0;
7100 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7102 exit_prob[i] = e->probability;
7103 exit_flag[i] = e->flags;
7104 exit_succ[i++] = e->dest;
7105 remove_edge (e);
7108 else
7110 num_exit_edges = 0;
7111 exit_succ = NULL;
7112 exit_flag = NULL;
7113 exit_prob = NULL;
7116 /* Switch context to the child function to initialize DEST_FN's CFG. */
7117 gcc_assert (dest_cfun->cfg == NULL);
7118 push_cfun (dest_cfun);
7120 init_empty_tree_cfg ();
7122 /* Initialize EH information for the new function. */
7123 eh_map = NULL;
7124 new_label_map = NULL;
7125 if (saved_cfun->eh)
7127 eh_region region = NULL;
7129 FOR_EACH_VEC_ELT (bbs, i, bb)
7130 region = find_outermost_region_in_block (saved_cfun, bb, region);
7132 init_eh_for_function ();
7133 if (region != NULL)
7135 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7136 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7137 new_label_mapper, new_label_map);
7141 /* Initialize an empty loop tree. */
7142 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7143 init_loops_structure (dest_cfun, loops, 1);
7144 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7145 set_loops_for_fn (dest_cfun, loops);
7147 /* Move the outlined loop tree part. */
7148 num_nodes = bbs.length ();
7149 FOR_EACH_VEC_ELT (bbs, i, bb)
7151 if (bb->loop_father->header == bb)
7153 struct loop *this_loop = bb->loop_father;
7154 struct loop *outer = loop_outer (this_loop);
7155 if (outer == loop
7156 /* If the SESE region contains some bbs ending with
7157 a noreturn call, those are considered to belong
7158 to the outermost loop in saved_cfun, rather than
7159 the entry_bb's loop_father. */
7160 || outer == loop0)
7162 if (outer != loop)
7163 num_nodes -= this_loop->num_nodes;
7164 flow_loop_tree_node_remove (bb->loop_father);
7165 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7166 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7169 else if (bb->loop_father == loop0 && loop0 != loop)
7170 num_nodes--;
7172 /* Remove loop exits from the outlined region. */
7173 if (loops_for_fn (saved_cfun)->exits)
7174 FOR_EACH_EDGE (e, ei, bb->succs)
7176 struct loops *l = loops_for_fn (saved_cfun);
7177 loop_exit **slot
7178 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7179 NO_INSERT);
7180 if (slot)
7181 l->exits->clear_slot (slot);
7186 /* Adjust the number of blocks in the tree root of the outlined part. */
7187 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7189 /* Setup a mapping to be used by move_block_to_fn. */
7190 loop->aux = current_loops->tree_root;
7191 loop0->aux = current_loops->tree_root;
7193 pop_cfun ();
7195 /* Move blocks from BBS into DEST_CFUN. */
7196 gcc_assert (bbs.length () >= 2);
7197 after = dest_cfun->cfg->x_entry_block_ptr;
7198 hash_map<tree, tree> vars_map;
7200 memset (&d, 0, sizeof (d));
7201 d.orig_block = orig_block;
7202 d.new_block = DECL_INITIAL (dest_cfun->decl);
7203 d.from_context = cfun->decl;
7204 d.to_context = dest_cfun->decl;
7205 d.vars_map = &vars_map;
7206 d.new_label_map = new_label_map;
7207 d.eh_map = eh_map;
7208 d.remap_decls_p = true;
7210 if (gimple_in_ssa_p (cfun))
7211 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7213 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7214 set_ssa_default_def (dest_cfun, arg, narg);
7215 vars_map.put (arg, narg);
7218 FOR_EACH_VEC_ELT (bbs, i, bb)
7220 /* No need to update edge counts on the last block. It has
7221 already been updated earlier when we detached the region from
7222 the original CFG. */
7223 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7224 after = bb;
7227 loop->aux = NULL;
7228 loop0->aux = NULL;
7229 /* Loop sizes are no longer correct, fix them up. */
7230 loop->num_nodes -= num_nodes;
7231 for (struct loop *outer = loop_outer (loop);
7232 outer; outer = loop_outer (outer))
7233 outer->num_nodes -= num_nodes;
7234 loop0->num_nodes -= bbs.length () - num_nodes;
7236 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7238 struct loop *aloop;
7239 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7240 if (aloop != NULL)
7242 if (aloop->simduid)
7244 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7245 d.to_context);
7246 dest_cfun->has_simduid_loops = true;
7248 if (aloop->force_vectorize)
7249 dest_cfun->has_force_vectorize_loops = true;
7253 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7254 if (orig_block)
7256 tree block;
7257 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7258 == NULL_TREE);
7259 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7260 = BLOCK_SUBBLOCKS (orig_block);
7261 for (block = BLOCK_SUBBLOCKS (orig_block);
7262 block; block = BLOCK_CHAIN (block))
7263 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7264 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7267 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7268 &vars_map, dest_cfun->decl);
7270 if (new_label_map)
7271 htab_delete (new_label_map);
7272 if (eh_map)
7273 delete eh_map;
7275 if (gimple_in_ssa_p (cfun))
7277 /* We need to release ssa-names in a defined order, so first find them,
7278 and then iterate in ascending version order. */
7279 bitmap release_names = BITMAP_ALLOC (NULL);
7280 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7281 bitmap_iterator bi;
7282 unsigned i;
7283 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7284 release_ssa_name (ssa_name (i));
7285 BITMAP_FREE (release_names);
7288 /* Rewire the entry and exit blocks. The successor to the entry
7289 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7290 the child function. Similarly, the predecessor of DEST_FN's
7291 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7292 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7293 various CFG manipulation function get to the right CFG.
7295 FIXME, this is silly. The CFG ought to become a parameter to
7296 these helpers. */
7297 push_cfun (dest_cfun);
7298 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7299 if (exit_bb)
7300 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7301 pop_cfun ();
7303 /* Back in the original function, the SESE region has disappeared,
7304 create a new basic block in its place. */
7305 bb = create_empty_bb (entry_pred[0]);
7306 if (current_loops)
7307 add_bb_to_loop (bb, loop);
7308 for (i = 0; i < num_entry_edges; i++)
7310 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7311 e->probability = entry_prob[i];
7314 for (i = 0; i < num_exit_edges; i++)
7316 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7317 e->probability = exit_prob[i];
7320 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7321 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7322 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7323 dom_bbs.release ();
7325 if (exit_bb)
7327 free (exit_prob);
7328 free (exit_flag);
7329 free (exit_succ);
7331 free (entry_prob);
7332 free (entry_flag);
7333 free (entry_pred);
7334 bbs.release ();
7336 return bb;
7340 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7343 void
7344 dump_function_to_file (tree fndecl, FILE *file, int flags)
7346 tree arg, var, old_current_fndecl = current_function_decl;
7347 struct function *dsf;
7348 bool ignore_topmost_bind = false, any_var = false;
7349 basic_block bb;
7350 tree chain;
7351 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7352 && decl_is_tm_clone (fndecl));
7353 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7355 current_function_decl = fndecl;
7356 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7358 arg = DECL_ARGUMENTS (fndecl);
7359 while (arg)
7361 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7362 fprintf (file, " ");
7363 print_generic_expr (file, arg, dump_flags);
7364 if (flags & TDF_VERBOSE)
7365 print_node (file, "", arg, 4);
7366 if (DECL_CHAIN (arg))
7367 fprintf (file, ", ");
7368 arg = DECL_CHAIN (arg);
7370 fprintf (file, ")\n");
7372 if (flags & TDF_VERBOSE)
7373 print_node (file, "", fndecl, 2);
7375 dsf = DECL_STRUCT_FUNCTION (fndecl);
7376 if (dsf && (flags & TDF_EH))
7377 dump_eh_tree (file, dsf);
7379 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7381 dump_node (fndecl, TDF_SLIM | flags, file);
7382 current_function_decl = old_current_fndecl;
7383 return;
7386 /* When GIMPLE is lowered, the variables are no longer available in
7387 BIND_EXPRs, so display them separately. */
7388 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7390 unsigned ix;
7391 ignore_topmost_bind = true;
7393 fprintf (file, "{\n");
7394 if (!vec_safe_is_empty (fun->local_decls))
7395 FOR_EACH_LOCAL_DECL (fun, ix, var)
7397 print_generic_decl (file, var, flags);
7398 if (flags & TDF_VERBOSE)
7399 print_node (file, "", var, 4);
7400 fprintf (file, "\n");
7402 any_var = true;
7404 if (gimple_in_ssa_p (cfun))
7405 for (ix = 1; ix < num_ssa_names; ++ix)
7407 tree name = ssa_name (ix);
7408 if (name && !SSA_NAME_VAR (name))
7410 fprintf (file, " ");
7411 print_generic_expr (file, TREE_TYPE (name), flags);
7412 fprintf (file, " ");
7413 print_generic_expr (file, name, flags);
7414 fprintf (file, ";\n");
7416 any_var = true;
7421 if (fun && fun->decl == fndecl
7422 && fun->cfg
7423 && basic_block_info_for_fn (fun))
7425 /* If the CFG has been built, emit a CFG-based dump. */
7426 if (!ignore_topmost_bind)
7427 fprintf (file, "{\n");
7429 if (any_var && n_basic_blocks_for_fn (fun))
7430 fprintf (file, "\n");
7432 FOR_EACH_BB_FN (bb, fun)
7433 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7435 fprintf (file, "}\n");
7437 else if (DECL_SAVED_TREE (fndecl) == NULL)
7439 /* The function is now in GIMPLE form but the CFG has not been
7440 built yet. Emit the single sequence of GIMPLE statements
7441 that make up its body. */
7442 gimple_seq body = gimple_body (fndecl);
7444 if (gimple_seq_first_stmt (body)
7445 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7446 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7447 print_gimple_seq (file, body, 0, flags);
7448 else
7450 if (!ignore_topmost_bind)
7451 fprintf (file, "{\n");
7453 if (any_var)
7454 fprintf (file, "\n");
7456 print_gimple_seq (file, body, 2, flags);
7457 fprintf (file, "}\n");
7460 else
7462 int indent;
7464 /* Make a tree based dump. */
7465 chain = DECL_SAVED_TREE (fndecl);
7466 if (chain && TREE_CODE (chain) == BIND_EXPR)
7468 if (ignore_topmost_bind)
7470 chain = BIND_EXPR_BODY (chain);
7471 indent = 2;
7473 else
7474 indent = 0;
7476 else
7478 if (!ignore_topmost_bind)
7480 fprintf (file, "{\n");
7481 /* No topmost bind, pretend it's ignored for later. */
7482 ignore_topmost_bind = true;
7484 indent = 2;
7487 if (any_var)
7488 fprintf (file, "\n");
7490 print_generic_stmt_indented (file, chain, flags, indent);
7491 if (ignore_topmost_bind)
7492 fprintf (file, "}\n");
7495 if (flags & TDF_ENUMERATE_LOCALS)
7496 dump_enumerated_decls (file, flags);
7497 fprintf (file, "\n\n");
7499 current_function_decl = old_current_fndecl;
7502 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7504 DEBUG_FUNCTION void
7505 debug_function (tree fn, int flags)
7507 dump_function_to_file (fn, stderr, flags);
7511 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7513 static void
7514 print_pred_bbs (FILE *file, basic_block bb)
7516 edge e;
7517 edge_iterator ei;
7519 FOR_EACH_EDGE (e, ei, bb->preds)
7520 fprintf (file, "bb_%d ", e->src->index);
7524 /* Print on FILE the indexes for the successors of basic_block BB. */
7526 static void
7527 print_succ_bbs (FILE *file, basic_block bb)
7529 edge e;
7530 edge_iterator ei;
7532 FOR_EACH_EDGE (e, ei, bb->succs)
7533 fprintf (file, "bb_%d ", e->dest->index);
7536 /* Print to FILE the basic block BB following the VERBOSITY level. */
7538 void
7539 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7541 char *s_indent = (char *) alloca ((size_t) indent + 1);
7542 memset ((void *) s_indent, ' ', (size_t) indent);
7543 s_indent[indent] = '\0';
7545 /* Print basic_block's header. */
7546 if (verbosity >= 2)
7548 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7549 print_pred_bbs (file, bb);
7550 fprintf (file, "}, succs = {");
7551 print_succ_bbs (file, bb);
7552 fprintf (file, "})\n");
7555 /* Print basic_block's body. */
7556 if (verbosity >= 3)
7558 fprintf (file, "%s {\n", s_indent);
7559 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7560 fprintf (file, "%s }\n", s_indent);
7564 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7566 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7567 VERBOSITY level this outputs the contents of the loop, or just its
7568 structure. */
7570 static void
7571 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7573 char *s_indent;
7574 basic_block bb;
7576 if (loop == NULL)
7577 return;
7579 s_indent = (char *) alloca ((size_t) indent + 1);
7580 memset ((void *) s_indent, ' ', (size_t) indent);
7581 s_indent[indent] = '\0';
7583 /* Print loop's header. */
7584 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7585 if (loop->header)
7586 fprintf (file, "header = %d", loop->header->index);
7587 else
7589 fprintf (file, "deleted)\n");
7590 return;
7592 if (loop->latch)
7593 fprintf (file, ", latch = %d", loop->latch->index);
7594 else
7595 fprintf (file, ", multiple latches");
7596 fprintf (file, ", niter = ");
7597 print_generic_expr (file, loop->nb_iterations, 0);
7599 if (loop->any_upper_bound)
7601 fprintf (file, ", upper_bound = ");
7602 print_decu (loop->nb_iterations_upper_bound, file);
7605 if (loop->any_estimate)
7607 fprintf (file, ", estimate = ");
7608 print_decu (loop->nb_iterations_estimate, file);
7610 fprintf (file, ")\n");
7612 /* Print loop's body. */
7613 if (verbosity >= 1)
7615 fprintf (file, "%s{\n", s_indent);
7616 FOR_EACH_BB_FN (bb, cfun)
7617 if (bb->loop_father == loop)
7618 print_loops_bb (file, bb, indent, verbosity);
7620 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7621 fprintf (file, "%s}\n", s_indent);
7625 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7626 spaces. Following VERBOSITY level this outputs the contents of the
7627 loop, or just its structure. */
7629 static void
7630 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7631 int verbosity)
7633 if (loop == NULL)
7634 return;
7636 print_loop (file, loop, indent, verbosity);
7637 print_loop_and_siblings (file, loop->next, indent, verbosity);
7640 /* Follow a CFG edge from the entry point of the program, and on entry
7641 of a loop, pretty print the loop structure on FILE. */
7643 void
7644 print_loops (FILE *file, int verbosity)
7646 basic_block bb;
7648 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7649 if (bb && bb->loop_father)
7650 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7653 /* Dump a loop. */
7655 DEBUG_FUNCTION void
7656 debug (struct loop &ref)
7658 print_loop (stderr, &ref, 0, /*verbosity*/0);
7661 DEBUG_FUNCTION void
7662 debug (struct loop *ptr)
7664 if (ptr)
7665 debug (*ptr);
7666 else
7667 fprintf (stderr, "<nil>\n");
7670 /* Dump a loop verbosely. */
7672 DEBUG_FUNCTION void
7673 debug_verbose (struct loop &ref)
7675 print_loop (stderr, &ref, 0, /*verbosity*/3);
7678 DEBUG_FUNCTION void
7679 debug_verbose (struct loop *ptr)
7681 if (ptr)
7682 debug (*ptr);
7683 else
7684 fprintf (stderr, "<nil>\n");
7688 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7690 DEBUG_FUNCTION void
7691 debug_loops (int verbosity)
7693 print_loops (stderr, verbosity);
7696 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7698 DEBUG_FUNCTION void
7699 debug_loop (struct loop *loop, int verbosity)
7701 print_loop (stderr, loop, 0, verbosity);
7704 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7705 level. */
7707 DEBUG_FUNCTION void
7708 debug_loop_num (unsigned num, int verbosity)
7710 debug_loop (get_loop (cfun, num), verbosity);
7713 /* Return true if BB ends with a call, possibly followed by some
7714 instructions that must stay with the call. Return false,
7715 otherwise. */
7717 static bool
7718 gimple_block_ends_with_call_p (basic_block bb)
7720 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7721 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7725 /* Return true if BB ends with a conditional branch. Return false,
7726 otherwise. */
7728 static bool
7729 gimple_block_ends_with_condjump_p (const_basic_block bb)
7731 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7732 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7736 /* Return true if we need to add fake edge to exit at statement T.
7737 Helper function for gimple_flow_call_edges_add. */
7739 static bool
7740 need_fake_edge_p (gimple t)
7742 tree fndecl = NULL_TREE;
7743 int call_flags = 0;
7745 /* NORETURN and LONGJMP calls already have an edge to exit.
7746 CONST and PURE calls do not need one.
7747 We don't currently check for CONST and PURE here, although
7748 it would be a good idea, because those attributes are
7749 figured out from the RTL in mark_constant_function, and
7750 the counter incrementation code from -fprofile-arcs
7751 leads to different results from -fbranch-probabilities. */
7752 if (is_gimple_call (t))
7754 fndecl = gimple_call_fndecl (t);
7755 call_flags = gimple_call_flags (t);
7758 if (is_gimple_call (t)
7759 && fndecl
7760 && DECL_BUILT_IN (fndecl)
7761 && (call_flags & ECF_NOTHROW)
7762 && !(call_flags & ECF_RETURNS_TWICE)
7763 /* fork() doesn't really return twice, but the effect of
7764 wrapping it in __gcov_fork() which calls __gcov_flush()
7765 and clears the counters before forking has the same
7766 effect as returning twice. Force a fake edge. */
7767 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7768 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7769 return false;
7771 if (is_gimple_call (t))
7773 edge_iterator ei;
7774 edge e;
7775 basic_block bb;
7777 if (!(call_flags & ECF_NORETURN))
7778 return true;
7780 bb = gimple_bb (t);
7781 FOR_EACH_EDGE (e, ei, bb->succs)
7782 if ((e->flags & EDGE_FAKE) == 0)
7783 return true;
7786 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7787 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7788 return true;
7790 return false;
7794 /* Add fake edges to the function exit for any non constant and non
7795 noreturn calls (or noreturn calls with EH/abnormal edges),
7796 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7797 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7798 that were split.
7800 The goal is to expose cases in which entering a basic block does
7801 not imply that all subsequent instructions must be executed. */
7803 static int
7804 gimple_flow_call_edges_add (sbitmap blocks)
7806 int i;
7807 int blocks_split = 0;
7808 int last_bb = last_basic_block_for_fn (cfun);
7809 bool check_last_block = false;
7811 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7812 return 0;
7814 if (! blocks)
7815 check_last_block = true;
7816 else
7817 check_last_block = bitmap_bit_p (blocks,
7818 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7820 /* In the last basic block, before epilogue generation, there will be
7821 a fallthru edge to EXIT. Special care is required if the last insn
7822 of the last basic block is a call because make_edge folds duplicate
7823 edges, which would result in the fallthru edge also being marked
7824 fake, which would result in the fallthru edge being removed by
7825 remove_fake_edges, which would result in an invalid CFG.
7827 Moreover, we can't elide the outgoing fake edge, since the block
7828 profiler needs to take this into account in order to solve the minimal
7829 spanning tree in the case that the call doesn't return.
7831 Handle this by adding a dummy instruction in a new last basic block. */
7832 if (check_last_block)
7834 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7835 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7836 gimple t = NULL;
7838 if (!gsi_end_p (gsi))
7839 t = gsi_stmt (gsi);
7841 if (t && need_fake_edge_p (t))
7843 edge e;
7845 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7846 if (e)
7848 gsi_insert_on_edge (e, gimple_build_nop ());
7849 gsi_commit_edge_inserts ();
7854 /* Now add fake edges to the function exit for any non constant
7855 calls since there is no way that we can determine if they will
7856 return or not... */
7857 for (i = 0; i < last_bb; i++)
7859 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7860 gimple_stmt_iterator gsi;
7861 gimple stmt, last_stmt;
7863 if (!bb)
7864 continue;
7866 if (blocks && !bitmap_bit_p (blocks, i))
7867 continue;
7869 gsi = gsi_last_nondebug_bb (bb);
7870 if (!gsi_end_p (gsi))
7872 last_stmt = gsi_stmt (gsi);
7875 stmt = gsi_stmt (gsi);
7876 if (need_fake_edge_p (stmt))
7878 edge e;
7880 /* The handling above of the final block before the
7881 epilogue should be enough to verify that there is
7882 no edge to the exit block in CFG already.
7883 Calling make_edge in such case would cause us to
7884 mark that edge as fake and remove it later. */
7885 #ifdef ENABLE_CHECKING
7886 if (stmt == last_stmt)
7888 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7889 gcc_assert (e == NULL);
7891 #endif
7893 /* Note that the following may create a new basic block
7894 and renumber the existing basic blocks. */
7895 if (stmt != last_stmt)
7897 e = split_block (bb, stmt);
7898 if (e)
7899 blocks_split++;
7901 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7903 gsi_prev (&gsi);
7905 while (!gsi_end_p (gsi));
7909 if (blocks_split)
7910 verify_flow_info ();
7912 return blocks_split;
7915 /* Removes edge E and all the blocks dominated by it, and updates dominance
7916 information. The IL in E->src needs to be updated separately.
7917 If dominance info is not available, only the edge E is removed.*/
7919 void
7920 remove_edge_and_dominated_blocks (edge e)
7922 vec<basic_block> bbs_to_remove = vNULL;
7923 vec<basic_block> bbs_to_fix_dom = vNULL;
7924 bitmap df, df_idom;
7925 edge f;
7926 edge_iterator ei;
7927 bool none_removed = false;
7928 unsigned i;
7929 basic_block bb, dbb;
7930 bitmap_iterator bi;
7932 /* If we are removing a path inside a non-root loop that may change
7933 loop ownership of blocks or remove loops. Mark loops for fixup. */
7934 if (current_loops
7935 && loop_outer (e->src->loop_father) != NULL
7936 && e->src->loop_father == e->dest->loop_father)
7937 loops_state_set (LOOPS_NEED_FIXUP);
7939 if (!dom_info_available_p (CDI_DOMINATORS))
7941 remove_edge (e);
7942 return;
7945 /* No updating is needed for edges to exit. */
7946 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7948 if (cfgcleanup_altered_bbs)
7949 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7950 remove_edge (e);
7951 return;
7954 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7955 that is not dominated by E->dest, then this set is empty. Otherwise,
7956 all the basic blocks dominated by E->dest are removed.
7958 Also, to DF_IDOM we store the immediate dominators of the blocks in
7959 the dominance frontier of E (i.e., of the successors of the
7960 removed blocks, if there are any, and of E->dest otherwise). */
7961 FOR_EACH_EDGE (f, ei, e->dest->preds)
7963 if (f == e)
7964 continue;
7966 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7968 none_removed = true;
7969 break;
7973 df = BITMAP_ALLOC (NULL);
7974 df_idom = BITMAP_ALLOC (NULL);
7976 if (none_removed)
7977 bitmap_set_bit (df_idom,
7978 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7979 else
7981 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7982 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7984 FOR_EACH_EDGE (f, ei, bb->succs)
7986 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7987 bitmap_set_bit (df, f->dest->index);
7990 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7991 bitmap_clear_bit (df, bb->index);
7993 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7995 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7996 bitmap_set_bit (df_idom,
7997 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8001 if (cfgcleanup_altered_bbs)
8003 /* Record the set of the altered basic blocks. */
8004 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8005 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8008 /* Remove E and the cancelled blocks. */
8009 if (none_removed)
8010 remove_edge (e);
8011 else
8013 /* Walk backwards so as to get a chance to substitute all
8014 released DEFs into debug stmts. See
8015 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8016 details. */
8017 for (i = bbs_to_remove.length (); i-- > 0; )
8018 delete_basic_block (bbs_to_remove[i]);
8021 /* Update the dominance information. The immediate dominator may change only
8022 for blocks whose immediate dominator belongs to DF_IDOM:
8024 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8025 removal. Let Z the arbitrary block such that idom(Z) = Y and
8026 Z dominates X after the removal. Before removal, there exists a path P
8027 from Y to X that avoids Z. Let F be the last edge on P that is
8028 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8029 dominates W, and because of P, Z does not dominate W), and W belongs to
8030 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8031 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8033 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8034 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8035 dbb;
8036 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8037 bbs_to_fix_dom.safe_push (dbb);
8040 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8042 BITMAP_FREE (df);
8043 BITMAP_FREE (df_idom);
8044 bbs_to_remove.release ();
8045 bbs_to_fix_dom.release ();
8048 /* Purge dead EH edges from basic block BB. */
8050 bool
8051 gimple_purge_dead_eh_edges (basic_block bb)
8053 bool changed = false;
8054 edge e;
8055 edge_iterator ei;
8056 gimple stmt = last_stmt (bb);
8058 if (stmt && stmt_can_throw_internal (stmt))
8059 return false;
8061 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8063 if (e->flags & EDGE_EH)
8065 remove_edge_and_dominated_blocks (e);
8066 changed = true;
8068 else
8069 ei_next (&ei);
8072 return changed;
8075 /* Purge dead EH edges from basic block listed in BLOCKS. */
8077 bool
8078 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8080 bool changed = false;
8081 unsigned i;
8082 bitmap_iterator bi;
8084 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8086 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8088 /* Earlier gimple_purge_dead_eh_edges could have removed
8089 this basic block already. */
8090 gcc_assert (bb || changed);
8091 if (bb != NULL)
8092 changed |= gimple_purge_dead_eh_edges (bb);
8095 return changed;
8098 /* Purge dead abnormal call edges from basic block BB. */
8100 bool
8101 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8103 bool changed = false;
8104 edge e;
8105 edge_iterator ei;
8106 gimple stmt = last_stmt (bb);
8108 if (!cfun->has_nonlocal_label
8109 && !cfun->calls_setjmp)
8110 return false;
8112 if (stmt && stmt_can_make_abnormal_goto (stmt))
8113 return false;
8115 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8117 if (e->flags & EDGE_ABNORMAL)
8119 if (e->flags & EDGE_FALLTHRU)
8120 e->flags &= ~EDGE_ABNORMAL;
8121 else
8122 remove_edge_and_dominated_blocks (e);
8123 changed = true;
8125 else
8126 ei_next (&ei);
8129 return changed;
8132 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8134 bool
8135 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8137 bool changed = false;
8138 unsigned i;
8139 bitmap_iterator bi;
8141 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8143 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8145 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8146 this basic block already. */
8147 gcc_assert (bb || changed);
8148 if (bb != NULL)
8149 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8152 return changed;
8155 /* This function is called whenever a new edge is created or
8156 redirected. */
8158 static void
8159 gimple_execute_on_growing_pred (edge e)
8161 basic_block bb = e->dest;
8163 if (!gimple_seq_empty_p (phi_nodes (bb)))
8164 reserve_phi_args_for_new_edge (bb);
8167 /* This function is called immediately before edge E is removed from
8168 the edge vector E->dest->preds. */
8170 static void
8171 gimple_execute_on_shrinking_pred (edge e)
8173 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8174 remove_phi_args (e);
8177 /*---------------------------------------------------------------------------
8178 Helper functions for Loop versioning
8179 ---------------------------------------------------------------------------*/
8181 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8182 of 'first'. Both of them are dominated by 'new_head' basic block. When
8183 'new_head' was created by 'second's incoming edge it received phi arguments
8184 on the edge by split_edge(). Later, additional edge 'e' was created to
8185 connect 'new_head' and 'first'. Now this routine adds phi args on this
8186 additional edge 'e' that new_head to second edge received as part of edge
8187 splitting. */
8189 static void
8190 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8191 basic_block new_head, edge e)
8193 gphi *phi1, *phi2;
8194 gphi_iterator psi1, psi2;
8195 tree def;
8196 edge e2 = find_edge (new_head, second);
8198 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8199 edge, we should always have an edge from NEW_HEAD to SECOND. */
8200 gcc_assert (e2 != NULL);
8202 /* Browse all 'second' basic block phi nodes and add phi args to
8203 edge 'e' for 'first' head. PHI args are always in correct order. */
8205 for (psi2 = gsi_start_phis (second),
8206 psi1 = gsi_start_phis (first);
8207 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8208 gsi_next (&psi2), gsi_next (&psi1))
8210 phi1 = psi1.phi ();
8211 phi2 = psi2.phi ();
8212 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8213 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8218 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8219 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8220 the destination of the ELSE part. */
8222 static void
8223 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8224 basic_block second_head ATTRIBUTE_UNUSED,
8225 basic_block cond_bb, void *cond_e)
8227 gimple_stmt_iterator gsi;
8228 gimple new_cond_expr;
8229 tree cond_expr = (tree) cond_e;
8230 edge e0;
8232 /* Build new conditional expr */
8233 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8234 NULL_TREE, NULL_TREE);
8236 /* Add new cond in cond_bb. */
8237 gsi = gsi_last_bb (cond_bb);
8238 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8240 /* Adjust edges appropriately to connect new head with first head
8241 as well as second head. */
8242 e0 = single_succ_edge (cond_bb);
8243 e0->flags &= ~EDGE_FALLTHRU;
8244 e0->flags |= EDGE_FALSE_VALUE;
8248 /* Do book-keeping of basic block BB for the profile consistency checker.
8249 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8250 then do post-pass accounting. Store the counting in RECORD. */
8251 static void
8252 gimple_account_profile_record (basic_block bb, int after_pass,
8253 struct profile_record *record)
8255 gimple_stmt_iterator i;
8256 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8258 record->size[after_pass]
8259 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8260 if (profile_status_for_fn (cfun) == PROFILE_READ)
8261 record->time[after_pass]
8262 += estimate_num_insns (gsi_stmt (i),
8263 &eni_time_weights) * bb->count;
8264 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8265 record->time[after_pass]
8266 += estimate_num_insns (gsi_stmt (i),
8267 &eni_time_weights) * bb->frequency;
8271 struct cfg_hooks gimple_cfg_hooks = {
8272 "gimple",
8273 gimple_verify_flow_info,
8274 gimple_dump_bb, /* dump_bb */
8275 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8276 create_bb, /* create_basic_block */
8277 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8278 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8279 gimple_can_remove_branch_p, /* can_remove_branch_p */
8280 remove_bb, /* delete_basic_block */
8281 gimple_split_block, /* split_block */
8282 gimple_move_block_after, /* move_block_after */
8283 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8284 gimple_merge_blocks, /* merge_blocks */
8285 gimple_predict_edge, /* predict_edge */
8286 gimple_predicted_by_p, /* predicted_by_p */
8287 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8288 gimple_duplicate_bb, /* duplicate_block */
8289 gimple_split_edge, /* split_edge */
8290 gimple_make_forwarder_block, /* make_forward_block */
8291 NULL, /* tidy_fallthru_edge */
8292 NULL, /* force_nonfallthru */
8293 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8294 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8295 gimple_flow_call_edges_add, /* flow_call_edges_add */
8296 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8297 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8298 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8299 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8300 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8301 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8302 flush_pending_stmts, /* flush_pending_stmts */
8303 gimple_empty_block_p, /* block_empty_p */
8304 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8305 gimple_account_profile_record,
8309 /* Split all critical edges. */
8311 unsigned int
8312 split_critical_edges (void)
8314 basic_block bb;
8315 edge e;
8316 edge_iterator ei;
8318 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8319 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8320 mappings around the calls to split_edge. */
8321 start_recording_case_labels ();
8322 FOR_ALL_BB_FN (bb, cfun)
8324 FOR_EACH_EDGE (e, ei, bb->succs)
8326 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8327 split_edge (e);
8328 /* PRE inserts statements to edges and expects that
8329 since split_critical_edges was done beforehand, committing edge
8330 insertions will not split more edges. In addition to critical
8331 edges we must split edges that have multiple successors and
8332 end by control flow statements, such as RESX.
8333 Go ahead and split them too. This matches the logic in
8334 gimple_find_edge_insert_loc. */
8335 else if ((!single_pred_p (e->dest)
8336 || !gimple_seq_empty_p (phi_nodes (e->dest))
8337 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8338 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8339 && !(e->flags & EDGE_ABNORMAL))
8341 gimple_stmt_iterator gsi;
8343 gsi = gsi_last_bb (e->src);
8344 if (!gsi_end_p (gsi)
8345 && stmt_ends_bb_p (gsi_stmt (gsi))
8346 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8347 && !gimple_call_builtin_p (gsi_stmt (gsi),
8348 BUILT_IN_RETURN)))
8349 split_edge (e);
8353 end_recording_case_labels ();
8354 return 0;
8357 namespace {
8359 const pass_data pass_data_split_crit_edges =
8361 GIMPLE_PASS, /* type */
8362 "crited", /* name */
8363 OPTGROUP_NONE, /* optinfo_flags */
8364 TV_TREE_SPLIT_EDGES, /* tv_id */
8365 PROP_cfg, /* properties_required */
8366 PROP_no_crit_edges, /* properties_provided */
8367 0, /* properties_destroyed */
8368 0, /* todo_flags_start */
8369 0, /* todo_flags_finish */
8372 class pass_split_crit_edges : public gimple_opt_pass
8374 public:
8375 pass_split_crit_edges (gcc::context *ctxt)
8376 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8379 /* opt_pass methods: */
8380 virtual unsigned int execute (function *) { return split_critical_edges (); }
8382 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8383 }; // class pass_split_crit_edges
8385 } // anon namespace
8387 gimple_opt_pass *
8388 make_pass_split_crit_edges (gcc::context *ctxt)
8390 return new pass_split_crit_edges (ctxt);
8394 /* Insert COND expression which is GIMPLE_COND after STMT
8395 in basic block BB with appropriate basic block split
8396 and creation of a new conditionally executed basic block.
8397 Return created basic block. */
8398 basic_block
8399 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8401 edge fall = split_block (bb, stmt);
8402 gimple_stmt_iterator iter = gsi_last_bb (bb);
8403 basic_block new_bb;
8405 /* Insert cond statement. */
8406 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8407 if (gsi_end_p (iter))
8408 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8409 else
8410 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8412 /* Create conditionally executed block. */
8413 new_bb = create_empty_bb (bb);
8414 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8415 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8417 /* Fix edge for split bb. */
8418 fall->flags = EDGE_FALSE_VALUE;
8420 /* Update dominance info. */
8421 if (dom_info_available_p (CDI_DOMINATORS))
8423 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8424 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8427 /* Update loop info. */
8428 if (current_loops)
8429 add_bb_to_loop (new_bb, bb->loop_father);
8431 return new_bb;
8434 /* Build a ternary operation and gimplify it. Emit code before GSI.
8435 Return the gimple_val holding the result. */
8437 tree
8438 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8439 tree type, tree a, tree b, tree c)
8441 tree ret;
8442 location_t loc = gimple_location (gsi_stmt (*gsi));
8444 ret = fold_build3_loc (loc, code, type, a, b, c);
8445 STRIP_NOPS (ret);
8447 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8448 GSI_SAME_STMT);
8451 /* Build a binary operation and gimplify it. Emit code before GSI.
8452 Return the gimple_val holding the result. */
8454 tree
8455 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8456 tree type, tree a, tree b)
8458 tree ret;
8460 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8461 STRIP_NOPS (ret);
8463 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8464 GSI_SAME_STMT);
8467 /* Build a unary operation and gimplify it. Emit code before GSI.
8468 Return the gimple_val holding the result. */
8470 tree
8471 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8472 tree a)
8474 tree ret;
8476 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8477 STRIP_NOPS (ret);
8479 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8480 GSI_SAME_STMT);
8485 /* Given a basic block B which ends with a conditional and has
8486 precisely two successors, determine which of the edges is taken if
8487 the conditional is true and which is taken if the conditional is
8488 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8490 void
8491 extract_true_false_edges_from_block (basic_block b,
8492 edge *true_edge,
8493 edge *false_edge)
8495 edge e = EDGE_SUCC (b, 0);
8497 if (e->flags & EDGE_TRUE_VALUE)
8499 *true_edge = e;
8500 *false_edge = EDGE_SUCC (b, 1);
8502 else
8504 *false_edge = e;
8505 *true_edge = EDGE_SUCC (b, 1);
8509 /* Emit return warnings. */
8511 namespace {
8513 const pass_data pass_data_warn_function_return =
8515 GIMPLE_PASS, /* type */
8516 "*warn_function_return", /* name */
8517 OPTGROUP_NONE, /* optinfo_flags */
8518 TV_NONE, /* tv_id */
8519 PROP_cfg, /* properties_required */
8520 0, /* properties_provided */
8521 0, /* properties_destroyed */
8522 0, /* todo_flags_start */
8523 0, /* todo_flags_finish */
8526 class pass_warn_function_return : public gimple_opt_pass
8528 public:
8529 pass_warn_function_return (gcc::context *ctxt)
8530 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8533 /* opt_pass methods: */
8534 virtual unsigned int execute (function *);
8536 }; // class pass_warn_function_return
8538 unsigned int
8539 pass_warn_function_return::execute (function *fun)
8541 source_location location;
8542 gimple last;
8543 edge e;
8544 edge_iterator ei;
8546 if (!targetm.warn_func_return (fun->decl))
8547 return 0;
8549 /* If we have a path to EXIT, then we do return. */
8550 if (TREE_THIS_VOLATILE (fun->decl)
8551 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8553 location = UNKNOWN_LOCATION;
8554 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8556 last = last_stmt (e->src);
8557 if ((gimple_code (last) == GIMPLE_RETURN
8558 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8559 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8560 break;
8562 if (location == UNKNOWN_LOCATION)
8563 location = cfun->function_end_locus;
8564 warning_at (location, 0, "%<noreturn%> function does return");
8567 /* If we see "return;" in some basic block, then we do reach the end
8568 without returning a value. */
8569 else if (warn_return_type
8570 && !TREE_NO_WARNING (fun->decl)
8571 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8572 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8574 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8576 gimple last = last_stmt (e->src);
8577 greturn *return_stmt = dyn_cast <greturn *> (last);
8578 if (return_stmt
8579 && gimple_return_retval (return_stmt) == NULL
8580 && !gimple_no_warning_p (last))
8582 location = gimple_location (last);
8583 if (location == UNKNOWN_LOCATION)
8584 location = fun->function_end_locus;
8585 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8586 TREE_NO_WARNING (fun->decl) = 1;
8587 break;
8591 return 0;
8594 } // anon namespace
8596 gimple_opt_pass *
8597 make_pass_warn_function_return (gcc::context *ctxt)
8599 return new pass_warn_function_return (ctxt);
8602 /* Walk a gimplified function and warn for functions whose return value is
8603 ignored and attribute((warn_unused_result)) is set. This is done before
8604 inlining, so we don't have to worry about that. */
8606 static void
8607 do_warn_unused_result (gimple_seq seq)
8609 tree fdecl, ftype;
8610 gimple_stmt_iterator i;
8612 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8614 gimple g = gsi_stmt (i);
8616 switch (gimple_code (g))
8618 case GIMPLE_BIND:
8619 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8620 break;
8621 case GIMPLE_TRY:
8622 do_warn_unused_result (gimple_try_eval (g));
8623 do_warn_unused_result (gimple_try_cleanup (g));
8624 break;
8625 case GIMPLE_CATCH:
8626 do_warn_unused_result (gimple_catch_handler (
8627 as_a <gcatch *> (g)));
8628 break;
8629 case GIMPLE_EH_FILTER:
8630 do_warn_unused_result (gimple_eh_filter_failure (g));
8631 break;
8633 case GIMPLE_CALL:
8634 if (gimple_call_lhs (g))
8635 break;
8636 if (gimple_call_internal_p (g))
8637 break;
8639 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8640 LHS. All calls whose value is ignored should be
8641 represented like this. Look for the attribute. */
8642 fdecl = gimple_call_fndecl (g);
8643 ftype = gimple_call_fntype (g);
8645 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8647 location_t loc = gimple_location (g);
8649 if (fdecl)
8650 warning_at (loc, OPT_Wunused_result,
8651 "ignoring return value of %qD, "
8652 "declared with attribute warn_unused_result",
8653 fdecl);
8654 else
8655 warning_at (loc, OPT_Wunused_result,
8656 "ignoring return value of function "
8657 "declared with attribute warn_unused_result");
8659 break;
8661 default:
8662 /* Not a container, not a call, or a call whose value is used. */
8663 break;
8668 namespace {
8670 const pass_data pass_data_warn_unused_result =
8672 GIMPLE_PASS, /* type */
8673 "*warn_unused_result", /* name */
8674 OPTGROUP_NONE, /* optinfo_flags */
8675 TV_NONE, /* tv_id */
8676 PROP_gimple_any, /* properties_required */
8677 0, /* properties_provided */
8678 0, /* properties_destroyed */
8679 0, /* todo_flags_start */
8680 0, /* todo_flags_finish */
8683 class pass_warn_unused_result : public gimple_opt_pass
8685 public:
8686 pass_warn_unused_result (gcc::context *ctxt)
8687 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8690 /* opt_pass methods: */
8691 virtual bool gate (function *) { return flag_warn_unused_result; }
8692 virtual unsigned int execute (function *)
8694 do_warn_unused_result (gimple_body (current_function_decl));
8695 return 0;
8698 }; // class pass_warn_unused_result
8700 } // anon namespace
8702 gimple_opt_pass *
8703 make_pass_warn_unused_result (gcc::context *ctxt)
8705 return new pass_warn_unused_result (ctxt);
8708 /* IPA passes, compilation of earlier functions or inlining
8709 might have changed some properties, such as marked functions nothrow,
8710 pure, const or noreturn.
8711 Remove redundant edges and basic blocks, and create new ones if necessary.
8713 This pass can't be executed as stand alone pass from pass manager, because
8714 in between inlining and this fixup the verify_flow_info would fail. */
8716 unsigned int
8717 execute_fixup_cfg (void)
8719 basic_block bb;
8720 gimple_stmt_iterator gsi;
8721 int todo = 0;
8722 gcov_type count_scale;
8723 edge e;
8724 edge_iterator ei;
8726 count_scale
8727 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8728 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8730 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8731 cgraph_node::get (current_function_decl)->count;
8732 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8733 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8734 count_scale);
8736 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8737 e->count = apply_scale (e->count, count_scale);
8739 FOR_EACH_BB_FN (bb, cfun)
8741 bb->count = apply_scale (bb->count, count_scale);
8742 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8744 gimple stmt = gsi_stmt (gsi);
8745 tree decl = is_gimple_call (stmt)
8746 ? gimple_call_fndecl (stmt)
8747 : NULL;
8748 if (decl)
8750 int flags = gimple_call_flags (stmt);
8751 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8753 if (gimple_purge_dead_abnormal_call_edges (bb))
8754 todo |= TODO_cleanup_cfg;
8756 if (gimple_in_ssa_p (cfun))
8758 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8759 update_stmt (stmt);
8763 if (flags & ECF_NORETURN
8764 && fixup_noreturn_call (stmt))
8765 todo |= TODO_cleanup_cfg;
8768 /* Remove stores to variables we marked write-only.
8769 Keep access when store has side effect, i.e. in case when source
8770 is volatile. */
8771 if (gimple_store_p (stmt)
8772 && !gimple_has_side_effects (stmt))
8774 tree lhs = get_base_address (gimple_get_lhs (stmt));
8776 if (TREE_CODE (lhs) == VAR_DECL
8777 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8778 && varpool_node::get (lhs)->writeonly)
8780 unlink_stmt_vdef (stmt);
8781 gsi_remove (&gsi, true);
8782 release_defs (stmt);
8783 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8784 continue;
8787 /* For calls we can simply remove LHS when it is known
8788 to be write-only. */
8789 if (is_gimple_call (stmt)
8790 && gimple_get_lhs (stmt))
8792 tree lhs = get_base_address (gimple_get_lhs (stmt));
8794 if (TREE_CODE (lhs) == VAR_DECL
8795 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8796 && varpool_node::get (lhs)->writeonly)
8798 gimple_call_set_lhs (stmt, NULL);
8799 update_stmt (stmt);
8800 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8804 if (maybe_clean_eh_stmt (stmt)
8805 && gimple_purge_dead_eh_edges (bb))
8806 todo |= TODO_cleanup_cfg;
8807 gsi_next (&gsi);
8810 FOR_EACH_EDGE (e, ei, bb->succs)
8811 e->count = apply_scale (e->count, count_scale);
8813 /* If we have a basic block with no successors that does not
8814 end with a control statement or a noreturn call end it with
8815 a call to __builtin_unreachable. This situation can occur
8816 when inlining a noreturn call that does in fact return. */
8817 if (EDGE_COUNT (bb->succs) == 0)
8819 gimple stmt = last_stmt (bb);
8820 if (!stmt
8821 || (!is_ctrl_stmt (stmt)
8822 && (!is_gimple_call (stmt)
8823 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8825 if (stmt && is_gimple_call (stmt))
8826 gimple_call_set_ctrl_altering (stmt, false);
8827 stmt = gimple_build_call
8828 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8829 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8830 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8834 if (count_scale != REG_BR_PROB_BASE)
8835 compute_function_frequency ();
8837 if (current_loops
8838 && (todo & TODO_cleanup_cfg))
8839 loops_state_set (LOOPS_NEED_FIXUP);
8841 return todo;
8844 namespace {
8846 const pass_data pass_data_fixup_cfg =
8848 GIMPLE_PASS, /* type */
8849 "fixup_cfg", /* name */
8850 OPTGROUP_NONE, /* optinfo_flags */
8851 TV_NONE, /* tv_id */
8852 PROP_cfg, /* properties_required */
8853 0, /* properties_provided */
8854 0, /* properties_destroyed */
8855 0, /* todo_flags_start */
8856 0, /* todo_flags_finish */
8859 class pass_fixup_cfg : public gimple_opt_pass
8861 public:
8862 pass_fixup_cfg (gcc::context *ctxt)
8863 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8866 /* opt_pass methods: */
8867 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8868 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8870 }; // class pass_fixup_cfg
8872 } // anon namespace
8874 gimple_opt_pass *
8875 make_pass_fixup_cfg (gcc::context *ctxt)
8877 return new pass_fixup_cfg (ctxt);
8880 /* Garbage collection support for edge_def. */
8882 extern void gt_ggc_mx (tree&);
8883 extern void gt_ggc_mx (gimple&);
8884 extern void gt_ggc_mx (rtx&);
8885 extern void gt_ggc_mx (basic_block&);
8887 static void
8888 gt_ggc_mx (rtx_insn *& x)
8890 if (x)
8891 gt_ggc_mx_rtx_def ((void *) x);
8894 void
8895 gt_ggc_mx (edge_def *e)
8897 tree block = LOCATION_BLOCK (e->goto_locus);
8898 gt_ggc_mx (e->src);
8899 gt_ggc_mx (e->dest);
8900 if (current_ir_type () == IR_GIMPLE)
8901 gt_ggc_mx (e->insns.g);
8902 else
8903 gt_ggc_mx (e->insns.r);
8904 gt_ggc_mx (block);
8907 /* PCH support for edge_def. */
8909 extern void gt_pch_nx (tree&);
8910 extern void gt_pch_nx (gimple&);
8911 extern void gt_pch_nx (rtx&);
8912 extern void gt_pch_nx (basic_block&);
8914 static void
8915 gt_pch_nx (rtx_insn *& x)
8917 if (x)
8918 gt_pch_nx_rtx_def ((void *) x);
8921 void
8922 gt_pch_nx (edge_def *e)
8924 tree block = LOCATION_BLOCK (e->goto_locus);
8925 gt_pch_nx (e->src);
8926 gt_pch_nx (e->dest);
8927 if (current_ir_type () == IR_GIMPLE)
8928 gt_pch_nx (e->insns.g);
8929 else
8930 gt_pch_nx (e->insns.r);
8931 gt_pch_nx (block);
8934 void
8935 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8937 tree block = LOCATION_BLOCK (e->goto_locus);
8938 op (&(e->src), cookie);
8939 op (&(e->dest), cookie);
8940 if (current_ir_type () == IR_GIMPLE)
8941 op (&(e->insns.g), cookie);
8942 else
8943 op (&(e->insns.r), cookie);
8944 op (&(block), cookie);