gcc/
[official-gcc.git] / gcc / tree-cfg.c
blobc16e7baddbdad68ee9994d3f154f8bf6542e413a
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "trans-mem.h"
30 #include "stor-layout.h"
31 #include "print-tree.h"
32 #include "tm_p.h"
33 #include "predict.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "cfganal.h"
39 #include "basic-block.h"
40 #include "flags.h"
41 #include "gimple-pretty-print.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "plugin-api.h"
53 #include "ipa-ref.h"
54 #include "cgraph.h"
55 #include "tree-cfg.h"
56 #include "tree-phinodes.h"
57 #include "ssa-iterators.h"
58 #include "stringpool.h"
59 #include "tree-ssanames.h"
60 #include "tree-ssa-loop-manip.h"
61 #include "tree-ssa-loop-niter.h"
62 #include "tree-into-ssa.h"
63 #include "rtl.h"
64 #include "insn-config.h"
65 #include "expmed.h"
66 #include "dojump.h"
67 #include "explow.h"
68 #include "calls.h"
69 #include "emit-rtl.h"
70 #include "varasm.h"
71 #include "stmt.h"
72 #include "expr.h"
73 #include "tree-dfa.h"
74 #include "tree-ssa.h"
75 #include "tree-dump.h"
76 #include "tree-pass.h"
77 #include "diagnostic-core.h"
78 #include "except.h"
79 #include "cfgloop.h"
80 #include "tree-ssa-propagate.h"
81 #include "value-prof.h"
82 #include "tree-inline.h"
83 #include "target.h"
84 #include "tree-ssa-live.h"
85 #include "omp-low.h"
86 #include "tree-cfgcleanup.h"
87 #include "wide-int-print.h"
89 /* This file contains functions for building the Control Flow Graph (CFG)
90 for a function tree. */
92 /* Local declarations. */
94 /* Initial capacity for the basic block array. */
95 static const int initial_cfg_capacity = 20;
97 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
98 which use a particular edge. The CASE_LABEL_EXPRs are chained together
99 via their CASE_CHAIN field, which we clear after we're done with the
100 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
102 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
103 update the case vector in response to edge redirections.
105 Right now this table is set up and torn down at key points in the
106 compilation process. It would be nice if we could make the table
107 more persistent. The key is getting notification of changes to
108 the CFG (particularly edge removal, creation and redirection). */
110 static hash_map<edge, tree> *edge_to_cases;
112 /* If we record edge_to_cases, this bitmap will hold indexes
113 of basic blocks that end in a GIMPLE_SWITCH which we touched
114 due to edge manipulations. */
116 static bitmap touched_switch_bbs;
118 /* CFG statistics. */
119 struct cfg_stats_d
121 long num_merged_labels;
124 static struct cfg_stats_d cfg_stats;
126 /* Hash table to store last discriminator assigned for each locus. */
127 struct locus_discrim_map
129 location_t locus;
130 int discriminator;
133 /* Hashtable helpers. */
135 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
137 static inline hashval_t hash (const locus_discrim_map *);
138 static inline bool equal (const locus_discrim_map *,
139 const locus_discrim_map *);
142 /* Trivial hash function for a location_t. ITEM is a pointer to
143 a hash table entry that maps a location_t to a discriminator. */
145 inline hashval_t
146 locus_discrim_hasher::hash (const locus_discrim_map *item)
148 return LOCATION_LINE (item->locus);
151 /* Equality function for the locus-to-discriminator map. A and B
152 point to the two hash table entries to compare. */
154 inline bool
155 locus_discrim_hasher::equal (const locus_discrim_map *a,
156 const locus_discrim_map *b)
158 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
161 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
163 /* Basic blocks and flowgraphs. */
164 static void make_blocks (gimple_seq);
166 /* Edges. */
167 static void make_edges (void);
168 static void assign_discriminators (void);
169 static void make_cond_expr_edges (basic_block);
170 static void make_gimple_switch_edges (gswitch *, basic_block);
171 static bool make_goto_expr_edges (basic_block);
172 static void make_gimple_asm_edges (basic_block);
173 static edge gimple_redirect_edge_and_branch (edge, basic_block);
174 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
176 /* Various helpers. */
177 static inline bool stmt_starts_bb_p (gimple, gimple);
178 static int gimple_verify_flow_info (void);
179 static void gimple_make_forwarder_block (edge);
180 static gimple first_non_label_stmt (basic_block);
181 static bool verify_gimple_transaction (gtransaction *);
182 static bool call_can_make_abnormal_goto (gimple);
184 /* Flowgraph optimization and cleanup. */
185 static void gimple_merge_blocks (basic_block, basic_block);
186 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
187 static void remove_bb (basic_block);
188 static edge find_taken_edge_computed_goto (basic_block, tree);
189 static edge find_taken_edge_cond_expr (basic_block, tree);
190 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
191 static tree find_case_label_for_value (gswitch *, tree);
193 void
194 init_empty_tree_cfg_for_function (struct function *fn)
196 /* Initialize the basic block array. */
197 init_flow (fn);
198 profile_status_for_fn (fn) = PROFILE_ABSENT;
199 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
200 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
201 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
202 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
203 initial_cfg_capacity);
205 /* Build a mapping of labels to their associated blocks. */
206 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
207 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
208 initial_cfg_capacity);
210 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
211 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
213 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
214 = EXIT_BLOCK_PTR_FOR_FN (fn);
215 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
216 = ENTRY_BLOCK_PTR_FOR_FN (fn);
219 void
220 init_empty_tree_cfg (void)
222 init_empty_tree_cfg_for_function (cfun);
225 /*---------------------------------------------------------------------------
226 Create basic blocks
227 ---------------------------------------------------------------------------*/
229 /* Entry point to the CFG builder for trees. SEQ is the sequence of
230 statements to be added to the flowgraph. */
232 static void
233 build_gimple_cfg (gimple_seq seq)
235 /* Register specific gimple functions. */
236 gimple_register_cfg_hooks ();
238 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
240 init_empty_tree_cfg ();
242 make_blocks (seq);
244 /* Make sure there is always at least one block, even if it's empty. */
245 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
246 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
248 /* Adjust the size of the array. */
249 if (basic_block_info_for_fn (cfun)->length ()
250 < (size_t) n_basic_blocks_for_fn (cfun))
251 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
252 n_basic_blocks_for_fn (cfun));
254 /* To speed up statement iterator walks, we first purge dead labels. */
255 cleanup_dead_labels ();
257 /* Group case nodes to reduce the number of edges.
258 We do this after cleaning up dead labels because otherwise we miss
259 a lot of obvious case merging opportunities. */
260 group_case_labels ();
262 /* Create the edges of the flowgraph. */
263 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
264 make_edges ();
265 assign_discriminators ();
266 cleanup_dead_labels ();
267 delete discriminator_per_locus;
268 discriminator_per_locus = NULL;
271 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
272 them and propagate the information to LOOP. We assume that the annotations
273 come immediately before the condition in BB, if any. */
275 static void
276 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
278 gimple_stmt_iterator gsi = gsi_last_bb (bb);
279 gimple stmt = gsi_stmt (gsi);
281 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
282 return;
284 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
286 stmt = gsi_stmt (gsi);
287 if (gimple_code (stmt) != GIMPLE_CALL)
288 break;
289 if (!gimple_call_internal_p (stmt)
290 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
291 break;
293 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
295 case annot_expr_ivdep_kind:
296 loop->safelen = INT_MAX;
297 break;
298 case annot_expr_no_vector_kind:
299 loop->dont_vectorize = true;
300 break;
301 case annot_expr_vector_kind:
302 loop->force_vectorize = true;
303 cfun->has_force_vectorize_loops = true;
304 break;
305 default:
306 gcc_unreachable ();
309 stmt = gimple_build_assign (gimple_call_lhs (stmt),
310 gimple_call_arg (stmt, 0));
311 gsi_replace (&gsi, stmt, true);
315 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
316 them and propagate the information to the loop. We assume that the
317 annotations come immediately before the condition of the loop. */
319 static void
320 replace_loop_annotate (void)
322 struct loop *loop;
323 basic_block bb;
324 gimple_stmt_iterator gsi;
325 gimple stmt;
327 FOR_EACH_LOOP (loop, 0)
329 /* First look into the header. */
330 replace_loop_annotate_in_block (loop->header, loop);
332 /* Then look into the latch, if any. */
333 if (loop->latch)
334 replace_loop_annotate_in_block (loop->latch, loop);
337 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
338 FOR_EACH_BB_FN (bb, cfun)
340 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
342 stmt = gsi_stmt (gsi);
343 if (gimple_code (stmt) != GIMPLE_CALL)
344 continue;
345 if (!gimple_call_internal_p (stmt)
346 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
347 continue;
349 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
351 case annot_expr_ivdep_kind:
352 case annot_expr_no_vector_kind:
353 case annot_expr_vector_kind:
354 break;
355 default:
356 gcc_unreachable ();
359 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
360 stmt = gimple_build_assign (gimple_call_lhs (stmt),
361 gimple_call_arg (stmt, 0));
362 gsi_replace (&gsi, stmt, true);
368 static unsigned int
369 execute_build_cfg (void)
371 gimple_seq body = gimple_body (current_function_decl);
373 build_gimple_cfg (body);
374 gimple_set_body (current_function_decl, NULL);
375 if (dump_file && (dump_flags & TDF_DETAILS))
377 fprintf (dump_file, "Scope blocks:\n");
378 dump_scope_blocks (dump_file, dump_flags);
380 cleanup_tree_cfg ();
381 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
382 replace_loop_annotate ();
383 return 0;
386 namespace {
388 const pass_data pass_data_build_cfg =
390 GIMPLE_PASS, /* type */
391 "cfg", /* name */
392 OPTGROUP_NONE, /* optinfo_flags */
393 TV_TREE_CFG, /* tv_id */
394 PROP_gimple_leh, /* properties_required */
395 ( PROP_cfg | PROP_loops ), /* properties_provided */
396 0, /* properties_destroyed */
397 0, /* todo_flags_start */
398 0, /* todo_flags_finish */
401 class pass_build_cfg : public gimple_opt_pass
403 public:
404 pass_build_cfg (gcc::context *ctxt)
405 : gimple_opt_pass (pass_data_build_cfg, ctxt)
408 /* opt_pass methods: */
409 virtual unsigned int execute (function *) { return execute_build_cfg (); }
411 }; // class pass_build_cfg
413 } // anon namespace
415 gimple_opt_pass *
416 make_pass_build_cfg (gcc::context *ctxt)
418 return new pass_build_cfg (ctxt);
422 /* Return true if T is a computed goto. */
424 bool
425 computed_goto_p (gimple t)
427 return (gimple_code (t) == GIMPLE_GOTO
428 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
431 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
432 the other edge points to a bb with just __builtin_unreachable ().
433 I.e. return true for C->M edge in:
434 <bb C>:
436 if (something)
437 goto <bb N>;
438 else
439 goto <bb M>;
440 <bb N>:
441 __builtin_unreachable ();
442 <bb M>: */
444 bool
445 assert_unreachable_fallthru_edge_p (edge e)
447 basic_block pred_bb = e->src;
448 gimple last = last_stmt (pred_bb);
449 if (last && gimple_code (last) == GIMPLE_COND)
451 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
452 if (other_bb == e->dest)
453 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
454 if (EDGE_COUNT (other_bb->succs) == 0)
456 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
457 gimple stmt;
459 if (gsi_end_p (gsi))
460 return false;
461 stmt = gsi_stmt (gsi);
462 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
464 gsi_next (&gsi);
465 if (gsi_end_p (gsi))
466 return false;
467 stmt = gsi_stmt (gsi);
469 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
472 return false;
476 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
477 could alter control flow except via eh. We initialize the flag at
478 CFG build time and only ever clear it later. */
480 static void
481 gimple_call_initialize_ctrl_altering (gimple stmt)
483 int flags = gimple_call_flags (stmt);
485 /* A call alters control flow if it can make an abnormal goto. */
486 if (call_can_make_abnormal_goto (stmt)
487 /* A call also alters control flow if it does not return. */
488 || flags & ECF_NORETURN
489 /* TM ending statements have backedges out of the transaction.
490 Return true so we split the basic block containing them.
491 Note that the TM_BUILTIN test is merely an optimization. */
492 || ((flags & ECF_TM_BUILTIN)
493 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
494 /* BUILT_IN_RETURN call is same as return statement. */
495 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
496 gimple_call_set_ctrl_altering (stmt, true);
497 else
498 gimple_call_set_ctrl_altering (stmt, false);
502 /* Insert SEQ after BB and build a flowgraph. */
504 static basic_block
505 make_blocks_1 (gimple_seq seq, basic_block bb)
507 gimple_stmt_iterator i = gsi_start (seq);
508 gimple stmt = NULL;
509 bool start_new_block = true;
510 bool first_stmt_of_seq = true;
512 while (!gsi_end_p (i))
514 gimple prev_stmt;
516 prev_stmt = stmt;
517 stmt = gsi_stmt (i);
519 if (stmt && is_gimple_call (stmt))
520 gimple_call_initialize_ctrl_altering (stmt);
522 /* If the statement starts a new basic block or if we have determined
523 in a previous pass that we need to create a new block for STMT, do
524 so now. */
525 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
527 if (!first_stmt_of_seq)
528 gsi_split_seq_before (&i, &seq);
529 bb = create_basic_block (seq, bb);
530 start_new_block = false;
533 /* Now add STMT to BB and create the subgraphs for special statement
534 codes. */
535 gimple_set_bb (stmt, bb);
537 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
538 next iteration. */
539 if (stmt_ends_bb_p (stmt))
541 /* If the stmt can make abnormal goto use a new temporary
542 for the assignment to the LHS. This makes sure the old value
543 of the LHS is available on the abnormal edge. Otherwise
544 we will end up with overlapping life-ranges for abnormal
545 SSA names. */
546 if (gimple_has_lhs (stmt)
547 && stmt_can_make_abnormal_goto (stmt)
548 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
550 tree lhs = gimple_get_lhs (stmt);
551 tree tmp = create_tmp_var (TREE_TYPE (lhs));
552 gimple s = gimple_build_assign (lhs, tmp);
553 gimple_set_location (s, gimple_location (stmt));
554 gimple_set_block (s, gimple_block (stmt));
555 gimple_set_lhs (stmt, tmp);
556 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
557 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
558 DECL_GIMPLE_REG_P (tmp) = 1;
559 gsi_insert_after (&i, s, GSI_SAME_STMT);
561 start_new_block = true;
564 gsi_next (&i);
565 first_stmt_of_seq = false;
567 return bb;
570 /* Build a flowgraph for the sequence of stmts SEQ. */
572 static void
573 make_blocks (gimple_seq seq)
575 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
578 /* Create and return a new empty basic block after bb AFTER. */
580 static basic_block
581 create_bb (void *h, void *e, basic_block after)
583 basic_block bb;
585 gcc_assert (!e);
587 /* Create and initialize a new basic block. Since alloc_block uses
588 GC allocation that clears memory to allocate a basic block, we do
589 not have to clear the newly allocated basic block here. */
590 bb = alloc_block ();
592 bb->index = last_basic_block_for_fn (cfun);
593 bb->flags = BB_NEW;
594 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
596 /* Add the new block to the linked list of blocks. */
597 link_block (bb, after);
599 /* Grow the basic block array if needed. */
600 if ((size_t) last_basic_block_for_fn (cfun)
601 == basic_block_info_for_fn (cfun)->length ())
603 size_t new_size =
604 (last_basic_block_for_fn (cfun)
605 + (last_basic_block_for_fn (cfun) + 3) / 4);
606 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
609 /* Add the newly created block to the array. */
610 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
612 n_basic_blocks_for_fn (cfun)++;
613 last_basic_block_for_fn (cfun)++;
615 return bb;
619 /*---------------------------------------------------------------------------
620 Edge creation
621 ---------------------------------------------------------------------------*/
623 /* Fold COND_EXPR_COND of each COND_EXPR. */
625 void
626 fold_cond_expr_cond (void)
628 basic_block bb;
630 FOR_EACH_BB_FN (bb, cfun)
632 gimple stmt = last_stmt (bb);
634 if (stmt && gimple_code (stmt) == GIMPLE_COND)
636 gcond *cond_stmt = as_a <gcond *> (stmt);
637 location_t loc = gimple_location (stmt);
638 tree cond;
639 bool zerop, onep;
641 fold_defer_overflow_warnings ();
642 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
643 boolean_type_node,
644 gimple_cond_lhs (cond_stmt),
645 gimple_cond_rhs (cond_stmt));
646 if (cond)
648 zerop = integer_zerop (cond);
649 onep = integer_onep (cond);
651 else
652 zerop = onep = false;
654 fold_undefer_overflow_warnings (zerop || onep,
655 stmt,
656 WARN_STRICT_OVERFLOW_CONDITIONAL);
657 if (zerop)
658 gimple_cond_make_false (cond_stmt);
659 else if (onep)
660 gimple_cond_make_true (cond_stmt);
665 /* If basic block BB has an abnormal edge to a basic block
666 containing IFN_ABNORMAL_DISPATCHER internal call, return
667 that the dispatcher's basic block, otherwise return NULL. */
669 basic_block
670 get_abnormal_succ_dispatcher (basic_block bb)
672 edge e;
673 edge_iterator ei;
675 FOR_EACH_EDGE (e, ei, bb->succs)
676 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
678 gimple_stmt_iterator gsi
679 = gsi_start_nondebug_after_labels_bb (e->dest);
680 gimple g = gsi_stmt (gsi);
681 if (g
682 && is_gimple_call (g)
683 && gimple_call_internal_p (g)
684 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
685 return e->dest;
687 return NULL;
690 /* Helper function for make_edges. Create a basic block with
691 with ABNORMAL_DISPATCHER internal call in it if needed, and
692 create abnormal edges from BBS to it and from it to FOR_BB
693 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
695 static void
696 handle_abnormal_edges (basic_block *dispatcher_bbs,
697 basic_block for_bb, int *bb_to_omp_idx,
698 auto_vec<basic_block> *bbs, bool computed_goto)
700 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
701 unsigned int idx = 0;
702 basic_block bb;
703 bool inner = false;
705 if (bb_to_omp_idx)
707 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
708 if (bb_to_omp_idx[for_bb->index] != 0)
709 inner = true;
712 /* If the dispatcher has been created already, then there are basic
713 blocks with abnormal edges to it, so just make a new edge to
714 for_bb. */
715 if (*dispatcher == NULL)
717 /* Check if there are any basic blocks that need to have
718 abnormal edges to this dispatcher. If there are none, return
719 early. */
720 if (bb_to_omp_idx == NULL)
722 if (bbs->is_empty ())
723 return;
725 else
727 FOR_EACH_VEC_ELT (*bbs, idx, bb)
728 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
729 break;
730 if (bb == NULL)
731 return;
734 /* Create the dispatcher bb. */
735 *dispatcher = create_basic_block (NULL, for_bb);
736 if (computed_goto)
738 /* Factor computed gotos into a common computed goto site. Also
739 record the location of that site so that we can un-factor the
740 gotos after we have converted back to normal form. */
741 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
743 /* Create the destination of the factored goto. Each original
744 computed goto will put its desired destination into this
745 variable and jump to the label we create immediately below. */
746 tree var = create_tmp_var (ptr_type_node, "gotovar");
748 /* Build a label for the new block which will contain the
749 factored computed goto. */
750 tree factored_label_decl
751 = create_artificial_label (UNKNOWN_LOCATION);
752 gimple factored_computed_goto_label
753 = gimple_build_label (factored_label_decl);
754 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
756 /* Build our new computed goto. */
757 gimple factored_computed_goto = gimple_build_goto (var);
758 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
760 FOR_EACH_VEC_ELT (*bbs, idx, bb)
762 if (bb_to_omp_idx
763 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
764 continue;
766 gsi = gsi_last_bb (bb);
767 gimple last = gsi_stmt (gsi);
769 gcc_assert (computed_goto_p (last));
771 /* Copy the original computed goto's destination into VAR. */
772 gimple assignment
773 = gimple_build_assign (var, gimple_goto_dest (last));
774 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
776 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
777 e->goto_locus = gimple_location (last);
778 gsi_remove (&gsi, true);
781 else
783 tree arg = inner ? boolean_true_node : boolean_false_node;
784 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
785 1, arg);
786 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
787 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
789 /* Create predecessor edges of the dispatcher. */
790 FOR_EACH_VEC_ELT (*bbs, idx, bb)
792 if (bb_to_omp_idx
793 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
794 continue;
795 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
800 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
803 /* Creates outgoing edges for BB. Returns 1 when it ends with an
804 computed goto, returns 2 when it ends with a statement that
805 might return to this function via an nonlocal goto, otherwise
806 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
808 static int
809 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
811 gimple last = last_stmt (bb);
812 bool fallthru = false;
813 int ret = 0;
815 if (!last)
816 return ret;
818 switch (gimple_code (last))
820 case GIMPLE_GOTO:
821 if (make_goto_expr_edges (bb))
822 ret = 1;
823 fallthru = false;
824 break;
825 case GIMPLE_RETURN:
827 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
828 e->goto_locus = gimple_location (last);
829 fallthru = false;
831 break;
832 case GIMPLE_COND:
833 make_cond_expr_edges (bb);
834 fallthru = false;
835 break;
836 case GIMPLE_SWITCH:
837 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
838 fallthru = false;
839 break;
840 case GIMPLE_RESX:
841 make_eh_edges (last);
842 fallthru = false;
843 break;
844 case GIMPLE_EH_DISPATCH:
845 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
846 break;
848 case GIMPLE_CALL:
849 /* If this function receives a nonlocal goto, then we need to
850 make edges from this call site to all the nonlocal goto
851 handlers. */
852 if (stmt_can_make_abnormal_goto (last))
853 ret = 2;
855 /* If this statement has reachable exception handlers, then
856 create abnormal edges to them. */
857 make_eh_edges (last);
859 /* BUILTIN_RETURN is really a return statement. */
860 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
862 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
863 fallthru = false;
865 /* Some calls are known not to return. */
866 else
867 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
868 break;
870 case GIMPLE_ASSIGN:
871 /* A GIMPLE_ASSIGN may throw internally and thus be considered
872 control-altering. */
873 if (is_ctrl_altering_stmt (last))
874 make_eh_edges (last);
875 fallthru = true;
876 break;
878 case GIMPLE_ASM:
879 make_gimple_asm_edges (bb);
880 fallthru = true;
881 break;
883 CASE_GIMPLE_OMP:
884 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
885 break;
887 case GIMPLE_TRANSACTION:
889 tree abort_label
890 = gimple_transaction_label (as_a <gtransaction *> (last));
891 if (abort_label)
892 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
893 fallthru = true;
895 break;
897 default:
898 gcc_assert (!stmt_ends_bb_p (last));
899 fallthru = true;
900 break;
903 if (fallthru)
904 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
906 return ret;
909 /* Join all the blocks in the flowgraph. */
911 static void
912 make_edges (void)
914 basic_block bb;
915 struct omp_region *cur_region = NULL;
916 auto_vec<basic_block> ab_edge_goto;
917 auto_vec<basic_block> ab_edge_call;
918 int *bb_to_omp_idx = NULL;
919 int cur_omp_region_idx = 0;
921 /* Create an edge from entry to the first block with executable
922 statements in it. */
923 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
924 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
925 EDGE_FALLTHRU);
927 /* Traverse the basic block array placing edges. */
928 FOR_EACH_BB_FN (bb, cfun)
930 int mer;
932 if (bb_to_omp_idx)
933 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
935 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
936 if (mer == 1)
937 ab_edge_goto.safe_push (bb);
938 else if (mer == 2)
939 ab_edge_call.safe_push (bb);
941 if (cur_region && bb_to_omp_idx == NULL)
942 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
945 /* Computed gotos are hell to deal with, especially if there are
946 lots of them with a large number of destinations. So we factor
947 them to a common computed goto location before we build the
948 edge list. After we convert back to normal form, we will un-factor
949 the computed gotos since factoring introduces an unwanted jump.
950 For non-local gotos and abnormal edges from calls to calls that return
951 twice or forced labels, factor the abnormal edges too, by having all
952 abnormal edges from the calls go to a common artificial basic block
953 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
954 basic block to all forced labels and calls returning twice.
955 We do this per-OpenMP structured block, because those regions
956 are guaranteed to be single entry single exit by the standard,
957 so it is not allowed to enter or exit such regions abnormally this way,
958 thus all computed gotos, non-local gotos and setjmp/longjmp calls
959 must not transfer control across SESE region boundaries. */
960 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
962 gimple_stmt_iterator gsi;
963 basic_block dispatcher_bb_array[2] = { NULL, NULL };
964 basic_block *dispatcher_bbs = dispatcher_bb_array;
965 int count = n_basic_blocks_for_fn (cfun);
967 if (bb_to_omp_idx)
968 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
970 FOR_EACH_BB_FN (bb, cfun)
972 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
974 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
975 tree target;
977 if (!label_stmt)
978 break;
980 target = gimple_label_label (label_stmt);
982 /* Make an edge to every label block that has been marked as a
983 potential target for a computed goto or a non-local goto. */
984 if (FORCED_LABEL (target))
985 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
986 &ab_edge_goto, true);
987 if (DECL_NONLOCAL (target))
989 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
990 &ab_edge_call, false);
991 break;
995 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
996 gsi_next_nondebug (&gsi);
997 if (!gsi_end_p (gsi))
999 /* Make an edge to every setjmp-like call. */
1000 gimple call_stmt = gsi_stmt (gsi);
1001 if (is_gimple_call (call_stmt)
1002 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1003 || gimple_call_builtin_p (call_stmt,
1004 BUILT_IN_SETJMP_RECEIVER)))
1005 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1006 &ab_edge_call, false);
1010 if (bb_to_omp_idx)
1011 XDELETE (dispatcher_bbs);
1014 XDELETE (bb_to_omp_idx);
1016 free_omp_regions ();
1018 /* Fold COND_EXPR_COND of each COND_EXPR. */
1019 fold_cond_expr_cond ();
1022 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1023 needed. Returns true if new bbs were created.
1024 Note: This is transitional code, and should not be used for new code. We
1025 should be able to get rid of this by rewriting all target va-arg
1026 gimplification hooks to use an interface gimple_build_cond_value as described
1027 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1029 bool
1030 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1032 gimple stmt = gsi_stmt (*gsi);
1033 basic_block bb = gimple_bb (stmt);
1034 basic_block lastbb, afterbb;
1035 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1036 edge e;
1037 lastbb = make_blocks_1 (seq, bb);
1038 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1039 return false;
1040 e = split_block (bb, stmt);
1041 /* Move e->dest to come after the new basic blocks. */
1042 afterbb = e->dest;
1043 unlink_block (afterbb);
1044 link_block (afterbb, lastbb);
1045 redirect_edge_succ (e, bb->next_bb);
1046 bb = bb->next_bb;
1047 while (bb != afterbb)
1049 struct omp_region *cur_region = NULL;
1050 int cur_omp_region_idx = 0;
1051 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1052 gcc_assert (!mer && !cur_region);
1053 add_bb_to_loop (bb, afterbb->loop_father);
1054 bb = bb->next_bb;
1056 return true;
1059 /* Find the next available discriminator value for LOCUS. The
1060 discriminator distinguishes among several basic blocks that
1061 share a common locus, allowing for more accurate sample-based
1062 profiling. */
1064 static int
1065 next_discriminator_for_locus (location_t locus)
1067 struct locus_discrim_map item;
1068 struct locus_discrim_map **slot;
1070 item.locus = locus;
1071 item.discriminator = 0;
1072 slot = discriminator_per_locus->find_slot_with_hash (
1073 &item, LOCATION_LINE (locus), INSERT);
1074 gcc_assert (slot);
1075 if (*slot == HTAB_EMPTY_ENTRY)
1077 *slot = XNEW (struct locus_discrim_map);
1078 gcc_assert (*slot);
1079 (*slot)->locus = locus;
1080 (*slot)->discriminator = 0;
1082 (*slot)->discriminator++;
1083 return (*slot)->discriminator;
1086 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1088 static bool
1089 same_line_p (location_t locus1, location_t locus2)
1091 expanded_location from, to;
1093 if (locus1 == locus2)
1094 return true;
1096 from = expand_location (locus1);
1097 to = expand_location (locus2);
1099 if (from.line != to.line)
1100 return false;
1101 if (from.file == to.file)
1102 return true;
1103 return (from.file != NULL
1104 && to.file != NULL
1105 && filename_cmp (from.file, to.file) == 0);
1108 /* Assign discriminators to each basic block. */
1110 static void
1111 assign_discriminators (void)
1113 basic_block bb;
1115 FOR_EACH_BB_FN (bb, cfun)
1117 edge e;
1118 edge_iterator ei;
1119 gimple last = last_stmt (bb);
1120 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1122 if (locus == UNKNOWN_LOCATION)
1123 continue;
1125 FOR_EACH_EDGE (e, ei, bb->succs)
1127 gimple first = first_non_label_stmt (e->dest);
1128 gimple last = last_stmt (e->dest);
1129 if ((first && same_line_p (locus, gimple_location (first)))
1130 || (last && same_line_p (locus, gimple_location (last))))
1132 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1133 bb->discriminator = next_discriminator_for_locus (locus);
1134 else
1135 e->dest->discriminator = next_discriminator_for_locus (locus);
1141 /* Create the edges for a GIMPLE_COND starting at block BB. */
1143 static void
1144 make_cond_expr_edges (basic_block bb)
1146 gcond *entry = as_a <gcond *> (last_stmt (bb));
1147 gimple then_stmt, else_stmt;
1148 basic_block then_bb, else_bb;
1149 tree then_label, else_label;
1150 edge e;
1152 gcc_assert (entry);
1153 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1155 /* Entry basic blocks for each component. */
1156 then_label = gimple_cond_true_label (entry);
1157 else_label = gimple_cond_false_label (entry);
1158 then_bb = label_to_block (then_label);
1159 else_bb = label_to_block (else_label);
1160 then_stmt = first_stmt (then_bb);
1161 else_stmt = first_stmt (else_bb);
1163 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1164 e->goto_locus = gimple_location (then_stmt);
1165 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1166 if (e)
1167 e->goto_locus = gimple_location (else_stmt);
1169 /* We do not need the labels anymore. */
1170 gimple_cond_set_true_label (entry, NULL_TREE);
1171 gimple_cond_set_false_label (entry, NULL_TREE);
1175 /* Called for each element in the hash table (P) as we delete the
1176 edge to cases hash table.
1178 Clear all the TREE_CHAINs to prevent problems with copying of
1179 SWITCH_EXPRs and structure sharing rules, then free the hash table
1180 element. */
1182 bool
1183 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1185 tree t, next;
1187 for (t = value; t; t = next)
1189 next = CASE_CHAIN (t);
1190 CASE_CHAIN (t) = NULL;
1193 return true;
1196 /* Start recording information mapping edges to case labels. */
1198 void
1199 start_recording_case_labels (void)
1201 gcc_assert (edge_to_cases == NULL);
1202 edge_to_cases = new hash_map<edge, tree>;
1203 touched_switch_bbs = BITMAP_ALLOC (NULL);
1206 /* Return nonzero if we are recording information for case labels. */
1208 static bool
1209 recording_case_labels_p (void)
1211 return (edge_to_cases != NULL);
1214 /* Stop recording information mapping edges to case labels and
1215 remove any information we have recorded. */
1216 void
1217 end_recording_case_labels (void)
1219 bitmap_iterator bi;
1220 unsigned i;
1221 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1222 delete edge_to_cases;
1223 edge_to_cases = NULL;
1224 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1226 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1227 if (bb)
1229 gimple stmt = last_stmt (bb);
1230 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1231 group_case_labels_stmt (as_a <gswitch *> (stmt));
1234 BITMAP_FREE (touched_switch_bbs);
1237 /* If we are inside a {start,end}_recording_cases block, then return
1238 a chain of CASE_LABEL_EXPRs from T which reference E.
1240 Otherwise return NULL. */
1242 static tree
1243 get_cases_for_edge (edge e, gswitch *t)
1245 tree *slot;
1246 size_t i, n;
1248 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1249 chains available. Return NULL so the caller can detect this case. */
1250 if (!recording_case_labels_p ())
1251 return NULL;
1253 slot = edge_to_cases->get (e);
1254 if (slot)
1255 return *slot;
1257 /* If we did not find E in the hash table, then this must be the first
1258 time we have been queried for information about E & T. Add all the
1259 elements from T to the hash table then perform the query again. */
1261 n = gimple_switch_num_labels (t);
1262 for (i = 0; i < n; i++)
1264 tree elt = gimple_switch_label (t, i);
1265 tree lab = CASE_LABEL (elt);
1266 basic_block label_bb = label_to_block (lab);
1267 edge this_edge = find_edge (e->src, label_bb);
1269 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1270 a new chain. */
1271 tree &s = edge_to_cases->get_or_insert (this_edge);
1272 CASE_CHAIN (elt) = s;
1273 s = elt;
1276 return *edge_to_cases->get (e);
1279 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1281 static void
1282 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1284 size_t i, n;
1286 n = gimple_switch_num_labels (entry);
1288 for (i = 0; i < n; ++i)
1290 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1291 basic_block label_bb = label_to_block (lab);
1292 make_edge (bb, label_bb, 0);
1297 /* Return the basic block holding label DEST. */
1299 basic_block
1300 label_to_block_fn (struct function *ifun, tree dest)
1302 int uid = LABEL_DECL_UID (dest);
1304 /* We would die hard when faced by an undefined label. Emit a label to
1305 the very first basic block. This will hopefully make even the dataflow
1306 and undefined variable warnings quite right. */
1307 if (seen_error () && uid < 0)
1309 gimple_stmt_iterator gsi =
1310 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1311 gimple stmt;
1313 stmt = gimple_build_label (dest);
1314 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1315 uid = LABEL_DECL_UID (dest);
1317 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1318 return NULL;
1319 return (*ifun->cfg->x_label_to_block_map)[uid];
1322 /* Create edges for a goto statement at block BB. Returns true
1323 if abnormal edges should be created. */
1325 static bool
1326 make_goto_expr_edges (basic_block bb)
1328 gimple_stmt_iterator last = gsi_last_bb (bb);
1329 gimple goto_t = gsi_stmt (last);
1331 /* A simple GOTO creates normal edges. */
1332 if (simple_goto_p (goto_t))
1334 tree dest = gimple_goto_dest (goto_t);
1335 basic_block label_bb = label_to_block (dest);
1336 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1337 e->goto_locus = gimple_location (goto_t);
1338 gsi_remove (&last, true);
1339 return false;
1342 /* A computed GOTO creates abnormal edges. */
1343 return true;
1346 /* Create edges for an asm statement with labels at block BB. */
1348 static void
1349 make_gimple_asm_edges (basic_block bb)
1351 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1352 int i, n = gimple_asm_nlabels (stmt);
1354 for (i = 0; i < n; ++i)
1356 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1357 basic_block label_bb = label_to_block (label);
1358 make_edge (bb, label_bb, 0);
1362 /*---------------------------------------------------------------------------
1363 Flowgraph analysis
1364 ---------------------------------------------------------------------------*/
1366 /* Cleanup useless labels in basic blocks. This is something we wish
1367 to do early because it allows us to group case labels before creating
1368 the edges for the CFG, and it speeds up block statement iterators in
1369 all passes later on.
1370 We rerun this pass after CFG is created, to get rid of the labels that
1371 are no longer referenced. After then we do not run it any more, since
1372 (almost) no new labels should be created. */
1374 /* A map from basic block index to the leading label of that block. */
1375 static struct label_record
1377 /* The label. */
1378 tree label;
1380 /* True if the label is referenced from somewhere. */
1381 bool used;
1382 } *label_for_bb;
1384 /* Given LABEL return the first label in the same basic block. */
1386 static tree
1387 main_block_label (tree label)
1389 basic_block bb = label_to_block (label);
1390 tree main_label = label_for_bb[bb->index].label;
1392 /* label_to_block possibly inserted undefined label into the chain. */
1393 if (!main_label)
1395 label_for_bb[bb->index].label = label;
1396 main_label = label;
1399 label_for_bb[bb->index].used = true;
1400 return main_label;
1403 /* Clean up redundant labels within the exception tree. */
1405 static void
1406 cleanup_dead_labels_eh (void)
1408 eh_landing_pad lp;
1409 eh_region r;
1410 tree lab;
1411 int i;
1413 if (cfun->eh == NULL)
1414 return;
1416 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1417 if (lp && lp->post_landing_pad)
1419 lab = main_block_label (lp->post_landing_pad);
1420 if (lab != lp->post_landing_pad)
1422 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1423 EH_LANDING_PAD_NR (lab) = lp->index;
1427 FOR_ALL_EH_REGION (r)
1428 switch (r->type)
1430 case ERT_CLEANUP:
1431 case ERT_MUST_NOT_THROW:
1432 break;
1434 case ERT_TRY:
1436 eh_catch c;
1437 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1439 lab = c->label;
1440 if (lab)
1441 c->label = main_block_label (lab);
1444 break;
1446 case ERT_ALLOWED_EXCEPTIONS:
1447 lab = r->u.allowed.label;
1448 if (lab)
1449 r->u.allowed.label = main_block_label (lab);
1450 break;
1455 /* Cleanup redundant labels. This is a three-step process:
1456 1) Find the leading label for each block.
1457 2) Redirect all references to labels to the leading labels.
1458 3) Cleanup all useless labels. */
1460 void
1461 cleanup_dead_labels (void)
1463 basic_block bb;
1464 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1466 /* Find a suitable label for each block. We use the first user-defined
1467 label if there is one, or otherwise just the first label we see. */
1468 FOR_EACH_BB_FN (bb, cfun)
1470 gimple_stmt_iterator i;
1472 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1474 tree label;
1475 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1477 if (!label_stmt)
1478 break;
1480 label = gimple_label_label (label_stmt);
1482 /* If we have not yet seen a label for the current block,
1483 remember this one and see if there are more labels. */
1484 if (!label_for_bb[bb->index].label)
1486 label_for_bb[bb->index].label = label;
1487 continue;
1490 /* If we did see a label for the current block already, but it
1491 is an artificially created label, replace it if the current
1492 label is a user defined label. */
1493 if (!DECL_ARTIFICIAL (label)
1494 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1496 label_for_bb[bb->index].label = label;
1497 break;
1502 /* Now redirect all jumps/branches to the selected label.
1503 First do so for each block ending in a control statement. */
1504 FOR_EACH_BB_FN (bb, cfun)
1506 gimple stmt = last_stmt (bb);
1507 tree label, new_label;
1509 if (!stmt)
1510 continue;
1512 switch (gimple_code (stmt))
1514 case GIMPLE_COND:
1516 gcond *cond_stmt = as_a <gcond *> (stmt);
1517 label = gimple_cond_true_label (cond_stmt);
1518 if (label)
1520 new_label = main_block_label (label);
1521 if (new_label != label)
1522 gimple_cond_set_true_label (cond_stmt, new_label);
1525 label = gimple_cond_false_label (cond_stmt);
1526 if (label)
1528 new_label = main_block_label (label);
1529 if (new_label != label)
1530 gimple_cond_set_false_label (cond_stmt, new_label);
1533 break;
1535 case GIMPLE_SWITCH:
1537 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1538 size_t i, n = gimple_switch_num_labels (switch_stmt);
1540 /* Replace all destination labels. */
1541 for (i = 0; i < n; ++i)
1543 tree case_label = gimple_switch_label (switch_stmt, i);
1544 label = CASE_LABEL (case_label);
1545 new_label = main_block_label (label);
1546 if (new_label != label)
1547 CASE_LABEL (case_label) = new_label;
1549 break;
1552 case GIMPLE_ASM:
1554 gasm *asm_stmt = as_a <gasm *> (stmt);
1555 int i, n = gimple_asm_nlabels (asm_stmt);
1557 for (i = 0; i < n; ++i)
1559 tree cons = gimple_asm_label_op (asm_stmt, i);
1560 tree label = main_block_label (TREE_VALUE (cons));
1561 TREE_VALUE (cons) = label;
1563 break;
1566 /* We have to handle gotos until they're removed, and we don't
1567 remove them until after we've created the CFG edges. */
1568 case GIMPLE_GOTO:
1569 if (!computed_goto_p (stmt))
1571 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1572 label = gimple_goto_dest (goto_stmt);
1573 new_label = main_block_label (label);
1574 if (new_label != label)
1575 gimple_goto_set_dest (goto_stmt, new_label);
1577 break;
1579 case GIMPLE_TRANSACTION:
1581 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1582 tree label = gimple_transaction_label (trans_stmt);
1583 if (label)
1585 tree new_label = main_block_label (label);
1586 if (new_label != label)
1587 gimple_transaction_set_label (trans_stmt, new_label);
1590 break;
1592 default:
1593 break;
1597 /* Do the same for the exception region tree labels. */
1598 cleanup_dead_labels_eh ();
1600 /* Finally, purge dead labels. All user-defined labels and labels that
1601 can be the target of non-local gotos and labels which have their
1602 address taken are preserved. */
1603 FOR_EACH_BB_FN (bb, cfun)
1605 gimple_stmt_iterator i;
1606 tree label_for_this_bb = label_for_bb[bb->index].label;
1608 if (!label_for_this_bb)
1609 continue;
1611 /* If the main label of the block is unused, we may still remove it. */
1612 if (!label_for_bb[bb->index].used)
1613 label_for_this_bb = NULL;
1615 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1617 tree label;
1618 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1620 if (!label_stmt)
1621 break;
1623 label = gimple_label_label (label_stmt);
1625 if (label == label_for_this_bb
1626 || !DECL_ARTIFICIAL (label)
1627 || DECL_NONLOCAL (label)
1628 || FORCED_LABEL (label))
1629 gsi_next (&i);
1630 else
1631 gsi_remove (&i, true);
1635 free (label_for_bb);
1638 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1639 the ones jumping to the same label.
1640 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1642 void
1643 group_case_labels_stmt (gswitch *stmt)
1645 int old_size = gimple_switch_num_labels (stmt);
1646 int i, j, new_size = old_size;
1647 basic_block default_bb = NULL;
1649 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1651 /* Look for possible opportunities to merge cases. */
1652 i = 1;
1653 while (i < old_size)
1655 tree base_case, base_high;
1656 basic_block base_bb;
1658 base_case = gimple_switch_label (stmt, i);
1660 gcc_assert (base_case);
1661 base_bb = label_to_block (CASE_LABEL (base_case));
1663 /* Discard cases that have the same destination as the
1664 default case. */
1665 if (base_bb == default_bb)
1667 gimple_switch_set_label (stmt, i, NULL_TREE);
1668 i++;
1669 new_size--;
1670 continue;
1673 base_high = CASE_HIGH (base_case)
1674 ? CASE_HIGH (base_case)
1675 : CASE_LOW (base_case);
1676 i++;
1678 /* Try to merge case labels. Break out when we reach the end
1679 of the label vector or when we cannot merge the next case
1680 label with the current one. */
1681 while (i < old_size)
1683 tree merge_case = gimple_switch_label (stmt, i);
1684 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1685 wide_int bhp1 = wi::add (base_high, 1);
1687 /* Merge the cases if they jump to the same place,
1688 and their ranges are consecutive. */
1689 if (merge_bb == base_bb
1690 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1692 base_high = CASE_HIGH (merge_case) ?
1693 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1694 CASE_HIGH (base_case) = base_high;
1695 gimple_switch_set_label (stmt, i, NULL_TREE);
1696 new_size--;
1697 i++;
1699 else
1700 break;
1704 /* Compress the case labels in the label vector, and adjust the
1705 length of the vector. */
1706 for (i = 0, j = 0; i < new_size; i++)
1708 while (! gimple_switch_label (stmt, j))
1709 j++;
1710 gimple_switch_set_label (stmt, i,
1711 gimple_switch_label (stmt, j++));
1714 gcc_assert (new_size <= old_size);
1715 gimple_switch_set_num_labels (stmt, new_size);
1718 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1719 and scan the sorted vector of cases. Combine the ones jumping to the
1720 same label. */
1722 void
1723 group_case_labels (void)
1725 basic_block bb;
1727 FOR_EACH_BB_FN (bb, cfun)
1729 gimple stmt = last_stmt (bb);
1730 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1731 group_case_labels_stmt (as_a <gswitch *> (stmt));
1735 /* Checks whether we can merge block B into block A. */
1737 static bool
1738 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1740 gimple stmt;
1742 if (!single_succ_p (a))
1743 return false;
1745 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1746 return false;
1748 if (single_succ (a) != b)
1749 return false;
1751 if (!single_pred_p (b))
1752 return false;
1754 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1755 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1756 return false;
1758 /* If A ends by a statement causing exceptions or something similar, we
1759 cannot merge the blocks. */
1760 stmt = last_stmt (a);
1761 if (stmt && stmt_ends_bb_p (stmt))
1762 return false;
1764 /* Do not allow a block with only a non-local label to be merged. */
1765 if (stmt)
1766 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1767 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1768 return false;
1770 /* Examine the labels at the beginning of B. */
1771 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1772 gsi_next (&gsi))
1774 tree lab;
1775 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1776 if (!label_stmt)
1777 break;
1778 lab = gimple_label_label (label_stmt);
1780 /* Do not remove user forced labels or for -O0 any user labels. */
1781 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1782 return false;
1785 /* Protect simple loop latches. We only want to avoid merging
1786 the latch with the loop header or with a block in another
1787 loop in this case. */
1788 if (current_loops
1789 && b->loop_father->latch == b
1790 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1791 && (b->loop_father->header == a
1792 || b->loop_father != a->loop_father))
1793 return false;
1795 /* It must be possible to eliminate all phi nodes in B. If ssa form
1796 is not up-to-date and a name-mapping is registered, we cannot eliminate
1797 any phis. Symbols marked for renaming are never a problem though. */
1798 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1799 gsi_next (&gsi))
1801 gphi *phi = gsi.phi ();
1802 /* Technically only new names matter. */
1803 if (name_registered_for_update_p (PHI_RESULT (phi)))
1804 return false;
1807 /* When not optimizing, don't merge if we'd lose goto_locus. */
1808 if (!optimize
1809 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1811 location_t goto_locus = single_succ_edge (a)->goto_locus;
1812 gimple_stmt_iterator prev, next;
1813 prev = gsi_last_nondebug_bb (a);
1814 next = gsi_after_labels (b);
1815 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1816 gsi_next_nondebug (&next);
1817 if ((gsi_end_p (prev)
1818 || gimple_location (gsi_stmt (prev)) != goto_locus)
1819 && (gsi_end_p (next)
1820 || gimple_location (gsi_stmt (next)) != goto_locus))
1821 return false;
1824 return true;
1827 /* Replaces all uses of NAME by VAL. */
1829 void
1830 replace_uses_by (tree name, tree val)
1832 imm_use_iterator imm_iter;
1833 use_operand_p use;
1834 gimple stmt;
1835 edge e;
1837 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1839 /* Mark the block if we change the last stmt in it. */
1840 if (cfgcleanup_altered_bbs
1841 && stmt_ends_bb_p (stmt))
1842 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1844 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1846 replace_exp (use, val);
1848 if (gimple_code (stmt) == GIMPLE_PHI)
1850 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1851 PHI_ARG_INDEX_FROM_USE (use));
1852 if (e->flags & EDGE_ABNORMAL
1853 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1855 /* This can only occur for virtual operands, since
1856 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1857 would prevent replacement. */
1858 gcc_checking_assert (virtual_operand_p (name));
1859 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1864 if (gimple_code (stmt) != GIMPLE_PHI)
1866 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1867 gimple orig_stmt = stmt;
1868 size_t i;
1870 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1871 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1872 only change sth from non-invariant to invariant, and only
1873 when propagating constants. */
1874 if (is_gimple_min_invariant (val))
1875 for (i = 0; i < gimple_num_ops (stmt); i++)
1877 tree op = gimple_op (stmt, i);
1878 /* Operands may be empty here. For example, the labels
1879 of a GIMPLE_COND are nulled out following the creation
1880 of the corresponding CFG edges. */
1881 if (op && TREE_CODE (op) == ADDR_EXPR)
1882 recompute_tree_invariant_for_addr_expr (op);
1885 if (fold_stmt (&gsi))
1886 stmt = gsi_stmt (gsi);
1888 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1889 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1891 update_stmt (stmt);
1895 gcc_checking_assert (has_zero_uses (name));
1897 /* Also update the trees stored in loop structures. */
1898 if (current_loops)
1900 struct loop *loop;
1902 FOR_EACH_LOOP (loop, 0)
1904 substitute_in_loop_info (loop, name, val);
1909 /* Merge block B into block A. */
1911 static void
1912 gimple_merge_blocks (basic_block a, basic_block b)
1914 gimple_stmt_iterator last, gsi;
1915 gphi_iterator psi;
1917 if (dump_file)
1918 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1920 /* Remove all single-valued PHI nodes from block B of the form
1921 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1922 gsi = gsi_last_bb (a);
1923 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1925 gimple phi = gsi_stmt (psi);
1926 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1927 gimple copy;
1928 bool may_replace_uses = (virtual_operand_p (def)
1929 || may_propagate_copy (def, use));
1931 /* In case we maintain loop closed ssa form, do not propagate arguments
1932 of loop exit phi nodes. */
1933 if (current_loops
1934 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1935 && !virtual_operand_p (def)
1936 && TREE_CODE (use) == SSA_NAME
1937 && a->loop_father != b->loop_father)
1938 may_replace_uses = false;
1940 if (!may_replace_uses)
1942 gcc_assert (!virtual_operand_p (def));
1944 /* Note that just emitting the copies is fine -- there is no problem
1945 with ordering of phi nodes. This is because A is the single
1946 predecessor of B, therefore results of the phi nodes cannot
1947 appear as arguments of the phi nodes. */
1948 copy = gimple_build_assign (def, use);
1949 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1950 remove_phi_node (&psi, false);
1952 else
1954 /* If we deal with a PHI for virtual operands, we can simply
1955 propagate these without fussing with folding or updating
1956 the stmt. */
1957 if (virtual_operand_p (def))
1959 imm_use_iterator iter;
1960 use_operand_p use_p;
1961 gimple stmt;
1963 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1964 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1965 SET_USE (use_p, use);
1967 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1968 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1970 else
1971 replace_uses_by (def, use);
1973 remove_phi_node (&psi, true);
1977 /* Ensure that B follows A. */
1978 move_block_after (b, a);
1980 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1981 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1983 /* Remove labels from B and set gimple_bb to A for other statements. */
1984 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1986 gimple stmt = gsi_stmt (gsi);
1987 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1989 tree label = gimple_label_label (label_stmt);
1990 int lp_nr;
1992 gsi_remove (&gsi, false);
1994 /* Now that we can thread computed gotos, we might have
1995 a situation where we have a forced label in block B
1996 However, the label at the start of block B might still be
1997 used in other ways (think about the runtime checking for
1998 Fortran assigned gotos). So we can not just delete the
1999 label. Instead we move the label to the start of block A. */
2000 if (FORCED_LABEL (label))
2002 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2003 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2005 /* Other user labels keep around in a form of a debug stmt. */
2006 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2008 gimple dbg = gimple_build_debug_bind (label,
2009 integer_zero_node,
2010 stmt);
2011 gimple_debug_bind_reset_value (dbg);
2012 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2015 lp_nr = EH_LANDING_PAD_NR (label);
2016 if (lp_nr)
2018 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2019 lp->post_landing_pad = NULL;
2022 else
2024 gimple_set_bb (stmt, a);
2025 gsi_next (&gsi);
2029 /* When merging two BBs, if their counts are different, the larger count
2030 is selected as the new bb count. This is to handle inconsistent
2031 profiles. */
2032 if (a->loop_father == b->loop_father)
2034 a->count = MAX (a->count, b->count);
2035 a->frequency = MAX (a->frequency, b->frequency);
2038 /* Merge the sequences. */
2039 last = gsi_last_bb (a);
2040 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2041 set_bb_seq (b, NULL);
2043 if (cfgcleanup_altered_bbs)
2044 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2048 /* Return the one of two successors of BB that is not reachable by a
2049 complex edge, if there is one. Else, return BB. We use
2050 this in optimizations that use post-dominators for their heuristics,
2051 to catch the cases in C++ where function calls are involved. */
2053 basic_block
2054 single_noncomplex_succ (basic_block bb)
2056 edge e0, e1;
2057 if (EDGE_COUNT (bb->succs) != 2)
2058 return bb;
2060 e0 = EDGE_SUCC (bb, 0);
2061 e1 = EDGE_SUCC (bb, 1);
2062 if (e0->flags & EDGE_COMPLEX)
2063 return e1->dest;
2064 if (e1->flags & EDGE_COMPLEX)
2065 return e0->dest;
2067 return bb;
2070 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2072 void
2073 notice_special_calls (gcall *call)
2075 int flags = gimple_call_flags (call);
2077 if (flags & ECF_MAY_BE_ALLOCA)
2078 cfun->calls_alloca = true;
2079 if (flags & ECF_RETURNS_TWICE)
2080 cfun->calls_setjmp = true;
2084 /* Clear flags set by notice_special_calls. Used by dead code removal
2085 to update the flags. */
2087 void
2088 clear_special_calls (void)
2090 cfun->calls_alloca = false;
2091 cfun->calls_setjmp = false;
2094 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2096 static void
2097 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2099 /* Since this block is no longer reachable, we can just delete all
2100 of its PHI nodes. */
2101 remove_phi_nodes (bb);
2103 /* Remove edges to BB's successors. */
2104 while (EDGE_COUNT (bb->succs) > 0)
2105 remove_edge (EDGE_SUCC (bb, 0));
2109 /* Remove statements of basic block BB. */
2111 static void
2112 remove_bb (basic_block bb)
2114 gimple_stmt_iterator i;
2116 if (dump_file)
2118 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2119 if (dump_flags & TDF_DETAILS)
2121 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2122 fprintf (dump_file, "\n");
2126 if (current_loops)
2128 struct loop *loop = bb->loop_father;
2130 /* If a loop gets removed, clean up the information associated
2131 with it. */
2132 if (loop->latch == bb
2133 || loop->header == bb)
2134 free_numbers_of_iterations_estimates_loop (loop);
2137 /* Remove all the instructions in the block. */
2138 if (bb_seq (bb) != NULL)
2140 /* Walk backwards so as to get a chance to substitute all
2141 released DEFs into debug stmts. See
2142 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2143 details. */
2144 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2146 gimple stmt = gsi_stmt (i);
2147 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2148 if (label_stmt
2149 && (FORCED_LABEL (gimple_label_label (label_stmt))
2150 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2152 basic_block new_bb;
2153 gimple_stmt_iterator new_gsi;
2155 /* A non-reachable non-local label may still be referenced.
2156 But it no longer needs to carry the extra semantics of
2157 non-locality. */
2158 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2160 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2161 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2164 new_bb = bb->prev_bb;
2165 new_gsi = gsi_start_bb (new_bb);
2166 gsi_remove (&i, false);
2167 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2169 else
2171 /* Release SSA definitions if we are in SSA. Note that we
2172 may be called when not in SSA. For example,
2173 final_cleanup calls this function via
2174 cleanup_tree_cfg. */
2175 if (gimple_in_ssa_p (cfun))
2176 release_defs (stmt);
2178 gsi_remove (&i, true);
2181 if (gsi_end_p (i))
2182 i = gsi_last_bb (bb);
2183 else
2184 gsi_prev (&i);
2188 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2189 bb->il.gimple.seq = NULL;
2190 bb->il.gimple.phi_nodes = NULL;
2194 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2195 predicate VAL, return the edge that will be taken out of the block.
2196 If VAL does not match a unique edge, NULL is returned. */
2198 edge
2199 find_taken_edge (basic_block bb, tree val)
2201 gimple stmt;
2203 stmt = last_stmt (bb);
2205 gcc_assert (stmt);
2206 gcc_assert (is_ctrl_stmt (stmt));
2208 if (val == NULL)
2209 return NULL;
2211 if (!is_gimple_min_invariant (val))
2212 return NULL;
2214 if (gimple_code (stmt) == GIMPLE_COND)
2215 return find_taken_edge_cond_expr (bb, val);
2217 if (gimple_code (stmt) == GIMPLE_SWITCH)
2218 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2220 if (computed_goto_p (stmt))
2222 /* Only optimize if the argument is a label, if the argument is
2223 not a label then we can not construct a proper CFG.
2225 It may be the case that we only need to allow the LABEL_REF to
2226 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2227 appear inside a LABEL_EXPR just to be safe. */
2228 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2229 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2230 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2231 return NULL;
2234 gcc_unreachable ();
2237 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2238 statement, determine which of the outgoing edges will be taken out of the
2239 block. Return NULL if either edge may be taken. */
2241 static edge
2242 find_taken_edge_computed_goto (basic_block bb, tree val)
2244 basic_block dest;
2245 edge e = NULL;
2247 dest = label_to_block (val);
2248 if (dest)
2250 e = find_edge (bb, dest);
2251 gcc_assert (e != NULL);
2254 return e;
2257 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2258 statement, determine which of the two edges will be taken out of the
2259 block. Return NULL if either edge may be taken. */
2261 static edge
2262 find_taken_edge_cond_expr (basic_block bb, tree val)
2264 edge true_edge, false_edge;
2266 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2268 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2269 return (integer_zerop (val) ? false_edge : true_edge);
2272 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2273 statement, determine which edge will be taken out of the block. Return
2274 NULL if any edge may be taken. */
2276 static edge
2277 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2278 tree val)
2280 basic_block dest_bb;
2281 edge e;
2282 tree taken_case;
2284 taken_case = find_case_label_for_value (switch_stmt, val);
2285 dest_bb = label_to_block (CASE_LABEL (taken_case));
2287 e = find_edge (bb, dest_bb);
2288 gcc_assert (e);
2289 return e;
2293 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2294 We can make optimal use here of the fact that the case labels are
2295 sorted: We can do a binary search for a case matching VAL. */
2297 static tree
2298 find_case_label_for_value (gswitch *switch_stmt, tree val)
2300 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2301 tree default_case = gimple_switch_default_label (switch_stmt);
2303 for (low = 0, high = n; high - low > 1; )
2305 size_t i = (high + low) / 2;
2306 tree t = gimple_switch_label (switch_stmt, i);
2307 int cmp;
2309 /* Cache the result of comparing CASE_LOW and val. */
2310 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2312 if (cmp > 0)
2313 high = i;
2314 else
2315 low = i;
2317 if (CASE_HIGH (t) == NULL)
2319 /* A singe-valued case label. */
2320 if (cmp == 0)
2321 return t;
2323 else
2325 /* A case range. We can only handle integer ranges. */
2326 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2327 return t;
2331 return default_case;
2335 /* Dump a basic block on stderr. */
2337 void
2338 gimple_debug_bb (basic_block bb)
2340 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2344 /* Dump basic block with index N on stderr. */
2346 basic_block
2347 gimple_debug_bb_n (int n)
2349 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2350 return BASIC_BLOCK_FOR_FN (cfun, n);
2354 /* Dump the CFG on stderr.
2356 FLAGS are the same used by the tree dumping functions
2357 (see TDF_* in dumpfile.h). */
2359 void
2360 gimple_debug_cfg (int flags)
2362 gimple_dump_cfg (stderr, flags);
2366 /* Dump the program showing basic block boundaries on the given FILE.
2368 FLAGS are the same used by the tree dumping functions (see TDF_* in
2369 tree.h). */
2371 void
2372 gimple_dump_cfg (FILE *file, int flags)
2374 if (flags & TDF_DETAILS)
2376 dump_function_header (file, current_function_decl, flags);
2377 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2378 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2379 last_basic_block_for_fn (cfun));
2381 brief_dump_cfg (file, flags | TDF_COMMENT);
2382 fprintf (file, "\n");
2385 if (flags & TDF_STATS)
2386 dump_cfg_stats (file);
2388 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2392 /* Dump CFG statistics on FILE. */
2394 void
2395 dump_cfg_stats (FILE *file)
2397 static long max_num_merged_labels = 0;
2398 unsigned long size, total = 0;
2399 long num_edges;
2400 basic_block bb;
2401 const char * const fmt_str = "%-30s%-13s%12s\n";
2402 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2403 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2404 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2405 const char *funcname = current_function_name ();
2407 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2409 fprintf (file, "---------------------------------------------------------\n");
2410 fprintf (file, fmt_str, "", " Number of ", "Memory");
2411 fprintf (file, fmt_str, "", " instances ", "used ");
2412 fprintf (file, "---------------------------------------------------------\n");
2414 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2415 total += size;
2416 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2417 SCALE (size), LABEL (size));
2419 num_edges = 0;
2420 FOR_EACH_BB_FN (bb, cfun)
2421 num_edges += EDGE_COUNT (bb->succs);
2422 size = num_edges * sizeof (struct edge_def);
2423 total += size;
2424 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2426 fprintf (file, "---------------------------------------------------------\n");
2427 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2428 LABEL (total));
2429 fprintf (file, "---------------------------------------------------------\n");
2430 fprintf (file, "\n");
2432 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2433 max_num_merged_labels = cfg_stats.num_merged_labels;
2435 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2436 cfg_stats.num_merged_labels, max_num_merged_labels);
2438 fprintf (file, "\n");
2442 /* Dump CFG statistics on stderr. Keep extern so that it's always
2443 linked in the final executable. */
2445 DEBUG_FUNCTION void
2446 debug_cfg_stats (void)
2448 dump_cfg_stats (stderr);
2451 /*---------------------------------------------------------------------------
2452 Miscellaneous helpers
2453 ---------------------------------------------------------------------------*/
2455 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2456 flow. Transfers of control flow associated with EH are excluded. */
2458 static bool
2459 call_can_make_abnormal_goto (gimple t)
2461 /* If the function has no non-local labels, then a call cannot make an
2462 abnormal transfer of control. */
2463 if (!cfun->has_nonlocal_label
2464 && !cfun->calls_setjmp)
2465 return false;
2467 /* Likewise if the call has no side effects. */
2468 if (!gimple_has_side_effects (t))
2469 return false;
2471 /* Likewise if the called function is leaf. */
2472 if (gimple_call_flags (t) & ECF_LEAF)
2473 return false;
2475 return true;
2479 /* Return true if T can make an abnormal transfer of control flow.
2480 Transfers of control flow associated with EH are excluded. */
2482 bool
2483 stmt_can_make_abnormal_goto (gimple t)
2485 if (computed_goto_p (t))
2486 return true;
2487 if (is_gimple_call (t))
2488 return call_can_make_abnormal_goto (t);
2489 return false;
2493 /* Return true if T represents a stmt that always transfers control. */
2495 bool
2496 is_ctrl_stmt (gimple t)
2498 switch (gimple_code (t))
2500 case GIMPLE_COND:
2501 case GIMPLE_SWITCH:
2502 case GIMPLE_GOTO:
2503 case GIMPLE_RETURN:
2504 case GIMPLE_RESX:
2505 return true;
2506 default:
2507 return false;
2512 /* Return true if T is a statement that may alter the flow of control
2513 (e.g., a call to a non-returning function). */
2515 bool
2516 is_ctrl_altering_stmt (gimple t)
2518 gcc_assert (t);
2520 switch (gimple_code (t))
2522 case GIMPLE_CALL:
2523 /* Per stmt call flag indicates whether the call could alter
2524 controlflow. */
2525 if (gimple_call_ctrl_altering_p (t))
2526 return true;
2527 break;
2529 case GIMPLE_EH_DISPATCH:
2530 /* EH_DISPATCH branches to the individual catch handlers at
2531 this level of a try or allowed-exceptions region. It can
2532 fallthru to the next statement as well. */
2533 return true;
2535 case GIMPLE_ASM:
2536 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2537 return true;
2538 break;
2540 CASE_GIMPLE_OMP:
2541 /* OpenMP directives alter control flow. */
2542 return true;
2544 case GIMPLE_TRANSACTION:
2545 /* A transaction start alters control flow. */
2546 return true;
2548 default:
2549 break;
2552 /* If a statement can throw, it alters control flow. */
2553 return stmt_can_throw_internal (t);
2557 /* Return true if T is a simple local goto. */
2559 bool
2560 simple_goto_p (gimple t)
2562 return (gimple_code (t) == GIMPLE_GOTO
2563 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2567 /* Return true if STMT should start a new basic block. PREV_STMT is
2568 the statement preceding STMT. It is used when STMT is a label or a
2569 case label. Labels should only start a new basic block if their
2570 previous statement wasn't a label. Otherwise, sequence of labels
2571 would generate unnecessary basic blocks that only contain a single
2572 label. */
2574 static inline bool
2575 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2577 if (stmt == NULL)
2578 return false;
2580 /* Labels start a new basic block only if the preceding statement
2581 wasn't a label of the same type. This prevents the creation of
2582 consecutive blocks that have nothing but a single label. */
2583 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2585 /* Nonlocal and computed GOTO targets always start a new block. */
2586 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2587 || FORCED_LABEL (gimple_label_label (label_stmt)))
2588 return true;
2590 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2592 if (DECL_NONLOCAL (gimple_label_label (
2593 as_a <glabel *> (prev_stmt))))
2594 return true;
2596 cfg_stats.num_merged_labels++;
2597 return false;
2599 else
2600 return true;
2602 else if (gimple_code (stmt) == GIMPLE_CALL
2603 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2604 /* setjmp acts similar to a nonlocal GOTO target and thus should
2605 start a new block. */
2606 return true;
2608 return false;
2612 /* Return true if T should end a basic block. */
2614 bool
2615 stmt_ends_bb_p (gimple t)
2617 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2620 /* Remove block annotations and other data structures. */
2622 void
2623 delete_tree_cfg_annotations (void)
2625 vec_free (label_to_block_map_for_fn (cfun));
2629 /* Return the first statement in basic block BB. */
2631 gimple
2632 first_stmt (basic_block bb)
2634 gimple_stmt_iterator i = gsi_start_bb (bb);
2635 gimple stmt = NULL;
2637 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2639 gsi_next (&i);
2640 stmt = NULL;
2642 return stmt;
2645 /* Return the first non-label statement in basic block BB. */
2647 static gimple
2648 first_non_label_stmt (basic_block bb)
2650 gimple_stmt_iterator i = gsi_start_bb (bb);
2651 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2652 gsi_next (&i);
2653 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2656 /* Return the last statement in basic block BB. */
2658 gimple
2659 last_stmt (basic_block bb)
2661 gimple_stmt_iterator i = gsi_last_bb (bb);
2662 gimple stmt = NULL;
2664 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2666 gsi_prev (&i);
2667 stmt = NULL;
2669 return stmt;
2672 /* Return the last statement of an otherwise empty block. Return NULL
2673 if the block is totally empty, or if it contains more than one
2674 statement. */
2676 gimple
2677 last_and_only_stmt (basic_block bb)
2679 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2680 gimple last, prev;
2682 if (gsi_end_p (i))
2683 return NULL;
2685 last = gsi_stmt (i);
2686 gsi_prev_nondebug (&i);
2687 if (gsi_end_p (i))
2688 return last;
2690 /* Empty statements should no longer appear in the instruction stream.
2691 Everything that might have appeared before should be deleted by
2692 remove_useless_stmts, and the optimizers should just gsi_remove
2693 instead of smashing with build_empty_stmt.
2695 Thus the only thing that should appear here in a block containing
2696 one executable statement is a label. */
2697 prev = gsi_stmt (i);
2698 if (gimple_code (prev) == GIMPLE_LABEL)
2699 return last;
2700 else
2701 return NULL;
2704 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2706 static void
2707 reinstall_phi_args (edge new_edge, edge old_edge)
2709 edge_var_map *vm;
2710 int i;
2711 gphi_iterator phis;
2713 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2714 if (!v)
2715 return;
2717 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2718 v->iterate (i, &vm) && !gsi_end_p (phis);
2719 i++, gsi_next (&phis))
2721 gphi *phi = phis.phi ();
2722 tree result = redirect_edge_var_map_result (vm);
2723 tree arg = redirect_edge_var_map_def (vm);
2725 gcc_assert (result == gimple_phi_result (phi));
2727 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2730 redirect_edge_var_map_clear (old_edge);
2733 /* Returns the basic block after which the new basic block created
2734 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2735 near its "logical" location. This is of most help to humans looking
2736 at debugging dumps. */
2738 basic_block
2739 split_edge_bb_loc (edge edge_in)
2741 basic_block dest = edge_in->dest;
2742 basic_block dest_prev = dest->prev_bb;
2744 if (dest_prev)
2746 edge e = find_edge (dest_prev, dest);
2747 if (e && !(e->flags & EDGE_COMPLEX))
2748 return edge_in->src;
2750 return dest_prev;
2753 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2754 Abort on abnormal edges. */
2756 static basic_block
2757 gimple_split_edge (edge edge_in)
2759 basic_block new_bb, after_bb, dest;
2760 edge new_edge, e;
2762 /* Abnormal edges cannot be split. */
2763 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2765 dest = edge_in->dest;
2767 after_bb = split_edge_bb_loc (edge_in);
2769 new_bb = create_empty_bb (after_bb);
2770 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2771 new_bb->count = edge_in->count;
2772 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2773 new_edge->probability = REG_BR_PROB_BASE;
2774 new_edge->count = edge_in->count;
2776 e = redirect_edge_and_branch (edge_in, new_bb);
2777 gcc_assert (e == edge_in);
2778 reinstall_phi_args (new_edge, e);
2780 return new_bb;
2784 /* Verify properties of the address expression T with base object BASE. */
2786 static tree
2787 verify_address (tree t, tree base)
2789 bool old_constant;
2790 bool old_side_effects;
2791 bool new_constant;
2792 bool new_side_effects;
2794 old_constant = TREE_CONSTANT (t);
2795 old_side_effects = TREE_SIDE_EFFECTS (t);
2797 recompute_tree_invariant_for_addr_expr (t);
2798 new_side_effects = TREE_SIDE_EFFECTS (t);
2799 new_constant = TREE_CONSTANT (t);
2801 if (old_constant != new_constant)
2803 error ("constant not recomputed when ADDR_EXPR changed");
2804 return t;
2806 if (old_side_effects != new_side_effects)
2808 error ("side effects not recomputed when ADDR_EXPR changed");
2809 return t;
2812 if (!(TREE_CODE (base) == VAR_DECL
2813 || TREE_CODE (base) == PARM_DECL
2814 || TREE_CODE (base) == RESULT_DECL))
2815 return NULL_TREE;
2817 if (DECL_GIMPLE_REG_P (base))
2819 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2820 return base;
2823 return NULL_TREE;
2826 /* Callback for walk_tree, check that all elements with address taken are
2827 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2828 inside a PHI node. */
2830 static tree
2831 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2833 tree t = *tp, x;
2835 if (TYPE_P (t))
2836 *walk_subtrees = 0;
2838 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2839 #define CHECK_OP(N, MSG) \
2840 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2841 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2843 switch (TREE_CODE (t))
2845 case SSA_NAME:
2846 if (SSA_NAME_IN_FREE_LIST (t))
2848 error ("SSA name in freelist but still referenced");
2849 return *tp;
2851 break;
2853 case INDIRECT_REF:
2854 error ("INDIRECT_REF in gimple IL");
2855 return t;
2857 case MEM_REF:
2858 x = TREE_OPERAND (t, 0);
2859 if (!POINTER_TYPE_P (TREE_TYPE (x))
2860 || !is_gimple_mem_ref_addr (x))
2862 error ("invalid first operand of MEM_REF");
2863 return x;
2865 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2866 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2868 error ("invalid offset operand of MEM_REF");
2869 return TREE_OPERAND (t, 1);
2871 if (TREE_CODE (x) == ADDR_EXPR
2872 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2873 return x;
2874 *walk_subtrees = 0;
2875 break;
2877 case ASSERT_EXPR:
2878 x = fold (ASSERT_EXPR_COND (t));
2879 if (x == boolean_false_node)
2881 error ("ASSERT_EXPR with an always-false condition");
2882 return *tp;
2884 break;
2886 case MODIFY_EXPR:
2887 error ("MODIFY_EXPR not expected while having tuples");
2888 return *tp;
2890 case ADDR_EXPR:
2892 tree tem;
2894 gcc_assert (is_gimple_address (t));
2896 /* Skip any references (they will be checked when we recurse down the
2897 tree) and ensure that any variable used as a prefix is marked
2898 addressable. */
2899 for (x = TREE_OPERAND (t, 0);
2900 handled_component_p (x);
2901 x = TREE_OPERAND (x, 0))
2904 if ((tem = verify_address (t, x)))
2905 return tem;
2907 if (!(TREE_CODE (x) == VAR_DECL
2908 || TREE_CODE (x) == PARM_DECL
2909 || TREE_CODE (x) == RESULT_DECL))
2910 return NULL;
2912 if (!TREE_ADDRESSABLE (x))
2914 error ("address taken, but ADDRESSABLE bit not set");
2915 return x;
2918 break;
2921 case COND_EXPR:
2922 x = COND_EXPR_COND (t);
2923 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2925 error ("non-integral used in condition");
2926 return x;
2928 if (!is_gimple_condexpr (x))
2930 error ("invalid conditional operand");
2931 return x;
2933 break;
2935 case NON_LVALUE_EXPR:
2936 case TRUTH_NOT_EXPR:
2937 gcc_unreachable ();
2939 CASE_CONVERT:
2940 case FIX_TRUNC_EXPR:
2941 case FLOAT_EXPR:
2942 case NEGATE_EXPR:
2943 case ABS_EXPR:
2944 case BIT_NOT_EXPR:
2945 CHECK_OP (0, "invalid operand to unary operator");
2946 break;
2948 case REALPART_EXPR:
2949 case IMAGPART_EXPR:
2950 case BIT_FIELD_REF:
2951 if (!is_gimple_reg_type (TREE_TYPE (t)))
2953 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2954 return t;
2957 if (TREE_CODE (t) == BIT_FIELD_REF)
2959 tree t0 = TREE_OPERAND (t, 0);
2960 tree t1 = TREE_OPERAND (t, 1);
2961 tree t2 = TREE_OPERAND (t, 2);
2962 if (!tree_fits_uhwi_p (t1)
2963 || !tree_fits_uhwi_p (t2))
2965 error ("invalid position or size operand to BIT_FIELD_REF");
2966 return t;
2968 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2969 && (TYPE_PRECISION (TREE_TYPE (t))
2970 != tree_to_uhwi (t1)))
2972 error ("integral result type precision does not match "
2973 "field size of BIT_FIELD_REF");
2974 return t;
2976 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2977 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2978 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2979 != tree_to_uhwi (t1)))
2981 error ("mode precision of non-integral result does not "
2982 "match field size of BIT_FIELD_REF");
2983 return t;
2985 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2986 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2987 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2989 error ("position plus size exceeds size of referenced object in "
2990 "BIT_FIELD_REF");
2991 return t;
2994 t = TREE_OPERAND (t, 0);
2996 /* Fall-through. */
2997 case COMPONENT_REF:
2998 case ARRAY_REF:
2999 case ARRAY_RANGE_REF:
3000 case VIEW_CONVERT_EXPR:
3001 /* We have a nest of references. Verify that each of the operands
3002 that determine where to reference is either a constant or a variable,
3003 verify that the base is valid, and then show we've already checked
3004 the subtrees. */
3005 while (handled_component_p (t))
3007 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3008 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3009 else if (TREE_CODE (t) == ARRAY_REF
3010 || TREE_CODE (t) == ARRAY_RANGE_REF)
3012 CHECK_OP (1, "invalid array index");
3013 if (TREE_OPERAND (t, 2))
3014 CHECK_OP (2, "invalid array lower bound");
3015 if (TREE_OPERAND (t, 3))
3016 CHECK_OP (3, "invalid array stride");
3018 else if (TREE_CODE (t) == BIT_FIELD_REF
3019 || TREE_CODE (t) == REALPART_EXPR
3020 || TREE_CODE (t) == IMAGPART_EXPR)
3022 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3023 "REALPART_EXPR");
3024 return t;
3027 t = TREE_OPERAND (t, 0);
3030 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3032 error ("invalid reference prefix");
3033 return t;
3035 *walk_subtrees = 0;
3036 break;
3037 case PLUS_EXPR:
3038 case MINUS_EXPR:
3039 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3040 POINTER_PLUS_EXPR. */
3041 if (POINTER_TYPE_P (TREE_TYPE (t)))
3043 error ("invalid operand to plus/minus, type is a pointer");
3044 return t;
3046 CHECK_OP (0, "invalid operand to binary operator");
3047 CHECK_OP (1, "invalid operand to binary operator");
3048 break;
3050 case POINTER_PLUS_EXPR:
3051 /* Check to make sure the first operand is a pointer or reference type. */
3052 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3054 error ("invalid operand to pointer plus, first operand is not a pointer");
3055 return t;
3057 /* Check to make sure the second operand is a ptrofftype. */
3058 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3060 error ("invalid operand to pointer plus, second operand is not an "
3061 "integer type of appropriate width");
3062 return t;
3064 /* FALLTHROUGH */
3065 case LT_EXPR:
3066 case LE_EXPR:
3067 case GT_EXPR:
3068 case GE_EXPR:
3069 case EQ_EXPR:
3070 case NE_EXPR:
3071 case UNORDERED_EXPR:
3072 case ORDERED_EXPR:
3073 case UNLT_EXPR:
3074 case UNLE_EXPR:
3075 case UNGT_EXPR:
3076 case UNGE_EXPR:
3077 case UNEQ_EXPR:
3078 case LTGT_EXPR:
3079 case MULT_EXPR:
3080 case TRUNC_DIV_EXPR:
3081 case CEIL_DIV_EXPR:
3082 case FLOOR_DIV_EXPR:
3083 case ROUND_DIV_EXPR:
3084 case TRUNC_MOD_EXPR:
3085 case CEIL_MOD_EXPR:
3086 case FLOOR_MOD_EXPR:
3087 case ROUND_MOD_EXPR:
3088 case RDIV_EXPR:
3089 case EXACT_DIV_EXPR:
3090 case MIN_EXPR:
3091 case MAX_EXPR:
3092 case LSHIFT_EXPR:
3093 case RSHIFT_EXPR:
3094 case LROTATE_EXPR:
3095 case RROTATE_EXPR:
3096 case BIT_IOR_EXPR:
3097 case BIT_XOR_EXPR:
3098 case BIT_AND_EXPR:
3099 CHECK_OP (0, "invalid operand to binary operator");
3100 CHECK_OP (1, "invalid operand to binary operator");
3101 break;
3103 case CONSTRUCTOR:
3104 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3105 *walk_subtrees = 0;
3106 break;
3108 case CASE_LABEL_EXPR:
3109 if (CASE_CHAIN (t))
3111 error ("invalid CASE_CHAIN");
3112 return t;
3114 break;
3116 default:
3117 break;
3119 return NULL;
3121 #undef CHECK_OP
3125 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3126 Returns true if there is an error, otherwise false. */
3128 static bool
3129 verify_types_in_gimple_min_lval (tree expr)
3131 tree op;
3133 if (is_gimple_id (expr))
3134 return false;
3136 if (TREE_CODE (expr) != TARGET_MEM_REF
3137 && TREE_CODE (expr) != MEM_REF)
3139 error ("invalid expression for min lvalue");
3140 return true;
3143 /* TARGET_MEM_REFs are strange beasts. */
3144 if (TREE_CODE (expr) == TARGET_MEM_REF)
3145 return false;
3147 op = TREE_OPERAND (expr, 0);
3148 if (!is_gimple_val (op))
3150 error ("invalid operand in indirect reference");
3151 debug_generic_stmt (op);
3152 return true;
3154 /* Memory references now generally can involve a value conversion. */
3156 return false;
3159 /* Verify if EXPR is a valid GIMPLE reference expression. If
3160 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3161 if there is an error, otherwise false. */
3163 static bool
3164 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3166 while (handled_component_p (expr))
3168 tree op = TREE_OPERAND (expr, 0);
3170 if (TREE_CODE (expr) == ARRAY_REF
3171 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3173 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3174 || (TREE_OPERAND (expr, 2)
3175 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3176 || (TREE_OPERAND (expr, 3)
3177 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3179 error ("invalid operands to array reference");
3180 debug_generic_stmt (expr);
3181 return true;
3185 /* Verify if the reference array element types are compatible. */
3186 if (TREE_CODE (expr) == ARRAY_REF
3187 && !useless_type_conversion_p (TREE_TYPE (expr),
3188 TREE_TYPE (TREE_TYPE (op))))
3190 error ("type mismatch in array reference");
3191 debug_generic_stmt (TREE_TYPE (expr));
3192 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3193 return true;
3195 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3196 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3197 TREE_TYPE (TREE_TYPE (op))))
3199 error ("type mismatch in array range reference");
3200 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3201 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3202 return true;
3205 if ((TREE_CODE (expr) == REALPART_EXPR
3206 || TREE_CODE (expr) == IMAGPART_EXPR)
3207 && !useless_type_conversion_p (TREE_TYPE (expr),
3208 TREE_TYPE (TREE_TYPE (op))))
3210 error ("type mismatch in real/imagpart reference");
3211 debug_generic_stmt (TREE_TYPE (expr));
3212 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3213 return true;
3216 if (TREE_CODE (expr) == COMPONENT_REF
3217 && !useless_type_conversion_p (TREE_TYPE (expr),
3218 TREE_TYPE (TREE_OPERAND (expr, 1))))
3220 error ("type mismatch in component reference");
3221 debug_generic_stmt (TREE_TYPE (expr));
3222 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3223 return true;
3226 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3228 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3229 that their operand is not an SSA name or an invariant when
3230 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3231 bug). Otherwise there is nothing to verify, gross mismatches at
3232 most invoke undefined behavior. */
3233 if (require_lvalue
3234 && (TREE_CODE (op) == SSA_NAME
3235 || is_gimple_min_invariant (op)))
3237 error ("conversion of an SSA_NAME on the left hand side");
3238 debug_generic_stmt (expr);
3239 return true;
3241 else if (TREE_CODE (op) == SSA_NAME
3242 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3244 error ("conversion of register to a different size");
3245 debug_generic_stmt (expr);
3246 return true;
3248 else if (!handled_component_p (op))
3249 return false;
3252 expr = op;
3255 if (TREE_CODE (expr) == MEM_REF)
3257 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3259 error ("invalid address operand in MEM_REF");
3260 debug_generic_stmt (expr);
3261 return true;
3263 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3264 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3266 error ("invalid offset operand in MEM_REF");
3267 debug_generic_stmt (expr);
3268 return true;
3271 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3273 if (!TMR_BASE (expr)
3274 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3276 error ("invalid address operand in TARGET_MEM_REF");
3277 return true;
3279 if (!TMR_OFFSET (expr)
3280 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3281 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3283 error ("invalid offset operand in TARGET_MEM_REF");
3284 debug_generic_stmt (expr);
3285 return true;
3289 return ((require_lvalue || !is_gimple_min_invariant (expr))
3290 && verify_types_in_gimple_min_lval (expr));
3293 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3294 list of pointer-to types that is trivially convertible to DEST. */
3296 static bool
3297 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3299 tree src;
3301 if (!TYPE_POINTER_TO (src_obj))
3302 return true;
3304 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3305 if (useless_type_conversion_p (dest, src))
3306 return true;
3308 return false;
3311 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3312 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3314 static bool
3315 valid_fixed_convert_types_p (tree type1, tree type2)
3317 return (FIXED_POINT_TYPE_P (type1)
3318 && (INTEGRAL_TYPE_P (type2)
3319 || SCALAR_FLOAT_TYPE_P (type2)
3320 || FIXED_POINT_TYPE_P (type2)));
3323 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3324 is a problem, otherwise false. */
3326 static bool
3327 verify_gimple_call (gcall *stmt)
3329 tree fn = gimple_call_fn (stmt);
3330 tree fntype, fndecl;
3331 unsigned i;
3333 if (gimple_call_internal_p (stmt))
3335 if (fn)
3337 error ("gimple call has two targets");
3338 debug_generic_stmt (fn);
3339 return true;
3342 else
3344 if (!fn)
3346 error ("gimple call has no target");
3347 return true;
3351 if (fn && !is_gimple_call_addr (fn))
3353 error ("invalid function in gimple call");
3354 debug_generic_stmt (fn);
3355 return true;
3358 if (fn
3359 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3360 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3361 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3363 error ("non-function in gimple call");
3364 return true;
3367 fndecl = gimple_call_fndecl (stmt);
3368 if (fndecl
3369 && TREE_CODE (fndecl) == FUNCTION_DECL
3370 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3371 && !DECL_PURE_P (fndecl)
3372 && !TREE_READONLY (fndecl))
3374 error ("invalid pure const state for function");
3375 return true;
3378 tree lhs = gimple_call_lhs (stmt);
3379 if (lhs
3380 && (!is_gimple_lvalue (lhs)
3381 || verify_types_in_gimple_reference (lhs, true)))
3383 error ("invalid LHS in gimple call");
3384 return true;
3387 if (lhs
3388 && gimple_call_ctrl_altering_p (stmt)
3389 && gimple_call_noreturn_p (stmt)
3390 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3392 error ("LHS in noreturn call");
3393 return true;
3396 fntype = gimple_call_fntype (stmt);
3397 if (fntype
3398 && lhs
3399 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3400 /* ??? At least C++ misses conversions at assignments from
3401 void * call results.
3402 ??? Java is completely off. Especially with functions
3403 returning java.lang.Object.
3404 For now simply allow arbitrary pointer type conversions. */
3405 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3406 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3408 error ("invalid conversion in gimple call");
3409 debug_generic_stmt (TREE_TYPE (lhs));
3410 debug_generic_stmt (TREE_TYPE (fntype));
3411 return true;
3414 if (gimple_call_chain (stmt)
3415 && !is_gimple_val (gimple_call_chain (stmt)))
3417 error ("invalid static chain in gimple call");
3418 debug_generic_stmt (gimple_call_chain (stmt));
3419 return true;
3422 /* If there is a static chain argument, the call should either be
3423 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3424 if (gimple_call_chain (stmt)
3425 && fndecl
3426 && !DECL_STATIC_CHAIN (fndecl))
3428 error ("static chain with function that doesn%'t use one");
3429 return true;
3432 /* ??? The C frontend passes unpromoted arguments in case it
3433 didn't see a function declaration before the call. So for now
3434 leave the call arguments mostly unverified. Once we gimplify
3435 unit-at-a-time we have a chance to fix this. */
3437 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3439 tree arg = gimple_call_arg (stmt, i);
3440 if ((is_gimple_reg_type (TREE_TYPE (arg))
3441 && !is_gimple_val (arg))
3442 || (!is_gimple_reg_type (TREE_TYPE (arg))
3443 && !is_gimple_lvalue (arg)))
3445 error ("invalid argument to gimple call");
3446 debug_generic_expr (arg);
3447 return true;
3451 return false;
3454 /* Verifies the gimple comparison with the result type TYPE and
3455 the operands OP0 and OP1. */
3457 static bool
3458 verify_gimple_comparison (tree type, tree op0, tree op1)
3460 tree op0_type = TREE_TYPE (op0);
3461 tree op1_type = TREE_TYPE (op1);
3463 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3465 error ("invalid operands in gimple comparison");
3466 return true;
3469 /* For comparisons we do not have the operations type as the
3470 effective type the comparison is carried out in. Instead
3471 we require that either the first operand is trivially
3472 convertible into the second, or the other way around.
3473 Because we special-case pointers to void we allow
3474 comparisons of pointers with the same mode as well. */
3475 if (!useless_type_conversion_p (op0_type, op1_type)
3476 && !useless_type_conversion_p (op1_type, op0_type)
3477 && (!POINTER_TYPE_P (op0_type)
3478 || !POINTER_TYPE_P (op1_type)
3479 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3481 error ("mismatching comparison operand types");
3482 debug_generic_expr (op0_type);
3483 debug_generic_expr (op1_type);
3484 return true;
3487 /* The resulting type of a comparison may be an effective boolean type. */
3488 if (INTEGRAL_TYPE_P (type)
3489 && (TREE_CODE (type) == BOOLEAN_TYPE
3490 || TYPE_PRECISION (type) == 1))
3492 if (TREE_CODE (op0_type) == VECTOR_TYPE
3493 || TREE_CODE (op1_type) == VECTOR_TYPE)
3495 error ("vector comparison returning a boolean");
3496 debug_generic_expr (op0_type);
3497 debug_generic_expr (op1_type);
3498 return true;
3501 /* Or an integer vector type with the same size and element count
3502 as the comparison operand types. */
3503 else if (TREE_CODE (type) == VECTOR_TYPE
3504 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3506 if (TREE_CODE (op0_type) != VECTOR_TYPE
3507 || TREE_CODE (op1_type) != VECTOR_TYPE)
3509 error ("non-vector operands in vector comparison");
3510 debug_generic_expr (op0_type);
3511 debug_generic_expr (op1_type);
3512 return true;
3515 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3516 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3517 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3518 /* The result of a vector comparison is of signed
3519 integral type. */
3520 || TYPE_UNSIGNED (TREE_TYPE (type)))
3522 error ("invalid vector comparison resulting type");
3523 debug_generic_expr (type);
3524 return true;
3527 else
3529 error ("bogus comparison result type");
3530 debug_generic_expr (type);
3531 return true;
3534 return false;
3537 /* Verify a gimple assignment statement STMT with an unary rhs.
3538 Returns true if anything is wrong. */
3540 static bool
3541 verify_gimple_assign_unary (gassign *stmt)
3543 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3544 tree lhs = gimple_assign_lhs (stmt);
3545 tree lhs_type = TREE_TYPE (lhs);
3546 tree rhs1 = gimple_assign_rhs1 (stmt);
3547 tree rhs1_type = TREE_TYPE (rhs1);
3549 if (!is_gimple_reg (lhs))
3551 error ("non-register as LHS of unary operation");
3552 return true;
3555 if (!is_gimple_val (rhs1))
3557 error ("invalid operand in unary operation");
3558 return true;
3561 /* First handle conversions. */
3562 switch (rhs_code)
3564 CASE_CONVERT:
3566 /* Allow conversions from pointer type to integral type only if
3567 there is no sign or zero extension involved.
3568 For targets were the precision of ptrofftype doesn't match that
3569 of pointers we need to allow arbitrary conversions to ptrofftype. */
3570 if ((POINTER_TYPE_P (lhs_type)
3571 && INTEGRAL_TYPE_P (rhs1_type))
3572 || (POINTER_TYPE_P (rhs1_type)
3573 && INTEGRAL_TYPE_P (lhs_type)
3574 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3575 || ptrofftype_p (sizetype))))
3576 return false;
3578 /* Allow conversion from integral to offset type and vice versa. */
3579 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3580 && INTEGRAL_TYPE_P (rhs1_type))
3581 || (INTEGRAL_TYPE_P (lhs_type)
3582 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3583 return false;
3585 /* Otherwise assert we are converting between types of the
3586 same kind. */
3587 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3589 error ("invalid types in nop conversion");
3590 debug_generic_expr (lhs_type);
3591 debug_generic_expr (rhs1_type);
3592 return true;
3595 return false;
3598 case ADDR_SPACE_CONVERT_EXPR:
3600 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3601 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3602 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3604 error ("invalid types in address space conversion");
3605 debug_generic_expr (lhs_type);
3606 debug_generic_expr (rhs1_type);
3607 return true;
3610 return false;
3613 case FIXED_CONVERT_EXPR:
3615 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3616 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3618 error ("invalid types in fixed-point conversion");
3619 debug_generic_expr (lhs_type);
3620 debug_generic_expr (rhs1_type);
3621 return true;
3624 return false;
3627 case FLOAT_EXPR:
3629 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3630 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3631 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3633 error ("invalid types in conversion to floating point");
3634 debug_generic_expr (lhs_type);
3635 debug_generic_expr (rhs1_type);
3636 return true;
3639 return false;
3642 case FIX_TRUNC_EXPR:
3644 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3645 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3646 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3648 error ("invalid types in conversion to integer");
3649 debug_generic_expr (lhs_type);
3650 debug_generic_expr (rhs1_type);
3651 return true;
3654 return false;
3656 case REDUC_MAX_EXPR:
3657 case REDUC_MIN_EXPR:
3658 case REDUC_PLUS_EXPR:
3659 if (!VECTOR_TYPE_P (rhs1_type)
3660 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3662 error ("reduction should convert from vector to element type");
3663 debug_generic_expr (lhs_type);
3664 debug_generic_expr (rhs1_type);
3665 return true;
3667 return false;
3669 case VEC_UNPACK_HI_EXPR:
3670 case VEC_UNPACK_LO_EXPR:
3671 case VEC_UNPACK_FLOAT_HI_EXPR:
3672 case VEC_UNPACK_FLOAT_LO_EXPR:
3673 /* FIXME. */
3674 return false;
3676 case NEGATE_EXPR:
3677 case ABS_EXPR:
3678 case BIT_NOT_EXPR:
3679 case PAREN_EXPR:
3680 case CONJ_EXPR:
3681 break;
3683 default:
3684 gcc_unreachable ();
3687 /* For the remaining codes assert there is no conversion involved. */
3688 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3690 error ("non-trivial conversion in unary operation");
3691 debug_generic_expr (lhs_type);
3692 debug_generic_expr (rhs1_type);
3693 return true;
3696 return false;
3699 /* Verify a gimple assignment statement STMT with a binary rhs.
3700 Returns true if anything is wrong. */
3702 static bool
3703 verify_gimple_assign_binary (gassign *stmt)
3705 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3706 tree lhs = gimple_assign_lhs (stmt);
3707 tree lhs_type = TREE_TYPE (lhs);
3708 tree rhs1 = gimple_assign_rhs1 (stmt);
3709 tree rhs1_type = TREE_TYPE (rhs1);
3710 tree rhs2 = gimple_assign_rhs2 (stmt);
3711 tree rhs2_type = TREE_TYPE (rhs2);
3713 if (!is_gimple_reg (lhs))
3715 error ("non-register as LHS of binary operation");
3716 return true;
3719 if (!is_gimple_val (rhs1)
3720 || !is_gimple_val (rhs2))
3722 error ("invalid operands in binary operation");
3723 return true;
3726 /* First handle operations that involve different types. */
3727 switch (rhs_code)
3729 case COMPLEX_EXPR:
3731 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3732 || !(INTEGRAL_TYPE_P (rhs1_type)
3733 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3734 || !(INTEGRAL_TYPE_P (rhs2_type)
3735 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3737 error ("type mismatch in complex expression");
3738 debug_generic_expr (lhs_type);
3739 debug_generic_expr (rhs1_type);
3740 debug_generic_expr (rhs2_type);
3741 return true;
3744 return false;
3747 case LSHIFT_EXPR:
3748 case RSHIFT_EXPR:
3749 case LROTATE_EXPR:
3750 case RROTATE_EXPR:
3752 /* Shifts and rotates are ok on integral types, fixed point
3753 types and integer vector types. */
3754 if ((!INTEGRAL_TYPE_P (rhs1_type)
3755 && !FIXED_POINT_TYPE_P (rhs1_type)
3756 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3757 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3758 || (!INTEGRAL_TYPE_P (rhs2_type)
3759 /* Vector shifts of vectors are also ok. */
3760 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3761 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3762 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3763 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3764 || !useless_type_conversion_p (lhs_type, rhs1_type))
3766 error ("type mismatch in shift expression");
3767 debug_generic_expr (lhs_type);
3768 debug_generic_expr (rhs1_type);
3769 debug_generic_expr (rhs2_type);
3770 return true;
3773 return false;
3776 case WIDEN_LSHIFT_EXPR:
3778 if (!INTEGRAL_TYPE_P (lhs_type)
3779 || !INTEGRAL_TYPE_P (rhs1_type)
3780 || TREE_CODE (rhs2) != INTEGER_CST
3781 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3783 error ("type mismatch in widening vector shift expression");
3784 debug_generic_expr (lhs_type);
3785 debug_generic_expr (rhs1_type);
3786 debug_generic_expr (rhs2_type);
3787 return true;
3790 return false;
3793 case VEC_WIDEN_LSHIFT_HI_EXPR:
3794 case VEC_WIDEN_LSHIFT_LO_EXPR:
3796 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3797 || TREE_CODE (lhs_type) != VECTOR_TYPE
3798 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3799 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3800 || TREE_CODE (rhs2) != INTEGER_CST
3801 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3802 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3804 error ("type mismatch in widening vector shift expression");
3805 debug_generic_expr (lhs_type);
3806 debug_generic_expr (rhs1_type);
3807 debug_generic_expr (rhs2_type);
3808 return true;
3811 return false;
3814 case PLUS_EXPR:
3815 case MINUS_EXPR:
3817 tree lhs_etype = lhs_type;
3818 tree rhs1_etype = rhs1_type;
3819 tree rhs2_etype = rhs2_type;
3820 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3822 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3823 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3825 error ("invalid non-vector operands to vector valued plus");
3826 return true;
3828 lhs_etype = TREE_TYPE (lhs_type);
3829 rhs1_etype = TREE_TYPE (rhs1_type);
3830 rhs2_etype = TREE_TYPE (rhs2_type);
3832 if (POINTER_TYPE_P (lhs_etype)
3833 || POINTER_TYPE_P (rhs1_etype)
3834 || POINTER_TYPE_P (rhs2_etype))
3836 error ("invalid (pointer) operands to plus/minus");
3837 return true;
3840 /* Continue with generic binary expression handling. */
3841 break;
3844 case POINTER_PLUS_EXPR:
3846 if (!POINTER_TYPE_P (rhs1_type)
3847 || !useless_type_conversion_p (lhs_type, rhs1_type)
3848 || !ptrofftype_p (rhs2_type))
3850 error ("type mismatch in pointer plus expression");
3851 debug_generic_stmt (lhs_type);
3852 debug_generic_stmt (rhs1_type);
3853 debug_generic_stmt (rhs2_type);
3854 return true;
3857 return false;
3860 case TRUTH_ANDIF_EXPR:
3861 case TRUTH_ORIF_EXPR:
3862 case TRUTH_AND_EXPR:
3863 case TRUTH_OR_EXPR:
3864 case TRUTH_XOR_EXPR:
3866 gcc_unreachable ();
3868 case LT_EXPR:
3869 case LE_EXPR:
3870 case GT_EXPR:
3871 case GE_EXPR:
3872 case EQ_EXPR:
3873 case NE_EXPR:
3874 case UNORDERED_EXPR:
3875 case ORDERED_EXPR:
3876 case UNLT_EXPR:
3877 case UNLE_EXPR:
3878 case UNGT_EXPR:
3879 case UNGE_EXPR:
3880 case UNEQ_EXPR:
3881 case LTGT_EXPR:
3882 /* Comparisons are also binary, but the result type is not
3883 connected to the operand types. */
3884 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3886 case WIDEN_MULT_EXPR:
3887 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3888 return true;
3889 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3890 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3892 case WIDEN_SUM_EXPR:
3893 case VEC_WIDEN_MULT_HI_EXPR:
3894 case VEC_WIDEN_MULT_LO_EXPR:
3895 case VEC_WIDEN_MULT_EVEN_EXPR:
3896 case VEC_WIDEN_MULT_ODD_EXPR:
3897 case VEC_PACK_TRUNC_EXPR:
3898 case VEC_PACK_SAT_EXPR:
3899 case VEC_PACK_FIX_TRUNC_EXPR:
3900 /* FIXME. */
3901 return false;
3903 case MULT_EXPR:
3904 case MULT_HIGHPART_EXPR:
3905 case TRUNC_DIV_EXPR:
3906 case CEIL_DIV_EXPR:
3907 case FLOOR_DIV_EXPR:
3908 case ROUND_DIV_EXPR:
3909 case TRUNC_MOD_EXPR:
3910 case CEIL_MOD_EXPR:
3911 case FLOOR_MOD_EXPR:
3912 case ROUND_MOD_EXPR:
3913 case RDIV_EXPR:
3914 case EXACT_DIV_EXPR:
3915 case MIN_EXPR:
3916 case MAX_EXPR:
3917 case BIT_IOR_EXPR:
3918 case BIT_XOR_EXPR:
3919 case BIT_AND_EXPR:
3920 /* Continue with generic binary expression handling. */
3921 break;
3923 default:
3924 gcc_unreachable ();
3927 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3928 || !useless_type_conversion_p (lhs_type, rhs2_type))
3930 error ("type mismatch in binary expression");
3931 debug_generic_stmt (lhs_type);
3932 debug_generic_stmt (rhs1_type);
3933 debug_generic_stmt (rhs2_type);
3934 return true;
3937 return false;
3940 /* Verify a gimple assignment statement STMT with a ternary rhs.
3941 Returns true if anything is wrong. */
3943 static bool
3944 verify_gimple_assign_ternary (gassign *stmt)
3946 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3947 tree lhs = gimple_assign_lhs (stmt);
3948 tree lhs_type = TREE_TYPE (lhs);
3949 tree rhs1 = gimple_assign_rhs1 (stmt);
3950 tree rhs1_type = TREE_TYPE (rhs1);
3951 tree rhs2 = gimple_assign_rhs2 (stmt);
3952 tree rhs2_type = TREE_TYPE (rhs2);
3953 tree rhs3 = gimple_assign_rhs3 (stmt);
3954 tree rhs3_type = TREE_TYPE (rhs3);
3956 if (!is_gimple_reg (lhs))
3958 error ("non-register as LHS of ternary operation");
3959 return true;
3962 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3963 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3964 || !is_gimple_val (rhs2)
3965 || !is_gimple_val (rhs3))
3967 error ("invalid operands in ternary operation");
3968 return true;
3971 /* First handle operations that involve different types. */
3972 switch (rhs_code)
3974 case WIDEN_MULT_PLUS_EXPR:
3975 case WIDEN_MULT_MINUS_EXPR:
3976 if ((!INTEGRAL_TYPE_P (rhs1_type)
3977 && !FIXED_POINT_TYPE_P (rhs1_type))
3978 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3979 || !useless_type_conversion_p (lhs_type, rhs3_type)
3980 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3981 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3983 error ("type mismatch in widening multiply-accumulate expression");
3984 debug_generic_expr (lhs_type);
3985 debug_generic_expr (rhs1_type);
3986 debug_generic_expr (rhs2_type);
3987 debug_generic_expr (rhs3_type);
3988 return true;
3990 break;
3992 case FMA_EXPR:
3993 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3994 || !useless_type_conversion_p (lhs_type, rhs2_type)
3995 || !useless_type_conversion_p (lhs_type, rhs3_type))
3997 error ("type mismatch in fused multiply-add expression");
3998 debug_generic_expr (lhs_type);
3999 debug_generic_expr (rhs1_type);
4000 debug_generic_expr (rhs2_type);
4001 debug_generic_expr (rhs3_type);
4002 return true;
4004 break;
4006 case COND_EXPR:
4007 case VEC_COND_EXPR:
4008 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4009 || !useless_type_conversion_p (lhs_type, rhs3_type))
4011 error ("type mismatch in conditional expression");
4012 debug_generic_expr (lhs_type);
4013 debug_generic_expr (rhs2_type);
4014 debug_generic_expr (rhs3_type);
4015 return true;
4017 break;
4019 case VEC_PERM_EXPR:
4020 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4021 || !useless_type_conversion_p (lhs_type, rhs2_type))
4023 error ("type mismatch in vector permute expression");
4024 debug_generic_expr (lhs_type);
4025 debug_generic_expr (rhs1_type);
4026 debug_generic_expr (rhs2_type);
4027 debug_generic_expr (rhs3_type);
4028 return true;
4031 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4032 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4033 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4035 error ("vector types expected in vector permute expression");
4036 debug_generic_expr (lhs_type);
4037 debug_generic_expr (rhs1_type);
4038 debug_generic_expr (rhs2_type);
4039 debug_generic_expr (rhs3_type);
4040 return true;
4043 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4044 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4045 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4046 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4047 != TYPE_VECTOR_SUBPARTS (lhs_type))
4049 error ("vectors with different element number found "
4050 "in vector permute expression");
4051 debug_generic_expr (lhs_type);
4052 debug_generic_expr (rhs1_type);
4053 debug_generic_expr (rhs2_type);
4054 debug_generic_expr (rhs3_type);
4055 return true;
4058 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4059 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4060 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4062 error ("invalid mask type in vector permute expression");
4063 debug_generic_expr (lhs_type);
4064 debug_generic_expr (rhs1_type);
4065 debug_generic_expr (rhs2_type);
4066 debug_generic_expr (rhs3_type);
4067 return true;
4070 return false;
4072 case SAD_EXPR:
4073 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4074 || !useless_type_conversion_p (lhs_type, rhs3_type)
4075 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4076 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4077 > GET_MODE_BITSIZE (GET_MODE_INNER
4078 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4080 error ("type mismatch in sad expression");
4081 debug_generic_expr (lhs_type);
4082 debug_generic_expr (rhs1_type);
4083 debug_generic_expr (rhs2_type);
4084 debug_generic_expr (rhs3_type);
4085 return true;
4088 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4089 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4090 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4092 error ("vector types expected in sad expression");
4093 debug_generic_expr (lhs_type);
4094 debug_generic_expr (rhs1_type);
4095 debug_generic_expr (rhs2_type);
4096 debug_generic_expr (rhs3_type);
4097 return true;
4100 return false;
4102 case DOT_PROD_EXPR:
4103 case REALIGN_LOAD_EXPR:
4104 /* FIXME. */
4105 return false;
4107 default:
4108 gcc_unreachable ();
4110 return false;
4113 /* Verify a gimple assignment statement STMT with a single rhs.
4114 Returns true if anything is wrong. */
4116 static bool
4117 verify_gimple_assign_single (gassign *stmt)
4119 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4120 tree lhs = gimple_assign_lhs (stmt);
4121 tree lhs_type = TREE_TYPE (lhs);
4122 tree rhs1 = gimple_assign_rhs1 (stmt);
4123 tree rhs1_type = TREE_TYPE (rhs1);
4124 bool res = false;
4126 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4128 error ("non-trivial conversion at assignment");
4129 debug_generic_expr (lhs_type);
4130 debug_generic_expr (rhs1_type);
4131 return true;
4134 if (gimple_clobber_p (stmt)
4135 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4137 error ("non-decl/MEM_REF LHS in clobber statement");
4138 debug_generic_expr (lhs);
4139 return true;
4142 if (handled_component_p (lhs)
4143 || TREE_CODE (lhs) == MEM_REF
4144 || TREE_CODE (lhs) == TARGET_MEM_REF)
4145 res |= verify_types_in_gimple_reference (lhs, true);
4147 /* Special codes we cannot handle via their class. */
4148 switch (rhs_code)
4150 case ADDR_EXPR:
4152 tree op = TREE_OPERAND (rhs1, 0);
4153 if (!is_gimple_addressable (op))
4155 error ("invalid operand in unary expression");
4156 return true;
4159 /* Technically there is no longer a need for matching types, but
4160 gimple hygiene asks for this check. In LTO we can end up
4161 combining incompatible units and thus end up with addresses
4162 of globals that change their type to a common one. */
4163 if (!in_lto_p
4164 && !types_compatible_p (TREE_TYPE (op),
4165 TREE_TYPE (TREE_TYPE (rhs1)))
4166 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4167 TREE_TYPE (op)))
4169 error ("type mismatch in address expression");
4170 debug_generic_stmt (TREE_TYPE (rhs1));
4171 debug_generic_stmt (TREE_TYPE (op));
4172 return true;
4175 return verify_types_in_gimple_reference (op, true);
4178 /* tcc_reference */
4179 case INDIRECT_REF:
4180 error ("INDIRECT_REF in gimple IL");
4181 return true;
4183 case COMPONENT_REF:
4184 case BIT_FIELD_REF:
4185 case ARRAY_REF:
4186 case ARRAY_RANGE_REF:
4187 case VIEW_CONVERT_EXPR:
4188 case REALPART_EXPR:
4189 case IMAGPART_EXPR:
4190 case TARGET_MEM_REF:
4191 case MEM_REF:
4192 if (!is_gimple_reg (lhs)
4193 && is_gimple_reg_type (TREE_TYPE (lhs)))
4195 error ("invalid rhs for gimple memory store");
4196 debug_generic_stmt (lhs);
4197 debug_generic_stmt (rhs1);
4198 return true;
4200 return res || verify_types_in_gimple_reference (rhs1, false);
4202 /* tcc_constant */
4203 case SSA_NAME:
4204 case INTEGER_CST:
4205 case REAL_CST:
4206 case FIXED_CST:
4207 case COMPLEX_CST:
4208 case VECTOR_CST:
4209 case STRING_CST:
4210 return res;
4212 /* tcc_declaration */
4213 case CONST_DECL:
4214 return res;
4215 case VAR_DECL:
4216 case PARM_DECL:
4217 if (!is_gimple_reg (lhs)
4218 && !is_gimple_reg (rhs1)
4219 && is_gimple_reg_type (TREE_TYPE (lhs)))
4221 error ("invalid rhs for gimple memory store");
4222 debug_generic_stmt (lhs);
4223 debug_generic_stmt (rhs1);
4224 return true;
4226 return res;
4228 case CONSTRUCTOR:
4229 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4231 unsigned int i;
4232 tree elt_i, elt_v, elt_t = NULL_TREE;
4234 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4235 return res;
4236 /* For vector CONSTRUCTORs we require that either it is empty
4237 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4238 (then the element count must be correct to cover the whole
4239 outer vector and index must be NULL on all elements, or it is
4240 a CONSTRUCTOR of scalar elements, where we as an exception allow
4241 smaller number of elements (assuming zero filling) and
4242 consecutive indexes as compared to NULL indexes (such
4243 CONSTRUCTORs can appear in the IL from FEs). */
4244 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4246 if (elt_t == NULL_TREE)
4248 elt_t = TREE_TYPE (elt_v);
4249 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4251 tree elt_t = TREE_TYPE (elt_v);
4252 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4253 TREE_TYPE (elt_t)))
4255 error ("incorrect type of vector CONSTRUCTOR"
4256 " elements");
4257 debug_generic_stmt (rhs1);
4258 return true;
4260 else if (CONSTRUCTOR_NELTS (rhs1)
4261 * TYPE_VECTOR_SUBPARTS (elt_t)
4262 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4264 error ("incorrect number of vector CONSTRUCTOR"
4265 " elements");
4266 debug_generic_stmt (rhs1);
4267 return true;
4270 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4271 elt_t))
4273 error ("incorrect type of vector CONSTRUCTOR elements");
4274 debug_generic_stmt (rhs1);
4275 return true;
4277 else if (CONSTRUCTOR_NELTS (rhs1)
4278 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4280 error ("incorrect number of vector CONSTRUCTOR elements");
4281 debug_generic_stmt (rhs1);
4282 return true;
4285 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4287 error ("incorrect type of vector CONSTRUCTOR elements");
4288 debug_generic_stmt (rhs1);
4289 return true;
4291 if (elt_i != NULL_TREE
4292 && (TREE_CODE (elt_t) == VECTOR_TYPE
4293 || TREE_CODE (elt_i) != INTEGER_CST
4294 || compare_tree_int (elt_i, i) != 0))
4296 error ("vector CONSTRUCTOR with non-NULL element index");
4297 debug_generic_stmt (rhs1);
4298 return true;
4300 if (!is_gimple_val (elt_v))
4302 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4303 debug_generic_stmt (rhs1);
4304 return true;
4308 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4310 error ("non-vector CONSTRUCTOR with elements");
4311 debug_generic_stmt (rhs1);
4312 return true;
4314 return res;
4315 case OBJ_TYPE_REF:
4316 case ASSERT_EXPR:
4317 case WITH_SIZE_EXPR:
4318 /* FIXME. */
4319 return res;
4321 default:;
4324 return res;
4327 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4328 is a problem, otherwise false. */
4330 static bool
4331 verify_gimple_assign (gassign *stmt)
4333 switch (gimple_assign_rhs_class (stmt))
4335 case GIMPLE_SINGLE_RHS:
4336 return verify_gimple_assign_single (stmt);
4338 case GIMPLE_UNARY_RHS:
4339 return verify_gimple_assign_unary (stmt);
4341 case GIMPLE_BINARY_RHS:
4342 return verify_gimple_assign_binary (stmt);
4344 case GIMPLE_TERNARY_RHS:
4345 return verify_gimple_assign_ternary (stmt);
4347 default:
4348 gcc_unreachable ();
4352 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4353 is a problem, otherwise false. */
4355 static bool
4356 verify_gimple_return (greturn *stmt)
4358 tree op = gimple_return_retval (stmt);
4359 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4361 /* We cannot test for present return values as we do not fix up missing
4362 return values from the original source. */
4363 if (op == NULL)
4364 return false;
4366 if (!is_gimple_val (op)
4367 && TREE_CODE (op) != RESULT_DECL)
4369 error ("invalid operand in return statement");
4370 debug_generic_stmt (op);
4371 return true;
4374 if ((TREE_CODE (op) == RESULT_DECL
4375 && DECL_BY_REFERENCE (op))
4376 || (TREE_CODE (op) == SSA_NAME
4377 && SSA_NAME_VAR (op)
4378 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4379 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4380 op = TREE_TYPE (op);
4382 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4384 error ("invalid conversion in return statement");
4385 debug_generic_stmt (restype);
4386 debug_generic_stmt (TREE_TYPE (op));
4387 return true;
4390 return false;
4394 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4395 is a problem, otherwise false. */
4397 static bool
4398 verify_gimple_goto (ggoto *stmt)
4400 tree dest = gimple_goto_dest (stmt);
4402 /* ??? We have two canonical forms of direct goto destinations, a
4403 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4404 if (TREE_CODE (dest) != LABEL_DECL
4405 && (!is_gimple_val (dest)
4406 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4408 error ("goto destination is neither a label nor a pointer");
4409 return true;
4412 return false;
4415 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4416 is a problem, otherwise false. */
4418 static bool
4419 verify_gimple_switch (gswitch *stmt)
4421 unsigned int i, n;
4422 tree elt, prev_upper_bound = NULL_TREE;
4423 tree index_type, elt_type = NULL_TREE;
4425 if (!is_gimple_val (gimple_switch_index (stmt)))
4427 error ("invalid operand to switch statement");
4428 debug_generic_stmt (gimple_switch_index (stmt));
4429 return true;
4432 index_type = TREE_TYPE (gimple_switch_index (stmt));
4433 if (! INTEGRAL_TYPE_P (index_type))
4435 error ("non-integral type switch statement");
4436 debug_generic_expr (index_type);
4437 return true;
4440 elt = gimple_switch_label (stmt, 0);
4441 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4443 error ("invalid default case label in switch statement");
4444 debug_generic_expr (elt);
4445 return true;
4448 n = gimple_switch_num_labels (stmt);
4449 for (i = 1; i < n; i++)
4451 elt = gimple_switch_label (stmt, i);
4453 if (! CASE_LOW (elt))
4455 error ("invalid case label in switch statement");
4456 debug_generic_expr (elt);
4457 return true;
4459 if (CASE_HIGH (elt)
4460 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4462 error ("invalid case range in switch statement");
4463 debug_generic_expr (elt);
4464 return true;
4467 if (elt_type)
4469 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4470 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4472 error ("type mismatch for case label in switch statement");
4473 debug_generic_expr (elt);
4474 return true;
4477 else
4479 elt_type = TREE_TYPE (CASE_LOW (elt));
4480 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4482 error ("type precision mismatch in switch statement");
4483 return true;
4487 if (prev_upper_bound)
4489 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4491 error ("case labels not sorted in switch statement");
4492 return true;
4496 prev_upper_bound = CASE_HIGH (elt);
4497 if (! prev_upper_bound)
4498 prev_upper_bound = CASE_LOW (elt);
4501 return false;
4504 /* Verify a gimple debug statement STMT.
4505 Returns true if anything is wrong. */
4507 static bool
4508 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4510 /* There isn't much that could be wrong in a gimple debug stmt. A
4511 gimple debug bind stmt, for example, maps a tree, that's usually
4512 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4513 component or member of an aggregate type, to another tree, that
4514 can be an arbitrary expression. These stmts expand into debug
4515 insns, and are converted to debug notes by var-tracking.c. */
4516 return false;
4519 /* Verify a gimple label statement STMT.
4520 Returns true if anything is wrong. */
4522 static bool
4523 verify_gimple_label (glabel *stmt)
4525 tree decl = gimple_label_label (stmt);
4526 int uid;
4527 bool err = false;
4529 if (TREE_CODE (decl) != LABEL_DECL)
4530 return true;
4531 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4532 && DECL_CONTEXT (decl) != current_function_decl)
4534 error ("label's context is not the current function decl");
4535 err |= true;
4538 uid = LABEL_DECL_UID (decl);
4539 if (cfun->cfg
4540 && (uid == -1
4541 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4543 error ("incorrect entry in label_to_block_map");
4544 err |= true;
4547 uid = EH_LANDING_PAD_NR (decl);
4548 if (uid)
4550 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4551 if (decl != lp->post_landing_pad)
4553 error ("incorrect setting of landing pad number");
4554 err |= true;
4558 return err;
4561 /* Verify a gimple cond statement STMT.
4562 Returns true if anything is wrong. */
4564 static bool
4565 verify_gimple_cond (gcond *stmt)
4567 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4569 error ("invalid comparison code in gimple cond");
4570 return true;
4572 if (!(!gimple_cond_true_label (stmt)
4573 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4574 || !(!gimple_cond_false_label (stmt)
4575 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4577 error ("invalid labels in gimple cond");
4578 return true;
4581 return verify_gimple_comparison (boolean_type_node,
4582 gimple_cond_lhs (stmt),
4583 gimple_cond_rhs (stmt));
4586 /* Verify the GIMPLE statement STMT. Returns true if there is an
4587 error, otherwise false. */
4589 static bool
4590 verify_gimple_stmt (gimple stmt)
4592 switch (gimple_code (stmt))
4594 case GIMPLE_ASSIGN:
4595 return verify_gimple_assign (as_a <gassign *> (stmt));
4597 case GIMPLE_LABEL:
4598 return verify_gimple_label (as_a <glabel *> (stmt));
4600 case GIMPLE_CALL:
4601 return verify_gimple_call (as_a <gcall *> (stmt));
4603 case GIMPLE_COND:
4604 return verify_gimple_cond (as_a <gcond *> (stmt));
4606 case GIMPLE_GOTO:
4607 return verify_gimple_goto (as_a <ggoto *> (stmt));
4609 case GIMPLE_SWITCH:
4610 return verify_gimple_switch (as_a <gswitch *> (stmt));
4612 case GIMPLE_RETURN:
4613 return verify_gimple_return (as_a <greturn *> (stmt));
4615 case GIMPLE_ASM:
4616 return false;
4618 case GIMPLE_TRANSACTION:
4619 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4621 /* Tuples that do not have tree operands. */
4622 case GIMPLE_NOP:
4623 case GIMPLE_PREDICT:
4624 case GIMPLE_RESX:
4625 case GIMPLE_EH_DISPATCH:
4626 case GIMPLE_EH_MUST_NOT_THROW:
4627 return false;
4629 CASE_GIMPLE_OMP:
4630 /* OpenMP directives are validated by the FE and never operated
4631 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4632 non-gimple expressions when the main index variable has had
4633 its address taken. This does not affect the loop itself
4634 because the header of an GIMPLE_OMP_FOR is merely used to determine
4635 how to setup the parallel iteration. */
4636 return false;
4638 case GIMPLE_DEBUG:
4639 return verify_gimple_debug (stmt);
4641 default:
4642 gcc_unreachable ();
4646 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4647 and false otherwise. */
4649 static bool
4650 verify_gimple_phi (gimple phi)
4652 bool err = false;
4653 unsigned i;
4654 tree phi_result = gimple_phi_result (phi);
4655 bool virtual_p;
4657 if (!phi_result)
4659 error ("invalid PHI result");
4660 return true;
4663 virtual_p = virtual_operand_p (phi_result);
4664 if (TREE_CODE (phi_result) != SSA_NAME
4665 || (virtual_p
4666 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4668 error ("invalid PHI result");
4669 err = true;
4672 for (i = 0; i < gimple_phi_num_args (phi); i++)
4674 tree t = gimple_phi_arg_def (phi, i);
4676 if (!t)
4678 error ("missing PHI def");
4679 err |= true;
4680 continue;
4682 /* Addressable variables do have SSA_NAMEs but they
4683 are not considered gimple values. */
4684 else if ((TREE_CODE (t) == SSA_NAME
4685 && virtual_p != virtual_operand_p (t))
4686 || (virtual_p
4687 && (TREE_CODE (t) != SSA_NAME
4688 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4689 || (!virtual_p
4690 && !is_gimple_val (t)))
4692 error ("invalid PHI argument");
4693 debug_generic_expr (t);
4694 err |= true;
4696 #ifdef ENABLE_TYPES_CHECKING
4697 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4699 error ("incompatible types in PHI argument %u", i);
4700 debug_generic_stmt (TREE_TYPE (phi_result));
4701 debug_generic_stmt (TREE_TYPE (t));
4702 err |= true;
4704 #endif
4707 return err;
4710 /* Verify the GIMPLE statements inside the sequence STMTS. */
4712 static bool
4713 verify_gimple_in_seq_2 (gimple_seq stmts)
4715 gimple_stmt_iterator ittr;
4716 bool err = false;
4718 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4720 gimple stmt = gsi_stmt (ittr);
4722 switch (gimple_code (stmt))
4724 case GIMPLE_BIND:
4725 err |= verify_gimple_in_seq_2 (
4726 gimple_bind_body (as_a <gbind *> (stmt)));
4727 break;
4729 case GIMPLE_TRY:
4730 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4731 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4732 break;
4734 case GIMPLE_EH_FILTER:
4735 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4736 break;
4738 case GIMPLE_EH_ELSE:
4740 geh_else *eh_else = as_a <geh_else *> (stmt);
4741 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4742 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4744 break;
4746 case GIMPLE_CATCH:
4747 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4748 as_a <gcatch *> (stmt)));
4749 break;
4751 case GIMPLE_TRANSACTION:
4752 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4753 break;
4755 default:
4757 bool err2 = verify_gimple_stmt (stmt);
4758 if (err2)
4759 debug_gimple_stmt (stmt);
4760 err |= err2;
4765 return err;
4768 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4769 is a problem, otherwise false. */
4771 static bool
4772 verify_gimple_transaction (gtransaction *stmt)
4774 tree lab = gimple_transaction_label (stmt);
4775 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4776 return true;
4777 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4781 /* Verify the GIMPLE statements inside the statement list STMTS. */
4783 DEBUG_FUNCTION void
4784 verify_gimple_in_seq (gimple_seq stmts)
4786 timevar_push (TV_TREE_STMT_VERIFY);
4787 if (verify_gimple_in_seq_2 (stmts))
4788 internal_error ("verify_gimple failed");
4789 timevar_pop (TV_TREE_STMT_VERIFY);
4792 /* Return true when the T can be shared. */
4794 static bool
4795 tree_node_can_be_shared (tree t)
4797 if (IS_TYPE_OR_DECL_P (t)
4798 || is_gimple_min_invariant (t)
4799 || TREE_CODE (t) == SSA_NAME
4800 || t == error_mark_node
4801 || TREE_CODE (t) == IDENTIFIER_NODE)
4802 return true;
4804 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4805 return true;
4807 if (DECL_P (t))
4808 return true;
4810 return false;
4813 /* Called via walk_tree. Verify tree sharing. */
4815 static tree
4816 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4818 hash_set<void *> *visited = (hash_set<void *> *) data;
4820 if (tree_node_can_be_shared (*tp))
4822 *walk_subtrees = false;
4823 return NULL;
4826 if (visited->add (*tp))
4827 return *tp;
4829 return NULL;
4832 /* Called via walk_gimple_stmt. Verify tree sharing. */
4834 static tree
4835 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4837 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4838 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4841 static bool eh_error_found;
4842 bool
4843 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4844 hash_set<gimple> *visited)
4846 if (!visited->contains (stmt))
4848 error ("dead STMT in EH table");
4849 debug_gimple_stmt (stmt);
4850 eh_error_found = true;
4852 return true;
4855 /* Verify if the location LOCs block is in BLOCKS. */
4857 static bool
4858 verify_location (hash_set<tree> *blocks, location_t loc)
4860 tree block = LOCATION_BLOCK (loc);
4861 if (block != NULL_TREE
4862 && !blocks->contains (block))
4864 error ("location references block not in block tree");
4865 return true;
4867 if (block != NULL_TREE)
4868 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4869 return false;
4872 /* Called via walk_tree. Verify that expressions have no blocks. */
4874 static tree
4875 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4877 if (!EXPR_P (*tp))
4879 *walk_subtrees = false;
4880 return NULL;
4883 location_t loc = EXPR_LOCATION (*tp);
4884 if (LOCATION_BLOCK (loc) != NULL)
4885 return *tp;
4887 return NULL;
4890 /* Called via walk_tree. Verify locations of expressions. */
4892 static tree
4893 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4895 hash_set<tree> *blocks = (hash_set<tree> *) data;
4897 if (TREE_CODE (*tp) == VAR_DECL
4898 && DECL_HAS_DEBUG_EXPR_P (*tp))
4900 tree t = DECL_DEBUG_EXPR (*tp);
4901 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4902 if (addr)
4903 return addr;
4905 if ((TREE_CODE (*tp) == VAR_DECL
4906 || TREE_CODE (*tp) == PARM_DECL
4907 || TREE_CODE (*tp) == RESULT_DECL)
4908 && DECL_HAS_VALUE_EXPR_P (*tp))
4910 tree t = DECL_VALUE_EXPR (*tp);
4911 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4912 if (addr)
4913 return addr;
4916 if (!EXPR_P (*tp))
4918 *walk_subtrees = false;
4919 return NULL;
4922 location_t loc = EXPR_LOCATION (*tp);
4923 if (verify_location (blocks, loc))
4924 return *tp;
4926 return NULL;
4929 /* Called via walk_gimple_op. Verify locations of expressions. */
4931 static tree
4932 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4934 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4935 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4938 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4940 static void
4941 collect_subblocks (hash_set<tree> *blocks, tree block)
4943 tree t;
4944 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4946 blocks->add (t);
4947 collect_subblocks (blocks, t);
4951 /* Verify the GIMPLE statements in the CFG of FN. */
4953 DEBUG_FUNCTION void
4954 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4956 basic_block bb;
4957 bool err = false;
4959 timevar_push (TV_TREE_STMT_VERIFY);
4960 hash_set<void *> visited;
4961 hash_set<gimple> visited_stmts;
4963 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4964 hash_set<tree> blocks;
4965 if (DECL_INITIAL (fn->decl))
4967 blocks.add (DECL_INITIAL (fn->decl));
4968 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4971 FOR_EACH_BB_FN (bb, fn)
4973 gimple_stmt_iterator gsi;
4975 for (gphi_iterator gpi = gsi_start_phis (bb);
4976 !gsi_end_p (gpi);
4977 gsi_next (&gpi))
4979 gphi *phi = gpi.phi ();
4980 bool err2 = false;
4981 unsigned i;
4983 visited_stmts.add (phi);
4985 if (gimple_bb (phi) != bb)
4987 error ("gimple_bb (phi) is set to a wrong basic block");
4988 err2 = true;
4991 err2 |= verify_gimple_phi (phi);
4993 /* Only PHI arguments have locations. */
4994 if (gimple_location (phi) != UNKNOWN_LOCATION)
4996 error ("PHI node with location");
4997 err2 = true;
5000 for (i = 0; i < gimple_phi_num_args (phi); i++)
5002 tree arg = gimple_phi_arg_def (phi, i);
5003 tree addr = walk_tree (&arg, verify_node_sharing_1,
5004 &visited, NULL);
5005 if (addr)
5007 error ("incorrect sharing of tree nodes");
5008 debug_generic_expr (addr);
5009 err2 |= true;
5011 location_t loc = gimple_phi_arg_location (phi, i);
5012 if (virtual_operand_p (gimple_phi_result (phi))
5013 && loc != UNKNOWN_LOCATION)
5015 error ("virtual PHI with argument locations");
5016 err2 = true;
5018 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5019 if (addr)
5021 debug_generic_expr (addr);
5022 err2 = true;
5024 err2 |= verify_location (&blocks, loc);
5027 if (err2)
5028 debug_gimple_stmt (phi);
5029 err |= err2;
5032 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5034 gimple stmt = gsi_stmt (gsi);
5035 bool err2 = false;
5036 struct walk_stmt_info wi;
5037 tree addr;
5038 int lp_nr;
5040 visited_stmts.add (stmt);
5042 if (gimple_bb (stmt) != bb)
5044 error ("gimple_bb (stmt) is set to a wrong basic block");
5045 err2 = true;
5048 err2 |= verify_gimple_stmt (stmt);
5049 err2 |= verify_location (&blocks, gimple_location (stmt));
5051 memset (&wi, 0, sizeof (wi));
5052 wi.info = (void *) &visited;
5053 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5054 if (addr)
5056 error ("incorrect sharing of tree nodes");
5057 debug_generic_expr (addr);
5058 err2 |= true;
5061 memset (&wi, 0, sizeof (wi));
5062 wi.info = (void *) &blocks;
5063 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5064 if (addr)
5066 debug_generic_expr (addr);
5067 err2 |= true;
5070 /* ??? Instead of not checking these stmts at all the walker
5071 should know its context via wi. */
5072 if (!is_gimple_debug (stmt)
5073 && !is_gimple_omp (stmt))
5075 memset (&wi, 0, sizeof (wi));
5076 addr = walk_gimple_op (stmt, verify_expr, &wi);
5077 if (addr)
5079 debug_generic_expr (addr);
5080 inform (gimple_location (stmt), "in statement");
5081 err2 |= true;
5085 /* If the statement is marked as part of an EH region, then it is
5086 expected that the statement could throw. Verify that when we
5087 have optimizations that simplify statements such that we prove
5088 that they cannot throw, that we update other data structures
5089 to match. */
5090 lp_nr = lookup_stmt_eh_lp (stmt);
5091 if (lp_nr > 0)
5093 if (!stmt_could_throw_p (stmt))
5095 if (verify_nothrow)
5097 error ("statement marked for throw, but doesn%'t");
5098 err2 |= true;
5101 else if (!gsi_one_before_end_p (gsi))
5103 error ("statement marked for throw in middle of block");
5104 err2 |= true;
5108 if (err2)
5109 debug_gimple_stmt (stmt);
5110 err |= err2;
5114 eh_error_found = false;
5115 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5116 if (eh_table)
5117 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5118 (&visited_stmts);
5120 if (err || eh_error_found)
5121 internal_error ("verify_gimple failed");
5123 verify_histograms ();
5124 timevar_pop (TV_TREE_STMT_VERIFY);
5128 /* Verifies that the flow information is OK. */
5130 static int
5131 gimple_verify_flow_info (void)
5133 int err = 0;
5134 basic_block bb;
5135 gimple_stmt_iterator gsi;
5136 gimple stmt;
5137 edge e;
5138 edge_iterator ei;
5140 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5141 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5143 error ("ENTRY_BLOCK has IL associated with it");
5144 err = 1;
5147 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5148 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5150 error ("EXIT_BLOCK has IL associated with it");
5151 err = 1;
5154 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5155 if (e->flags & EDGE_FALLTHRU)
5157 error ("fallthru to exit from bb %d", e->src->index);
5158 err = 1;
5161 FOR_EACH_BB_FN (bb, cfun)
5163 bool found_ctrl_stmt = false;
5165 stmt = NULL;
5167 /* Skip labels on the start of basic block. */
5168 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5170 tree label;
5171 gimple prev_stmt = stmt;
5173 stmt = gsi_stmt (gsi);
5175 if (gimple_code (stmt) != GIMPLE_LABEL)
5176 break;
5178 label = gimple_label_label (as_a <glabel *> (stmt));
5179 if (prev_stmt && DECL_NONLOCAL (label))
5181 error ("nonlocal label ");
5182 print_generic_expr (stderr, label, 0);
5183 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5184 bb->index);
5185 err = 1;
5188 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5190 error ("EH landing pad label ");
5191 print_generic_expr (stderr, label, 0);
5192 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5193 bb->index);
5194 err = 1;
5197 if (label_to_block (label) != bb)
5199 error ("label ");
5200 print_generic_expr (stderr, label, 0);
5201 fprintf (stderr, " to block does not match in bb %d",
5202 bb->index);
5203 err = 1;
5206 if (decl_function_context (label) != current_function_decl)
5208 error ("label ");
5209 print_generic_expr (stderr, label, 0);
5210 fprintf (stderr, " has incorrect context in bb %d",
5211 bb->index);
5212 err = 1;
5216 /* Verify that body of basic block BB is free of control flow. */
5217 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5219 gimple stmt = gsi_stmt (gsi);
5221 if (found_ctrl_stmt)
5223 error ("control flow in the middle of basic block %d",
5224 bb->index);
5225 err = 1;
5228 if (stmt_ends_bb_p (stmt))
5229 found_ctrl_stmt = true;
5231 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5233 error ("label ");
5234 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5235 fprintf (stderr, " in the middle of basic block %d", bb->index);
5236 err = 1;
5240 gsi = gsi_last_bb (bb);
5241 if (gsi_end_p (gsi))
5242 continue;
5244 stmt = gsi_stmt (gsi);
5246 if (gimple_code (stmt) == GIMPLE_LABEL)
5247 continue;
5249 err |= verify_eh_edges (stmt);
5251 if (is_ctrl_stmt (stmt))
5253 FOR_EACH_EDGE (e, ei, bb->succs)
5254 if (e->flags & EDGE_FALLTHRU)
5256 error ("fallthru edge after a control statement in bb %d",
5257 bb->index);
5258 err = 1;
5262 if (gimple_code (stmt) != GIMPLE_COND)
5264 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5265 after anything else but if statement. */
5266 FOR_EACH_EDGE (e, ei, bb->succs)
5267 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5269 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5270 bb->index);
5271 err = 1;
5275 switch (gimple_code (stmt))
5277 case GIMPLE_COND:
5279 edge true_edge;
5280 edge false_edge;
5282 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5284 if (!true_edge
5285 || !false_edge
5286 || !(true_edge->flags & EDGE_TRUE_VALUE)
5287 || !(false_edge->flags & EDGE_FALSE_VALUE)
5288 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5289 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5290 || EDGE_COUNT (bb->succs) >= 3)
5292 error ("wrong outgoing edge flags at end of bb %d",
5293 bb->index);
5294 err = 1;
5297 break;
5299 case GIMPLE_GOTO:
5300 if (simple_goto_p (stmt))
5302 error ("explicit goto at end of bb %d", bb->index);
5303 err = 1;
5305 else
5307 /* FIXME. We should double check that the labels in the
5308 destination blocks have their address taken. */
5309 FOR_EACH_EDGE (e, ei, bb->succs)
5310 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5311 | EDGE_FALSE_VALUE))
5312 || !(e->flags & EDGE_ABNORMAL))
5314 error ("wrong outgoing edge flags at end of bb %d",
5315 bb->index);
5316 err = 1;
5319 break;
5321 case GIMPLE_CALL:
5322 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5323 break;
5324 /* ... fallthru ... */
5325 case GIMPLE_RETURN:
5326 if (!single_succ_p (bb)
5327 || (single_succ_edge (bb)->flags
5328 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5329 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5331 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5332 err = 1;
5334 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5336 error ("return edge does not point to exit in bb %d",
5337 bb->index);
5338 err = 1;
5340 break;
5342 case GIMPLE_SWITCH:
5344 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5345 tree prev;
5346 edge e;
5347 size_t i, n;
5349 n = gimple_switch_num_labels (switch_stmt);
5351 /* Mark all the destination basic blocks. */
5352 for (i = 0; i < n; ++i)
5354 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5355 basic_block label_bb = label_to_block (lab);
5356 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5357 label_bb->aux = (void *)1;
5360 /* Verify that the case labels are sorted. */
5361 prev = gimple_switch_label (switch_stmt, 0);
5362 for (i = 1; i < n; ++i)
5364 tree c = gimple_switch_label (switch_stmt, i);
5365 if (!CASE_LOW (c))
5367 error ("found default case not at the start of "
5368 "case vector");
5369 err = 1;
5370 continue;
5372 if (CASE_LOW (prev)
5373 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5375 error ("case labels not sorted: ");
5376 print_generic_expr (stderr, prev, 0);
5377 fprintf (stderr," is greater than ");
5378 print_generic_expr (stderr, c, 0);
5379 fprintf (stderr," but comes before it.\n");
5380 err = 1;
5382 prev = c;
5384 /* VRP will remove the default case if it can prove it will
5385 never be executed. So do not verify there always exists
5386 a default case here. */
5388 FOR_EACH_EDGE (e, ei, bb->succs)
5390 if (!e->dest->aux)
5392 error ("extra outgoing edge %d->%d",
5393 bb->index, e->dest->index);
5394 err = 1;
5397 e->dest->aux = (void *)2;
5398 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5399 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5401 error ("wrong outgoing edge flags at end of bb %d",
5402 bb->index);
5403 err = 1;
5407 /* Check that we have all of them. */
5408 for (i = 0; i < n; ++i)
5410 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5411 basic_block label_bb = label_to_block (lab);
5413 if (label_bb->aux != (void *)2)
5415 error ("missing edge %i->%i", bb->index, label_bb->index);
5416 err = 1;
5420 FOR_EACH_EDGE (e, ei, bb->succs)
5421 e->dest->aux = (void *)0;
5423 break;
5425 case GIMPLE_EH_DISPATCH:
5426 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5427 break;
5429 default:
5430 break;
5434 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5435 verify_dominators (CDI_DOMINATORS);
5437 return err;
5441 /* Updates phi nodes after creating a forwarder block joined
5442 by edge FALLTHRU. */
5444 static void
5445 gimple_make_forwarder_block (edge fallthru)
5447 edge e;
5448 edge_iterator ei;
5449 basic_block dummy, bb;
5450 tree var;
5451 gphi_iterator gsi;
5453 dummy = fallthru->src;
5454 bb = fallthru->dest;
5456 if (single_pred_p (bb))
5457 return;
5459 /* If we redirected a branch we must create new PHI nodes at the
5460 start of BB. */
5461 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5463 gphi *phi, *new_phi;
5465 phi = gsi.phi ();
5466 var = gimple_phi_result (phi);
5467 new_phi = create_phi_node (var, bb);
5468 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5469 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5470 UNKNOWN_LOCATION);
5473 /* Add the arguments we have stored on edges. */
5474 FOR_EACH_EDGE (e, ei, bb->preds)
5476 if (e == fallthru)
5477 continue;
5479 flush_pending_stmts (e);
5484 /* Return a non-special label in the head of basic block BLOCK.
5485 Create one if it doesn't exist. */
5487 tree
5488 gimple_block_label (basic_block bb)
5490 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5491 bool first = true;
5492 tree label;
5493 glabel *stmt;
5495 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5497 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5498 if (!stmt)
5499 break;
5500 label = gimple_label_label (stmt);
5501 if (!DECL_NONLOCAL (label))
5503 if (!first)
5504 gsi_move_before (&i, &s);
5505 return label;
5509 label = create_artificial_label (UNKNOWN_LOCATION);
5510 stmt = gimple_build_label (label);
5511 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5512 return label;
5516 /* Attempt to perform edge redirection by replacing a possibly complex
5517 jump instruction by a goto or by removing the jump completely.
5518 This can apply only if all edges now point to the same block. The
5519 parameters and return values are equivalent to
5520 redirect_edge_and_branch. */
5522 static edge
5523 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5525 basic_block src = e->src;
5526 gimple_stmt_iterator i;
5527 gimple stmt;
5529 /* We can replace or remove a complex jump only when we have exactly
5530 two edges. */
5531 if (EDGE_COUNT (src->succs) != 2
5532 /* Verify that all targets will be TARGET. Specifically, the
5533 edge that is not E must also go to TARGET. */
5534 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5535 return NULL;
5537 i = gsi_last_bb (src);
5538 if (gsi_end_p (i))
5539 return NULL;
5541 stmt = gsi_stmt (i);
5543 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5545 gsi_remove (&i, true);
5546 e = ssa_redirect_edge (e, target);
5547 e->flags = EDGE_FALLTHRU;
5548 return e;
5551 return NULL;
5555 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5556 edge representing the redirected branch. */
5558 static edge
5559 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5561 basic_block bb = e->src;
5562 gimple_stmt_iterator gsi;
5563 edge ret;
5564 gimple stmt;
5566 if (e->flags & EDGE_ABNORMAL)
5567 return NULL;
5569 if (e->dest == dest)
5570 return NULL;
5572 if (e->flags & EDGE_EH)
5573 return redirect_eh_edge (e, dest);
5575 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5577 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5578 if (ret)
5579 return ret;
5582 gsi = gsi_last_bb (bb);
5583 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5585 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5587 case GIMPLE_COND:
5588 /* For COND_EXPR, we only need to redirect the edge. */
5589 break;
5591 case GIMPLE_GOTO:
5592 /* No non-abnormal edges should lead from a non-simple goto, and
5593 simple ones should be represented implicitly. */
5594 gcc_unreachable ();
5596 case GIMPLE_SWITCH:
5598 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5599 tree label = gimple_block_label (dest);
5600 tree cases = get_cases_for_edge (e, switch_stmt);
5602 /* If we have a list of cases associated with E, then use it
5603 as it's a lot faster than walking the entire case vector. */
5604 if (cases)
5606 edge e2 = find_edge (e->src, dest);
5607 tree last, first;
5609 first = cases;
5610 while (cases)
5612 last = cases;
5613 CASE_LABEL (cases) = label;
5614 cases = CASE_CHAIN (cases);
5617 /* If there was already an edge in the CFG, then we need
5618 to move all the cases associated with E to E2. */
5619 if (e2)
5621 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5623 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5624 CASE_CHAIN (cases2) = first;
5626 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5628 else
5630 size_t i, n = gimple_switch_num_labels (switch_stmt);
5632 for (i = 0; i < n; i++)
5634 tree elt = gimple_switch_label (switch_stmt, i);
5635 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5636 CASE_LABEL (elt) = label;
5640 break;
5642 case GIMPLE_ASM:
5644 gasm *asm_stmt = as_a <gasm *> (stmt);
5645 int i, n = gimple_asm_nlabels (asm_stmt);
5646 tree label = NULL;
5648 for (i = 0; i < n; ++i)
5650 tree cons = gimple_asm_label_op (asm_stmt, i);
5651 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5653 if (!label)
5654 label = gimple_block_label (dest);
5655 TREE_VALUE (cons) = label;
5659 /* If we didn't find any label matching the former edge in the
5660 asm labels, we must be redirecting the fallthrough
5661 edge. */
5662 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5664 break;
5666 case GIMPLE_RETURN:
5667 gsi_remove (&gsi, true);
5668 e->flags |= EDGE_FALLTHRU;
5669 break;
5671 case GIMPLE_OMP_RETURN:
5672 case GIMPLE_OMP_CONTINUE:
5673 case GIMPLE_OMP_SECTIONS_SWITCH:
5674 case GIMPLE_OMP_FOR:
5675 /* The edges from OMP constructs can be simply redirected. */
5676 break;
5678 case GIMPLE_EH_DISPATCH:
5679 if (!(e->flags & EDGE_FALLTHRU))
5680 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5681 break;
5683 case GIMPLE_TRANSACTION:
5684 /* The ABORT edge has a stored label associated with it, otherwise
5685 the edges are simply redirectable. */
5686 if (e->flags == 0)
5687 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5688 gimple_block_label (dest));
5689 break;
5691 default:
5692 /* Otherwise it must be a fallthru edge, and we don't need to
5693 do anything besides redirecting it. */
5694 gcc_assert (e->flags & EDGE_FALLTHRU);
5695 break;
5698 /* Update/insert PHI nodes as necessary. */
5700 /* Now update the edges in the CFG. */
5701 e = ssa_redirect_edge (e, dest);
5703 return e;
5706 /* Returns true if it is possible to remove edge E by redirecting
5707 it to the destination of the other edge from E->src. */
5709 static bool
5710 gimple_can_remove_branch_p (const_edge e)
5712 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5713 return false;
5715 return true;
5718 /* Simple wrapper, as we can always redirect fallthru edges. */
5720 static basic_block
5721 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5723 e = gimple_redirect_edge_and_branch (e, dest);
5724 gcc_assert (e);
5726 return NULL;
5730 /* Splits basic block BB after statement STMT (but at least after the
5731 labels). If STMT is NULL, BB is split just after the labels. */
5733 static basic_block
5734 gimple_split_block (basic_block bb, void *stmt)
5736 gimple_stmt_iterator gsi;
5737 gimple_stmt_iterator gsi_tgt;
5738 gimple_seq list;
5739 basic_block new_bb;
5740 edge e;
5741 edge_iterator ei;
5743 new_bb = create_empty_bb (bb);
5745 /* Redirect the outgoing edges. */
5746 new_bb->succs = bb->succs;
5747 bb->succs = NULL;
5748 FOR_EACH_EDGE (e, ei, new_bb->succs)
5749 e->src = new_bb;
5751 /* Get a stmt iterator pointing to the first stmt to move. */
5752 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5753 gsi = gsi_after_labels (bb);
5754 else
5756 gsi = gsi_for_stmt ((gimple) stmt);
5757 gsi_next (&gsi);
5760 /* Move everything from GSI to the new basic block. */
5761 if (gsi_end_p (gsi))
5762 return new_bb;
5764 /* Split the statement list - avoid re-creating new containers as this
5765 brings ugly quadratic memory consumption in the inliner.
5766 (We are still quadratic since we need to update stmt BB pointers,
5767 sadly.) */
5768 gsi_split_seq_before (&gsi, &list);
5769 set_bb_seq (new_bb, list);
5770 for (gsi_tgt = gsi_start (list);
5771 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5772 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5774 return new_bb;
5778 /* Moves basic block BB after block AFTER. */
5780 static bool
5781 gimple_move_block_after (basic_block bb, basic_block after)
5783 if (bb->prev_bb == after)
5784 return true;
5786 unlink_block (bb);
5787 link_block (bb, after);
5789 return true;
5793 /* Return TRUE if block BB has no executable statements, otherwise return
5794 FALSE. */
5796 static bool
5797 gimple_empty_block_p (basic_block bb)
5799 /* BB must have no executable statements. */
5800 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5801 if (phi_nodes (bb))
5802 return false;
5803 if (gsi_end_p (gsi))
5804 return true;
5805 if (is_gimple_debug (gsi_stmt (gsi)))
5806 gsi_next_nondebug (&gsi);
5807 return gsi_end_p (gsi);
5811 /* Split a basic block if it ends with a conditional branch and if the
5812 other part of the block is not empty. */
5814 static basic_block
5815 gimple_split_block_before_cond_jump (basic_block bb)
5817 gimple last, split_point;
5818 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5819 if (gsi_end_p (gsi))
5820 return NULL;
5821 last = gsi_stmt (gsi);
5822 if (gimple_code (last) != GIMPLE_COND
5823 && gimple_code (last) != GIMPLE_SWITCH)
5824 return NULL;
5825 gsi_prev_nondebug (&gsi);
5826 split_point = gsi_stmt (gsi);
5827 return split_block (bb, split_point)->dest;
5831 /* Return true if basic_block can be duplicated. */
5833 static bool
5834 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5836 return true;
5839 /* Create a duplicate of the basic block BB. NOTE: This does not
5840 preserve SSA form. */
5842 static basic_block
5843 gimple_duplicate_bb (basic_block bb)
5845 basic_block new_bb;
5846 gimple_stmt_iterator gsi_tgt;
5848 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5850 /* Copy the PHI nodes. We ignore PHI node arguments here because
5851 the incoming edges have not been setup yet. */
5852 for (gphi_iterator gpi = gsi_start_phis (bb);
5853 !gsi_end_p (gpi);
5854 gsi_next (&gpi))
5856 gphi *phi, *copy;
5857 phi = gpi.phi ();
5858 copy = create_phi_node (NULL_TREE, new_bb);
5859 create_new_def_for (gimple_phi_result (phi), copy,
5860 gimple_phi_result_ptr (copy));
5861 gimple_set_uid (copy, gimple_uid (phi));
5864 gsi_tgt = gsi_start_bb (new_bb);
5865 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5866 !gsi_end_p (gsi);
5867 gsi_next (&gsi))
5869 def_operand_p def_p;
5870 ssa_op_iter op_iter;
5871 tree lhs;
5872 gimple stmt, copy;
5874 stmt = gsi_stmt (gsi);
5875 if (gimple_code (stmt) == GIMPLE_LABEL)
5876 continue;
5878 /* Don't duplicate label debug stmts. */
5879 if (gimple_debug_bind_p (stmt)
5880 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5881 == LABEL_DECL)
5882 continue;
5884 /* Create a new copy of STMT and duplicate STMT's virtual
5885 operands. */
5886 copy = gimple_copy (stmt);
5887 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5889 maybe_duplicate_eh_stmt (copy, stmt);
5890 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5892 /* When copying around a stmt writing into a local non-user
5893 aggregate, make sure it won't share stack slot with other
5894 vars. */
5895 lhs = gimple_get_lhs (stmt);
5896 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5898 tree base = get_base_address (lhs);
5899 if (base
5900 && (TREE_CODE (base) == VAR_DECL
5901 || TREE_CODE (base) == RESULT_DECL)
5902 && DECL_IGNORED_P (base)
5903 && !TREE_STATIC (base)
5904 && !DECL_EXTERNAL (base)
5905 && (TREE_CODE (base) != VAR_DECL
5906 || !DECL_HAS_VALUE_EXPR_P (base)))
5907 DECL_NONSHAREABLE (base) = 1;
5910 /* Create new names for all the definitions created by COPY and
5911 add replacement mappings for each new name. */
5912 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5913 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5916 return new_bb;
5919 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5921 static void
5922 add_phi_args_after_copy_edge (edge e_copy)
5924 basic_block bb, bb_copy = e_copy->src, dest;
5925 edge e;
5926 edge_iterator ei;
5927 gphi *phi, *phi_copy;
5928 tree def;
5929 gphi_iterator psi, psi_copy;
5931 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5932 return;
5934 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5936 if (e_copy->dest->flags & BB_DUPLICATED)
5937 dest = get_bb_original (e_copy->dest);
5938 else
5939 dest = e_copy->dest;
5941 e = find_edge (bb, dest);
5942 if (!e)
5944 /* During loop unrolling the target of the latch edge is copied.
5945 In this case we are not looking for edge to dest, but to
5946 duplicated block whose original was dest. */
5947 FOR_EACH_EDGE (e, ei, bb->succs)
5949 if ((e->dest->flags & BB_DUPLICATED)
5950 && get_bb_original (e->dest) == dest)
5951 break;
5954 gcc_assert (e != NULL);
5957 for (psi = gsi_start_phis (e->dest),
5958 psi_copy = gsi_start_phis (e_copy->dest);
5959 !gsi_end_p (psi);
5960 gsi_next (&psi), gsi_next (&psi_copy))
5962 phi = psi.phi ();
5963 phi_copy = psi_copy.phi ();
5964 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5965 add_phi_arg (phi_copy, def, e_copy,
5966 gimple_phi_arg_location_from_edge (phi, e));
5971 /* Basic block BB_COPY was created by code duplication. Add phi node
5972 arguments for edges going out of BB_COPY. The blocks that were
5973 duplicated have BB_DUPLICATED set. */
5975 void
5976 add_phi_args_after_copy_bb (basic_block bb_copy)
5978 edge e_copy;
5979 edge_iterator ei;
5981 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5983 add_phi_args_after_copy_edge (e_copy);
5987 /* Blocks in REGION_COPY array of length N_REGION were created by
5988 duplication of basic blocks. Add phi node arguments for edges
5989 going from these blocks. If E_COPY is not NULL, also add
5990 phi node arguments for its destination.*/
5992 void
5993 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5994 edge e_copy)
5996 unsigned i;
5998 for (i = 0; i < n_region; i++)
5999 region_copy[i]->flags |= BB_DUPLICATED;
6001 for (i = 0; i < n_region; i++)
6002 add_phi_args_after_copy_bb (region_copy[i]);
6003 if (e_copy)
6004 add_phi_args_after_copy_edge (e_copy);
6006 for (i = 0; i < n_region; i++)
6007 region_copy[i]->flags &= ~BB_DUPLICATED;
6010 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6011 important exit edge EXIT. By important we mean that no SSA name defined
6012 inside region is live over the other exit edges of the region. All entry
6013 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6014 to the duplicate of the region. Dominance and loop information is
6015 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6016 UPDATE_DOMINANCE is false then we assume that the caller will update the
6017 dominance information after calling this function. The new basic
6018 blocks are stored to REGION_COPY in the same order as they had in REGION,
6019 provided that REGION_COPY is not NULL.
6020 The function returns false if it is unable to copy the region,
6021 true otherwise. */
6023 bool
6024 gimple_duplicate_sese_region (edge entry, edge exit,
6025 basic_block *region, unsigned n_region,
6026 basic_block *region_copy,
6027 bool update_dominance)
6029 unsigned i;
6030 bool free_region_copy = false, copying_header = false;
6031 struct loop *loop = entry->dest->loop_father;
6032 edge exit_copy;
6033 vec<basic_block> doms;
6034 edge redirected;
6035 int total_freq = 0, entry_freq = 0;
6036 gcov_type total_count = 0, entry_count = 0;
6038 if (!can_copy_bbs_p (region, n_region))
6039 return false;
6041 /* Some sanity checking. Note that we do not check for all possible
6042 missuses of the functions. I.e. if you ask to copy something weird,
6043 it will work, but the state of structures probably will not be
6044 correct. */
6045 for (i = 0; i < n_region; i++)
6047 /* We do not handle subloops, i.e. all the blocks must belong to the
6048 same loop. */
6049 if (region[i]->loop_father != loop)
6050 return false;
6052 if (region[i] != entry->dest
6053 && region[i] == loop->header)
6054 return false;
6057 /* In case the function is used for loop header copying (which is the primary
6058 use), ensure that EXIT and its copy will be new latch and entry edges. */
6059 if (loop->header == entry->dest)
6061 copying_header = true;
6063 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6064 return false;
6066 for (i = 0; i < n_region; i++)
6067 if (region[i] != exit->src
6068 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6069 return false;
6072 initialize_original_copy_tables ();
6074 if (copying_header)
6075 set_loop_copy (loop, loop_outer (loop));
6076 else
6077 set_loop_copy (loop, loop);
6079 if (!region_copy)
6081 region_copy = XNEWVEC (basic_block, n_region);
6082 free_region_copy = true;
6085 /* Record blocks outside the region that are dominated by something
6086 inside. */
6087 if (update_dominance)
6089 doms.create (0);
6090 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6093 if (entry->dest->count)
6095 total_count = entry->dest->count;
6096 entry_count = entry->count;
6097 /* Fix up corner cases, to avoid division by zero or creation of negative
6098 frequencies. */
6099 if (entry_count > total_count)
6100 entry_count = total_count;
6102 else
6104 total_freq = entry->dest->frequency;
6105 entry_freq = EDGE_FREQUENCY (entry);
6106 /* Fix up corner cases, to avoid division by zero or creation of negative
6107 frequencies. */
6108 if (total_freq == 0)
6109 total_freq = 1;
6110 else if (entry_freq > total_freq)
6111 entry_freq = total_freq;
6114 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6115 split_edge_bb_loc (entry), update_dominance);
6116 if (total_count)
6118 scale_bbs_frequencies_gcov_type (region, n_region,
6119 total_count - entry_count,
6120 total_count);
6121 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6122 total_count);
6124 else
6126 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6127 total_freq);
6128 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6131 if (copying_header)
6133 loop->header = exit->dest;
6134 loop->latch = exit->src;
6137 /* Redirect the entry and add the phi node arguments. */
6138 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6139 gcc_assert (redirected != NULL);
6140 flush_pending_stmts (entry);
6142 /* Concerning updating of dominators: We must recount dominators
6143 for entry block and its copy. Anything that is outside of the
6144 region, but was dominated by something inside needs recounting as
6145 well. */
6146 if (update_dominance)
6148 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6149 doms.safe_push (get_bb_original (entry->dest));
6150 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6151 doms.release ();
6154 /* Add the other PHI node arguments. */
6155 add_phi_args_after_copy (region_copy, n_region, NULL);
6157 if (free_region_copy)
6158 free (region_copy);
6160 free_original_copy_tables ();
6161 return true;
6164 /* Checks if BB is part of the region defined by N_REGION BBS. */
6165 static bool
6166 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6168 unsigned int n;
6170 for (n = 0; n < n_region; n++)
6172 if (bb == bbs[n])
6173 return true;
6175 return false;
6178 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6179 are stored to REGION_COPY in the same order in that they appear
6180 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6181 the region, EXIT an exit from it. The condition guarding EXIT
6182 is moved to ENTRY. Returns true if duplication succeeds, false
6183 otherwise.
6185 For example,
6187 some_code;
6188 if (cond)
6190 else
6193 is transformed to
6195 if (cond)
6197 some_code;
6200 else
6202 some_code;
6207 bool
6208 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6209 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6210 basic_block *region_copy ATTRIBUTE_UNUSED)
6212 unsigned i;
6213 bool free_region_copy = false;
6214 struct loop *loop = exit->dest->loop_father;
6215 struct loop *orig_loop = entry->dest->loop_father;
6216 basic_block switch_bb, entry_bb, nentry_bb;
6217 vec<basic_block> doms;
6218 int total_freq = 0, exit_freq = 0;
6219 gcov_type total_count = 0, exit_count = 0;
6220 edge exits[2], nexits[2], e;
6221 gimple_stmt_iterator gsi;
6222 gimple cond_stmt;
6223 edge sorig, snew;
6224 basic_block exit_bb;
6225 gphi_iterator psi;
6226 gphi *phi;
6227 tree def;
6228 struct loop *target, *aloop, *cloop;
6230 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6231 exits[0] = exit;
6232 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6234 if (!can_copy_bbs_p (region, n_region))
6235 return false;
6237 initialize_original_copy_tables ();
6238 set_loop_copy (orig_loop, loop);
6240 target= loop;
6241 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6243 if (bb_part_of_region_p (aloop->header, region, n_region))
6245 cloop = duplicate_loop (aloop, target);
6246 duplicate_subloops (aloop, cloop);
6250 if (!region_copy)
6252 region_copy = XNEWVEC (basic_block, n_region);
6253 free_region_copy = true;
6256 gcc_assert (!need_ssa_update_p (cfun));
6258 /* Record blocks outside the region that are dominated by something
6259 inside. */
6260 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6262 if (exit->src->count)
6264 total_count = exit->src->count;
6265 exit_count = exit->count;
6266 /* Fix up corner cases, to avoid division by zero or creation of negative
6267 frequencies. */
6268 if (exit_count > total_count)
6269 exit_count = total_count;
6271 else
6273 total_freq = exit->src->frequency;
6274 exit_freq = EDGE_FREQUENCY (exit);
6275 /* Fix up corner cases, to avoid division by zero or creation of negative
6276 frequencies. */
6277 if (total_freq == 0)
6278 total_freq = 1;
6279 if (exit_freq > total_freq)
6280 exit_freq = total_freq;
6283 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6284 split_edge_bb_loc (exit), true);
6285 if (total_count)
6287 scale_bbs_frequencies_gcov_type (region, n_region,
6288 total_count - exit_count,
6289 total_count);
6290 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6291 total_count);
6293 else
6295 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6296 total_freq);
6297 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6300 /* Create the switch block, and put the exit condition to it. */
6301 entry_bb = entry->dest;
6302 nentry_bb = get_bb_copy (entry_bb);
6303 if (!last_stmt (entry->src)
6304 || !stmt_ends_bb_p (last_stmt (entry->src)))
6305 switch_bb = entry->src;
6306 else
6307 switch_bb = split_edge (entry);
6308 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6310 gsi = gsi_last_bb (switch_bb);
6311 cond_stmt = last_stmt (exit->src);
6312 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6313 cond_stmt = gimple_copy (cond_stmt);
6315 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6317 sorig = single_succ_edge (switch_bb);
6318 sorig->flags = exits[1]->flags;
6319 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6321 /* Register the new edge from SWITCH_BB in loop exit lists. */
6322 rescan_loop_exit (snew, true, false);
6324 /* Add the PHI node arguments. */
6325 add_phi_args_after_copy (region_copy, n_region, snew);
6327 /* Get rid of now superfluous conditions and associated edges (and phi node
6328 arguments). */
6329 exit_bb = exit->dest;
6331 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6332 PENDING_STMT (e) = NULL;
6334 /* The latch of ORIG_LOOP was copied, and so was the backedge
6335 to the original header. We redirect this backedge to EXIT_BB. */
6336 for (i = 0; i < n_region; i++)
6337 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6339 gcc_assert (single_succ_edge (region_copy[i]));
6340 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6341 PENDING_STMT (e) = NULL;
6342 for (psi = gsi_start_phis (exit_bb);
6343 !gsi_end_p (psi);
6344 gsi_next (&psi))
6346 phi = psi.phi ();
6347 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6348 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6351 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6352 PENDING_STMT (e) = NULL;
6354 /* Anything that is outside of the region, but was dominated by something
6355 inside needs to update dominance info. */
6356 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6357 doms.release ();
6358 /* Update the SSA web. */
6359 update_ssa (TODO_update_ssa);
6361 if (free_region_copy)
6362 free (region_copy);
6364 free_original_copy_tables ();
6365 return true;
6368 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6369 adding blocks when the dominator traversal reaches EXIT. This
6370 function silently assumes that ENTRY strictly dominates EXIT. */
6372 void
6373 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6374 vec<basic_block> *bbs_p)
6376 basic_block son;
6378 for (son = first_dom_son (CDI_DOMINATORS, entry);
6379 son;
6380 son = next_dom_son (CDI_DOMINATORS, son))
6382 bbs_p->safe_push (son);
6383 if (son != exit)
6384 gather_blocks_in_sese_region (son, exit, bbs_p);
6388 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6389 The duplicates are recorded in VARS_MAP. */
6391 static void
6392 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6393 tree to_context)
6395 tree t = *tp, new_t;
6396 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6398 if (DECL_CONTEXT (t) == to_context)
6399 return;
6401 bool existed;
6402 tree &loc = vars_map->get_or_insert (t, &existed);
6404 if (!existed)
6406 if (SSA_VAR_P (t))
6408 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6409 add_local_decl (f, new_t);
6411 else
6413 gcc_assert (TREE_CODE (t) == CONST_DECL);
6414 new_t = copy_node (t);
6416 DECL_CONTEXT (new_t) = to_context;
6418 loc = new_t;
6420 else
6421 new_t = loc;
6423 *tp = new_t;
6427 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6428 VARS_MAP maps old ssa names and var_decls to the new ones. */
6430 static tree
6431 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6432 tree to_context)
6434 tree new_name;
6436 gcc_assert (!virtual_operand_p (name));
6438 tree *loc = vars_map->get (name);
6440 if (!loc)
6442 tree decl = SSA_NAME_VAR (name);
6443 if (decl)
6445 replace_by_duplicate_decl (&decl, vars_map, to_context);
6446 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6447 decl, SSA_NAME_DEF_STMT (name));
6448 if (SSA_NAME_IS_DEFAULT_DEF (name))
6449 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6450 decl, new_name);
6452 else
6453 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6454 name, SSA_NAME_DEF_STMT (name));
6456 vars_map->put (name, new_name);
6458 else
6459 new_name = *loc;
6461 return new_name;
6464 struct move_stmt_d
6466 tree orig_block;
6467 tree new_block;
6468 tree from_context;
6469 tree to_context;
6470 hash_map<tree, tree> *vars_map;
6471 htab_t new_label_map;
6472 hash_map<void *, void *> *eh_map;
6473 bool remap_decls_p;
6476 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6477 contained in *TP if it has been ORIG_BLOCK previously and change the
6478 DECL_CONTEXT of every local variable referenced in *TP. */
6480 static tree
6481 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6483 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6484 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6485 tree t = *tp;
6487 if (EXPR_P (t))
6489 tree block = TREE_BLOCK (t);
6490 if (block == p->orig_block
6491 || (p->orig_block == NULL_TREE
6492 && block != NULL_TREE))
6493 TREE_SET_BLOCK (t, p->new_block);
6494 #ifdef ENABLE_CHECKING
6495 else if (block != NULL_TREE)
6497 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6498 block = BLOCK_SUPERCONTEXT (block);
6499 gcc_assert (block == p->orig_block);
6501 #endif
6503 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6505 if (TREE_CODE (t) == SSA_NAME)
6506 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6507 else if (TREE_CODE (t) == LABEL_DECL)
6509 if (p->new_label_map)
6511 struct tree_map in, *out;
6512 in.base.from = t;
6513 out = (struct tree_map *)
6514 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6515 if (out)
6516 *tp = t = out->to;
6519 DECL_CONTEXT (t) = p->to_context;
6521 else if (p->remap_decls_p)
6523 /* Replace T with its duplicate. T should no longer appear in the
6524 parent function, so this looks wasteful; however, it may appear
6525 in referenced_vars, and more importantly, as virtual operands of
6526 statements, and in alias lists of other variables. It would be
6527 quite difficult to expunge it from all those places. ??? It might
6528 suffice to do this for addressable variables. */
6529 if ((TREE_CODE (t) == VAR_DECL
6530 && !is_global_var (t))
6531 || TREE_CODE (t) == CONST_DECL)
6532 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6534 *walk_subtrees = 0;
6536 else if (TYPE_P (t))
6537 *walk_subtrees = 0;
6539 return NULL_TREE;
6542 /* Helper for move_stmt_r. Given an EH region number for the source
6543 function, map that to the duplicate EH regio number in the dest. */
6545 static int
6546 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6548 eh_region old_r, new_r;
6550 old_r = get_eh_region_from_number (old_nr);
6551 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6553 return new_r->index;
6556 /* Similar, but operate on INTEGER_CSTs. */
6558 static tree
6559 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6561 int old_nr, new_nr;
6563 old_nr = tree_to_shwi (old_t_nr);
6564 new_nr = move_stmt_eh_region_nr (old_nr, p);
6566 return build_int_cst (integer_type_node, new_nr);
6569 /* Like move_stmt_op, but for gimple statements.
6571 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6572 contained in the current statement in *GSI_P and change the
6573 DECL_CONTEXT of every local variable referenced in the current
6574 statement. */
6576 static tree
6577 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6578 struct walk_stmt_info *wi)
6580 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6581 gimple stmt = gsi_stmt (*gsi_p);
6582 tree block = gimple_block (stmt);
6584 if (block == p->orig_block
6585 || (p->orig_block == NULL_TREE
6586 && block != NULL_TREE))
6587 gimple_set_block (stmt, p->new_block);
6589 switch (gimple_code (stmt))
6591 case GIMPLE_CALL:
6592 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6594 tree r, fndecl = gimple_call_fndecl (stmt);
6595 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6596 switch (DECL_FUNCTION_CODE (fndecl))
6598 case BUILT_IN_EH_COPY_VALUES:
6599 r = gimple_call_arg (stmt, 1);
6600 r = move_stmt_eh_region_tree_nr (r, p);
6601 gimple_call_set_arg (stmt, 1, r);
6602 /* FALLTHRU */
6604 case BUILT_IN_EH_POINTER:
6605 case BUILT_IN_EH_FILTER:
6606 r = gimple_call_arg (stmt, 0);
6607 r = move_stmt_eh_region_tree_nr (r, p);
6608 gimple_call_set_arg (stmt, 0, r);
6609 break;
6611 default:
6612 break;
6615 break;
6617 case GIMPLE_RESX:
6619 gresx *resx_stmt = as_a <gresx *> (stmt);
6620 int r = gimple_resx_region (resx_stmt);
6621 r = move_stmt_eh_region_nr (r, p);
6622 gimple_resx_set_region (resx_stmt, r);
6624 break;
6626 case GIMPLE_EH_DISPATCH:
6628 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6629 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6630 r = move_stmt_eh_region_nr (r, p);
6631 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6633 break;
6635 case GIMPLE_OMP_RETURN:
6636 case GIMPLE_OMP_CONTINUE:
6637 break;
6638 default:
6639 if (is_gimple_omp (stmt))
6641 /* Do not remap variables inside OMP directives. Variables
6642 referenced in clauses and directive header belong to the
6643 parent function and should not be moved into the child
6644 function. */
6645 bool save_remap_decls_p = p->remap_decls_p;
6646 p->remap_decls_p = false;
6647 *handled_ops_p = true;
6649 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6650 move_stmt_op, wi);
6652 p->remap_decls_p = save_remap_decls_p;
6654 break;
6657 return NULL_TREE;
6660 /* Move basic block BB from function CFUN to function DEST_FN. The
6661 block is moved out of the original linked list and placed after
6662 block AFTER in the new list. Also, the block is removed from the
6663 original array of blocks and placed in DEST_FN's array of blocks.
6664 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6665 updated to reflect the moved edges.
6667 The local variables are remapped to new instances, VARS_MAP is used
6668 to record the mapping. */
6670 static void
6671 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6672 basic_block after, bool update_edge_count_p,
6673 struct move_stmt_d *d)
6675 struct control_flow_graph *cfg;
6676 edge_iterator ei;
6677 edge e;
6678 gimple_stmt_iterator si;
6679 unsigned old_len, new_len;
6681 /* Remove BB from dominance structures. */
6682 delete_from_dominance_info (CDI_DOMINATORS, bb);
6684 /* Move BB from its current loop to the copy in the new function. */
6685 if (current_loops)
6687 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6688 if (new_loop)
6689 bb->loop_father = new_loop;
6692 /* Link BB to the new linked list. */
6693 move_block_after (bb, after);
6695 /* Update the edge count in the corresponding flowgraphs. */
6696 if (update_edge_count_p)
6697 FOR_EACH_EDGE (e, ei, bb->succs)
6699 cfun->cfg->x_n_edges--;
6700 dest_cfun->cfg->x_n_edges++;
6703 /* Remove BB from the original basic block array. */
6704 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6705 cfun->cfg->x_n_basic_blocks--;
6707 /* Grow DEST_CFUN's basic block array if needed. */
6708 cfg = dest_cfun->cfg;
6709 cfg->x_n_basic_blocks++;
6710 if (bb->index >= cfg->x_last_basic_block)
6711 cfg->x_last_basic_block = bb->index + 1;
6713 old_len = vec_safe_length (cfg->x_basic_block_info);
6714 if ((unsigned) cfg->x_last_basic_block >= old_len)
6716 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6717 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6720 (*cfg->x_basic_block_info)[bb->index] = bb;
6722 /* Remap the variables in phi nodes. */
6723 for (gphi_iterator psi = gsi_start_phis (bb);
6724 !gsi_end_p (psi); )
6726 gphi *phi = psi.phi ();
6727 use_operand_p use;
6728 tree op = PHI_RESULT (phi);
6729 ssa_op_iter oi;
6730 unsigned i;
6732 if (virtual_operand_p (op))
6734 /* Remove the phi nodes for virtual operands (alias analysis will be
6735 run for the new function, anyway). */
6736 remove_phi_node (&psi, true);
6737 continue;
6740 SET_PHI_RESULT (phi,
6741 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6742 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6744 op = USE_FROM_PTR (use);
6745 if (TREE_CODE (op) == SSA_NAME)
6746 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6749 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6751 location_t locus = gimple_phi_arg_location (phi, i);
6752 tree block = LOCATION_BLOCK (locus);
6754 if (locus == UNKNOWN_LOCATION)
6755 continue;
6756 if (d->orig_block == NULL_TREE || block == d->orig_block)
6758 if (d->new_block == NULL_TREE)
6759 locus = LOCATION_LOCUS (locus);
6760 else
6761 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6762 gimple_phi_arg_set_location (phi, i, locus);
6766 gsi_next (&psi);
6769 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6771 gimple stmt = gsi_stmt (si);
6772 struct walk_stmt_info wi;
6774 memset (&wi, 0, sizeof (wi));
6775 wi.info = d;
6776 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6778 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6780 tree label = gimple_label_label (label_stmt);
6781 int uid = LABEL_DECL_UID (label);
6783 gcc_assert (uid > -1);
6785 old_len = vec_safe_length (cfg->x_label_to_block_map);
6786 if (old_len <= (unsigned) uid)
6788 new_len = 3 * uid / 2 + 1;
6789 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6792 (*cfg->x_label_to_block_map)[uid] = bb;
6793 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6795 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6797 if (uid >= dest_cfun->cfg->last_label_uid)
6798 dest_cfun->cfg->last_label_uid = uid + 1;
6801 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6802 remove_stmt_from_eh_lp_fn (cfun, stmt);
6804 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6805 gimple_remove_stmt_histograms (cfun, stmt);
6807 /* We cannot leave any operands allocated from the operand caches of
6808 the current function. */
6809 free_stmt_operands (cfun, stmt);
6810 push_cfun (dest_cfun);
6811 update_stmt (stmt);
6812 pop_cfun ();
6815 FOR_EACH_EDGE (e, ei, bb->succs)
6816 if (e->goto_locus != UNKNOWN_LOCATION)
6818 tree block = LOCATION_BLOCK (e->goto_locus);
6819 if (d->orig_block == NULL_TREE
6820 || block == d->orig_block)
6821 e->goto_locus = d->new_block ?
6822 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6823 LOCATION_LOCUS (e->goto_locus);
6827 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6828 the outermost EH region. Use REGION as the incoming base EH region. */
6830 static eh_region
6831 find_outermost_region_in_block (struct function *src_cfun,
6832 basic_block bb, eh_region region)
6834 gimple_stmt_iterator si;
6836 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6838 gimple stmt = gsi_stmt (si);
6839 eh_region stmt_region;
6840 int lp_nr;
6842 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6843 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6844 if (stmt_region)
6846 if (region == NULL)
6847 region = stmt_region;
6848 else if (stmt_region != region)
6850 region = eh_region_outermost (src_cfun, stmt_region, region);
6851 gcc_assert (region != NULL);
6856 return region;
6859 static tree
6860 new_label_mapper (tree decl, void *data)
6862 htab_t hash = (htab_t) data;
6863 struct tree_map *m;
6864 void **slot;
6866 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6868 m = XNEW (struct tree_map);
6869 m->hash = DECL_UID (decl);
6870 m->base.from = decl;
6871 m->to = create_artificial_label (UNKNOWN_LOCATION);
6872 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6873 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6874 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6876 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6877 gcc_assert (*slot == NULL);
6879 *slot = m;
6881 return m->to;
6884 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6885 subblocks. */
6887 static void
6888 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6889 tree to_context)
6891 tree *tp, t;
6893 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6895 t = *tp;
6896 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6897 continue;
6898 replace_by_duplicate_decl (&t, vars_map, to_context);
6899 if (t != *tp)
6901 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6903 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6904 DECL_HAS_VALUE_EXPR_P (t) = 1;
6906 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6907 *tp = t;
6911 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6912 replace_block_vars_by_duplicates (block, vars_map, to_context);
6915 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6916 from FN1 to FN2. */
6918 static void
6919 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6920 struct loop *loop)
6922 /* Discard it from the old loop array. */
6923 (*get_loops (fn1))[loop->num] = NULL;
6925 /* Place it in the new loop array, assigning it a new number. */
6926 loop->num = number_of_loops (fn2);
6927 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6929 /* Recurse to children. */
6930 for (loop = loop->inner; loop; loop = loop->next)
6931 fixup_loop_arrays_after_move (fn1, fn2, loop);
6934 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6935 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6937 DEBUG_FUNCTION void
6938 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6940 basic_block bb;
6941 edge_iterator ei;
6942 edge e;
6943 bitmap bbs = BITMAP_ALLOC (NULL);
6944 int i;
6946 gcc_assert (entry != NULL);
6947 gcc_assert (entry != exit);
6948 gcc_assert (bbs_p != NULL);
6950 gcc_assert (bbs_p->length () > 0);
6952 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6953 bitmap_set_bit (bbs, bb->index);
6955 gcc_assert (bitmap_bit_p (bbs, entry->index));
6956 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6958 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6960 if (bb == entry)
6962 gcc_assert (single_pred_p (entry));
6963 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6965 else
6966 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6968 e = ei_edge (ei);
6969 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6972 if (bb == exit)
6974 gcc_assert (single_succ_p (exit));
6975 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6977 else
6978 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6980 e = ei_edge (ei);
6981 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6985 BITMAP_FREE (bbs);
6989 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6990 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6991 single basic block in the original CFG and the new basic block is
6992 returned. DEST_CFUN must not have a CFG yet.
6994 Note that the region need not be a pure SESE region. Blocks inside
6995 the region may contain calls to abort/exit. The only restriction
6996 is that ENTRY_BB should be the only entry point and it must
6997 dominate EXIT_BB.
6999 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7000 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7001 to the new function.
7003 All local variables referenced in the region are assumed to be in
7004 the corresponding BLOCK_VARS and unexpanded variable lists
7005 associated with DEST_CFUN. */
7007 basic_block
7008 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7009 basic_block exit_bb, tree orig_block)
7011 vec<basic_block> bbs, dom_bbs;
7012 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7013 basic_block after, bb, *entry_pred, *exit_succ, abb;
7014 struct function *saved_cfun = cfun;
7015 int *entry_flag, *exit_flag;
7016 unsigned *entry_prob, *exit_prob;
7017 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7018 edge e;
7019 edge_iterator ei;
7020 htab_t new_label_map;
7021 hash_map<void *, void *> *eh_map;
7022 struct loop *loop = entry_bb->loop_father;
7023 struct loop *loop0 = get_loop (saved_cfun, 0);
7024 struct move_stmt_d d;
7026 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7027 region. */
7028 gcc_assert (entry_bb != exit_bb
7029 && (!exit_bb
7030 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7032 /* Collect all the blocks in the region. Manually add ENTRY_BB
7033 because it won't be added by dfs_enumerate_from. */
7034 bbs.create (0);
7035 bbs.safe_push (entry_bb);
7036 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7037 #ifdef ENABLE_CHECKING
7038 verify_sese (entry_bb, exit_bb, &bbs);
7039 #endif
7041 /* The blocks that used to be dominated by something in BBS will now be
7042 dominated by the new block. */
7043 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7044 bbs.address (),
7045 bbs.length ());
7047 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7048 the predecessor edges to ENTRY_BB and the successor edges to
7049 EXIT_BB so that we can re-attach them to the new basic block that
7050 will replace the region. */
7051 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7052 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7053 entry_flag = XNEWVEC (int, num_entry_edges);
7054 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7055 i = 0;
7056 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7058 entry_prob[i] = e->probability;
7059 entry_flag[i] = e->flags;
7060 entry_pred[i++] = e->src;
7061 remove_edge (e);
7064 if (exit_bb)
7066 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7067 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7068 exit_flag = XNEWVEC (int, num_exit_edges);
7069 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7070 i = 0;
7071 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7073 exit_prob[i] = e->probability;
7074 exit_flag[i] = e->flags;
7075 exit_succ[i++] = e->dest;
7076 remove_edge (e);
7079 else
7081 num_exit_edges = 0;
7082 exit_succ = NULL;
7083 exit_flag = NULL;
7084 exit_prob = NULL;
7087 /* Switch context to the child function to initialize DEST_FN's CFG. */
7088 gcc_assert (dest_cfun->cfg == NULL);
7089 push_cfun (dest_cfun);
7091 init_empty_tree_cfg ();
7093 /* Initialize EH information for the new function. */
7094 eh_map = NULL;
7095 new_label_map = NULL;
7096 if (saved_cfun->eh)
7098 eh_region region = NULL;
7100 FOR_EACH_VEC_ELT (bbs, i, bb)
7101 region = find_outermost_region_in_block (saved_cfun, bb, region);
7103 init_eh_for_function ();
7104 if (region != NULL)
7106 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7107 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7108 new_label_mapper, new_label_map);
7112 /* Initialize an empty loop tree. */
7113 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7114 init_loops_structure (dest_cfun, loops, 1);
7115 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7116 set_loops_for_fn (dest_cfun, loops);
7118 /* Move the outlined loop tree part. */
7119 num_nodes = bbs.length ();
7120 FOR_EACH_VEC_ELT (bbs, i, bb)
7122 if (bb->loop_father->header == bb)
7124 struct loop *this_loop = bb->loop_father;
7125 struct loop *outer = loop_outer (this_loop);
7126 if (outer == loop
7127 /* If the SESE region contains some bbs ending with
7128 a noreturn call, those are considered to belong
7129 to the outermost loop in saved_cfun, rather than
7130 the entry_bb's loop_father. */
7131 || outer == loop0)
7133 if (outer != loop)
7134 num_nodes -= this_loop->num_nodes;
7135 flow_loop_tree_node_remove (bb->loop_father);
7136 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7137 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7140 else if (bb->loop_father == loop0 && loop0 != loop)
7141 num_nodes--;
7143 /* Remove loop exits from the outlined region. */
7144 if (loops_for_fn (saved_cfun)->exits)
7145 FOR_EACH_EDGE (e, ei, bb->succs)
7147 struct loops *l = loops_for_fn (saved_cfun);
7148 loop_exit **slot
7149 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7150 NO_INSERT);
7151 if (slot)
7152 l->exits->clear_slot (slot);
7157 /* Adjust the number of blocks in the tree root of the outlined part. */
7158 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7160 /* Setup a mapping to be used by move_block_to_fn. */
7161 loop->aux = current_loops->tree_root;
7162 loop0->aux = current_loops->tree_root;
7164 pop_cfun ();
7166 /* Move blocks from BBS into DEST_CFUN. */
7167 gcc_assert (bbs.length () >= 2);
7168 after = dest_cfun->cfg->x_entry_block_ptr;
7169 hash_map<tree, tree> vars_map;
7171 memset (&d, 0, sizeof (d));
7172 d.orig_block = orig_block;
7173 d.new_block = DECL_INITIAL (dest_cfun->decl);
7174 d.from_context = cfun->decl;
7175 d.to_context = dest_cfun->decl;
7176 d.vars_map = &vars_map;
7177 d.new_label_map = new_label_map;
7178 d.eh_map = eh_map;
7179 d.remap_decls_p = true;
7181 FOR_EACH_VEC_ELT (bbs, i, bb)
7183 /* No need to update edge counts on the last block. It has
7184 already been updated earlier when we detached the region from
7185 the original CFG. */
7186 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7187 after = bb;
7190 loop->aux = NULL;
7191 loop0->aux = NULL;
7192 /* Loop sizes are no longer correct, fix them up. */
7193 loop->num_nodes -= num_nodes;
7194 for (struct loop *outer = loop_outer (loop);
7195 outer; outer = loop_outer (outer))
7196 outer->num_nodes -= num_nodes;
7197 loop0->num_nodes -= bbs.length () - num_nodes;
7199 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7201 struct loop *aloop;
7202 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7203 if (aloop != NULL)
7205 if (aloop->simduid)
7207 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7208 d.to_context);
7209 dest_cfun->has_simduid_loops = true;
7211 if (aloop->force_vectorize)
7212 dest_cfun->has_force_vectorize_loops = true;
7216 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7217 if (orig_block)
7219 tree block;
7220 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7221 == NULL_TREE);
7222 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7223 = BLOCK_SUBBLOCKS (orig_block);
7224 for (block = BLOCK_SUBBLOCKS (orig_block);
7225 block; block = BLOCK_CHAIN (block))
7226 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7227 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7230 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7231 &vars_map, dest_cfun->decl);
7233 if (new_label_map)
7234 htab_delete (new_label_map);
7235 if (eh_map)
7236 delete eh_map;
7238 /* Rewire the entry and exit blocks. The successor to the entry
7239 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7240 the child function. Similarly, the predecessor of DEST_FN's
7241 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7242 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7243 various CFG manipulation function get to the right CFG.
7245 FIXME, this is silly. The CFG ought to become a parameter to
7246 these helpers. */
7247 push_cfun (dest_cfun);
7248 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7249 if (exit_bb)
7250 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7251 pop_cfun ();
7253 /* Back in the original function, the SESE region has disappeared,
7254 create a new basic block in its place. */
7255 bb = create_empty_bb (entry_pred[0]);
7256 if (current_loops)
7257 add_bb_to_loop (bb, loop);
7258 for (i = 0; i < num_entry_edges; i++)
7260 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7261 e->probability = entry_prob[i];
7264 for (i = 0; i < num_exit_edges; i++)
7266 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7267 e->probability = exit_prob[i];
7270 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7271 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7272 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7273 dom_bbs.release ();
7275 if (exit_bb)
7277 free (exit_prob);
7278 free (exit_flag);
7279 free (exit_succ);
7281 free (entry_prob);
7282 free (entry_flag);
7283 free (entry_pred);
7284 bbs.release ();
7286 return bb;
7290 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7293 void
7294 dump_function_to_file (tree fndecl, FILE *file, int flags)
7296 tree arg, var, old_current_fndecl = current_function_decl;
7297 struct function *dsf;
7298 bool ignore_topmost_bind = false, any_var = false;
7299 basic_block bb;
7300 tree chain;
7301 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7302 && decl_is_tm_clone (fndecl));
7303 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7305 current_function_decl = fndecl;
7306 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7308 arg = DECL_ARGUMENTS (fndecl);
7309 while (arg)
7311 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7312 fprintf (file, " ");
7313 print_generic_expr (file, arg, dump_flags);
7314 if (flags & TDF_VERBOSE)
7315 print_node (file, "", arg, 4);
7316 if (DECL_CHAIN (arg))
7317 fprintf (file, ", ");
7318 arg = DECL_CHAIN (arg);
7320 fprintf (file, ")\n");
7322 if (flags & TDF_VERBOSE)
7323 print_node (file, "", fndecl, 2);
7325 dsf = DECL_STRUCT_FUNCTION (fndecl);
7326 if (dsf && (flags & TDF_EH))
7327 dump_eh_tree (file, dsf);
7329 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7331 dump_node (fndecl, TDF_SLIM | flags, file);
7332 current_function_decl = old_current_fndecl;
7333 return;
7336 /* When GIMPLE is lowered, the variables are no longer available in
7337 BIND_EXPRs, so display them separately. */
7338 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7340 unsigned ix;
7341 ignore_topmost_bind = true;
7343 fprintf (file, "{\n");
7344 if (!vec_safe_is_empty (fun->local_decls))
7345 FOR_EACH_LOCAL_DECL (fun, ix, var)
7347 print_generic_decl (file, var, flags);
7348 if (flags & TDF_VERBOSE)
7349 print_node (file, "", var, 4);
7350 fprintf (file, "\n");
7352 any_var = true;
7354 if (gimple_in_ssa_p (cfun))
7355 for (ix = 1; ix < num_ssa_names; ++ix)
7357 tree name = ssa_name (ix);
7358 if (name && !SSA_NAME_VAR (name))
7360 fprintf (file, " ");
7361 print_generic_expr (file, TREE_TYPE (name), flags);
7362 fprintf (file, " ");
7363 print_generic_expr (file, name, flags);
7364 fprintf (file, ";\n");
7366 any_var = true;
7371 if (fun && fun->decl == fndecl
7372 && fun->cfg
7373 && basic_block_info_for_fn (fun))
7375 /* If the CFG has been built, emit a CFG-based dump. */
7376 if (!ignore_topmost_bind)
7377 fprintf (file, "{\n");
7379 if (any_var && n_basic_blocks_for_fn (fun))
7380 fprintf (file, "\n");
7382 FOR_EACH_BB_FN (bb, fun)
7383 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7385 fprintf (file, "}\n");
7387 else if (DECL_SAVED_TREE (fndecl) == NULL)
7389 /* The function is now in GIMPLE form but the CFG has not been
7390 built yet. Emit the single sequence of GIMPLE statements
7391 that make up its body. */
7392 gimple_seq body = gimple_body (fndecl);
7394 if (gimple_seq_first_stmt (body)
7395 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7396 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7397 print_gimple_seq (file, body, 0, flags);
7398 else
7400 if (!ignore_topmost_bind)
7401 fprintf (file, "{\n");
7403 if (any_var)
7404 fprintf (file, "\n");
7406 print_gimple_seq (file, body, 2, flags);
7407 fprintf (file, "}\n");
7410 else
7412 int indent;
7414 /* Make a tree based dump. */
7415 chain = DECL_SAVED_TREE (fndecl);
7416 if (chain && TREE_CODE (chain) == BIND_EXPR)
7418 if (ignore_topmost_bind)
7420 chain = BIND_EXPR_BODY (chain);
7421 indent = 2;
7423 else
7424 indent = 0;
7426 else
7428 if (!ignore_topmost_bind)
7430 fprintf (file, "{\n");
7431 /* No topmost bind, pretend it's ignored for later. */
7432 ignore_topmost_bind = true;
7434 indent = 2;
7437 if (any_var)
7438 fprintf (file, "\n");
7440 print_generic_stmt_indented (file, chain, flags, indent);
7441 if (ignore_topmost_bind)
7442 fprintf (file, "}\n");
7445 if (flags & TDF_ENUMERATE_LOCALS)
7446 dump_enumerated_decls (file, flags);
7447 fprintf (file, "\n\n");
7449 current_function_decl = old_current_fndecl;
7452 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7454 DEBUG_FUNCTION void
7455 debug_function (tree fn, int flags)
7457 dump_function_to_file (fn, stderr, flags);
7461 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7463 static void
7464 print_pred_bbs (FILE *file, basic_block bb)
7466 edge e;
7467 edge_iterator ei;
7469 FOR_EACH_EDGE (e, ei, bb->preds)
7470 fprintf (file, "bb_%d ", e->src->index);
7474 /* Print on FILE the indexes for the successors of basic_block BB. */
7476 static void
7477 print_succ_bbs (FILE *file, basic_block bb)
7479 edge e;
7480 edge_iterator ei;
7482 FOR_EACH_EDGE (e, ei, bb->succs)
7483 fprintf (file, "bb_%d ", e->dest->index);
7486 /* Print to FILE the basic block BB following the VERBOSITY level. */
7488 void
7489 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7491 char *s_indent = (char *) alloca ((size_t) indent + 1);
7492 memset ((void *) s_indent, ' ', (size_t) indent);
7493 s_indent[indent] = '\0';
7495 /* Print basic_block's header. */
7496 if (verbosity >= 2)
7498 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7499 print_pred_bbs (file, bb);
7500 fprintf (file, "}, succs = {");
7501 print_succ_bbs (file, bb);
7502 fprintf (file, "})\n");
7505 /* Print basic_block's body. */
7506 if (verbosity >= 3)
7508 fprintf (file, "%s {\n", s_indent);
7509 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7510 fprintf (file, "%s }\n", s_indent);
7514 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7516 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7517 VERBOSITY level this outputs the contents of the loop, or just its
7518 structure. */
7520 static void
7521 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7523 char *s_indent;
7524 basic_block bb;
7526 if (loop == NULL)
7527 return;
7529 s_indent = (char *) alloca ((size_t) indent + 1);
7530 memset ((void *) s_indent, ' ', (size_t) indent);
7531 s_indent[indent] = '\0';
7533 /* Print loop's header. */
7534 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7535 if (loop->header)
7536 fprintf (file, "header = %d", loop->header->index);
7537 else
7539 fprintf (file, "deleted)\n");
7540 return;
7542 if (loop->latch)
7543 fprintf (file, ", latch = %d", loop->latch->index);
7544 else
7545 fprintf (file, ", multiple latches");
7546 fprintf (file, ", niter = ");
7547 print_generic_expr (file, loop->nb_iterations, 0);
7549 if (loop->any_upper_bound)
7551 fprintf (file, ", upper_bound = ");
7552 print_decu (loop->nb_iterations_upper_bound, file);
7555 if (loop->any_estimate)
7557 fprintf (file, ", estimate = ");
7558 print_decu (loop->nb_iterations_estimate, file);
7560 fprintf (file, ")\n");
7562 /* Print loop's body. */
7563 if (verbosity >= 1)
7565 fprintf (file, "%s{\n", s_indent);
7566 FOR_EACH_BB_FN (bb, cfun)
7567 if (bb->loop_father == loop)
7568 print_loops_bb (file, bb, indent, verbosity);
7570 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7571 fprintf (file, "%s}\n", s_indent);
7575 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7576 spaces. Following VERBOSITY level this outputs the contents of the
7577 loop, or just its structure. */
7579 static void
7580 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7581 int verbosity)
7583 if (loop == NULL)
7584 return;
7586 print_loop (file, loop, indent, verbosity);
7587 print_loop_and_siblings (file, loop->next, indent, verbosity);
7590 /* Follow a CFG edge from the entry point of the program, and on entry
7591 of a loop, pretty print the loop structure on FILE. */
7593 void
7594 print_loops (FILE *file, int verbosity)
7596 basic_block bb;
7598 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7599 if (bb && bb->loop_father)
7600 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7603 /* Dump a loop. */
7605 DEBUG_FUNCTION void
7606 debug (struct loop &ref)
7608 print_loop (stderr, &ref, 0, /*verbosity*/0);
7611 DEBUG_FUNCTION void
7612 debug (struct loop *ptr)
7614 if (ptr)
7615 debug (*ptr);
7616 else
7617 fprintf (stderr, "<nil>\n");
7620 /* Dump a loop verbosely. */
7622 DEBUG_FUNCTION void
7623 debug_verbose (struct loop &ref)
7625 print_loop (stderr, &ref, 0, /*verbosity*/3);
7628 DEBUG_FUNCTION void
7629 debug_verbose (struct loop *ptr)
7631 if (ptr)
7632 debug (*ptr);
7633 else
7634 fprintf (stderr, "<nil>\n");
7638 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7640 DEBUG_FUNCTION void
7641 debug_loops (int verbosity)
7643 print_loops (stderr, verbosity);
7646 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7648 DEBUG_FUNCTION void
7649 debug_loop (struct loop *loop, int verbosity)
7651 print_loop (stderr, loop, 0, verbosity);
7654 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7655 level. */
7657 DEBUG_FUNCTION void
7658 debug_loop_num (unsigned num, int verbosity)
7660 debug_loop (get_loop (cfun, num), verbosity);
7663 /* Return true if BB ends with a call, possibly followed by some
7664 instructions that must stay with the call. Return false,
7665 otherwise. */
7667 static bool
7668 gimple_block_ends_with_call_p (basic_block bb)
7670 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7671 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7675 /* Return true if BB ends with a conditional branch. Return false,
7676 otherwise. */
7678 static bool
7679 gimple_block_ends_with_condjump_p (const_basic_block bb)
7681 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7682 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7686 /* Return true if we need to add fake edge to exit at statement T.
7687 Helper function for gimple_flow_call_edges_add. */
7689 static bool
7690 need_fake_edge_p (gimple t)
7692 tree fndecl = NULL_TREE;
7693 int call_flags = 0;
7695 /* NORETURN and LONGJMP calls already have an edge to exit.
7696 CONST and PURE calls do not need one.
7697 We don't currently check for CONST and PURE here, although
7698 it would be a good idea, because those attributes are
7699 figured out from the RTL in mark_constant_function, and
7700 the counter incrementation code from -fprofile-arcs
7701 leads to different results from -fbranch-probabilities. */
7702 if (is_gimple_call (t))
7704 fndecl = gimple_call_fndecl (t);
7705 call_flags = gimple_call_flags (t);
7708 if (is_gimple_call (t)
7709 && fndecl
7710 && DECL_BUILT_IN (fndecl)
7711 && (call_flags & ECF_NOTHROW)
7712 && !(call_flags & ECF_RETURNS_TWICE)
7713 /* fork() doesn't really return twice, but the effect of
7714 wrapping it in __gcov_fork() which calls __gcov_flush()
7715 and clears the counters before forking has the same
7716 effect as returning twice. Force a fake edge. */
7717 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7718 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7719 return false;
7721 if (is_gimple_call (t))
7723 edge_iterator ei;
7724 edge e;
7725 basic_block bb;
7727 if (!(call_flags & ECF_NORETURN))
7728 return true;
7730 bb = gimple_bb (t);
7731 FOR_EACH_EDGE (e, ei, bb->succs)
7732 if ((e->flags & EDGE_FAKE) == 0)
7733 return true;
7736 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7737 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7738 return true;
7740 return false;
7744 /* Add fake edges to the function exit for any non constant and non
7745 noreturn calls (or noreturn calls with EH/abnormal edges),
7746 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7747 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7748 that were split.
7750 The goal is to expose cases in which entering a basic block does
7751 not imply that all subsequent instructions must be executed. */
7753 static int
7754 gimple_flow_call_edges_add (sbitmap blocks)
7756 int i;
7757 int blocks_split = 0;
7758 int last_bb = last_basic_block_for_fn (cfun);
7759 bool check_last_block = false;
7761 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7762 return 0;
7764 if (! blocks)
7765 check_last_block = true;
7766 else
7767 check_last_block = bitmap_bit_p (blocks,
7768 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7770 /* In the last basic block, before epilogue generation, there will be
7771 a fallthru edge to EXIT. Special care is required if the last insn
7772 of the last basic block is a call because make_edge folds duplicate
7773 edges, which would result in the fallthru edge also being marked
7774 fake, which would result in the fallthru edge being removed by
7775 remove_fake_edges, which would result in an invalid CFG.
7777 Moreover, we can't elide the outgoing fake edge, since the block
7778 profiler needs to take this into account in order to solve the minimal
7779 spanning tree in the case that the call doesn't return.
7781 Handle this by adding a dummy instruction in a new last basic block. */
7782 if (check_last_block)
7784 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7785 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7786 gimple t = NULL;
7788 if (!gsi_end_p (gsi))
7789 t = gsi_stmt (gsi);
7791 if (t && need_fake_edge_p (t))
7793 edge e;
7795 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7796 if (e)
7798 gsi_insert_on_edge (e, gimple_build_nop ());
7799 gsi_commit_edge_inserts ();
7804 /* Now add fake edges to the function exit for any non constant
7805 calls since there is no way that we can determine if they will
7806 return or not... */
7807 for (i = 0; i < last_bb; i++)
7809 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7810 gimple_stmt_iterator gsi;
7811 gimple stmt, last_stmt;
7813 if (!bb)
7814 continue;
7816 if (blocks && !bitmap_bit_p (blocks, i))
7817 continue;
7819 gsi = gsi_last_nondebug_bb (bb);
7820 if (!gsi_end_p (gsi))
7822 last_stmt = gsi_stmt (gsi);
7825 stmt = gsi_stmt (gsi);
7826 if (need_fake_edge_p (stmt))
7828 edge e;
7830 /* The handling above of the final block before the
7831 epilogue should be enough to verify that there is
7832 no edge to the exit block in CFG already.
7833 Calling make_edge in such case would cause us to
7834 mark that edge as fake and remove it later. */
7835 #ifdef ENABLE_CHECKING
7836 if (stmt == last_stmt)
7838 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7839 gcc_assert (e == NULL);
7841 #endif
7843 /* Note that the following may create a new basic block
7844 and renumber the existing basic blocks. */
7845 if (stmt != last_stmt)
7847 e = split_block (bb, stmt);
7848 if (e)
7849 blocks_split++;
7851 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7853 gsi_prev (&gsi);
7855 while (!gsi_end_p (gsi));
7859 if (blocks_split)
7860 verify_flow_info ();
7862 return blocks_split;
7865 /* Removes edge E and all the blocks dominated by it, and updates dominance
7866 information. The IL in E->src needs to be updated separately.
7867 If dominance info is not available, only the edge E is removed.*/
7869 void
7870 remove_edge_and_dominated_blocks (edge e)
7872 vec<basic_block> bbs_to_remove = vNULL;
7873 vec<basic_block> bbs_to_fix_dom = vNULL;
7874 bitmap df, df_idom;
7875 edge f;
7876 edge_iterator ei;
7877 bool none_removed = false;
7878 unsigned i;
7879 basic_block bb, dbb;
7880 bitmap_iterator bi;
7882 /* If we are removing a path inside a non-root loop that may change
7883 loop ownership of blocks or remove loops. Mark loops for fixup. */
7884 if (current_loops
7885 && loop_outer (e->src->loop_father) != NULL
7886 && e->src->loop_father == e->dest->loop_father)
7887 loops_state_set (LOOPS_NEED_FIXUP);
7889 if (!dom_info_available_p (CDI_DOMINATORS))
7891 remove_edge (e);
7892 return;
7895 /* No updating is needed for edges to exit. */
7896 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7898 if (cfgcleanup_altered_bbs)
7899 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7900 remove_edge (e);
7901 return;
7904 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7905 that is not dominated by E->dest, then this set is empty. Otherwise,
7906 all the basic blocks dominated by E->dest are removed.
7908 Also, to DF_IDOM we store the immediate dominators of the blocks in
7909 the dominance frontier of E (i.e., of the successors of the
7910 removed blocks, if there are any, and of E->dest otherwise). */
7911 FOR_EACH_EDGE (f, ei, e->dest->preds)
7913 if (f == e)
7914 continue;
7916 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7918 none_removed = true;
7919 break;
7923 df = BITMAP_ALLOC (NULL);
7924 df_idom = BITMAP_ALLOC (NULL);
7926 if (none_removed)
7927 bitmap_set_bit (df_idom,
7928 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7929 else
7931 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7932 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7934 FOR_EACH_EDGE (f, ei, bb->succs)
7936 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7937 bitmap_set_bit (df, f->dest->index);
7940 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7941 bitmap_clear_bit (df, bb->index);
7943 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7945 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7946 bitmap_set_bit (df_idom,
7947 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7951 if (cfgcleanup_altered_bbs)
7953 /* Record the set of the altered basic blocks. */
7954 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7955 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7958 /* Remove E and the cancelled blocks. */
7959 if (none_removed)
7960 remove_edge (e);
7961 else
7963 /* Walk backwards so as to get a chance to substitute all
7964 released DEFs into debug stmts. See
7965 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7966 details. */
7967 for (i = bbs_to_remove.length (); i-- > 0; )
7968 delete_basic_block (bbs_to_remove[i]);
7971 /* Update the dominance information. The immediate dominator may change only
7972 for blocks whose immediate dominator belongs to DF_IDOM:
7974 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7975 removal. Let Z the arbitrary block such that idom(Z) = Y and
7976 Z dominates X after the removal. Before removal, there exists a path P
7977 from Y to X that avoids Z. Let F be the last edge on P that is
7978 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7979 dominates W, and because of P, Z does not dominate W), and W belongs to
7980 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7981 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7983 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7984 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7985 dbb;
7986 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7987 bbs_to_fix_dom.safe_push (dbb);
7990 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7992 BITMAP_FREE (df);
7993 BITMAP_FREE (df_idom);
7994 bbs_to_remove.release ();
7995 bbs_to_fix_dom.release ();
7998 /* Purge dead EH edges from basic block BB. */
8000 bool
8001 gimple_purge_dead_eh_edges (basic_block bb)
8003 bool changed = false;
8004 edge e;
8005 edge_iterator ei;
8006 gimple stmt = last_stmt (bb);
8008 if (stmt && stmt_can_throw_internal (stmt))
8009 return false;
8011 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8013 if (e->flags & EDGE_EH)
8015 remove_edge_and_dominated_blocks (e);
8016 changed = true;
8018 else
8019 ei_next (&ei);
8022 return changed;
8025 /* Purge dead EH edges from basic block listed in BLOCKS. */
8027 bool
8028 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8030 bool changed = false;
8031 unsigned i;
8032 bitmap_iterator bi;
8034 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8036 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8038 /* Earlier gimple_purge_dead_eh_edges could have removed
8039 this basic block already. */
8040 gcc_assert (bb || changed);
8041 if (bb != NULL)
8042 changed |= gimple_purge_dead_eh_edges (bb);
8045 return changed;
8048 /* Purge dead abnormal call edges from basic block BB. */
8050 bool
8051 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8053 bool changed = false;
8054 edge e;
8055 edge_iterator ei;
8056 gimple stmt = last_stmt (bb);
8058 if (!cfun->has_nonlocal_label
8059 && !cfun->calls_setjmp)
8060 return false;
8062 if (stmt && stmt_can_make_abnormal_goto (stmt))
8063 return false;
8065 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8067 if (e->flags & EDGE_ABNORMAL)
8069 if (e->flags & EDGE_FALLTHRU)
8070 e->flags &= ~EDGE_ABNORMAL;
8071 else
8072 remove_edge_and_dominated_blocks (e);
8073 changed = true;
8075 else
8076 ei_next (&ei);
8079 return changed;
8082 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8084 bool
8085 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8087 bool changed = false;
8088 unsigned i;
8089 bitmap_iterator bi;
8091 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8093 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8095 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8096 this basic block already. */
8097 gcc_assert (bb || changed);
8098 if (bb != NULL)
8099 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8102 return changed;
8105 /* This function is called whenever a new edge is created or
8106 redirected. */
8108 static void
8109 gimple_execute_on_growing_pred (edge e)
8111 basic_block bb = e->dest;
8113 if (!gimple_seq_empty_p (phi_nodes (bb)))
8114 reserve_phi_args_for_new_edge (bb);
8117 /* This function is called immediately before edge E is removed from
8118 the edge vector E->dest->preds. */
8120 static void
8121 gimple_execute_on_shrinking_pred (edge e)
8123 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8124 remove_phi_args (e);
8127 /*---------------------------------------------------------------------------
8128 Helper functions for Loop versioning
8129 ---------------------------------------------------------------------------*/
8131 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8132 of 'first'. Both of them are dominated by 'new_head' basic block. When
8133 'new_head' was created by 'second's incoming edge it received phi arguments
8134 on the edge by split_edge(). Later, additional edge 'e' was created to
8135 connect 'new_head' and 'first'. Now this routine adds phi args on this
8136 additional edge 'e' that new_head to second edge received as part of edge
8137 splitting. */
8139 static void
8140 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8141 basic_block new_head, edge e)
8143 gphi *phi1, *phi2;
8144 gphi_iterator psi1, psi2;
8145 tree def;
8146 edge e2 = find_edge (new_head, second);
8148 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8149 edge, we should always have an edge from NEW_HEAD to SECOND. */
8150 gcc_assert (e2 != NULL);
8152 /* Browse all 'second' basic block phi nodes and add phi args to
8153 edge 'e' for 'first' head. PHI args are always in correct order. */
8155 for (psi2 = gsi_start_phis (second),
8156 psi1 = gsi_start_phis (first);
8157 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8158 gsi_next (&psi2), gsi_next (&psi1))
8160 phi1 = psi1.phi ();
8161 phi2 = psi2.phi ();
8162 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8163 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8168 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8169 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8170 the destination of the ELSE part. */
8172 static void
8173 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8174 basic_block second_head ATTRIBUTE_UNUSED,
8175 basic_block cond_bb, void *cond_e)
8177 gimple_stmt_iterator gsi;
8178 gimple new_cond_expr;
8179 tree cond_expr = (tree) cond_e;
8180 edge e0;
8182 /* Build new conditional expr */
8183 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8184 NULL_TREE, NULL_TREE);
8186 /* Add new cond in cond_bb. */
8187 gsi = gsi_last_bb (cond_bb);
8188 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8190 /* Adjust edges appropriately to connect new head with first head
8191 as well as second head. */
8192 e0 = single_succ_edge (cond_bb);
8193 e0->flags &= ~EDGE_FALLTHRU;
8194 e0->flags |= EDGE_FALSE_VALUE;
8198 /* Do book-keeping of basic block BB for the profile consistency checker.
8199 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8200 then do post-pass accounting. Store the counting in RECORD. */
8201 static void
8202 gimple_account_profile_record (basic_block bb, int after_pass,
8203 struct profile_record *record)
8205 gimple_stmt_iterator i;
8206 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8208 record->size[after_pass]
8209 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8210 if (profile_status_for_fn (cfun) == PROFILE_READ)
8211 record->time[after_pass]
8212 += estimate_num_insns (gsi_stmt (i),
8213 &eni_time_weights) * bb->count;
8214 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8215 record->time[after_pass]
8216 += estimate_num_insns (gsi_stmt (i),
8217 &eni_time_weights) * bb->frequency;
8221 struct cfg_hooks gimple_cfg_hooks = {
8222 "gimple",
8223 gimple_verify_flow_info,
8224 gimple_dump_bb, /* dump_bb */
8225 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8226 create_bb, /* create_basic_block */
8227 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8228 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8229 gimple_can_remove_branch_p, /* can_remove_branch_p */
8230 remove_bb, /* delete_basic_block */
8231 gimple_split_block, /* split_block */
8232 gimple_move_block_after, /* move_block_after */
8233 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8234 gimple_merge_blocks, /* merge_blocks */
8235 gimple_predict_edge, /* predict_edge */
8236 gimple_predicted_by_p, /* predicted_by_p */
8237 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8238 gimple_duplicate_bb, /* duplicate_block */
8239 gimple_split_edge, /* split_edge */
8240 gimple_make_forwarder_block, /* make_forward_block */
8241 NULL, /* tidy_fallthru_edge */
8242 NULL, /* force_nonfallthru */
8243 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8244 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8245 gimple_flow_call_edges_add, /* flow_call_edges_add */
8246 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8247 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8248 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8249 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8250 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8251 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8252 flush_pending_stmts, /* flush_pending_stmts */
8253 gimple_empty_block_p, /* block_empty_p */
8254 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8255 gimple_account_profile_record,
8259 /* Split all critical edges. */
8261 unsigned int
8262 split_critical_edges (void)
8264 basic_block bb;
8265 edge e;
8266 edge_iterator ei;
8268 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8269 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8270 mappings around the calls to split_edge. */
8271 start_recording_case_labels ();
8272 FOR_ALL_BB_FN (bb, cfun)
8274 FOR_EACH_EDGE (e, ei, bb->succs)
8276 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8277 split_edge (e);
8278 /* PRE inserts statements to edges and expects that
8279 since split_critical_edges was done beforehand, committing edge
8280 insertions will not split more edges. In addition to critical
8281 edges we must split edges that have multiple successors and
8282 end by control flow statements, such as RESX.
8283 Go ahead and split them too. This matches the logic in
8284 gimple_find_edge_insert_loc. */
8285 else if ((!single_pred_p (e->dest)
8286 || !gimple_seq_empty_p (phi_nodes (e->dest))
8287 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8288 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8289 && !(e->flags & EDGE_ABNORMAL))
8291 gimple_stmt_iterator gsi;
8293 gsi = gsi_last_bb (e->src);
8294 if (!gsi_end_p (gsi)
8295 && stmt_ends_bb_p (gsi_stmt (gsi))
8296 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8297 && !gimple_call_builtin_p (gsi_stmt (gsi),
8298 BUILT_IN_RETURN)))
8299 split_edge (e);
8303 end_recording_case_labels ();
8304 return 0;
8307 namespace {
8309 const pass_data pass_data_split_crit_edges =
8311 GIMPLE_PASS, /* type */
8312 "crited", /* name */
8313 OPTGROUP_NONE, /* optinfo_flags */
8314 TV_TREE_SPLIT_EDGES, /* tv_id */
8315 PROP_cfg, /* properties_required */
8316 PROP_no_crit_edges, /* properties_provided */
8317 0, /* properties_destroyed */
8318 0, /* todo_flags_start */
8319 0, /* todo_flags_finish */
8322 class pass_split_crit_edges : public gimple_opt_pass
8324 public:
8325 pass_split_crit_edges (gcc::context *ctxt)
8326 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8329 /* opt_pass methods: */
8330 virtual unsigned int execute (function *) { return split_critical_edges (); }
8332 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8333 }; // class pass_split_crit_edges
8335 } // anon namespace
8337 gimple_opt_pass *
8338 make_pass_split_crit_edges (gcc::context *ctxt)
8340 return new pass_split_crit_edges (ctxt);
8344 /* Insert COND expression which is GIMPLE_COND after STMT
8345 in basic block BB with appropriate basic block split
8346 and creation of a new conditionally executed basic block.
8347 Return created basic block. */
8348 basic_block
8349 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8351 edge fall = split_block (bb, stmt);
8352 gimple_stmt_iterator iter = gsi_last_bb (bb);
8353 basic_block new_bb;
8355 /* Insert cond statement. */
8356 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8357 if (gsi_end_p (iter))
8358 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8359 else
8360 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8362 /* Create conditionally executed block. */
8363 new_bb = create_empty_bb (bb);
8364 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8365 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8367 /* Fix edge for split bb. */
8368 fall->flags = EDGE_FALSE_VALUE;
8370 /* Update dominance info. */
8371 if (dom_info_available_p (CDI_DOMINATORS))
8373 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8374 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8377 /* Update loop info. */
8378 if (current_loops)
8379 add_bb_to_loop (new_bb, bb->loop_father);
8381 return new_bb;
8384 /* Build a ternary operation and gimplify it. Emit code before GSI.
8385 Return the gimple_val holding the result. */
8387 tree
8388 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8389 tree type, tree a, tree b, tree c)
8391 tree ret;
8392 location_t loc = gimple_location (gsi_stmt (*gsi));
8394 ret = fold_build3_loc (loc, code, type, a, b, c);
8395 STRIP_NOPS (ret);
8397 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8398 GSI_SAME_STMT);
8401 /* Build a binary operation and gimplify it. Emit code before GSI.
8402 Return the gimple_val holding the result. */
8404 tree
8405 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8406 tree type, tree a, tree b)
8408 tree ret;
8410 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8411 STRIP_NOPS (ret);
8413 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8414 GSI_SAME_STMT);
8417 /* Build a unary operation and gimplify it. Emit code before GSI.
8418 Return the gimple_val holding the result. */
8420 tree
8421 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8422 tree a)
8424 tree ret;
8426 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8427 STRIP_NOPS (ret);
8429 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8430 GSI_SAME_STMT);
8435 /* Given a basic block B which ends with a conditional and has
8436 precisely two successors, determine which of the edges is taken if
8437 the conditional is true and which is taken if the conditional is
8438 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8440 void
8441 extract_true_false_edges_from_block (basic_block b,
8442 edge *true_edge,
8443 edge *false_edge)
8445 edge e = EDGE_SUCC (b, 0);
8447 if (e->flags & EDGE_TRUE_VALUE)
8449 *true_edge = e;
8450 *false_edge = EDGE_SUCC (b, 1);
8452 else
8454 *false_edge = e;
8455 *true_edge = EDGE_SUCC (b, 1);
8459 /* Emit return warnings. */
8461 namespace {
8463 const pass_data pass_data_warn_function_return =
8465 GIMPLE_PASS, /* type */
8466 "*warn_function_return", /* name */
8467 OPTGROUP_NONE, /* optinfo_flags */
8468 TV_NONE, /* tv_id */
8469 PROP_cfg, /* properties_required */
8470 0, /* properties_provided */
8471 0, /* properties_destroyed */
8472 0, /* todo_flags_start */
8473 0, /* todo_flags_finish */
8476 class pass_warn_function_return : public gimple_opt_pass
8478 public:
8479 pass_warn_function_return (gcc::context *ctxt)
8480 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8483 /* opt_pass methods: */
8484 virtual unsigned int execute (function *);
8486 }; // class pass_warn_function_return
8488 unsigned int
8489 pass_warn_function_return::execute (function *fun)
8491 source_location location;
8492 gimple last;
8493 edge e;
8494 edge_iterator ei;
8496 if (!targetm.warn_func_return (fun->decl))
8497 return 0;
8499 /* If we have a path to EXIT, then we do return. */
8500 if (TREE_THIS_VOLATILE (fun->decl)
8501 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8503 location = UNKNOWN_LOCATION;
8504 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8506 last = last_stmt (e->src);
8507 if ((gimple_code (last) == GIMPLE_RETURN
8508 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8509 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8510 break;
8512 if (location == UNKNOWN_LOCATION)
8513 location = cfun->function_end_locus;
8514 warning_at (location, 0, "%<noreturn%> function does return");
8517 /* If we see "return;" in some basic block, then we do reach the end
8518 without returning a value. */
8519 else if (warn_return_type
8520 && !TREE_NO_WARNING (fun->decl)
8521 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8522 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8524 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8526 gimple last = last_stmt (e->src);
8527 greturn *return_stmt = dyn_cast <greturn *> (last);
8528 if (return_stmt
8529 && gimple_return_retval (return_stmt) == NULL
8530 && !gimple_no_warning_p (last))
8532 location = gimple_location (last);
8533 if (location == UNKNOWN_LOCATION)
8534 location = fun->function_end_locus;
8535 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8536 TREE_NO_WARNING (fun->decl) = 1;
8537 break;
8541 return 0;
8544 } // anon namespace
8546 gimple_opt_pass *
8547 make_pass_warn_function_return (gcc::context *ctxt)
8549 return new pass_warn_function_return (ctxt);
8552 /* Walk a gimplified function and warn for functions whose return value is
8553 ignored and attribute((warn_unused_result)) is set. This is done before
8554 inlining, so we don't have to worry about that. */
8556 static void
8557 do_warn_unused_result (gimple_seq seq)
8559 tree fdecl, ftype;
8560 gimple_stmt_iterator i;
8562 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8564 gimple g = gsi_stmt (i);
8566 switch (gimple_code (g))
8568 case GIMPLE_BIND:
8569 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8570 break;
8571 case GIMPLE_TRY:
8572 do_warn_unused_result (gimple_try_eval (g));
8573 do_warn_unused_result (gimple_try_cleanup (g));
8574 break;
8575 case GIMPLE_CATCH:
8576 do_warn_unused_result (gimple_catch_handler (
8577 as_a <gcatch *> (g)));
8578 break;
8579 case GIMPLE_EH_FILTER:
8580 do_warn_unused_result (gimple_eh_filter_failure (g));
8581 break;
8583 case GIMPLE_CALL:
8584 if (gimple_call_lhs (g))
8585 break;
8586 if (gimple_call_internal_p (g))
8587 break;
8589 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8590 LHS. All calls whose value is ignored should be
8591 represented like this. Look for the attribute. */
8592 fdecl = gimple_call_fndecl (g);
8593 ftype = gimple_call_fntype (g);
8595 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8597 location_t loc = gimple_location (g);
8599 if (fdecl)
8600 warning_at (loc, OPT_Wunused_result,
8601 "ignoring return value of %qD, "
8602 "declared with attribute warn_unused_result",
8603 fdecl);
8604 else
8605 warning_at (loc, OPT_Wunused_result,
8606 "ignoring return value of function "
8607 "declared with attribute warn_unused_result");
8609 break;
8611 default:
8612 /* Not a container, not a call, or a call whose value is used. */
8613 break;
8618 namespace {
8620 const pass_data pass_data_warn_unused_result =
8622 GIMPLE_PASS, /* type */
8623 "*warn_unused_result", /* name */
8624 OPTGROUP_NONE, /* optinfo_flags */
8625 TV_NONE, /* tv_id */
8626 PROP_gimple_any, /* properties_required */
8627 0, /* properties_provided */
8628 0, /* properties_destroyed */
8629 0, /* todo_flags_start */
8630 0, /* todo_flags_finish */
8633 class pass_warn_unused_result : public gimple_opt_pass
8635 public:
8636 pass_warn_unused_result (gcc::context *ctxt)
8637 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8640 /* opt_pass methods: */
8641 virtual bool gate (function *) { return flag_warn_unused_result; }
8642 virtual unsigned int execute (function *)
8644 do_warn_unused_result (gimple_body (current_function_decl));
8645 return 0;
8648 }; // class pass_warn_unused_result
8650 } // anon namespace
8652 gimple_opt_pass *
8653 make_pass_warn_unused_result (gcc::context *ctxt)
8655 return new pass_warn_unused_result (ctxt);
8658 /* IPA passes, compilation of earlier functions or inlining
8659 might have changed some properties, such as marked functions nothrow,
8660 pure, const or noreturn.
8661 Remove redundant edges and basic blocks, and create new ones if necessary.
8663 This pass can't be executed as stand alone pass from pass manager, because
8664 in between inlining and this fixup the verify_flow_info would fail. */
8666 unsigned int
8667 execute_fixup_cfg (void)
8669 basic_block bb;
8670 gimple_stmt_iterator gsi;
8671 int todo = 0;
8672 gcov_type count_scale;
8673 edge e;
8674 edge_iterator ei;
8676 count_scale
8677 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8678 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8680 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8681 cgraph_node::get (current_function_decl)->count;
8682 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8683 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8684 count_scale);
8686 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8687 e->count = apply_scale (e->count, count_scale);
8689 FOR_EACH_BB_FN (bb, cfun)
8691 bb->count = apply_scale (bb->count, count_scale);
8692 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8694 gimple stmt = gsi_stmt (gsi);
8695 tree decl = is_gimple_call (stmt)
8696 ? gimple_call_fndecl (stmt)
8697 : NULL;
8698 if (decl)
8700 int flags = gimple_call_flags (stmt);
8701 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8703 if (gimple_purge_dead_abnormal_call_edges (bb))
8704 todo |= TODO_cleanup_cfg;
8706 if (gimple_in_ssa_p (cfun))
8708 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8709 update_stmt (stmt);
8713 if (flags & ECF_NORETURN
8714 && fixup_noreturn_call (stmt))
8715 todo |= TODO_cleanup_cfg;
8718 /* Remove stores to variables we marked write-only.
8719 Keep access when store has side effect, i.e. in case when source
8720 is volatile. */
8721 if (gimple_store_p (stmt)
8722 && !gimple_has_side_effects (stmt))
8724 tree lhs = get_base_address (gimple_get_lhs (stmt));
8726 if (TREE_CODE (lhs) == VAR_DECL
8727 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8728 && varpool_node::get (lhs)->writeonly)
8730 unlink_stmt_vdef (stmt);
8731 gsi_remove (&gsi, true);
8732 release_defs (stmt);
8733 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8734 continue;
8737 /* For calls we can simply remove LHS when it is known
8738 to be write-only. */
8739 if (is_gimple_call (stmt)
8740 && gimple_get_lhs (stmt))
8742 tree lhs = get_base_address (gimple_get_lhs (stmt));
8744 if (TREE_CODE (lhs) == VAR_DECL
8745 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8746 && varpool_node::get (lhs)->writeonly)
8748 gimple_call_set_lhs (stmt, NULL);
8749 update_stmt (stmt);
8750 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8754 if (maybe_clean_eh_stmt (stmt)
8755 && gimple_purge_dead_eh_edges (bb))
8756 todo |= TODO_cleanup_cfg;
8757 gsi_next (&gsi);
8760 FOR_EACH_EDGE (e, ei, bb->succs)
8761 e->count = apply_scale (e->count, count_scale);
8763 /* If we have a basic block with no successors that does not
8764 end with a control statement or a noreturn call end it with
8765 a call to __builtin_unreachable. This situation can occur
8766 when inlining a noreturn call that does in fact return. */
8767 if (EDGE_COUNT (bb->succs) == 0)
8769 gimple stmt = last_stmt (bb);
8770 if (!stmt
8771 || (!is_ctrl_stmt (stmt)
8772 && (!is_gimple_call (stmt)
8773 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8775 if (stmt && is_gimple_call (stmt))
8776 gimple_call_set_ctrl_altering (stmt, false);
8777 stmt = gimple_build_call
8778 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8779 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8780 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8784 if (count_scale != REG_BR_PROB_BASE)
8785 compute_function_frequency ();
8787 if (current_loops
8788 && (todo & TODO_cleanup_cfg))
8789 loops_state_set (LOOPS_NEED_FIXUP);
8791 return todo;
8794 namespace {
8796 const pass_data pass_data_fixup_cfg =
8798 GIMPLE_PASS, /* type */
8799 "fixup_cfg", /* name */
8800 OPTGROUP_NONE, /* optinfo_flags */
8801 TV_NONE, /* tv_id */
8802 PROP_cfg, /* properties_required */
8803 0, /* properties_provided */
8804 0, /* properties_destroyed */
8805 0, /* todo_flags_start */
8806 0, /* todo_flags_finish */
8809 class pass_fixup_cfg : public gimple_opt_pass
8811 public:
8812 pass_fixup_cfg (gcc::context *ctxt)
8813 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8816 /* opt_pass methods: */
8817 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8818 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8820 }; // class pass_fixup_cfg
8822 } // anon namespace
8824 gimple_opt_pass *
8825 make_pass_fixup_cfg (gcc::context *ctxt)
8827 return new pass_fixup_cfg (ctxt);
8830 /* Garbage collection support for edge_def. */
8832 extern void gt_ggc_mx (tree&);
8833 extern void gt_ggc_mx (gimple&);
8834 extern void gt_ggc_mx (rtx&);
8835 extern void gt_ggc_mx (basic_block&);
8837 static void
8838 gt_ggc_mx (rtx_insn *& x)
8840 if (x)
8841 gt_ggc_mx_rtx_def ((void *) x);
8844 void
8845 gt_ggc_mx (edge_def *e)
8847 tree block = LOCATION_BLOCK (e->goto_locus);
8848 gt_ggc_mx (e->src);
8849 gt_ggc_mx (e->dest);
8850 if (current_ir_type () == IR_GIMPLE)
8851 gt_ggc_mx (e->insns.g);
8852 else
8853 gt_ggc_mx (e->insns.r);
8854 gt_ggc_mx (block);
8857 /* PCH support for edge_def. */
8859 extern void gt_pch_nx (tree&);
8860 extern void gt_pch_nx (gimple&);
8861 extern void gt_pch_nx (rtx&);
8862 extern void gt_pch_nx (basic_block&);
8864 static void
8865 gt_pch_nx (rtx_insn *& x)
8867 if (x)
8868 gt_pch_nx_rtx_def ((void *) x);
8871 void
8872 gt_pch_nx (edge_def *e)
8874 tree block = LOCATION_BLOCK (e->goto_locus);
8875 gt_pch_nx (e->src);
8876 gt_pch_nx (e->dest);
8877 if (current_ir_type () == IR_GIMPLE)
8878 gt_pch_nx (e->insns.g);
8879 else
8880 gt_pch_nx (e->insns.r);
8881 gt_pch_nx (block);
8884 void
8885 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8887 tree block = LOCATION_BLOCK (e->goto_locus);
8888 op (&(e->src), cookie);
8889 op (&(e->dest), cookie);
8890 if (current_ir_type () == IR_GIMPLE)
8891 op (&(e->insns.g), cookie);
8892 else
8893 op (&(e->insns.r), cookie);
8894 op (&(block), cookie);