* c-omp.c (c_omp_declare_simd_clauses_to_numbers): If all clauses
[official-gcc.git] / gcc / tree-cfg.c
blobe90ca3d81c84d6612cdddc8210deeb22d4308fca
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "trans-mem.h"
32 #include "stor-layout.h"
33 #include "print-tree.h"
34 #include "tm_p.h"
35 #include "cfganal.h"
36 #include "flags.h"
37 #include "gimple-pretty-print.h"
38 #include "internal-fn.h"
39 #include "gimple-fold.h"
40 #include "tree-eh.h"
41 #include "gimple-iterator.h"
42 #include "gimplify-me.h"
43 #include "gimple-walk.h"
44 #include "cgraph.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "insn-config.h"
50 #include "expmed.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "calls.h"
54 #include "emit-rtl.h"
55 #include "varasm.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "tree-dump.h"
61 #include "tree-pass.h"
62 #include "diagnostic-core.h"
63 #include "except.h"
64 #include "cfgloop.h"
65 #include "tree-ssa-propagate.h"
66 #include "value-prof.h"
67 #include "tree-inline.h"
68 #include "target.h"
69 #include "tree-ssa-live.h"
70 #include "omp-low.h"
71 #include "tree-cfgcleanup.h"
72 #include "wide-int-print.h"
74 /* This file contains functions for building the Control Flow Graph (CFG)
75 for a function tree. */
77 /* Local declarations. */
79 /* Initial capacity for the basic block array. */
80 static const int initial_cfg_capacity = 20;
82 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
83 which use a particular edge. The CASE_LABEL_EXPRs are chained together
84 via their CASE_CHAIN field, which we clear after we're done with the
85 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
87 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
88 update the case vector in response to edge redirections.
90 Right now this table is set up and torn down at key points in the
91 compilation process. It would be nice if we could make the table
92 more persistent. The key is getting notification of changes to
93 the CFG (particularly edge removal, creation and redirection). */
95 static hash_map<edge, tree> *edge_to_cases;
97 /* If we record edge_to_cases, this bitmap will hold indexes
98 of basic blocks that end in a GIMPLE_SWITCH which we touched
99 due to edge manipulations. */
101 static bitmap touched_switch_bbs;
103 /* CFG statistics. */
104 struct cfg_stats_d
106 long num_merged_labels;
109 static struct cfg_stats_d cfg_stats;
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
114 location_t locus;
115 int discriminator;
118 /* Hashtable helpers. */
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
122 static inline hashval_t hash (const locus_discrim_map *);
123 static inline bool equal (const locus_discrim_map *,
124 const locus_discrim_map *);
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
130 inline hashval_t
131 locus_discrim_hasher::hash (const locus_discrim_map *item)
133 return LOCATION_LINE (item->locus);
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
139 inline bool
140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 const locus_discrim_map *b)
143 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple, gimple);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple);
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (basic_block, tree);
175 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
176 static tree find_case_label_for_value (gswitch *, tree);
178 void
179 init_empty_tree_cfg_for_function (struct function *fn)
181 /* Initialize the basic block array. */
182 init_flow (fn);
183 profile_status_for_fn (fn) = PROFILE_ABSENT;
184 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
185 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
186 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
188 initial_cfg_capacity);
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
193 initial_cfg_capacity);
195 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
196 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
198 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn);
200 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn);
204 void
205 init_empty_tree_cfg (void)
207 init_empty_tree_cfg_for_function (cfun);
210 /*---------------------------------------------------------------------------
211 Create basic blocks
212 ---------------------------------------------------------------------------*/
214 /* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
217 static void
218 build_gimple_cfg (gimple_seq seq)
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
223 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
225 init_empty_tree_cfg ();
227 make_blocks (seq);
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
237 n_basic_blocks_for_fn (cfun));
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
249 make_edges ();
250 assign_discriminators ();
251 cleanup_dead_labels ();
252 delete discriminator_per_locus;
253 discriminator_per_locus = NULL;
256 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
257 them and propagate the information to LOOP. We assume that the annotations
258 come immediately before the condition in BB, if any. */
260 static void
261 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
263 gimple_stmt_iterator gsi = gsi_last_bb (bb);
264 gimple stmt = gsi_stmt (gsi);
266 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
267 return;
269 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
271 stmt = gsi_stmt (gsi);
272 if (gimple_code (stmt) != GIMPLE_CALL)
273 break;
274 if (!gimple_call_internal_p (stmt)
275 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
276 break;
278 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 case annot_expr_ivdep_kind:
281 loop->safelen = INT_MAX;
282 break;
283 case annot_expr_no_vector_kind:
284 loop->dont_vectorize = true;
285 break;
286 case annot_expr_vector_kind:
287 loop->force_vectorize = true;
288 cfun->has_force_vectorize_loops = true;
289 break;
290 default:
291 gcc_unreachable ();
294 stmt = gimple_build_assign (gimple_call_lhs (stmt),
295 gimple_call_arg (stmt, 0));
296 gsi_replace (&gsi, stmt, true);
300 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
301 them and propagate the information to the loop. We assume that the
302 annotations come immediately before the condition of the loop. */
304 static void
305 replace_loop_annotate (void)
307 struct loop *loop;
308 basic_block bb;
309 gimple_stmt_iterator gsi;
310 gimple stmt;
312 FOR_EACH_LOOP (loop, 0)
314 /* First look into the header. */
315 replace_loop_annotate_in_block (loop->header, loop);
317 /* Then look into the latch, if any. */
318 if (loop->latch)
319 replace_loop_annotate_in_block (loop->latch, loop);
322 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
323 FOR_EACH_BB_FN (bb, cfun)
325 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
327 stmt = gsi_stmt (gsi);
328 if (gimple_code (stmt) != GIMPLE_CALL)
329 continue;
330 if (!gimple_call_internal_p (stmt)
331 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
332 continue;
334 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
336 case annot_expr_ivdep_kind:
337 case annot_expr_no_vector_kind:
338 case annot_expr_vector_kind:
339 break;
340 default:
341 gcc_unreachable ();
344 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
345 stmt = gimple_build_assign (gimple_call_lhs (stmt),
346 gimple_call_arg (stmt, 0));
347 gsi_replace (&gsi, stmt, true);
353 static unsigned int
354 execute_build_cfg (void)
356 gimple_seq body = gimple_body (current_function_decl);
358 build_gimple_cfg (body);
359 gimple_set_body (current_function_decl, NULL);
360 if (dump_file && (dump_flags & TDF_DETAILS))
362 fprintf (dump_file, "Scope blocks:\n");
363 dump_scope_blocks (dump_file, dump_flags);
365 cleanup_tree_cfg ();
366 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
367 replace_loop_annotate ();
368 return 0;
371 namespace {
373 const pass_data pass_data_build_cfg =
375 GIMPLE_PASS, /* type */
376 "cfg", /* name */
377 OPTGROUP_NONE, /* optinfo_flags */
378 TV_TREE_CFG, /* tv_id */
379 PROP_gimple_leh, /* properties_required */
380 ( PROP_cfg | PROP_loops ), /* properties_provided */
381 0, /* properties_destroyed */
382 0, /* todo_flags_start */
383 0, /* todo_flags_finish */
386 class pass_build_cfg : public gimple_opt_pass
388 public:
389 pass_build_cfg (gcc::context *ctxt)
390 : gimple_opt_pass (pass_data_build_cfg, ctxt)
393 /* opt_pass methods: */
394 virtual unsigned int execute (function *) { return execute_build_cfg (); }
396 }; // class pass_build_cfg
398 } // anon namespace
400 gimple_opt_pass *
401 make_pass_build_cfg (gcc::context *ctxt)
403 return new pass_build_cfg (ctxt);
407 /* Return true if T is a computed goto. */
409 bool
410 computed_goto_p (gimple t)
412 return (gimple_code (t) == GIMPLE_GOTO
413 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
416 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
417 the other edge points to a bb with just __builtin_unreachable ().
418 I.e. return true for C->M edge in:
419 <bb C>:
421 if (something)
422 goto <bb N>;
423 else
424 goto <bb M>;
425 <bb N>:
426 __builtin_unreachable ();
427 <bb M>: */
429 bool
430 assert_unreachable_fallthru_edge_p (edge e)
432 basic_block pred_bb = e->src;
433 gimple last = last_stmt (pred_bb);
434 if (last && gimple_code (last) == GIMPLE_COND)
436 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
437 if (other_bb == e->dest)
438 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
439 if (EDGE_COUNT (other_bb->succs) == 0)
441 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
442 gimple stmt;
444 if (gsi_end_p (gsi))
445 return false;
446 stmt = gsi_stmt (gsi);
447 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
449 gsi_next (&gsi);
450 if (gsi_end_p (gsi))
451 return false;
452 stmt = gsi_stmt (gsi);
454 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
457 return false;
461 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
462 could alter control flow except via eh. We initialize the flag at
463 CFG build time and only ever clear it later. */
465 static void
466 gimple_call_initialize_ctrl_altering (gimple stmt)
468 int flags = gimple_call_flags (stmt);
470 /* A call alters control flow if it can make an abnormal goto. */
471 if (call_can_make_abnormal_goto (stmt)
472 /* A call also alters control flow if it does not return. */
473 || flags & ECF_NORETURN
474 /* TM ending statements have backedges out of the transaction.
475 Return true so we split the basic block containing them.
476 Note that the TM_BUILTIN test is merely an optimization. */
477 || ((flags & ECF_TM_BUILTIN)
478 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
479 /* BUILT_IN_RETURN call is same as return statement. */
480 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
481 gimple_call_set_ctrl_altering (stmt, true);
482 else
483 gimple_call_set_ctrl_altering (stmt, false);
487 /* Insert SEQ after BB and build a flowgraph. */
489 static basic_block
490 make_blocks_1 (gimple_seq seq, basic_block bb)
492 gimple_stmt_iterator i = gsi_start (seq);
493 gimple stmt = NULL;
494 bool start_new_block = true;
495 bool first_stmt_of_seq = true;
497 while (!gsi_end_p (i))
499 gimple prev_stmt;
501 prev_stmt = stmt;
502 stmt = gsi_stmt (i);
504 if (stmt && is_gimple_call (stmt))
505 gimple_call_initialize_ctrl_altering (stmt);
507 /* If the statement starts a new basic block or if we have determined
508 in a previous pass that we need to create a new block for STMT, do
509 so now. */
510 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
512 if (!first_stmt_of_seq)
513 gsi_split_seq_before (&i, &seq);
514 bb = create_basic_block (seq, bb);
515 start_new_block = false;
518 /* Now add STMT to BB and create the subgraphs for special statement
519 codes. */
520 gimple_set_bb (stmt, bb);
522 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
523 next iteration. */
524 if (stmt_ends_bb_p (stmt))
526 /* If the stmt can make abnormal goto use a new temporary
527 for the assignment to the LHS. This makes sure the old value
528 of the LHS is available on the abnormal edge. Otherwise
529 we will end up with overlapping life-ranges for abnormal
530 SSA names. */
531 if (gimple_has_lhs (stmt)
532 && stmt_can_make_abnormal_goto (stmt)
533 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
535 tree lhs = gimple_get_lhs (stmt);
536 tree tmp = create_tmp_var (TREE_TYPE (lhs));
537 gimple s = gimple_build_assign (lhs, tmp);
538 gimple_set_location (s, gimple_location (stmt));
539 gimple_set_block (s, gimple_block (stmt));
540 gimple_set_lhs (stmt, tmp);
541 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
542 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
543 DECL_GIMPLE_REG_P (tmp) = 1;
544 gsi_insert_after (&i, s, GSI_SAME_STMT);
546 start_new_block = true;
549 gsi_next (&i);
550 first_stmt_of_seq = false;
552 return bb;
555 /* Build a flowgraph for the sequence of stmts SEQ. */
557 static void
558 make_blocks (gimple_seq seq)
560 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
563 /* Create and return a new empty basic block after bb AFTER. */
565 static basic_block
566 create_bb (void *h, void *e, basic_block after)
568 basic_block bb;
570 gcc_assert (!e);
572 /* Create and initialize a new basic block. Since alloc_block uses
573 GC allocation that clears memory to allocate a basic block, we do
574 not have to clear the newly allocated basic block here. */
575 bb = alloc_block ();
577 bb->index = last_basic_block_for_fn (cfun);
578 bb->flags = BB_NEW;
579 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
581 /* Add the new block to the linked list of blocks. */
582 link_block (bb, after);
584 /* Grow the basic block array if needed. */
585 if ((size_t) last_basic_block_for_fn (cfun)
586 == basic_block_info_for_fn (cfun)->length ())
588 size_t new_size =
589 (last_basic_block_for_fn (cfun)
590 + (last_basic_block_for_fn (cfun) + 3) / 4);
591 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
594 /* Add the newly created block to the array. */
595 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
597 n_basic_blocks_for_fn (cfun)++;
598 last_basic_block_for_fn (cfun)++;
600 return bb;
604 /*---------------------------------------------------------------------------
605 Edge creation
606 ---------------------------------------------------------------------------*/
608 /* Fold COND_EXPR_COND of each COND_EXPR. */
610 void
611 fold_cond_expr_cond (void)
613 basic_block bb;
615 FOR_EACH_BB_FN (bb, cfun)
617 gimple stmt = last_stmt (bb);
619 if (stmt && gimple_code (stmt) == GIMPLE_COND)
621 gcond *cond_stmt = as_a <gcond *> (stmt);
622 location_t loc = gimple_location (stmt);
623 tree cond;
624 bool zerop, onep;
626 fold_defer_overflow_warnings ();
627 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
628 boolean_type_node,
629 gimple_cond_lhs (cond_stmt),
630 gimple_cond_rhs (cond_stmt));
631 if (cond)
633 zerop = integer_zerop (cond);
634 onep = integer_onep (cond);
636 else
637 zerop = onep = false;
639 fold_undefer_overflow_warnings (zerop || onep,
640 stmt,
641 WARN_STRICT_OVERFLOW_CONDITIONAL);
642 if (zerop)
643 gimple_cond_make_false (cond_stmt);
644 else if (onep)
645 gimple_cond_make_true (cond_stmt);
650 /* If basic block BB has an abnormal edge to a basic block
651 containing IFN_ABNORMAL_DISPATCHER internal call, return
652 that the dispatcher's basic block, otherwise return NULL. */
654 basic_block
655 get_abnormal_succ_dispatcher (basic_block bb)
657 edge e;
658 edge_iterator ei;
660 FOR_EACH_EDGE (e, ei, bb->succs)
661 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
663 gimple_stmt_iterator gsi
664 = gsi_start_nondebug_after_labels_bb (e->dest);
665 gimple g = gsi_stmt (gsi);
666 if (g
667 && is_gimple_call (g)
668 && gimple_call_internal_p (g)
669 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
670 return e->dest;
672 return NULL;
675 /* Helper function for make_edges. Create a basic block with
676 with ABNORMAL_DISPATCHER internal call in it if needed, and
677 create abnormal edges from BBS to it and from it to FOR_BB
678 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
680 static void
681 handle_abnormal_edges (basic_block *dispatcher_bbs,
682 basic_block for_bb, int *bb_to_omp_idx,
683 auto_vec<basic_block> *bbs, bool computed_goto)
685 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
686 unsigned int idx = 0;
687 basic_block bb;
688 bool inner = false;
690 if (bb_to_omp_idx)
692 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
693 if (bb_to_omp_idx[for_bb->index] != 0)
694 inner = true;
697 /* If the dispatcher has been created already, then there are basic
698 blocks with abnormal edges to it, so just make a new edge to
699 for_bb. */
700 if (*dispatcher == NULL)
702 /* Check if there are any basic blocks that need to have
703 abnormal edges to this dispatcher. If there are none, return
704 early. */
705 if (bb_to_omp_idx == NULL)
707 if (bbs->is_empty ())
708 return;
710 else
712 FOR_EACH_VEC_ELT (*bbs, idx, bb)
713 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
714 break;
715 if (bb == NULL)
716 return;
719 /* Create the dispatcher bb. */
720 *dispatcher = create_basic_block (NULL, for_bb);
721 if (computed_goto)
723 /* Factor computed gotos into a common computed goto site. Also
724 record the location of that site so that we can un-factor the
725 gotos after we have converted back to normal form. */
726 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
728 /* Create the destination of the factored goto. Each original
729 computed goto will put its desired destination into this
730 variable and jump to the label we create immediately below. */
731 tree var = create_tmp_var (ptr_type_node, "gotovar");
733 /* Build a label for the new block which will contain the
734 factored computed goto. */
735 tree factored_label_decl
736 = create_artificial_label (UNKNOWN_LOCATION);
737 gimple factored_computed_goto_label
738 = gimple_build_label (factored_label_decl);
739 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
741 /* Build our new computed goto. */
742 gimple factored_computed_goto = gimple_build_goto (var);
743 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
745 FOR_EACH_VEC_ELT (*bbs, idx, bb)
747 if (bb_to_omp_idx
748 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
749 continue;
751 gsi = gsi_last_bb (bb);
752 gimple last = gsi_stmt (gsi);
754 gcc_assert (computed_goto_p (last));
756 /* Copy the original computed goto's destination into VAR. */
757 gimple assignment
758 = gimple_build_assign (var, gimple_goto_dest (last));
759 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
761 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
762 e->goto_locus = gimple_location (last);
763 gsi_remove (&gsi, true);
766 else
768 tree arg = inner ? boolean_true_node : boolean_false_node;
769 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
770 1, arg);
771 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
772 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
774 /* Create predecessor edges of the dispatcher. */
775 FOR_EACH_VEC_ELT (*bbs, idx, bb)
777 if (bb_to_omp_idx
778 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
779 continue;
780 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
785 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
788 /* Creates outgoing edges for BB. Returns 1 when it ends with an
789 computed goto, returns 2 when it ends with a statement that
790 might return to this function via an nonlocal goto, otherwise
791 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
793 static int
794 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
796 gimple last = last_stmt (bb);
797 bool fallthru = false;
798 int ret = 0;
800 if (!last)
801 return ret;
803 switch (gimple_code (last))
805 case GIMPLE_GOTO:
806 if (make_goto_expr_edges (bb))
807 ret = 1;
808 fallthru = false;
809 break;
810 case GIMPLE_RETURN:
812 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
813 e->goto_locus = gimple_location (last);
814 fallthru = false;
816 break;
817 case GIMPLE_COND:
818 make_cond_expr_edges (bb);
819 fallthru = false;
820 break;
821 case GIMPLE_SWITCH:
822 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
823 fallthru = false;
824 break;
825 case GIMPLE_RESX:
826 make_eh_edges (last);
827 fallthru = false;
828 break;
829 case GIMPLE_EH_DISPATCH:
830 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
831 break;
833 case GIMPLE_CALL:
834 /* If this function receives a nonlocal goto, then we need to
835 make edges from this call site to all the nonlocal goto
836 handlers. */
837 if (stmt_can_make_abnormal_goto (last))
838 ret = 2;
840 /* If this statement has reachable exception handlers, then
841 create abnormal edges to them. */
842 make_eh_edges (last);
844 /* BUILTIN_RETURN is really a return statement. */
845 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
847 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
848 fallthru = false;
850 /* Some calls are known not to return. */
851 else
852 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
853 break;
855 case GIMPLE_ASSIGN:
856 /* A GIMPLE_ASSIGN may throw internally and thus be considered
857 control-altering. */
858 if (is_ctrl_altering_stmt (last))
859 make_eh_edges (last);
860 fallthru = true;
861 break;
863 case GIMPLE_ASM:
864 make_gimple_asm_edges (bb);
865 fallthru = true;
866 break;
868 CASE_GIMPLE_OMP:
869 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
870 break;
872 case GIMPLE_TRANSACTION:
874 tree abort_label
875 = gimple_transaction_label (as_a <gtransaction *> (last));
876 if (abort_label)
877 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
878 fallthru = true;
880 break;
882 default:
883 gcc_assert (!stmt_ends_bb_p (last));
884 fallthru = true;
885 break;
888 if (fallthru)
889 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
891 return ret;
894 /* Join all the blocks in the flowgraph. */
896 static void
897 make_edges (void)
899 basic_block bb;
900 struct omp_region *cur_region = NULL;
901 auto_vec<basic_block> ab_edge_goto;
902 auto_vec<basic_block> ab_edge_call;
903 int *bb_to_omp_idx = NULL;
904 int cur_omp_region_idx = 0;
906 /* Create an edge from entry to the first block with executable
907 statements in it. */
908 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
909 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
910 EDGE_FALLTHRU);
912 /* Traverse the basic block array placing edges. */
913 FOR_EACH_BB_FN (bb, cfun)
915 int mer;
917 if (bb_to_omp_idx)
918 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
920 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
921 if (mer == 1)
922 ab_edge_goto.safe_push (bb);
923 else if (mer == 2)
924 ab_edge_call.safe_push (bb);
926 if (cur_region && bb_to_omp_idx == NULL)
927 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
930 /* Computed gotos are hell to deal with, especially if there are
931 lots of them with a large number of destinations. So we factor
932 them to a common computed goto location before we build the
933 edge list. After we convert back to normal form, we will un-factor
934 the computed gotos since factoring introduces an unwanted jump.
935 For non-local gotos and abnormal edges from calls to calls that return
936 twice or forced labels, factor the abnormal edges too, by having all
937 abnormal edges from the calls go to a common artificial basic block
938 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
939 basic block to all forced labels and calls returning twice.
940 We do this per-OpenMP structured block, because those regions
941 are guaranteed to be single entry single exit by the standard,
942 so it is not allowed to enter or exit such regions abnormally this way,
943 thus all computed gotos, non-local gotos and setjmp/longjmp calls
944 must not transfer control across SESE region boundaries. */
945 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
947 gimple_stmt_iterator gsi;
948 basic_block dispatcher_bb_array[2] = { NULL, NULL };
949 basic_block *dispatcher_bbs = dispatcher_bb_array;
950 int count = n_basic_blocks_for_fn (cfun);
952 if (bb_to_omp_idx)
953 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
955 FOR_EACH_BB_FN (bb, cfun)
957 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
959 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
960 tree target;
962 if (!label_stmt)
963 break;
965 target = gimple_label_label (label_stmt);
967 /* Make an edge to every label block that has been marked as a
968 potential target for a computed goto or a non-local goto. */
969 if (FORCED_LABEL (target))
970 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
971 &ab_edge_goto, true);
972 if (DECL_NONLOCAL (target))
974 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
975 &ab_edge_call, false);
976 break;
980 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
981 gsi_next_nondebug (&gsi);
982 if (!gsi_end_p (gsi))
984 /* Make an edge to every setjmp-like call. */
985 gimple call_stmt = gsi_stmt (gsi);
986 if (is_gimple_call (call_stmt)
987 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
988 || gimple_call_builtin_p (call_stmt,
989 BUILT_IN_SETJMP_RECEIVER)))
990 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
991 &ab_edge_call, false);
995 if (bb_to_omp_idx)
996 XDELETE (dispatcher_bbs);
999 XDELETE (bb_to_omp_idx);
1001 free_omp_regions ();
1003 /* Fold COND_EXPR_COND of each COND_EXPR. */
1004 fold_cond_expr_cond ();
1007 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1008 needed. Returns true if new bbs were created.
1009 Note: This is transitional code, and should not be used for new code. We
1010 should be able to get rid of this by rewriting all target va-arg
1011 gimplification hooks to use an interface gimple_build_cond_value as described
1012 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1014 bool
1015 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1017 gimple stmt = gsi_stmt (*gsi);
1018 basic_block bb = gimple_bb (stmt);
1019 basic_block lastbb, afterbb;
1020 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1021 edge e;
1022 lastbb = make_blocks_1 (seq, bb);
1023 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1024 return false;
1025 e = split_block (bb, stmt);
1026 /* Move e->dest to come after the new basic blocks. */
1027 afterbb = e->dest;
1028 unlink_block (afterbb);
1029 link_block (afterbb, lastbb);
1030 redirect_edge_succ (e, bb->next_bb);
1031 bb = bb->next_bb;
1032 while (bb != afterbb)
1034 struct omp_region *cur_region = NULL;
1035 int cur_omp_region_idx = 0;
1036 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1037 gcc_assert (!mer && !cur_region);
1038 add_bb_to_loop (bb, afterbb->loop_father);
1039 bb = bb->next_bb;
1041 return true;
1044 /* Find the next available discriminator value for LOCUS. The
1045 discriminator distinguishes among several basic blocks that
1046 share a common locus, allowing for more accurate sample-based
1047 profiling. */
1049 static int
1050 next_discriminator_for_locus (location_t locus)
1052 struct locus_discrim_map item;
1053 struct locus_discrim_map **slot;
1055 item.locus = locus;
1056 item.discriminator = 0;
1057 slot = discriminator_per_locus->find_slot_with_hash (
1058 &item, LOCATION_LINE (locus), INSERT);
1059 gcc_assert (slot);
1060 if (*slot == HTAB_EMPTY_ENTRY)
1062 *slot = XNEW (struct locus_discrim_map);
1063 gcc_assert (*slot);
1064 (*slot)->locus = locus;
1065 (*slot)->discriminator = 0;
1067 (*slot)->discriminator++;
1068 return (*slot)->discriminator;
1071 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1073 static bool
1074 same_line_p (location_t locus1, location_t locus2)
1076 expanded_location from, to;
1078 if (locus1 == locus2)
1079 return true;
1081 from = expand_location (locus1);
1082 to = expand_location (locus2);
1084 if (from.line != to.line)
1085 return false;
1086 if (from.file == to.file)
1087 return true;
1088 return (from.file != NULL
1089 && to.file != NULL
1090 && filename_cmp (from.file, to.file) == 0);
1093 /* Assign discriminators to each basic block. */
1095 static void
1096 assign_discriminators (void)
1098 basic_block bb;
1100 FOR_EACH_BB_FN (bb, cfun)
1102 edge e;
1103 edge_iterator ei;
1104 gimple last = last_stmt (bb);
1105 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1107 if (locus == UNKNOWN_LOCATION)
1108 continue;
1110 FOR_EACH_EDGE (e, ei, bb->succs)
1112 gimple first = first_non_label_stmt (e->dest);
1113 gimple last = last_stmt (e->dest);
1114 if ((first && same_line_p (locus, gimple_location (first)))
1115 || (last && same_line_p (locus, gimple_location (last))))
1117 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1118 bb->discriminator = next_discriminator_for_locus (locus);
1119 else
1120 e->dest->discriminator = next_discriminator_for_locus (locus);
1126 /* Create the edges for a GIMPLE_COND starting at block BB. */
1128 static void
1129 make_cond_expr_edges (basic_block bb)
1131 gcond *entry = as_a <gcond *> (last_stmt (bb));
1132 gimple then_stmt, else_stmt;
1133 basic_block then_bb, else_bb;
1134 tree then_label, else_label;
1135 edge e;
1137 gcc_assert (entry);
1138 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1140 /* Entry basic blocks for each component. */
1141 then_label = gimple_cond_true_label (entry);
1142 else_label = gimple_cond_false_label (entry);
1143 then_bb = label_to_block (then_label);
1144 else_bb = label_to_block (else_label);
1145 then_stmt = first_stmt (then_bb);
1146 else_stmt = first_stmt (else_bb);
1148 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1149 e->goto_locus = gimple_location (then_stmt);
1150 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1151 if (e)
1152 e->goto_locus = gimple_location (else_stmt);
1154 /* We do not need the labels anymore. */
1155 gimple_cond_set_true_label (entry, NULL_TREE);
1156 gimple_cond_set_false_label (entry, NULL_TREE);
1160 /* Called for each element in the hash table (P) as we delete the
1161 edge to cases hash table.
1163 Clear all the TREE_CHAINs to prevent problems with copying of
1164 SWITCH_EXPRs and structure sharing rules, then free the hash table
1165 element. */
1167 bool
1168 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1170 tree t, next;
1172 for (t = value; t; t = next)
1174 next = CASE_CHAIN (t);
1175 CASE_CHAIN (t) = NULL;
1178 return true;
1181 /* Start recording information mapping edges to case labels. */
1183 void
1184 start_recording_case_labels (void)
1186 gcc_assert (edge_to_cases == NULL);
1187 edge_to_cases = new hash_map<edge, tree>;
1188 touched_switch_bbs = BITMAP_ALLOC (NULL);
1191 /* Return nonzero if we are recording information for case labels. */
1193 static bool
1194 recording_case_labels_p (void)
1196 return (edge_to_cases != NULL);
1199 /* Stop recording information mapping edges to case labels and
1200 remove any information we have recorded. */
1201 void
1202 end_recording_case_labels (void)
1204 bitmap_iterator bi;
1205 unsigned i;
1206 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1207 delete edge_to_cases;
1208 edge_to_cases = NULL;
1209 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1211 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1212 if (bb)
1214 gimple stmt = last_stmt (bb);
1215 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1216 group_case_labels_stmt (as_a <gswitch *> (stmt));
1219 BITMAP_FREE (touched_switch_bbs);
1222 /* If we are inside a {start,end}_recording_cases block, then return
1223 a chain of CASE_LABEL_EXPRs from T which reference E.
1225 Otherwise return NULL. */
1227 static tree
1228 get_cases_for_edge (edge e, gswitch *t)
1230 tree *slot;
1231 size_t i, n;
1233 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1234 chains available. Return NULL so the caller can detect this case. */
1235 if (!recording_case_labels_p ())
1236 return NULL;
1238 slot = edge_to_cases->get (e);
1239 if (slot)
1240 return *slot;
1242 /* If we did not find E in the hash table, then this must be the first
1243 time we have been queried for information about E & T. Add all the
1244 elements from T to the hash table then perform the query again. */
1246 n = gimple_switch_num_labels (t);
1247 for (i = 0; i < n; i++)
1249 tree elt = gimple_switch_label (t, i);
1250 tree lab = CASE_LABEL (elt);
1251 basic_block label_bb = label_to_block (lab);
1252 edge this_edge = find_edge (e->src, label_bb);
1254 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1255 a new chain. */
1256 tree &s = edge_to_cases->get_or_insert (this_edge);
1257 CASE_CHAIN (elt) = s;
1258 s = elt;
1261 return *edge_to_cases->get (e);
1264 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1266 static void
1267 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1269 size_t i, n;
1271 n = gimple_switch_num_labels (entry);
1273 for (i = 0; i < n; ++i)
1275 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1276 basic_block label_bb = label_to_block (lab);
1277 make_edge (bb, label_bb, 0);
1282 /* Return the basic block holding label DEST. */
1284 basic_block
1285 label_to_block_fn (struct function *ifun, tree dest)
1287 int uid = LABEL_DECL_UID (dest);
1289 /* We would die hard when faced by an undefined label. Emit a label to
1290 the very first basic block. This will hopefully make even the dataflow
1291 and undefined variable warnings quite right. */
1292 if (seen_error () && uid < 0)
1294 gimple_stmt_iterator gsi =
1295 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1296 gimple stmt;
1298 stmt = gimple_build_label (dest);
1299 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1300 uid = LABEL_DECL_UID (dest);
1302 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1303 return NULL;
1304 return (*ifun->cfg->x_label_to_block_map)[uid];
1307 /* Create edges for a goto statement at block BB. Returns true
1308 if abnormal edges should be created. */
1310 static bool
1311 make_goto_expr_edges (basic_block bb)
1313 gimple_stmt_iterator last = gsi_last_bb (bb);
1314 gimple goto_t = gsi_stmt (last);
1316 /* A simple GOTO creates normal edges. */
1317 if (simple_goto_p (goto_t))
1319 tree dest = gimple_goto_dest (goto_t);
1320 basic_block label_bb = label_to_block (dest);
1321 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1322 e->goto_locus = gimple_location (goto_t);
1323 gsi_remove (&last, true);
1324 return false;
1327 /* A computed GOTO creates abnormal edges. */
1328 return true;
1331 /* Create edges for an asm statement with labels at block BB. */
1333 static void
1334 make_gimple_asm_edges (basic_block bb)
1336 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1337 int i, n = gimple_asm_nlabels (stmt);
1339 for (i = 0; i < n; ++i)
1341 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1342 basic_block label_bb = label_to_block (label);
1343 make_edge (bb, label_bb, 0);
1347 /*---------------------------------------------------------------------------
1348 Flowgraph analysis
1349 ---------------------------------------------------------------------------*/
1351 /* Cleanup useless labels in basic blocks. This is something we wish
1352 to do early because it allows us to group case labels before creating
1353 the edges for the CFG, and it speeds up block statement iterators in
1354 all passes later on.
1355 We rerun this pass after CFG is created, to get rid of the labels that
1356 are no longer referenced. After then we do not run it any more, since
1357 (almost) no new labels should be created. */
1359 /* A map from basic block index to the leading label of that block. */
1360 static struct label_record
1362 /* The label. */
1363 tree label;
1365 /* True if the label is referenced from somewhere. */
1366 bool used;
1367 } *label_for_bb;
1369 /* Given LABEL return the first label in the same basic block. */
1371 static tree
1372 main_block_label (tree label)
1374 basic_block bb = label_to_block (label);
1375 tree main_label = label_for_bb[bb->index].label;
1377 /* label_to_block possibly inserted undefined label into the chain. */
1378 if (!main_label)
1380 label_for_bb[bb->index].label = label;
1381 main_label = label;
1384 label_for_bb[bb->index].used = true;
1385 return main_label;
1388 /* Clean up redundant labels within the exception tree. */
1390 static void
1391 cleanup_dead_labels_eh (void)
1393 eh_landing_pad lp;
1394 eh_region r;
1395 tree lab;
1396 int i;
1398 if (cfun->eh == NULL)
1399 return;
1401 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1402 if (lp && lp->post_landing_pad)
1404 lab = main_block_label (lp->post_landing_pad);
1405 if (lab != lp->post_landing_pad)
1407 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1408 EH_LANDING_PAD_NR (lab) = lp->index;
1412 FOR_ALL_EH_REGION (r)
1413 switch (r->type)
1415 case ERT_CLEANUP:
1416 case ERT_MUST_NOT_THROW:
1417 break;
1419 case ERT_TRY:
1421 eh_catch c;
1422 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1424 lab = c->label;
1425 if (lab)
1426 c->label = main_block_label (lab);
1429 break;
1431 case ERT_ALLOWED_EXCEPTIONS:
1432 lab = r->u.allowed.label;
1433 if (lab)
1434 r->u.allowed.label = main_block_label (lab);
1435 break;
1440 /* Cleanup redundant labels. This is a three-step process:
1441 1) Find the leading label for each block.
1442 2) Redirect all references to labels to the leading labels.
1443 3) Cleanup all useless labels. */
1445 void
1446 cleanup_dead_labels (void)
1448 basic_block bb;
1449 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1451 /* Find a suitable label for each block. We use the first user-defined
1452 label if there is one, or otherwise just the first label we see. */
1453 FOR_EACH_BB_FN (bb, cfun)
1455 gimple_stmt_iterator i;
1457 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1459 tree label;
1460 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1462 if (!label_stmt)
1463 break;
1465 label = gimple_label_label (label_stmt);
1467 /* If we have not yet seen a label for the current block,
1468 remember this one and see if there are more labels. */
1469 if (!label_for_bb[bb->index].label)
1471 label_for_bb[bb->index].label = label;
1472 continue;
1475 /* If we did see a label for the current block already, but it
1476 is an artificially created label, replace it if the current
1477 label is a user defined label. */
1478 if (!DECL_ARTIFICIAL (label)
1479 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1481 label_for_bb[bb->index].label = label;
1482 break;
1487 /* Now redirect all jumps/branches to the selected label.
1488 First do so for each block ending in a control statement. */
1489 FOR_EACH_BB_FN (bb, cfun)
1491 gimple stmt = last_stmt (bb);
1492 tree label, new_label;
1494 if (!stmt)
1495 continue;
1497 switch (gimple_code (stmt))
1499 case GIMPLE_COND:
1501 gcond *cond_stmt = as_a <gcond *> (stmt);
1502 label = gimple_cond_true_label (cond_stmt);
1503 if (label)
1505 new_label = main_block_label (label);
1506 if (new_label != label)
1507 gimple_cond_set_true_label (cond_stmt, new_label);
1510 label = gimple_cond_false_label (cond_stmt);
1511 if (label)
1513 new_label = main_block_label (label);
1514 if (new_label != label)
1515 gimple_cond_set_false_label (cond_stmt, new_label);
1518 break;
1520 case GIMPLE_SWITCH:
1522 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1523 size_t i, n = gimple_switch_num_labels (switch_stmt);
1525 /* Replace all destination labels. */
1526 for (i = 0; i < n; ++i)
1528 tree case_label = gimple_switch_label (switch_stmt, i);
1529 label = CASE_LABEL (case_label);
1530 new_label = main_block_label (label);
1531 if (new_label != label)
1532 CASE_LABEL (case_label) = new_label;
1534 break;
1537 case GIMPLE_ASM:
1539 gasm *asm_stmt = as_a <gasm *> (stmt);
1540 int i, n = gimple_asm_nlabels (asm_stmt);
1542 for (i = 0; i < n; ++i)
1544 tree cons = gimple_asm_label_op (asm_stmt, i);
1545 tree label = main_block_label (TREE_VALUE (cons));
1546 TREE_VALUE (cons) = label;
1548 break;
1551 /* We have to handle gotos until they're removed, and we don't
1552 remove them until after we've created the CFG edges. */
1553 case GIMPLE_GOTO:
1554 if (!computed_goto_p (stmt))
1556 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1557 label = gimple_goto_dest (goto_stmt);
1558 new_label = main_block_label (label);
1559 if (new_label != label)
1560 gimple_goto_set_dest (goto_stmt, new_label);
1562 break;
1564 case GIMPLE_TRANSACTION:
1566 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1567 tree label = gimple_transaction_label (trans_stmt);
1568 if (label)
1570 tree new_label = main_block_label (label);
1571 if (new_label != label)
1572 gimple_transaction_set_label (trans_stmt, new_label);
1575 break;
1577 default:
1578 break;
1582 /* Do the same for the exception region tree labels. */
1583 cleanup_dead_labels_eh ();
1585 /* Finally, purge dead labels. All user-defined labels and labels that
1586 can be the target of non-local gotos and labels which have their
1587 address taken are preserved. */
1588 FOR_EACH_BB_FN (bb, cfun)
1590 gimple_stmt_iterator i;
1591 tree label_for_this_bb = label_for_bb[bb->index].label;
1593 if (!label_for_this_bb)
1594 continue;
1596 /* If the main label of the block is unused, we may still remove it. */
1597 if (!label_for_bb[bb->index].used)
1598 label_for_this_bb = NULL;
1600 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1602 tree label;
1603 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1605 if (!label_stmt)
1606 break;
1608 label = gimple_label_label (label_stmt);
1610 if (label == label_for_this_bb
1611 || !DECL_ARTIFICIAL (label)
1612 || DECL_NONLOCAL (label)
1613 || FORCED_LABEL (label))
1614 gsi_next (&i);
1615 else
1616 gsi_remove (&i, true);
1620 free (label_for_bb);
1623 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1624 the ones jumping to the same label.
1625 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1627 void
1628 group_case_labels_stmt (gswitch *stmt)
1630 int old_size = gimple_switch_num_labels (stmt);
1631 int i, j, new_size = old_size;
1632 basic_block default_bb = NULL;
1634 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1636 /* Look for possible opportunities to merge cases. */
1637 i = 1;
1638 while (i < old_size)
1640 tree base_case, base_high;
1641 basic_block base_bb;
1643 base_case = gimple_switch_label (stmt, i);
1645 gcc_assert (base_case);
1646 base_bb = label_to_block (CASE_LABEL (base_case));
1648 /* Discard cases that have the same destination as the
1649 default case. */
1650 if (base_bb == default_bb)
1652 gimple_switch_set_label (stmt, i, NULL_TREE);
1653 i++;
1654 new_size--;
1655 continue;
1658 base_high = CASE_HIGH (base_case)
1659 ? CASE_HIGH (base_case)
1660 : CASE_LOW (base_case);
1661 i++;
1663 /* Try to merge case labels. Break out when we reach the end
1664 of the label vector or when we cannot merge the next case
1665 label with the current one. */
1666 while (i < old_size)
1668 tree merge_case = gimple_switch_label (stmt, i);
1669 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1670 wide_int bhp1 = wi::add (base_high, 1);
1672 /* Merge the cases if they jump to the same place,
1673 and their ranges are consecutive. */
1674 if (merge_bb == base_bb
1675 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1677 base_high = CASE_HIGH (merge_case) ?
1678 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1679 CASE_HIGH (base_case) = base_high;
1680 gimple_switch_set_label (stmt, i, NULL_TREE);
1681 new_size--;
1682 i++;
1684 else
1685 break;
1689 /* Compress the case labels in the label vector, and adjust the
1690 length of the vector. */
1691 for (i = 0, j = 0; i < new_size; i++)
1693 while (! gimple_switch_label (stmt, j))
1694 j++;
1695 gimple_switch_set_label (stmt, i,
1696 gimple_switch_label (stmt, j++));
1699 gcc_assert (new_size <= old_size);
1700 gimple_switch_set_num_labels (stmt, new_size);
1703 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1704 and scan the sorted vector of cases. Combine the ones jumping to the
1705 same label. */
1707 void
1708 group_case_labels (void)
1710 basic_block bb;
1712 FOR_EACH_BB_FN (bb, cfun)
1714 gimple stmt = last_stmt (bb);
1715 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1716 group_case_labels_stmt (as_a <gswitch *> (stmt));
1720 /* Checks whether we can merge block B into block A. */
1722 static bool
1723 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1725 gimple stmt;
1727 if (!single_succ_p (a))
1728 return false;
1730 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1731 return false;
1733 if (single_succ (a) != b)
1734 return false;
1736 if (!single_pred_p (b))
1737 return false;
1739 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1740 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1741 return false;
1743 /* If A ends by a statement causing exceptions or something similar, we
1744 cannot merge the blocks. */
1745 stmt = last_stmt (a);
1746 if (stmt && stmt_ends_bb_p (stmt))
1747 return false;
1749 /* Do not allow a block with only a non-local label to be merged. */
1750 if (stmt)
1751 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1752 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1753 return false;
1755 /* Examine the labels at the beginning of B. */
1756 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1757 gsi_next (&gsi))
1759 tree lab;
1760 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1761 if (!label_stmt)
1762 break;
1763 lab = gimple_label_label (label_stmt);
1765 /* Do not remove user forced labels or for -O0 any user labels. */
1766 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1767 return false;
1770 /* Protect simple loop latches. We only want to avoid merging
1771 the latch with the loop header or with a block in another
1772 loop in this case. */
1773 if (current_loops
1774 && b->loop_father->latch == b
1775 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1776 && (b->loop_father->header == a
1777 || b->loop_father != a->loop_father))
1778 return false;
1780 /* It must be possible to eliminate all phi nodes in B. If ssa form
1781 is not up-to-date and a name-mapping is registered, we cannot eliminate
1782 any phis. Symbols marked for renaming are never a problem though. */
1783 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1784 gsi_next (&gsi))
1786 gphi *phi = gsi.phi ();
1787 /* Technically only new names matter. */
1788 if (name_registered_for_update_p (PHI_RESULT (phi)))
1789 return false;
1792 /* When not optimizing, don't merge if we'd lose goto_locus. */
1793 if (!optimize
1794 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1796 location_t goto_locus = single_succ_edge (a)->goto_locus;
1797 gimple_stmt_iterator prev, next;
1798 prev = gsi_last_nondebug_bb (a);
1799 next = gsi_after_labels (b);
1800 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1801 gsi_next_nondebug (&next);
1802 if ((gsi_end_p (prev)
1803 || gimple_location (gsi_stmt (prev)) != goto_locus)
1804 && (gsi_end_p (next)
1805 || gimple_location (gsi_stmt (next)) != goto_locus))
1806 return false;
1809 return true;
1812 /* Replaces all uses of NAME by VAL. */
1814 void
1815 replace_uses_by (tree name, tree val)
1817 imm_use_iterator imm_iter;
1818 use_operand_p use;
1819 gimple stmt;
1820 edge e;
1822 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1824 /* Mark the block if we change the last stmt in it. */
1825 if (cfgcleanup_altered_bbs
1826 && stmt_ends_bb_p (stmt))
1827 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1829 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1831 replace_exp (use, val);
1833 if (gimple_code (stmt) == GIMPLE_PHI)
1835 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1836 PHI_ARG_INDEX_FROM_USE (use));
1837 if (e->flags & EDGE_ABNORMAL
1838 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1840 /* This can only occur for virtual operands, since
1841 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1842 would prevent replacement. */
1843 gcc_checking_assert (virtual_operand_p (name));
1844 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1849 if (gimple_code (stmt) != GIMPLE_PHI)
1851 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1852 gimple orig_stmt = stmt;
1853 size_t i;
1855 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1856 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1857 only change sth from non-invariant to invariant, and only
1858 when propagating constants. */
1859 if (is_gimple_min_invariant (val))
1860 for (i = 0; i < gimple_num_ops (stmt); i++)
1862 tree op = gimple_op (stmt, i);
1863 /* Operands may be empty here. For example, the labels
1864 of a GIMPLE_COND are nulled out following the creation
1865 of the corresponding CFG edges. */
1866 if (op && TREE_CODE (op) == ADDR_EXPR)
1867 recompute_tree_invariant_for_addr_expr (op);
1870 if (fold_stmt (&gsi))
1871 stmt = gsi_stmt (gsi);
1873 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1874 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1876 update_stmt (stmt);
1880 gcc_checking_assert (has_zero_uses (name));
1882 /* Also update the trees stored in loop structures. */
1883 if (current_loops)
1885 struct loop *loop;
1887 FOR_EACH_LOOP (loop, 0)
1889 substitute_in_loop_info (loop, name, val);
1894 /* Merge block B into block A. */
1896 static void
1897 gimple_merge_blocks (basic_block a, basic_block b)
1899 gimple_stmt_iterator last, gsi;
1900 gphi_iterator psi;
1902 if (dump_file)
1903 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1905 /* Remove all single-valued PHI nodes from block B of the form
1906 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1907 gsi = gsi_last_bb (a);
1908 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1910 gimple phi = gsi_stmt (psi);
1911 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1912 gimple copy;
1913 bool may_replace_uses = (virtual_operand_p (def)
1914 || may_propagate_copy (def, use));
1916 /* In case we maintain loop closed ssa form, do not propagate arguments
1917 of loop exit phi nodes. */
1918 if (current_loops
1919 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1920 && !virtual_operand_p (def)
1921 && TREE_CODE (use) == SSA_NAME
1922 && a->loop_father != b->loop_father)
1923 may_replace_uses = false;
1925 if (!may_replace_uses)
1927 gcc_assert (!virtual_operand_p (def));
1929 /* Note that just emitting the copies is fine -- there is no problem
1930 with ordering of phi nodes. This is because A is the single
1931 predecessor of B, therefore results of the phi nodes cannot
1932 appear as arguments of the phi nodes. */
1933 copy = gimple_build_assign (def, use);
1934 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1935 remove_phi_node (&psi, false);
1937 else
1939 /* If we deal with a PHI for virtual operands, we can simply
1940 propagate these without fussing with folding or updating
1941 the stmt. */
1942 if (virtual_operand_p (def))
1944 imm_use_iterator iter;
1945 use_operand_p use_p;
1946 gimple stmt;
1948 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1949 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1950 SET_USE (use_p, use);
1952 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1953 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1955 else
1956 replace_uses_by (def, use);
1958 remove_phi_node (&psi, true);
1962 /* Ensure that B follows A. */
1963 move_block_after (b, a);
1965 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1966 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1968 /* Remove labels from B and set gimple_bb to A for other statements. */
1969 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1971 gimple stmt = gsi_stmt (gsi);
1972 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1974 tree label = gimple_label_label (label_stmt);
1975 int lp_nr;
1977 gsi_remove (&gsi, false);
1979 /* Now that we can thread computed gotos, we might have
1980 a situation where we have a forced label in block B
1981 However, the label at the start of block B might still be
1982 used in other ways (think about the runtime checking for
1983 Fortran assigned gotos). So we can not just delete the
1984 label. Instead we move the label to the start of block A. */
1985 if (FORCED_LABEL (label))
1987 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1988 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1990 /* Other user labels keep around in a form of a debug stmt. */
1991 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1993 gimple dbg = gimple_build_debug_bind (label,
1994 integer_zero_node,
1995 stmt);
1996 gimple_debug_bind_reset_value (dbg);
1997 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2000 lp_nr = EH_LANDING_PAD_NR (label);
2001 if (lp_nr)
2003 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2004 lp->post_landing_pad = NULL;
2007 else
2009 gimple_set_bb (stmt, a);
2010 gsi_next (&gsi);
2014 /* When merging two BBs, if their counts are different, the larger count
2015 is selected as the new bb count. This is to handle inconsistent
2016 profiles. */
2017 if (a->loop_father == b->loop_father)
2019 a->count = MAX (a->count, b->count);
2020 a->frequency = MAX (a->frequency, b->frequency);
2023 /* Merge the sequences. */
2024 last = gsi_last_bb (a);
2025 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2026 set_bb_seq (b, NULL);
2028 if (cfgcleanup_altered_bbs)
2029 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2033 /* Return the one of two successors of BB that is not reachable by a
2034 complex edge, if there is one. Else, return BB. We use
2035 this in optimizations that use post-dominators for their heuristics,
2036 to catch the cases in C++ where function calls are involved. */
2038 basic_block
2039 single_noncomplex_succ (basic_block bb)
2041 edge e0, e1;
2042 if (EDGE_COUNT (bb->succs) != 2)
2043 return bb;
2045 e0 = EDGE_SUCC (bb, 0);
2046 e1 = EDGE_SUCC (bb, 1);
2047 if (e0->flags & EDGE_COMPLEX)
2048 return e1->dest;
2049 if (e1->flags & EDGE_COMPLEX)
2050 return e0->dest;
2052 return bb;
2055 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2057 void
2058 notice_special_calls (gcall *call)
2060 int flags = gimple_call_flags (call);
2062 if (flags & ECF_MAY_BE_ALLOCA)
2063 cfun->calls_alloca = true;
2064 if (flags & ECF_RETURNS_TWICE)
2065 cfun->calls_setjmp = true;
2069 /* Clear flags set by notice_special_calls. Used by dead code removal
2070 to update the flags. */
2072 void
2073 clear_special_calls (void)
2075 cfun->calls_alloca = false;
2076 cfun->calls_setjmp = false;
2079 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2081 static void
2082 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2084 /* Since this block is no longer reachable, we can just delete all
2085 of its PHI nodes. */
2086 remove_phi_nodes (bb);
2088 /* Remove edges to BB's successors. */
2089 while (EDGE_COUNT (bb->succs) > 0)
2090 remove_edge (EDGE_SUCC (bb, 0));
2094 /* Remove statements of basic block BB. */
2096 static void
2097 remove_bb (basic_block bb)
2099 gimple_stmt_iterator i;
2101 if (dump_file)
2103 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2104 if (dump_flags & TDF_DETAILS)
2106 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2107 fprintf (dump_file, "\n");
2111 if (current_loops)
2113 struct loop *loop = bb->loop_father;
2115 /* If a loop gets removed, clean up the information associated
2116 with it. */
2117 if (loop->latch == bb
2118 || loop->header == bb)
2119 free_numbers_of_iterations_estimates_loop (loop);
2122 /* Remove all the instructions in the block. */
2123 if (bb_seq (bb) != NULL)
2125 /* Walk backwards so as to get a chance to substitute all
2126 released DEFs into debug stmts. See
2127 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2128 details. */
2129 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2131 gimple stmt = gsi_stmt (i);
2132 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2133 if (label_stmt
2134 && (FORCED_LABEL (gimple_label_label (label_stmt))
2135 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2137 basic_block new_bb;
2138 gimple_stmt_iterator new_gsi;
2140 /* A non-reachable non-local label may still be referenced.
2141 But it no longer needs to carry the extra semantics of
2142 non-locality. */
2143 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2145 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2146 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2149 new_bb = bb->prev_bb;
2150 new_gsi = gsi_start_bb (new_bb);
2151 gsi_remove (&i, false);
2152 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2154 else
2156 /* Release SSA definitions if we are in SSA. Note that we
2157 may be called when not in SSA. For example,
2158 final_cleanup calls this function via
2159 cleanup_tree_cfg. */
2160 if (gimple_in_ssa_p (cfun))
2161 release_defs (stmt);
2163 gsi_remove (&i, true);
2166 if (gsi_end_p (i))
2167 i = gsi_last_bb (bb);
2168 else
2169 gsi_prev (&i);
2173 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2174 bb->il.gimple.seq = NULL;
2175 bb->il.gimple.phi_nodes = NULL;
2179 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2180 predicate VAL, return the edge that will be taken out of the block.
2181 If VAL does not match a unique edge, NULL is returned. */
2183 edge
2184 find_taken_edge (basic_block bb, tree val)
2186 gimple stmt;
2188 stmt = last_stmt (bb);
2190 gcc_assert (stmt);
2191 gcc_assert (is_ctrl_stmt (stmt));
2193 if (val == NULL)
2194 return NULL;
2196 if (!is_gimple_min_invariant (val))
2197 return NULL;
2199 if (gimple_code (stmt) == GIMPLE_COND)
2200 return find_taken_edge_cond_expr (bb, val);
2202 if (gimple_code (stmt) == GIMPLE_SWITCH)
2203 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2205 if (computed_goto_p (stmt))
2207 /* Only optimize if the argument is a label, if the argument is
2208 not a label then we can not construct a proper CFG.
2210 It may be the case that we only need to allow the LABEL_REF to
2211 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2212 appear inside a LABEL_EXPR just to be safe. */
2213 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2214 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2215 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2216 return NULL;
2219 gcc_unreachable ();
2222 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2223 statement, determine which of the outgoing edges will be taken out of the
2224 block. Return NULL if either edge may be taken. */
2226 static edge
2227 find_taken_edge_computed_goto (basic_block bb, tree val)
2229 basic_block dest;
2230 edge e = NULL;
2232 dest = label_to_block (val);
2233 if (dest)
2235 e = find_edge (bb, dest);
2236 gcc_assert (e != NULL);
2239 return e;
2242 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2243 statement, determine which of the two edges will be taken out of the
2244 block. Return NULL if either edge may be taken. */
2246 static edge
2247 find_taken_edge_cond_expr (basic_block bb, tree val)
2249 edge true_edge, false_edge;
2251 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2253 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2254 return (integer_zerop (val) ? false_edge : true_edge);
2257 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2258 statement, determine which edge will be taken out of the block. Return
2259 NULL if any edge may be taken. */
2261 static edge
2262 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2263 tree val)
2265 basic_block dest_bb;
2266 edge e;
2267 tree taken_case;
2269 taken_case = find_case_label_for_value (switch_stmt, val);
2270 dest_bb = label_to_block (CASE_LABEL (taken_case));
2272 e = find_edge (bb, dest_bb);
2273 gcc_assert (e);
2274 return e;
2278 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2279 We can make optimal use here of the fact that the case labels are
2280 sorted: We can do a binary search for a case matching VAL. */
2282 static tree
2283 find_case_label_for_value (gswitch *switch_stmt, tree val)
2285 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2286 tree default_case = gimple_switch_default_label (switch_stmt);
2288 for (low = 0, high = n; high - low > 1; )
2290 size_t i = (high + low) / 2;
2291 tree t = gimple_switch_label (switch_stmt, i);
2292 int cmp;
2294 /* Cache the result of comparing CASE_LOW and val. */
2295 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2297 if (cmp > 0)
2298 high = i;
2299 else
2300 low = i;
2302 if (CASE_HIGH (t) == NULL)
2304 /* A singe-valued case label. */
2305 if (cmp == 0)
2306 return t;
2308 else
2310 /* A case range. We can only handle integer ranges. */
2311 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2312 return t;
2316 return default_case;
2320 /* Dump a basic block on stderr. */
2322 void
2323 gimple_debug_bb (basic_block bb)
2325 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2329 /* Dump basic block with index N on stderr. */
2331 basic_block
2332 gimple_debug_bb_n (int n)
2334 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2335 return BASIC_BLOCK_FOR_FN (cfun, n);
2339 /* Dump the CFG on stderr.
2341 FLAGS are the same used by the tree dumping functions
2342 (see TDF_* in dumpfile.h). */
2344 void
2345 gimple_debug_cfg (int flags)
2347 gimple_dump_cfg (stderr, flags);
2351 /* Dump the program showing basic block boundaries on the given FILE.
2353 FLAGS are the same used by the tree dumping functions (see TDF_* in
2354 tree.h). */
2356 void
2357 gimple_dump_cfg (FILE *file, int flags)
2359 if (flags & TDF_DETAILS)
2361 dump_function_header (file, current_function_decl, flags);
2362 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2363 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2364 last_basic_block_for_fn (cfun));
2366 brief_dump_cfg (file, flags | TDF_COMMENT);
2367 fprintf (file, "\n");
2370 if (flags & TDF_STATS)
2371 dump_cfg_stats (file);
2373 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2377 /* Dump CFG statistics on FILE. */
2379 void
2380 dump_cfg_stats (FILE *file)
2382 static long max_num_merged_labels = 0;
2383 unsigned long size, total = 0;
2384 long num_edges;
2385 basic_block bb;
2386 const char * const fmt_str = "%-30s%-13s%12s\n";
2387 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2388 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2389 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2390 const char *funcname = current_function_name ();
2392 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2394 fprintf (file, "---------------------------------------------------------\n");
2395 fprintf (file, fmt_str, "", " Number of ", "Memory");
2396 fprintf (file, fmt_str, "", " instances ", "used ");
2397 fprintf (file, "---------------------------------------------------------\n");
2399 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2400 total += size;
2401 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2402 SCALE (size), LABEL (size));
2404 num_edges = 0;
2405 FOR_EACH_BB_FN (bb, cfun)
2406 num_edges += EDGE_COUNT (bb->succs);
2407 size = num_edges * sizeof (struct edge_def);
2408 total += size;
2409 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2411 fprintf (file, "---------------------------------------------------------\n");
2412 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2413 LABEL (total));
2414 fprintf (file, "---------------------------------------------------------\n");
2415 fprintf (file, "\n");
2417 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2418 max_num_merged_labels = cfg_stats.num_merged_labels;
2420 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2421 cfg_stats.num_merged_labels, max_num_merged_labels);
2423 fprintf (file, "\n");
2427 /* Dump CFG statistics on stderr. Keep extern so that it's always
2428 linked in the final executable. */
2430 DEBUG_FUNCTION void
2431 debug_cfg_stats (void)
2433 dump_cfg_stats (stderr);
2436 /*---------------------------------------------------------------------------
2437 Miscellaneous helpers
2438 ---------------------------------------------------------------------------*/
2440 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2441 flow. Transfers of control flow associated with EH are excluded. */
2443 static bool
2444 call_can_make_abnormal_goto (gimple t)
2446 /* If the function has no non-local labels, then a call cannot make an
2447 abnormal transfer of control. */
2448 if (!cfun->has_nonlocal_label
2449 && !cfun->calls_setjmp)
2450 return false;
2452 /* Likewise if the call has no side effects. */
2453 if (!gimple_has_side_effects (t))
2454 return false;
2456 /* Likewise if the called function is leaf. */
2457 if (gimple_call_flags (t) & ECF_LEAF)
2458 return false;
2460 return true;
2464 /* Return true if T can make an abnormal transfer of control flow.
2465 Transfers of control flow associated with EH are excluded. */
2467 bool
2468 stmt_can_make_abnormal_goto (gimple t)
2470 if (computed_goto_p (t))
2471 return true;
2472 if (is_gimple_call (t))
2473 return call_can_make_abnormal_goto (t);
2474 return false;
2478 /* Return true if T represents a stmt that always transfers control. */
2480 bool
2481 is_ctrl_stmt (gimple t)
2483 switch (gimple_code (t))
2485 case GIMPLE_COND:
2486 case GIMPLE_SWITCH:
2487 case GIMPLE_GOTO:
2488 case GIMPLE_RETURN:
2489 case GIMPLE_RESX:
2490 return true;
2491 default:
2492 return false;
2497 /* Return true if T is a statement that may alter the flow of control
2498 (e.g., a call to a non-returning function). */
2500 bool
2501 is_ctrl_altering_stmt (gimple t)
2503 gcc_assert (t);
2505 switch (gimple_code (t))
2507 case GIMPLE_CALL:
2508 /* Per stmt call flag indicates whether the call could alter
2509 controlflow. */
2510 if (gimple_call_ctrl_altering_p (t))
2511 return true;
2512 break;
2514 case GIMPLE_EH_DISPATCH:
2515 /* EH_DISPATCH branches to the individual catch handlers at
2516 this level of a try or allowed-exceptions region. It can
2517 fallthru to the next statement as well. */
2518 return true;
2520 case GIMPLE_ASM:
2521 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2522 return true;
2523 break;
2525 CASE_GIMPLE_OMP:
2526 /* OpenMP directives alter control flow. */
2527 return true;
2529 case GIMPLE_TRANSACTION:
2530 /* A transaction start alters control flow. */
2531 return true;
2533 default:
2534 break;
2537 /* If a statement can throw, it alters control flow. */
2538 return stmt_can_throw_internal (t);
2542 /* Return true if T is a simple local goto. */
2544 bool
2545 simple_goto_p (gimple t)
2547 return (gimple_code (t) == GIMPLE_GOTO
2548 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2552 /* Return true if STMT should start a new basic block. PREV_STMT is
2553 the statement preceding STMT. It is used when STMT is a label or a
2554 case label. Labels should only start a new basic block if their
2555 previous statement wasn't a label. Otherwise, sequence of labels
2556 would generate unnecessary basic blocks that only contain a single
2557 label. */
2559 static inline bool
2560 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2562 if (stmt == NULL)
2563 return false;
2565 /* Labels start a new basic block only if the preceding statement
2566 wasn't a label of the same type. This prevents the creation of
2567 consecutive blocks that have nothing but a single label. */
2568 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2570 /* Nonlocal and computed GOTO targets always start a new block. */
2571 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2572 || FORCED_LABEL (gimple_label_label (label_stmt)))
2573 return true;
2575 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2577 if (DECL_NONLOCAL (gimple_label_label (
2578 as_a <glabel *> (prev_stmt))))
2579 return true;
2581 cfg_stats.num_merged_labels++;
2582 return false;
2584 else
2585 return true;
2587 else if (gimple_code (stmt) == GIMPLE_CALL
2588 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2589 /* setjmp acts similar to a nonlocal GOTO target and thus should
2590 start a new block. */
2591 return true;
2593 return false;
2597 /* Return true if T should end a basic block. */
2599 bool
2600 stmt_ends_bb_p (gimple t)
2602 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2605 /* Remove block annotations and other data structures. */
2607 void
2608 delete_tree_cfg_annotations (void)
2610 vec_free (label_to_block_map_for_fn (cfun));
2613 /* Return the virtual phi in BB. */
2615 gphi *
2616 get_virtual_phi (basic_block bb)
2618 for (gphi_iterator gsi = gsi_start_phis (bb);
2619 !gsi_end_p (gsi);
2620 gsi_next (&gsi))
2622 gphi *phi = gsi.phi ();
2624 if (virtual_operand_p (PHI_RESULT (phi)))
2625 return phi;
2628 return NULL;
2631 /* Return the first statement in basic block BB. */
2633 gimple
2634 first_stmt (basic_block bb)
2636 gimple_stmt_iterator i = gsi_start_bb (bb);
2637 gimple stmt = NULL;
2639 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2641 gsi_next (&i);
2642 stmt = NULL;
2644 return stmt;
2647 /* Return the first non-label statement in basic block BB. */
2649 static gimple
2650 first_non_label_stmt (basic_block bb)
2652 gimple_stmt_iterator i = gsi_start_bb (bb);
2653 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2654 gsi_next (&i);
2655 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2658 /* Return the last statement in basic block BB. */
2660 gimple
2661 last_stmt (basic_block bb)
2663 gimple_stmt_iterator i = gsi_last_bb (bb);
2664 gimple stmt = NULL;
2666 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2668 gsi_prev (&i);
2669 stmt = NULL;
2671 return stmt;
2674 /* Return the last statement of an otherwise empty block. Return NULL
2675 if the block is totally empty, or if it contains more than one
2676 statement. */
2678 gimple
2679 last_and_only_stmt (basic_block bb)
2681 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2682 gimple last, prev;
2684 if (gsi_end_p (i))
2685 return NULL;
2687 last = gsi_stmt (i);
2688 gsi_prev_nondebug (&i);
2689 if (gsi_end_p (i))
2690 return last;
2692 /* Empty statements should no longer appear in the instruction stream.
2693 Everything that might have appeared before should be deleted by
2694 remove_useless_stmts, and the optimizers should just gsi_remove
2695 instead of smashing with build_empty_stmt.
2697 Thus the only thing that should appear here in a block containing
2698 one executable statement is a label. */
2699 prev = gsi_stmt (i);
2700 if (gimple_code (prev) == GIMPLE_LABEL)
2701 return last;
2702 else
2703 return NULL;
2706 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2708 static void
2709 reinstall_phi_args (edge new_edge, edge old_edge)
2711 edge_var_map *vm;
2712 int i;
2713 gphi_iterator phis;
2715 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2716 if (!v)
2717 return;
2719 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2720 v->iterate (i, &vm) && !gsi_end_p (phis);
2721 i++, gsi_next (&phis))
2723 gphi *phi = phis.phi ();
2724 tree result = redirect_edge_var_map_result (vm);
2725 tree arg = redirect_edge_var_map_def (vm);
2727 gcc_assert (result == gimple_phi_result (phi));
2729 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2732 redirect_edge_var_map_clear (old_edge);
2735 /* Returns the basic block after which the new basic block created
2736 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2737 near its "logical" location. This is of most help to humans looking
2738 at debugging dumps. */
2740 basic_block
2741 split_edge_bb_loc (edge edge_in)
2743 basic_block dest = edge_in->dest;
2744 basic_block dest_prev = dest->prev_bb;
2746 if (dest_prev)
2748 edge e = find_edge (dest_prev, dest);
2749 if (e && !(e->flags & EDGE_COMPLEX))
2750 return edge_in->src;
2752 return dest_prev;
2755 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2756 Abort on abnormal edges. */
2758 static basic_block
2759 gimple_split_edge (edge edge_in)
2761 basic_block new_bb, after_bb, dest;
2762 edge new_edge, e;
2764 /* Abnormal edges cannot be split. */
2765 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2767 dest = edge_in->dest;
2769 after_bb = split_edge_bb_loc (edge_in);
2771 new_bb = create_empty_bb (after_bb);
2772 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2773 new_bb->count = edge_in->count;
2774 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2775 new_edge->probability = REG_BR_PROB_BASE;
2776 new_edge->count = edge_in->count;
2778 e = redirect_edge_and_branch (edge_in, new_bb);
2779 gcc_assert (e == edge_in);
2780 reinstall_phi_args (new_edge, e);
2782 return new_bb;
2786 /* Verify properties of the address expression T with base object BASE. */
2788 static tree
2789 verify_address (tree t, tree base)
2791 bool old_constant;
2792 bool old_side_effects;
2793 bool new_constant;
2794 bool new_side_effects;
2796 old_constant = TREE_CONSTANT (t);
2797 old_side_effects = TREE_SIDE_EFFECTS (t);
2799 recompute_tree_invariant_for_addr_expr (t);
2800 new_side_effects = TREE_SIDE_EFFECTS (t);
2801 new_constant = TREE_CONSTANT (t);
2803 if (old_constant != new_constant)
2805 error ("constant not recomputed when ADDR_EXPR changed");
2806 return t;
2808 if (old_side_effects != new_side_effects)
2810 error ("side effects not recomputed when ADDR_EXPR changed");
2811 return t;
2814 if (!(TREE_CODE (base) == VAR_DECL
2815 || TREE_CODE (base) == PARM_DECL
2816 || TREE_CODE (base) == RESULT_DECL))
2817 return NULL_TREE;
2819 if (DECL_GIMPLE_REG_P (base))
2821 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2822 return base;
2825 return NULL_TREE;
2828 /* Callback for walk_tree, check that all elements with address taken are
2829 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2830 inside a PHI node. */
2832 static tree
2833 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2835 tree t = *tp, x;
2837 if (TYPE_P (t))
2838 *walk_subtrees = 0;
2840 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2841 #define CHECK_OP(N, MSG) \
2842 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2843 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2845 switch (TREE_CODE (t))
2847 case SSA_NAME:
2848 if (SSA_NAME_IN_FREE_LIST (t))
2850 error ("SSA name in freelist but still referenced");
2851 return *tp;
2853 break;
2855 case INDIRECT_REF:
2856 error ("INDIRECT_REF in gimple IL");
2857 return t;
2859 case MEM_REF:
2860 x = TREE_OPERAND (t, 0);
2861 if (!POINTER_TYPE_P (TREE_TYPE (x))
2862 || !is_gimple_mem_ref_addr (x))
2864 error ("invalid first operand of MEM_REF");
2865 return x;
2867 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2868 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2870 error ("invalid offset operand of MEM_REF");
2871 return TREE_OPERAND (t, 1);
2873 if (TREE_CODE (x) == ADDR_EXPR
2874 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2875 return x;
2876 *walk_subtrees = 0;
2877 break;
2879 case ASSERT_EXPR:
2880 x = fold (ASSERT_EXPR_COND (t));
2881 if (x == boolean_false_node)
2883 error ("ASSERT_EXPR with an always-false condition");
2884 return *tp;
2886 break;
2888 case MODIFY_EXPR:
2889 error ("MODIFY_EXPR not expected while having tuples");
2890 return *tp;
2892 case ADDR_EXPR:
2894 tree tem;
2896 gcc_assert (is_gimple_address (t));
2898 /* Skip any references (they will be checked when we recurse down the
2899 tree) and ensure that any variable used as a prefix is marked
2900 addressable. */
2901 for (x = TREE_OPERAND (t, 0);
2902 handled_component_p (x);
2903 x = TREE_OPERAND (x, 0))
2906 if ((tem = verify_address (t, x)))
2907 return tem;
2909 if (!(TREE_CODE (x) == VAR_DECL
2910 || TREE_CODE (x) == PARM_DECL
2911 || TREE_CODE (x) == RESULT_DECL))
2912 return NULL;
2914 if (!TREE_ADDRESSABLE (x))
2916 error ("address taken, but ADDRESSABLE bit not set");
2917 return x;
2920 break;
2923 case COND_EXPR:
2924 x = COND_EXPR_COND (t);
2925 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2927 error ("non-integral used in condition");
2928 return x;
2930 if (!is_gimple_condexpr (x))
2932 error ("invalid conditional operand");
2933 return x;
2935 break;
2937 case NON_LVALUE_EXPR:
2938 case TRUTH_NOT_EXPR:
2939 gcc_unreachable ();
2941 CASE_CONVERT:
2942 case FIX_TRUNC_EXPR:
2943 case FLOAT_EXPR:
2944 case NEGATE_EXPR:
2945 case ABS_EXPR:
2946 case BIT_NOT_EXPR:
2947 CHECK_OP (0, "invalid operand to unary operator");
2948 break;
2950 case REALPART_EXPR:
2951 case IMAGPART_EXPR:
2952 case BIT_FIELD_REF:
2953 if (!is_gimple_reg_type (TREE_TYPE (t)))
2955 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2956 return t;
2959 if (TREE_CODE (t) == BIT_FIELD_REF)
2961 tree t0 = TREE_OPERAND (t, 0);
2962 tree t1 = TREE_OPERAND (t, 1);
2963 tree t2 = TREE_OPERAND (t, 2);
2964 if (!tree_fits_uhwi_p (t1)
2965 || !tree_fits_uhwi_p (t2))
2967 error ("invalid position or size operand to BIT_FIELD_REF");
2968 return t;
2970 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2971 && (TYPE_PRECISION (TREE_TYPE (t))
2972 != tree_to_uhwi (t1)))
2974 error ("integral result type precision does not match "
2975 "field size of BIT_FIELD_REF");
2976 return t;
2978 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2979 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2980 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2981 != tree_to_uhwi (t1)))
2983 error ("mode precision of non-integral result does not "
2984 "match field size of BIT_FIELD_REF");
2985 return t;
2987 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2988 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2989 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2991 error ("position plus size exceeds size of referenced object in "
2992 "BIT_FIELD_REF");
2993 return t;
2996 t = TREE_OPERAND (t, 0);
2998 /* Fall-through. */
2999 case COMPONENT_REF:
3000 case ARRAY_REF:
3001 case ARRAY_RANGE_REF:
3002 case VIEW_CONVERT_EXPR:
3003 /* We have a nest of references. Verify that each of the operands
3004 that determine where to reference is either a constant or a variable,
3005 verify that the base is valid, and then show we've already checked
3006 the subtrees. */
3007 while (handled_component_p (t))
3009 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3010 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3011 else if (TREE_CODE (t) == ARRAY_REF
3012 || TREE_CODE (t) == ARRAY_RANGE_REF)
3014 CHECK_OP (1, "invalid array index");
3015 if (TREE_OPERAND (t, 2))
3016 CHECK_OP (2, "invalid array lower bound");
3017 if (TREE_OPERAND (t, 3))
3018 CHECK_OP (3, "invalid array stride");
3020 else if (TREE_CODE (t) == BIT_FIELD_REF
3021 || TREE_CODE (t) == REALPART_EXPR
3022 || TREE_CODE (t) == IMAGPART_EXPR)
3024 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3025 "REALPART_EXPR");
3026 return t;
3029 t = TREE_OPERAND (t, 0);
3032 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3034 error ("invalid reference prefix");
3035 return t;
3037 *walk_subtrees = 0;
3038 break;
3039 case PLUS_EXPR:
3040 case MINUS_EXPR:
3041 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3042 POINTER_PLUS_EXPR. */
3043 if (POINTER_TYPE_P (TREE_TYPE (t)))
3045 error ("invalid operand to plus/minus, type is a pointer");
3046 return t;
3048 CHECK_OP (0, "invalid operand to binary operator");
3049 CHECK_OP (1, "invalid operand to binary operator");
3050 break;
3052 case POINTER_PLUS_EXPR:
3053 /* Check to make sure the first operand is a pointer or reference type. */
3054 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3056 error ("invalid operand to pointer plus, first operand is not a pointer");
3057 return t;
3059 /* Check to make sure the second operand is a ptrofftype. */
3060 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3062 error ("invalid operand to pointer plus, second operand is not an "
3063 "integer type of appropriate width");
3064 return t;
3066 /* FALLTHROUGH */
3067 case LT_EXPR:
3068 case LE_EXPR:
3069 case GT_EXPR:
3070 case GE_EXPR:
3071 case EQ_EXPR:
3072 case NE_EXPR:
3073 case UNORDERED_EXPR:
3074 case ORDERED_EXPR:
3075 case UNLT_EXPR:
3076 case UNLE_EXPR:
3077 case UNGT_EXPR:
3078 case UNGE_EXPR:
3079 case UNEQ_EXPR:
3080 case LTGT_EXPR:
3081 case MULT_EXPR:
3082 case TRUNC_DIV_EXPR:
3083 case CEIL_DIV_EXPR:
3084 case FLOOR_DIV_EXPR:
3085 case ROUND_DIV_EXPR:
3086 case TRUNC_MOD_EXPR:
3087 case CEIL_MOD_EXPR:
3088 case FLOOR_MOD_EXPR:
3089 case ROUND_MOD_EXPR:
3090 case RDIV_EXPR:
3091 case EXACT_DIV_EXPR:
3092 case MIN_EXPR:
3093 case MAX_EXPR:
3094 case LSHIFT_EXPR:
3095 case RSHIFT_EXPR:
3096 case LROTATE_EXPR:
3097 case RROTATE_EXPR:
3098 case BIT_IOR_EXPR:
3099 case BIT_XOR_EXPR:
3100 case BIT_AND_EXPR:
3101 CHECK_OP (0, "invalid operand to binary operator");
3102 CHECK_OP (1, "invalid operand to binary operator");
3103 break;
3105 case CONSTRUCTOR:
3106 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3107 *walk_subtrees = 0;
3108 break;
3110 case CASE_LABEL_EXPR:
3111 if (CASE_CHAIN (t))
3113 error ("invalid CASE_CHAIN");
3114 return t;
3116 break;
3118 default:
3119 break;
3121 return NULL;
3123 #undef CHECK_OP
3127 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3128 Returns true if there is an error, otherwise false. */
3130 static bool
3131 verify_types_in_gimple_min_lval (tree expr)
3133 tree op;
3135 if (is_gimple_id (expr))
3136 return false;
3138 if (TREE_CODE (expr) != TARGET_MEM_REF
3139 && TREE_CODE (expr) != MEM_REF)
3141 error ("invalid expression for min lvalue");
3142 return true;
3145 /* TARGET_MEM_REFs are strange beasts. */
3146 if (TREE_CODE (expr) == TARGET_MEM_REF)
3147 return false;
3149 op = TREE_OPERAND (expr, 0);
3150 if (!is_gimple_val (op))
3152 error ("invalid operand in indirect reference");
3153 debug_generic_stmt (op);
3154 return true;
3156 /* Memory references now generally can involve a value conversion. */
3158 return false;
3161 /* Verify if EXPR is a valid GIMPLE reference expression. If
3162 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3163 if there is an error, otherwise false. */
3165 static bool
3166 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3168 while (handled_component_p (expr))
3170 tree op = TREE_OPERAND (expr, 0);
3172 if (TREE_CODE (expr) == ARRAY_REF
3173 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3175 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3176 || (TREE_OPERAND (expr, 2)
3177 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3178 || (TREE_OPERAND (expr, 3)
3179 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3181 error ("invalid operands to array reference");
3182 debug_generic_stmt (expr);
3183 return true;
3187 /* Verify if the reference array element types are compatible. */
3188 if (TREE_CODE (expr) == ARRAY_REF
3189 && !useless_type_conversion_p (TREE_TYPE (expr),
3190 TREE_TYPE (TREE_TYPE (op))))
3192 error ("type mismatch in array reference");
3193 debug_generic_stmt (TREE_TYPE (expr));
3194 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3195 return true;
3197 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3198 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3199 TREE_TYPE (TREE_TYPE (op))))
3201 error ("type mismatch in array range reference");
3202 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3203 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3204 return true;
3207 if ((TREE_CODE (expr) == REALPART_EXPR
3208 || TREE_CODE (expr) == IMAGPART_EXPR)
3209 && !useless_type_conversion_p (TREE_TYPE (expr),
3210 TREE_TYPE (TREE_TYPE (op))))
3212 error ("type mismatch in real/imagpart reference");
3213 debug_generic_stmt (TREE_TYPE (expr));
3214 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3215 return true;
3218 if (TREE_CODE (expr) == COMPONENT_REF
3219 && !useless_type_conversion_p (TREE_TYPE (expr),
3220 TREE_TYPE (TREE_OPERAND (expr, 1))))
3222 error ("type mismatch in component reference");
3223 debug_generic_stmt (TREE_TYPE (expr));
3224 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3225 return true;
3228 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3230 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3231 that their operand is not an SSA name or an invariant when
3232 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3233 bug). Otherwise there is nothing to verify, gross mismatches at
3234 most invoke undefined behavior. */
3235 if (require_lvalue
3236 && (TREE_CODE (op) == SSA_NAME
3237 || is_gimple_min_invariant (op)))
3239 error ("conversion of an SSA_NAME on the left hand side");
3240 debug_generic_stmt (expr);
3241 return true;
3243 else if (TREE_CODE (op) == SSA_NAME
3244 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3246 error ("conversion of register to a different size");
3247 debug_generic_stmt (expr);
3248 return true;
3250 else if (!handled_component_p (op))
3251 return false;
3254 expr = op;
3257 if (TREE_CODE (expr) == MEM_REF)
3259 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3261 error ("invalid address operand in MEM_REF");
3262 debug_generic_stmt (expr);
3263 return true;
3265 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3266 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3268 error ("invalid offset operand in MEM_REF");
3269 debug_generic_stmt (expr);
3270 return true;
3273 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3275 if (!TMR_BASE (expr)
3276 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3278 error ("invalid address operand in TARGET_MEM_REF");
3279 return true;
3281 if (!TMR_OFFSET (expr)
3282 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3283 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3285 error ("invalid offset operand in TARGET_MEM_REF");
3286 debug_generic_stmt (expr);
3287 return true;
3291 return ((require_lvalue || !is_gimple_min_invariant (expr))
3292 && verify_types_in_gimple_min_lval (expr));
3295 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3296 list of pointer-to types that is trivially convertible to DEST. */
3298 static bool
3299 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3301 tree src;
3303 if (!TYPE_POINTER_TO (src_obj))
3304 return true;
3306 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3307 if (useless_type_conversion_p (dest, src))
3308 return true;
3310 return false;
3313 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3314 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3316 static bool
3317 valid_fixed_convert_types_p (tree type1, tree type2)
3319 return (FIXED_POINT_TYPE_P (type1)
3320 && (INTEGRAL_TYPE_P (type2)
3321 || SCALAR_FLOAT_TYPE_P (type2)
3322 || FIXED_POINT_TYPE_P (type2)));
3325 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3326 is a problem, otherwise false. */
3328 static bool
3329 verify_gimple_call (gcall *stmt)
3331 tree fn = gimple_call_fn (stmt);
3332 tree fntype, fndecl;
3333 unsigned i;
3335 if (gimple_call_internal_p (stmt))
3337 if (fn)
3339 error ("gimple call has two targets");
3340 debug_generic_stmt (fn);
3341 return true;
3344 else
3346 if (!fn)
3348 error ("gimple call has no target");
3349 return true;
3353 if (fn && !is_gimple_call_addr (fn))
3355 error ("invalid function in gimple call");
3356 debug_generic_stmt (fn);
3357 return true;
3360 if (fn
3361 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3362 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3363 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3365 error ("non-function in gimple call");
3366 return true;
3369 fndecl = gimple_call_fndecl (stmt);
3370 if (fndecl
3371 && TREE_CODE (fndecl) == FUNCTION_DECL
3372 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3373 && !DECL_PURE_P (fndecl)
3374 && !TREE_READONLY (fndecl))
3376 error ("invalid pure const state for function");
3377 return true;
3380 tree lhs = gimple_call_lhs (stmt);
3381 if (lhs
3382 && (!is_gimple_lvalue (lhs)
3383 || verify_types_in_gimple_reference (lhs, true)))
3385 error ("invalid LHS in gimple call");
3386 return true;
3389 if (lhs
3390 && gimple_call_ctrl_altering_p (stmt)
3391 && gimple_call_noreturn_p (stmt)
3392 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3394 error ("LHS in noreturn call");
3395 return true;
3398 fntype = gimple_call_fntype (stmt);
3399 if (fntype
3400 && lhs
3401 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3402 /* ??? At least C++ misses conversions at assignments from
3403 void * call results.
3404 ??? Java is completely off. Especially with functions
3405 returning java.lang.Object.
3406 For now simply allow arbitrary pointer type conversions. */
3407 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3408 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3410 error ("invalid conversion in gimple call");
3411 debug_generic_stmt (TREE_TYPE (lhs));
3412 debug_generic_stmt (TREE_TYPE (fntype));
3413 return true;
3416 if (gimple_call_chain (stmt)
3417 && !is_gimple_val (gimple_call_chain (stmt)))
3419 error ("invalid static chain in gimple call");
3420 debug_generic_stmt (gimple_call_chain (stmt));
3421 return true;
3424 /* If there is a static chain argument, the call should either be
3425 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3426 if (gimple_call_chain (stmt)
3427 && fndecl
3428 && !DECL_STATIC_CHAIN (fndecl))
3430 error ("static chain with function that doesn%'t use one");
3431 return true;
3434 /* ??? The C frontend passes unpromoted arguments in case it
3435 didn't see a function declaration before the call. So for now
3436 leave the call arguments mostly unverified. Once we gimplify
3437 unit-at-a-time we have a chance to fix this. */
3439 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3441 tree arg = gimple_call_arg (stmt, i);
3442 if ((is_gimple_reg_type (TREE_TYPE (arg))
3443 && !is_gimple_val (arg))
3444 || (!is_gimple_reg_type (TREE_TYPE (arg))
3445 && !is_gimple_lvalue (arg)))
3447 error ("invalid argument to gimple call");
3448 debug_generic_expr (arg);
3449 return true;
3453 return false;
3456 /* Verifies the gimple comparison with the result type TYPE and
3457 the operands OP0 and OP1. */
3459 static bool
3460 verify_gimple_comparison (tree type, tree op0, tree op1)
3462 tree op0_type = TREE_TYPE (op0);
3463 tree op1_type = TREE_TYPE (op1);
3465 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3467 error ("invalid operands in gimple comparison");
3468 return true;
3471 /* For comparisons we do not have the operations type as the
3472 effective type the comparison is carried out in. Instead
3473 we require that either the first operand is trivially
3474 convertible into the second, or the other way around.
3475 Because we special-case pointers to void we allow
3476 comparisons of pointers with the same mode as well. */
3477 if (!useless_type_conversion_p (op0_type, op1_type)
3478 && !useless_type_conversion_p (op1_type, op0_type)
3479 && (!POINTER_TYPE_P (op0_type)
3480 || !POINTER_TYPE_P (op1_type)
3481 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3483 error ("mismatching comparison operand types");
3484 debug_generic_expr (op0_type);
3485 debug_generic_expr (op1_type);
3486 return true;
3489 /* The resulting type of a comparison may be an effective boolean type. */
3490 if (INTEGRAL_TYPE_P (type)
3491 && (TREE_CODE (type) == BOOLEAN_TYPE
3492 || TYPE_PRECISION (type) == 1))
3494 if (TREE_CODE (op0_type) == VECTOR_TYPE
3495 || TREE_CODE (op1_type) == VECTOR_TYPE)
3497 error ("vector comparison returning a boolean");
3498 debug_generic_expr (op0_type);
3499 debug_generic_expr (op1_type);
3500 return true;
3503 /* Or an integer vector type with the same size and element count
3504 as the comparison operand types. */
3505 else if (TREE_CODE (type) == VECTOR_TYPE
3506 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3508 if (TREE_CODE (op0_type) != VECTOR_TYPE
3509 || TREE_CODE (op1_type) != VECTOR_TYPE)
3511 error ("non-vector operands in vector comparison");
3512 debug_generic_expr (op0_type);
3513 debug_generic_expr (op1_type);
3514 return true;
3517 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3518 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3519 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3520 /* The result of a vector comparison is of signed
3521 integral type. */
3522 || TYPE_UNSIGNED (TREE_TYPE (type)))
3524 error ("invalid vector comparison resulting type");
3525 debug_generic_expr (type);
3526 return true;
3529 else
3531 error ("bogus comparison result type");
3532 debug_generic_expr (type);
3533 return true;
3536 return false;
3539 /* Verify a gimple assignment statement STMT with an unary rhs.
3540 Returns true if anything is wrong. */
3542 static bool
3543 verify_gimple_assign_unary (gassign *stmt)
3545 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3546 tree lhs = gimple_assign_lhs (stmt);
3547 tree lhs_type = TREE_TYPE (lhs);
3548 tree rhs1 = gimple_assign_rhs1 (stmt);
3549 tree rhs1_type = TREE_TYPE (rhs1);
3551 if (!is_gimple_reg (lhs))
3553 error ("non-register as LHS of unary operation");
3554 return true;
3557 if (!is_gimple_val (rhs1))
3559 error ("invalid operand in unary operation");
3560 return true;
3563 /* First handle conversions. */
3564 switch (rhs_code)
3566 CASE_CONVERT:
3568 /* Allow conversions from pointer type to integral type only if
3569 there is no sign or zero extension involved.
3570 For targets were the precision of ptrofftype doesn't match that
3571 of pointers we need to allow arbitrary conversions to ptrofftype. */
3572 if ((POINTER_TYPE_P (lhs_type)
3573 && INTEGRAL_TYPE_P (rhs1_type))
3574 || (POINTER_TYPE_P (rhs1_type)
3575 && INTEGRAL_TYPE_P (lhs_type)
3576 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3577 || ptrofftype_p (sizetype))))
3578 return false;
3580 /* Allow conversion from integral to offset type and vice versa. */
3581 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3582 && INTEGRAL_TYPE_P (rhs1_type))
3583 || (INTEGRAL_TYPE_P (lhs_type)
3584 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3585 return false;
3587 /* Otherwise assert we are converting between types of the
3588 same kind. */
3589 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3591 error ("invalid types in nop conversion");
3592 debug_generic_expr (lhs_type);
3593 debug_generic_expr (rhs1_type);
3594 return true;
3597 return false;
3600 case ADDR_SPACE_CONVERT_EXPR:
3602 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3603 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3604 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3606 error ("invalid types in address space conversion");
3607 debug_generic_expr (lhs_type);
3608 debug_generic_expr (rhs1_type);
3609 return true;
3612 return false;
3615 case FIXED_CONVERT_EXPR:
3617 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3618 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3620 error ("invalid types in fixed-point conversion");
3621 debug_generic_expr (lhs_type);
3622 debug_generic_expr (rhs1_type);
3623 return true;
3626 return false;
3629 case FLOAT_EXPR:
3631 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3632 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3633 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3635 error ("invalid types in conversion to floating point");
3636 debug_generic_expr (lhs_type);
3637 debug_generic_expr (rhs1_type);
3638 return true;
3641 return false;
3644 case FIX_TRUNC_EXPR:
3646 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3647 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3648 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3650 error ("invalid types in conversion to integer");
3651 debug_generic_expr (lhs_type);
3652 debug_generic_expr (rhs1_type);
3653 return true;
3656 return false;
3658 case REDUC_MAX_EXPR:
3659 case REDUC_MIN_EXPR:
3660 case REDUC_PLUS_EXPR:
3661 if (!VECTOR_TYPE_P (rhs1_type)
3662 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3664 error ("reduction should convert from vector to element type");
3665 debug_generic_expr (lhs_type);
3666 debug_generic_expr (rhs1_type);
3667 return true;
3669 return false;
3671 case VEC_UNPACK_HI_EXPR:
3672 case VEC_UNPACK_LO_EXPR:
3673 case VEC_UNPACK_FLOAT_HI_EXPR:
3674 case VEC_UNPACK_FLOAT_LO_EXPR:
3675 /* FIXME. */
3676 return false;
3678 case NEGATE_EXPR:
3679 case ABS_EXPR:
3680 case BIT_NOT_EXPR:
3681 case PAREN_EXPR:
3682 case CONJ_EXPR:
3683 break;
3685 default:
3686 gcc_unreachable ();
3689 /* For the remaining codes assert there is no conversion involved. */
3690 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3692 error ("non-trivial conversion in unary operation");
3693 debug_generic_expr (lhs_type);
3694 debug_generic_expr (rhs1_type);
3695 return true;
3698 return false;
3701 /* Verify a gimple assignment statement STMT with a binary rhs.
3702 Returns true if anything is wrong. */
3704 static bool
3705 verify_gimple_assign_binary (gassign *stmt)
3707 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3708 tree lhs = gimple_assign_lhs (stmt);
3709 tree lhs_type = TREE_TYPE (lhs);
3710 tree rhs1 = gimple_assign_rhs1 (stmt);
3711 tree rhs1_type = TREE_TYPE (rhs1);
3712 tree rhs2 = gimple_assign_rhs2 (stmt);
3713 tree rhs2_type = TREE_TYPE (rhs2);
3715 if (!is_gimple_reg (lhs))
3717 error ("non-register as LHS of binary operation");
3718 return true;
3721 if (!is_gimple_val (rhs1)
3722 || !is_gimple_val (rhs2))
3724 error ("invalid operands in binary operation");
3725 return true;
3728 /* First handle operations that involve different types. */
3729 switch (rhs_code)
3731 case COMPLEX_EXPR:
3733 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3734 || !(INTEGRAL_TYPE_P (rhs1_type)
3735 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3736 || !(INTEGRAL_TYPE_P (rhs2_type)
3737 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3739 error ("type mismatch in complex expression");
3740 debug_generic_expr (lhs_type);
3741 debug_generic_expr (rhs1_type);
3742 debug_generic_expr (rhs2_type);
3743 return true;
3746 return false;
3749 case LSHIFT_EXPR:
3750 case RSHIFT_EXPR:
3751 case LROTATE_EXPR:
3752 case RROTATE_EXPR:
3754 /* Shifts and rotates are ok on integral types, fixed point
3755 types and integer vector types. */
3756 if ((!INTEGRAL_TYPE_P (rhs1_type)
3757 && !FIXED_POINT_TYPE_P (rhs1_type)
3758 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3759 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3760 || (!INTEGRAL_TYPE_P (rhs2_type)
3761 /* Vector shifts of vectors are also ok. */
3762 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3763 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3764 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3765 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3766 || !useless_type_conversion_p (lhs_type, rhs1_type))
3768 error ("type mismatch in shift expression");
3769 debug_generic_expr (lhs_type);
3770 debug_generic_expr (rhs1_type);
3771 debug_generic_expr (rhs2_type);
3772 return true;
3775 return false;
3778 case WIDEN_LSHIFT_EXPR:
3780 if (!INTEGRAL_TYPE_P (lhs_type)
3781 || !INTEGRAL_TYPE_P (rhs1_type)
3782 || TREE_CODE (rhs2) != INTEGER_CST
3783 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3785 error ("type mismatch in widening vector shift expression");
3786 debug_generic_expr (lhs_type);
3787 debug_generic_expr (rhs1_type);
3788 debug_generic_expr (rhs2_type);
3789 return true;
3792 return false;
3795 case VEC_WIDEN_LSHIFT_HI_EXPR:
3796 case VEC_WIDEN_LSHIFT_LO_EXPR:
3798 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3799 || TREE_CODE (lhs_type) != VECTOR_TYPE
3800 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3801 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3802 || TREE_CODE (rhs2) != INTEGER_CST
3803 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3804 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3806 error ("type mismatch in widening vector shift expression");
3807 debug_generic_expr (lhs_type);
3808 debug_generic_expr (rhs1_type);
3809 debug_generic_expr (rhs2_type);
3810 return true;
3813 return false;
3816 case PLUS_EXPR:
3817 case MINUS_EXPR:
3819 tree lhs_etype = lhs_type;
3820 tree rhs1_etype = rhs1_type;
3821 tree rhs2_etype = rhs2_type;
3822 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3824 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3825 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3827 error ("invalid non-vector operands to vector valued plus");
3828 return true;
3830 lhs_etype = TREE_TYPE (lhs_type);
3831 rhs1_etype = TREE_TYPE (rhs1_type);
3832 rhs2_etype = TREE_TYPE (rhs2_type);
3834 if (POINTER_TYPE_P (lhs_etype)
3835 || POINTER_TYPE_P (rhs1_etype)
3836 || POINTER_TYPE_P (rhs2_etype))
3838 error ("invalid (pointer) operands to plus/minus");
3839 return true;
3842 /* Continue with generic binary expression handling. */
3843 break;
3846 case POINTER_PLUS_EXPR:
3848 if (!POINTER_TYPE_P (rhs1_type)
3849 || !useless_type_conversion_p (lhs_type, rhs1_type)
3850 || !ptrofftype_p (rhs2_type))
3852 error ("type mismatch in pointer plus expression");
3853 debug_generic_stmt (lhs_type);
3854 debug_generic_stmt (rhs1_type);
3855 debug_generic_stmt (rhs2_type);
3856 return true;
3859 return false;
3862 case TRUTH_ANDIF_EXPR:
3863 case TRUTH_ORIF_EXPR:
3864 case TRUTH_AND_EXPR:
3865 case TRUTH_OR_EXPR:
3866 case TRUTH_XOR_EXPR:
3868 gcc_unreachable ();
3870 case LT_EXPR:
3871 case LE_EXPR:
3872 case GT_EXPR:
3873 case GE_EXPR:
3874 case EQ_EXPR:
3875 case NE_EXPR:
3876 case UNORDERED_EXPR:
3877 case ORDERED_EXPR:
3878 case UNLT_EXPR:
3879 case UNLE_EXPR:
3880 case UNGT_EXPR:
3881 case UNGE_EXPR:
3882 case UNEQ_EXPR:
3883 case LTGT_EXPR:
3884 /* Comparisons are also binary, but the result type is not
3885 connected to the operand types. */
3886 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3888 case WIDEN_MULT_EXPR:
3889 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3890 return true;
3891 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3892 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3894 case WIDEN_SUM_EXPR:
3895 case VEC_WIDEN_MULT_HI_EXPR:
3896 case VEC_WIDEN_MULT_LO_EXPR:
3897 case VEC_WIDEN_MULT_EVEN_EXPR:
3898 case VEC_WIDEN_MULT_ODD_EXPR:
3899 case VEC_PACK_TRUNC_EXPR:
3900 case VEC_PACK_SAT_EXPR:
3901 case VEC_PACK_FIX_TRUNC_EXPR:
3902 /* FIXME. */
3903 return false;
3905 case MULT_EXPR:
3906 case MULT_HIGHPART_EXPR:
3907 case TRUNC_DIV_EXPR:
3908 case CEIL_DIV_EXPR:
3909 case FLOOR_DIV_EXPR:
3910 case ROUND_DIV_EXPR:
3911 case TRUNC_MOD_EXPR:
3912 case CEIL_MOD_EXPR:
3913 case FLOOR_MOD_EXPR:
3914 case ROUND_MOD_EXPR:
3915 case RDIV_EXPR:
3916 case EXACT_DIV_EXPR:
3917 case MIN_EXPR:
3918 case MAX_EXPR:
3919 case BIT_IOR_EXPR:
3920 case BIT_XOR_EXPR:
3921 case BIT_AND_EXPR:
3922 /* Continue with generic binary expression handling. */
3923 break;
3925 default:
3926 gcc_unreachable ();
3929 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3930 || !useless_type_conversion_p (lhs_type, rhs2_type))
3932 error ("type mismatch in binary expression");
3933 debug_generic_stmt (lhs_type);
3934 debug_generic_stmt (rhs1_type);
3935 debug_generic_stmt (rhs2_type);
3936 return true;
3939 return false;
3942 /* Verify a gimple assignment statement STMT with a ternary rhs.
3943 Returns true if anything is wrong. */
3945 static bool
3946 verify_gimple_assign_ternary (gassign *stmt)
3948 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3949 tree lhs = gimple_assign_lhs (stmt);
3950 tree lhs_type = TREE_TYPE (lhs);
3951 tree rhs1 = gimple_assign_rhs1 (stmt);
3952 tree rhs1_type = TREE_TYPE (rhs1);
3953 tree rhs2 = gimple_assign_rhs2 (stmt);
3954 tree rhs2_type = TREE_TYPE (rhs2);
3955 tree rhs3 = gimple_assign_rhs3 (stmt);
3956 tree rhs3_type = TREE_TYPE (rhs3);
3958 if (!is_gimple_reg (lhs))
3960 error ("non-register as LHS of ternary operation");
3961 return true;
3964 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3965 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3966 || !is_gimple_val (rhs2)
3967 || !is_gimple_val (rhs3))
3969 error ("invalid operands in ternary operation");
3970 return true;
3973 /* First handle operations that involve different types. */
3974 switch (rhs_code)
3976 case WIDEN_MULT_PLUS_EXPR:
3977 case WIDEN_MULT_MINUS_EXPR:
3978 if ((!INTEGRAL_TYPE_P (rhs1_type)
3979 && !FIXED_POINT_TYPE_P (rhs1_type))
3980 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3981 || !useless_type_conversion_p (lhs_type, rhs3_type)
3982 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3983 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3985 error ("type mismatch in widening multiply-accumulate expression");
3986 debug_generic_expr (lhs_type);
3987 debug_generic_expr (rhs1_type);
3988 debug_generic_expr (rhs2_type);
3989 debug_generic_expr (rhs3_type);
3990 return true;
3992 break;
3994 case FMA_EXPR:
3995 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3996 || !useless_type_conversion_p (lhs_type, rhs2_type)
3997 || !useless_type_conversion_p (lhs_type, rhs3_type))
3999 error ("type mismatch in fused multiply-add expression");
4000 debug_generic_expr (lhs_type);
4001 debug_generic_expr (rhs1_type);
4002 debug_generic_expr (rhs2_type);
4003 debug_generic_expr (rhs3_type);
4004 return true;
4006 break;
4008 case VEC_COND_EXPR:
4009 if (!VECTOR_INTEGER_TYPE_P (rhs1_type)
4010 || TYPE_SIGN (rhs1_type) != SIGNED
4011 || TYPE_SIZE (rhs1_type) != TYPE_SIZE (lhs_type)
4012 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4013 != TYPE_VECTOR_SUBPARTS (lhs_type))
4015 error ("the first argument of a VEC_COND_EXPR must be of a signed "
4016 "integral vector type of the same size and number of "
4017 "elements as the result");
4018 debug_generic_expr (lhs_type);
4019 debug_generic_expr (rhs1_type);
4020 return true;
4022 /* Fallthrough. */
4023 case COND_EXPR:
4024 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4025 || !useless_type_conversion_p (lhs_type, rhs3_type))
4027 error ("type mismatch in conditional expression");
4028 debug_generic_expr (lhs_type);
4029 debug_generic_expr (rhs2_type);
4030 debug_generic_expr (rhs3_type);
4031 return true;
4033 break;
4035 case VEC_PERM_EXPR:
4036 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4037 || !useless_type_conversion_p (lhs_type, rhs2_type))
4039 error ("type mismatch in vector permute expression");
4040 debug_generic_expr (lhs_type);
4041 debug_generic_expr (rhs1_type);
4042 debug_generic_expr (rhs2_type);
4043 debug_generic_expr (rhs3_type);
4044 return true;
4047 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4048 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4049 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4051 error ("vector types expected in vector permute expression");
4052 debug_generic_expr (lhs_type);
4053 debug_generic_expr (rhs1_type);
4054 debug_generic_expr (rhs2_type);
4055 debug_generic_expr (rhs3_type);
4056 return true;
4059 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4060 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4061 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4062 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4063 != TYPE_VECTOR_SUBPARTS (lhs_type))
4065 error ("vectors with different element number found "
4066 "in vector permute expression");
4067 debug_generic_expr (lhs_type);
4068 debug_generic_expr (rhs1_type);
4069 debug_generic_expr (rhs2_type);
4070 debug_generic_expr (rhs3_type);
4071 return true;
4074 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4075 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4076 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4078 error ("invalid mask type in vector permute expression");
4079 debug_generic_expr (lhs_type);
4080 debug_generic_expr (rhs1_type);
4081 debug_generic_expr (rhs2_type);
4082 debug_generic_expr (rhs3_type);
4083 return true;
4086 return false;
4088 case SAD_EXPR:
4089 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4090 || !useless_type_conversion_p (lhs_type, rhs3_type)
4091 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4092 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4093 > GET_MODE_BITSIZE (GET_MODE_INNER
4094 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4096 error ("type mismatch in sad expression");
4097 debug_generic_expr (lhs_type);
4098 debug_generic_expr (rhs1_type);
4099 debug_generic_expr (rhs2_type);
4100 debug_generic_expr (rhs3_type);
4101 return true;
4104 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4105 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4106 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4108 error ("vector types expected in sad expression");
4109 debug_generic_expr (lhs_type);
4110 debug_generic_expr (rhs1_type);
4111 debug_generic_expr (rhs2_type);
4112 debug_generic_expr (rhs3_type);
4113 return true;
4116 return false;
4118 case DOT_PROD_EXPR:
4119 case REALIGN_LOAD_EXPR:
4120 /* FIXME. */
4121 return false;
4123 default:
4124 gcc_unreachable ();
4126 return false;
4129 /* Verify a gimple assignment statement STMT with a single rhs.
4130 Returns true if anything is wrong. */
4132 static bool
4133 verify_gimple_assign_single (gassign *stmt)
4135 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4136 tree lhs = gimple_assign_lhs (stmt);
4137 tree lhs_type = TREE_TYPE (lhs);
4138 tree rhs1 = gimple_assign_rhs1 (stmt);
4139 tree rhs1_type = TREE_TYPE (rhs1);
4140 bool res = false;
4142 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4144 error ("non-trivial conversion at assignment");
4145 debug_generic_expr (lhs_type);
4146 debug_generic_expr (rhs1_type);
4147 return true;
4150 if (gimple_clobber_p (stmt)
4151 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4153 error ("non-decl/MEM_REF LHS in clobber statement");
4154 debug_generic_expr (lhs);
4155 return true;
4158 if (handled_component_p (lhs)
4159 || TREE_CODE (lhs) == MEM_REF
4160 || TREE_CODE (lhs) == TARGET_MEM_REF)
4161 res |= verify_types_in_gimple_reference (lhs, true);
4163 /* Special codes we cannot handle via their class. */
4164 switch (rhs_code)
4166 case ADDR_EXPR:
4168 tree op = TREE_OPERAND (rhs1, 0);
4169 if (!is_gimple_addressable (op))
4171 error ("invalid operand in unary expression");
4172 return true;
4175 /* Technically there is no longer a need for matching types, but
4176 gimple hygiene asks for this check. In LTO we can end up
4177 combining incompatible units and thus end up with addresses
4178 of globals that change their type to a common one. */
4179 if (!in_lto_p
4180 && !types_compatible_p (TREE_TYPE (op),
4181 TREE_TYPE (TREE_TYPE (rhs1)))
4182 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4183 TREE_TYPE (op)))
4185 error ("type mismatch in address expression");
4186 debug_generic_stmt (TREE_TYPE (rhs1));
4187 debug_generic_stmt (TREE_TYPE (op));
4188 return true;
4191 return verify_types_in_gimple_reference (op, true);
4194 /* tcc_reference */
4195 case INDIRECT_REF:
4196 error ("INDIRECT_REF in gimple IL");
4197 return true;
4199 case COMPONENT_REF:
4200 case BIT_FIELD_REF:
4201 case ARRAY_REF:
4202 case ARRAY_RANGE_REF:
4203 case VIEW_CONVERT_EXPR:
4204 case REALPART_EXPR:
4205 case IMAGPART_EXPR:
4206 case TARGET_MEM_REF:
4207 case MEM_REF:
4208 if (!is_gimple_reg (lhs)
4209 && is_gimple_reg_type (TREE_TYPE (lhs)))
4211 error ("invalid rhs for gimple memory store");
4212 debug_generic_stmt (lhs);
4213 debug_generic_stmt (rhs1);
4214 return true;
4216 return res || verify_types_in_gimple_reference (rhs1, false);
4218 /* tcc_constant */
4219 case SSA_NAME:
4220 case INTEGER_CST:
4221 case REAL_CST:
4222 case FIXED_CST:
4223 case COMPLEX_CST:
4224 case VECTOR_CST:
4225 case STRING_CST:
4226 return res;
4228 /* tcc_declaration */
4229 case CONST_DECL:
4230 return res;
4231 case VAR_DECL:
4232 case PARM_DECL:
4233 if (!is_gimple_reg (lhs)
4234 && !is_gimple_reg (rhs1)
4235 && is_gimple_reg_type (TREE_TYPE (lhs)))
4237 error ("invalid rhs for gimple memory store");
4238 debug_generic_stmt (lhs);
4239 debug_generic_stmt (rhs1);
4240 return true;
4242 return res;
4244 case CONSTRUCTOR:
4245 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4247 unsigned int i;
4248 tree elt_i, elt_v, elt_t = NULL_TREE;
4250 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4251 return res;
4252 /* For vector CONSTRUCTORs we require that either it is empty
4253 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4254 (then the element count must be correct to cover the whole
4255 outer vector and index must be NULL on all elements, or it is
4256 a CONSTRUCTOR of scalar elements, where we as an exception allow
4257 smaller number of elements (assuming zero filling) and
4258 consecutive indexes as compared to NULL indexes (such
4259 CONSTRUCTORs can appear in the IL from FEs). */
4260 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4262 if (elt_t == NULL_TREE)
4264 elt_t = TREE_TYPE (elt_v);
4265 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4267 tree elt_t = TREE_TYPE (elt_v);
4268 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4269 TREE_TYPE (elt_t)))
4271 error ("incorrect type of vector CONSTRUCTOR"
4272 " elements");
4273 debug_generic_stmt (rhs1);
4274 return true;
4276 else if (CONSTRUCTOR_NELTS (rhs1)
4277 * TYPE_VECTOR_SUBPARTS (elt_t)
4278 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4280 error ("incorrect number of vector CONSTRUCTOR"
4281 " elements");
4282 debug_generic_stmt (rhs1);
4283 return true;
4286 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4287 elt_t))
4289 error ("incorrect type of vector CONSTRUCTOR elements");
4290 debug_generic_stmt (rhs1);
4291 return true;
4293 else if (CONSTRUCTOR_NELTS (rhs1)
4294 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4296 error ("incorrect number of vector CONSTRUCTOR elements");
4297 debug_generic_stmt (rhs1);
4298 return true;
4301 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4303 error ("incorrect type of vector CONSTRUCTOR elements");
4304 debug_generic_stmt (rhs1);
4305 return true;
4307 if (elt_i != NULL_TREE
4308 && (TREE_CODE (elt_t) == VECTOR_TYPE
4309 || TREE_CODE (elt_i) != INTEGER_CST
4310 || compare_tree_int (elt_i, i) != 0))
4312 error ("vector CONSTRUCTOR with non-NULL element index");
4313 debug_generic_stmt (rhs1);
4314 return true;
4316 if (!is_gimple_val (elt_v))
4318 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4319 debug_generic_stmt (rhs1);
4320 return true;
4324 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4326 error ("non-vector CONSTRUCTOR with elements");
4327 debug_generic_stmt (rhs1);
4328 return true;
4330 return res;
4331 case OBJ_TYPE_REF:
4332 case ASSERT_EXPR:
4333 case WITH_SIZE_EXPR:
4334 /* FIXME. */
4335 return res;
4337 default:;
4340 return res;
4343 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4344 is a problem, otherwise false. */
4346 static bool
4347 verify_gimple_assign (gassign *stmt)
4349 switch (gimple_assign_rhs_class (stmt))
4351 case GIMPLE_SINGLE_RHS:
4352 return verify_gimple_assign_single (stmt);
4354 case GIMPLE_UNARY_RHS:
4355 return verify_gimple_assign_unary (stmt);
4357 case GIMPLE_BINARY_RHS:
4358 return verify_gimple_assign_binary (stmt);
4360 case GIMPLE_TERNARY_RHS:
4361 return verify_gimple_assign_ternary (stmt);
4363 default:
4364 gcc_unreachable ();
4368 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4369 is a problem, otherwise false. */
4371 static bool
4372 verify_gimple_return (greturn *stmt)
4374 tree op = gimple_return_retval (stmt);
4375 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4377 /* We cannot test for present return values as we do not fix up missing
4378 return values from the original source. */
4379 if (op == NULL)
4380 return false;
4382 if (!is_gimple_val (op)
4383 && TREE_CODE (op) != RESULT_DECL)
4385 error ("invalid operand in return statement");
4386 debug_generic_stmt (op);
4387 return true;
4390 if ((TREE_CODE (op) == RESULT_DECL
4391 && DECL_BY_REFERENCE (op))
4392 || (TREE_CODE (op) == SSA_NAME
4393 && SSA_NAME_VAR (op)
4394 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4395 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4396 op = TREE_TYPE (op);
4398 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4400 error ("invalid conversion in return statement");
4401 debug_generic_stmt (restype);
4402 debug_generic_stmt (TREE_TYPE (op));
4403 return true;
4406 return false;
4410 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4411 is a problem, otherwise false. */
4413 static bool
4414 verify_gimple_goto (ggoto *stmt)
4416 tree dest = gimple_goto_dest (stmt);
4418 /* ??? We have two canonical forms of direct goto destinations, a
4419 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4420 if (TREE_CODE (dest) != LABEL_DECL
4421 && (!is_gimple_val (dest)
4422 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4424 error ("goto destination is neither a label nor a pointer");
4425 return true;
4428 return false;
4431 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4432 is a problem, otherwise false. */
4434 static bool
4435 verify_gimple_switch (gswitch *stmt)
4437 unsigned int i, n;
4438 tree elt, prev_upper_bound = NULL_TREE;
4439 tree index_type, elt_type = NULL_TREE;
4441 if (!is_gimple_val (gimple_switch_index (stmt)))
4443 error ("invalid operand to switch statement");
4444 debug_generic_stmt (gimple_switch_index (stmt));
4445 return true;
4448 index_type = TREE_TYPE (gimple_switch_index (stmt));
4449 if (! INTEGRAL_TYPE_P (index_type))
4451 error ("non-integral type switch statement");
4452 debug_generic_expr (index_type);
4453 return true;
4456 elt = gimple_switch_label (stmt, 0);
4457 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4459 error ("invalid default case label in switch statement");
4460 debug_generic_expr (elt);
4461 return true;
4464 n = gimple_switch_num_labels (stmt);
4465 for (i = 1; i < n; i++)
4467 elt = gimple_switch_label (stmt, i);
4469 if (! CASE_LOW (elt))
4471 error ("invalid case label in switch statement");
4472 debug_generic_expr (elt);
4473 return true;
4475 if (CASE_HIGH (elt)
4476 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4478 error ("invalid case range in switch statement");
4479 debug_generic_expr (elt);
4480 return true;
4483 if (elt_type)
4485 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4486 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4488 error ("type mismatch for case label in switch statement");
4489 debug_generic_expr (elt);
4490 return true;
4493 else
4495 elt_type = TREE_TYPE (CASE_LOW (elt));
4496 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4498 error ("type precision mismatch in switch statement");
4499 return true;
4503 if (prev_upper_bound)
4505 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4507 error ("case labels not sorted in switch statement");
4508 return true;
4512 prev_upper_bound = CASE_HIGH (elt);
4513 if (! prev_upper_bound)
4514 prev_upper_bound = CASE_LOW (elt);
4517 return false;
4520 /* Verify a gimple debug statement STMT.
4521 Returns true if anything is wrong. */
4523 static bool
4524 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4526 /* There isn't much that could be wrong in a gimple debug stmt. A
4527 gimple debug bind stmt, for example, maps a tree, that's usually
4528 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4529 component or member of an aggregate type, to another tree, that
4530 can be an arbitrary expression. These stmts expand into debug
4531 insns, and are converted to debug notes by var-tracking.c. */
4532 return false;
4535 /* Verify a gimple label statement STMT.
4536 Returns true if anything is wrong. */
4538 static bool
4539 verify_gimple_label (glabel *stmt)
4541 tree decl = gimple_label_label (stmt);
4542 int uid;
4543 bool err = false;
4545 if (TREE_CODE (decl) != LABEL_DECL)
4546 return true;
4547 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4548 && DECL_CONTEXT (decl) != current_function_decl)
4550 error ("label's context is not the current function decl");
4551 err |= true;
4554 uid = LABEL_DECL_UID (decl);
4555 if (cfun->cfg
4556 && (uid == -1
4557 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4559 error ("incorrect entry in label_to_block_map");
4560 err |= true;
4563 uid = EH_LANDING_PAD_NR (decl);
4564 if (uid)
4566 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4567 if (decl != lp->post_landing_pad)
4569 error ("incorrect setting of landing pad number");
4570 err |= true;
4574 return err;
4577 /* Verify a gimple cond statement STMT.
4578 Returns true if anything is wrong. */
4580 static bool
4581 verify_gimple_cond (gcond *stmt)
4583 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4585 error ("invalid comparison code in gimple cond");
4586 return true;
4588 if (!(!gimple_cond_true_label (stmt)
4589 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4590 || !(!gimple_cond_false_label (stmt)
4591 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4593 error ("invalid labels in gimple cond");
4594 return true;
4597 return verify_gimple_comparison (boolean_type_node,
4598 gimple_cond_lhs (stmt),
4599 gimple_cond_rhs (stmt));
4602 /* Verify the GIMPLE statement STMT. Returns true if there is an
4603 error, otherwise false. */
4605 static bool
4606 verify_gimple_stmt (gimple stmt)
4608 switch (gimple_code (stmt))
4610 case GIMPLE_ASSIGN:
4611 return verify_gimple_assign (as_a <gassign *> (stmt));
4613 case GIMPLE_LABEL:
4614 return verify_gimple_label (as_a <glabel *> (stmt));
4616 case GIMPLE_CALL:
4617 return verify_gimple_call (as_a <gcall *> (stmt));
4619 case GIMPLE_COND:
4620 return verify_gimple_cond (as_a <gcond *> (stmt));
4622 case GIMPLE_GOTO:
4623 return verify_gimple_goto (as_a <ggoto *> (stmt));
4625 case GIMPLE_SWITCH:
4626 return verify_gimple_switch (as_a <gswitch *> (stmt));
4628 case GIMPLE_RETURN:
4629 return verify_gimple_return (as_a <greturn *> (stmt));
4631 case GIMPLE_ASM:
4632 return false;
4634 case GIMPLE_TRANSACTION:
4635 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4637 /* Tuples that do not have tree operands. */
4638 case GIMPLE_NOP:
4639 case GIMPLE_PREDICT:
4640 case GIMPLE_RESX:
4641 case GIMPLE_EH_DISPATCH:
4642 case GIMPLE_EH_MUST_NOT_THROW:
4643 return false;
4645 CASE_GIMPLE_OMP:
4646 /* OpenMP directives are validated by the FE and never operated
4647 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4648 non-gimple expressions when the main index variable has had
4649 its address taken. This does not affect the loop itself
4650 because the header of an GIMPLE_OMP_FOR is merely used to determine
4651 how to setup the parallel iteration. */
4652 return false;
4654 case GIMPLE_DEBUG:
4655 return verify_gimple_debug (stmt);
4657 default:
4658 gcc_unreachable ();
4662 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4663 and false otherwise. */
4665 static bool
4666 verify_gimple_phi (gimple phi)
4668 bool err = false;
4669 unsigned i;
4670 tree phi_result = gimple_phi_result (phi);
4671 bool virtual_p;
4673 if (!phi_result)
4675 error ("invalid PHI result");
4676 return true;
4679 virtual_p = virtual_operand_p (phi_result);
4680 if (TREE_CODE (phi_result) != SSA_NAME
4681 || (virtual_p
4682 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4684 error ("invalid PHI result");
4685 err = true;
4688 for (i = 0; i < gimple_phi_num_args (phi); i++)
4690 tree t = gimple_phi_arg_def (phi, i);
4692 if (!t)
4694 error ("missing PHI def");
4695 err |= true;
4696 continue;
4698 /* Addressable variables do have SSA_NAMEs but they
4699 are not considered gimple values. */
4700 else if ((TREE_CODE (t) == SSA_NAME
4701 && virtual_p != virtual_operand_p (t))
4702 || (virtual_p
4703 && (TREE_CODE (t) != SSA_NAME
4704 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4705 || (!virtual_p
4706 && !is_gimple_val (t)))
4708 error ("invalid PHI argument");
4709 debug_generic_expr (t);
4710 err |= true;
4712 #ifdef ENABLE_TYPES_CHECKING
4713 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4715 error ("incompatible types in PHI argument %u", i);
4716 debug_generic_stmt (TREE_TYPE (phi_result));
4717 debug_generic_stmt (TREE_TYPE (t));
4718 err |= true;
4720 #endif
4723 return err;
4726 /* Verify the GIMPLE statements inside the sequence STMTS. */
4728 static bool
4729 verify_gimple_in_seq_2 (gimple_seq stmts)
4731 gimple_stmt_iterator ittr;
4732 bool err = false;
4734 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4736 gimple stmt = gsi_stmt (ittr);
4738 switch (gimple_code (stmt))
4740 case GIMPLE_BIND:
4741 err |= verify_gimple_in_seq_2 (
4742 gimple_bind_body (as_a <gbind *> (stmt)));
4743 break;
4745 case GIMPLE_TRY:
4746 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4747 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4748 break;
4750 case GIMPLE_EH_FILTER:
4751 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4752 break;
4754 case GIMPLE_EH_ELSE:
4756 geh_else *eh_else = as_a <geh_else *> (stmt);
4757 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4758 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4760 break;
4762 case GIMPLE_CATCH:
4763 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4764 as_a <gcatch *> (stmt)));
4765 break;
4767 case GIMPLE_TRANSACTION:
4768 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4769 break;
4771 default:
4773 bool err2 = verify_gimple_stmt (stmt);
4774 if (err2)
4775 debug_gimple_stmt (stmt);
4776 err |= err2;
4781 return err;
4784 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4785 is a problem, otherwise false. */
4787 static bool
4788 verify_gimple_transaction (gtransaction *stmt)
4790 tree lab = gimple_transaction_label (stmt);
4791 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4792 return true;
4793 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4797 /* Verify the GIMPLE statements inside the statement list STMTS. */
4799 DEBUG_FUNCTION void
4800 verify_gimple_in_seq (gimple_seq stmts)
4802 timevar_push (TV_TREE_STMT_VERIFY);
4803 if (verify_gimple_in_seq_2 (stmts))
4804 internal_error ("verify_gimple failed");
4805 timevar_pop (TV_TREE_STMT_VERIFY);
4808 /* Return true when the T can be shared. */
4810 static bool
4811 tree_node_can_be_shared (tree t)
4813 if (IS_TYPE_OR_DECL_P (t)
4814 || is_gimple_min_invariant (t)
4815 || TREE_CODE (t) == SSA_NAME
4816 || t == error_mark_node
4817 || TREE_CODE (t) == IDENTIFIER_NODE)
4818 return true;
4820 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4821 return true;
4823 if (DECL_P (t))
4824 return true;
4826 return false;
4829 /* Called via walk_tree. Verify tree sharing. */
4831 static tree
4832 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4834 hash_set<void *> *visited = (hash_set<void *> *) data;
4836 if (tree_node_can_be_shared (*tp))
4838 *walk_subtrees = false;
4839 return NULL;
4842 if (visited->add (*tp))
4843 return *tp;
4845 return NULL;
4848 /* Called via walk_gimple_stmt. Verify tree sharing. */
4850 static tree
4851 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4853 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4854 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4857 static bool eh_error_found;
4858 bool
4859 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4860 hash_set<gimple> *visited)
4862 if (!visited->contains (stmt))
4864 error ("dead STMT in EH table");
4865 debug_gimple_stmt (stmt);
4866 eh_error_found = true;
4868 return true;
4871 /* Verify if the location LOCs block is in BLOCKS. */
4873 static bool
4874 verify_location (hash_set<tree> *blocks, location_t loc)
4876 tree block = LOCATION_BLOCK (loc);
4877 if (block != NULL_TREE
4878 && !blocks->contains (block))
4880 error ("location references block not in block tree");
4881 return true;
4883 if (block != NULL_TREE)
4884 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4885 return false;
4888 /* Called via walk_tree. Verify that expressions have no blocks. */
4890 static tree
4891 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4893 if (!EXPR_P (*tp))
4895 *walk_subtrees = false;
4896 return NULL;
4899 location_t loc = EXPR_LOCATION (*tp);
4900 if (LOCATION_BLOCK (loc) != NULL)
4901 return *tp;
4903 return NULL;
4906 /* Called via walk_tree. Verify locations of expressions. */
4908 static tree
4909 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4911 hash_set<tree> *blocks = (hash_set<tree> *) data;
4913 if (TREE_CODE (*tp) == VAR_DECL
4914 && DECL_HAS_DEBUG_EXPR_P (*tp))
4916 tree t = DECL_DEBUG_EXPR (*tp);
4917 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4918 if (addr)
4919 return addr;
4921 if ((TREE_CODE (*tp) == VAR_DECL
4922 || TREE_CODE (*tp) == PARM_DECL
4923 || TREE_CODE (*tp) == RESULT_DECL)
4924 && DECL_HAS_VALUE_EXPR_P (*tp))
4926 tree t = DECL_VALUE_EXPR (*tp);
4927 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4928 if (addr)
4929 return addr;
4932 if (!EXPR_P (*tp))
4934 *walk_subtrees = false;
4935 return NULL;
4938 location_t loc = EXPR_LOCATION (*tp);
4939 if (verify_location (blocks, loc))
4940 return *tp;
4942 return NULL;
4945 /* Called via walk_gimple_op. Verify locations of expressions. */
4947 static tree
4948 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4950 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4951 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4954 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4956 static void
4957 collect_subblocks (hash_set<tree> *blocks, tree block)
4959 tree t;
4960 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4962 blocks->add (t);
4963 collect_subblocks (blocks, t);
4967 /* Verify the GIMPLE statements in the CFG of FN. */
4969 DEBUG_FUNCTION void
4970 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4972 basic_block bb;
4973 bool err = false;
4975 timevar_push (TV_TREE_STMT_VERIFY);
4976 hash_set<void *> visited;
4977 hash_set<gimple> visited_stmts;
4979 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4980 hash_set<tree> blocks;
4981 if (DECL_INITIAL (fn->decl))
4983 blocks.add (DECL_INITIAL (fn->decl));
4984 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4987 FOR_EACH_BB_FN (bb, fn)
4989 gimple_stmt_iterator gsi;
4991 for (gphi_iterator gpi = gsi_start_phis (bb);
4992 !gsi_end_p (gpi);
4993 gsi_next (&gpi))
4995 gphi *phi = gpi.phi ();
4996 bool err2 = false;
4997 unsigned i;
4999 visited_stmts.add (phi);
5001 if (gimple_bb (phi) != bb)
5003 error ("gimple_bb (phi) is set to a wrong basic block");
5004 err2 = true;
5007 err2 |= verify_gimple_phi (phi);
5009 /* Only PHI arguments have locations. */
5010 if (gimple_location (phi) != UNKNOWN_LOCATION)
5012 error ("PHI node with location");
5013 err2 = true;
5016 for (i = 0; i < gimple_phi_num_args (phi); i++)
5018 tree arg = gimple_phi_arg_def (phi, i);
5019 tree addr = walk_tree (&arg, verify_node_sharing_1,
5020 &visited, NULL);
5021 if (addr)
5023 error ("incorrect sharing of tree nodes");
5024 debug_generic_expr (addr);
5025 err2 |= true;
5027 location_t loc = gimple_phi_arg_location (phi, i);
5028 if (virtual_operand_p (gimple_phi_result (phi))
5029 && loc != UNKNOWN_LOCATION)
5031 error ("virtual PHI with argument locations");
5032 err2 = true;
5034 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5035 if (addr)
5037 debug_generic_expr (addr);
5038 err2 = true;
5040 err2 |= verify_location (&blocks, loc);
5043 if (err2)
5044 debug_gimple_stmt (phi);
5045 err |= err2;
5048 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5050 gimple stmt = gsi_stmt (gsi);
5051 bool err2 = false;
5052 struct walk_stmt_info wi;
5053 tree addr;
5054 int lp_nr;
5056 visited_stmts.add (stmt);
5058 if (gimple_bb (stmt) != bb)
5060 error ("gimple_bb (stmt) is set to a wrong basic block");
5061 err2 = true;
5064 err2 |= verify_gimple_stmt (stmt);
5065 err2 |= verify_location (&blocks, gimple_location (stmt));
5067 memset (&wi, 0, sizeof (wi));
5068 wi.info = (void *) &visited;
5069 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5070 if (addr)
5072 error ("incorrect sharing of tree nodes");
5073 debug_generic_expr (addr);
5074 err2 |= true;
5077 memset (&wi, 0, sizeof (wi));
5078 wi.info = (void *) &blocks;
5079 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5080 if (addr)
5082 debug_generic_expr (addr);
5083 err2 |= true;
5086 /* ??? Instead of not checking these stmts at all the walker
5087 should know its context via wi. */
5088 if (!is_gimple_debug (stmt)
5089 && !is_gimple_omp (stmt))
5091 memset (&wi, 0, sizeof (wi));
5092 addr = walk_gimple_op (stmt, verify_expr, &wi);
5093 if (addr)
5095 debug_generic_expr (addr);
5096 inform (gimple_location (stmt), "in statement");
5097 err2 |= true;
5101 /* If the statement is marked as part of an EH region, then it is
5102 expected that the statement could throw. Verify that when we
5103 have optimizations that simplify statements such that we prove
5104 that they cannot throw, that we update other data structures
5105 to match. */
5106 lp_nr = lookup_stmt_eh_lp (stmt);
5107 if (lp_nr > 0)
5109 if (!stmt_could_throw_p (stmt))
5111 if (verify_nothrow)
5113 error ("statement marked for throw, but doesn%'t");
5114 err2 |= true;
5117 else if (!gsi_one_before_end_p (gsi))
5119 error ("statement marked for throw in middle of block");
5120 err2 |= true;
5124 if (err2)
5125 debug_gimple_stmt (stmt);
5126 err |= err2;
5130 eh_error_found = false;
5131 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5132 if (eh_table)
5133 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5134 (&visited_stmts);
5136 if (err || eh_error_found)
5137 internal_error ("verify_gimple failed");
5139 verify_histograms ();
5140 timevar_pop (TV_TREE_STMT_VERIFY);
5144 /* Verifies that the flow information is OK. */
5146 static int
5147 gimple_verify_flow_info (void)
5149 int err = 0;
5150 basic_block bb;
5151 gimple_stmt_iterator gsi;
5152 gimple stmt;
5153 edge e;
5154 edge_iterator ei;
5156 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5157 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5159 error ("ENTRY_BLOCK has IL associated with it");
5160 err = 1;
5163 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5164 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5166 error ("EXIT_BLOCK has IL associated with it");
5167 err = 1;
5170 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5171 if (e->flags & EDGE_FALLTHRU)
5173 error ("fallthru to exit from bb %d", e->src->index);
5174 err = 1;
5177 FOR_EACH_BB_FN (bb, cfun)
5179 bool found_ctrl_stmt = false;
5181 stmt = NULL;
5183 /* Skip labels on the start of basic block. */
5184 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5186 tree label;
5187 gimple prev_stmt = stmt;
5189 stmt = gsi_stmt (gsi);
5191 if (gimple_code (stmt) != GIMPLE_LABEL)
5192 break;
5194 label = gimple_label_label (as_a <glabel *> (stmt));
5195 if (prev_stmt && DECL_NONLOCAL (label))
5197 error ("nonlocal label ");
5198 print_generic_expr (stderr, label, 0);
5199 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5200 bb->index);
5201 err = 1;
5204 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5206 error ("EH landing pad label ");
5207 print_generic_expr (stderr, label, 0);
5208 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5209 bb->index);
5210 err = 1;
5213 if (label_to_block (label) != bb)
5215 error ("label ");
5216 print_generic_expr (stderr, label, 0);
5217 fprintf (stderr, " to block does not match in bb %d",
5218 bb->index);
5219 err = 1;
5222 if (decl_function_context (label) != current_function_decl)
5224 error ("label ");
5225 print_generic_expr (stderr, label, 0);
5226 fprintf (stderr, " has incorrect context in bb %d",
5227 bb->index);
5228 err = 1;
5232 /* Verify that body of basic block BB is free of control flow. */
5233 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5235 gimple stmt = gsi_stmt (gsi);
5237 if (found_ctrl_stmt)
5239 error ("control flow in the middle of basic block %d",
5240 bb->index);
5241 err = 1;
5244 if (stmt_ends_bb_p (stmt))
5245 found_ctrl_stmt = true;
5247 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5249 error ("label ");
5250 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5251 fprintf (stderr, " in the middle of basic block %d", bb->index);
5252 err = 1;
5256 gsi = gsi_last_bb (bb);
5257 if (gsi_end_p (gsi))
5258 continue;
5260 stmt = gsi_stmt (gsi);
5262 if (gimple_code (stmt) == GIMPLE_LABEL)
5263 continue;
5265 err |= verify_eh_edges (stmt);
5267 if (is_ctrl_stmt (stmt))
5269 FOR_EACH_EDGE (e, ei, bb->succs)
5270 if (e->flags & EDGE_FALLTHRU)
5272 error ("fallthru edge after a control statement in bb %d",
5273 bb->index);
5274 err = 1;
5278 if (gimple_code (stmt) != GIMPLE_COND)
5280 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5281 after anything else but if statement. */
5282 FOR_EACH_EDGE (e, ei, bb->succs)
5283 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5285 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5286 bb->index);
5287 err = 1;
5291 switch (gimple_code (stmt))
5293 case GIMPLE_COND:
5295 edge true_edge;
5296 edge false_edge;
5298 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5300 if (!true_edge
5301 || !false_edge
5302 || !(true_edge->flags & EDGE_TRUE_VALUE)
5303 || !(false_edge->flags & EDGE_FALSE_VALUE)
5304 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5305 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5306 || EDGE_COUNT (bb->succs) >= 3)
5308 error ("wrong outgoing edge flags at end of bb %d",
5309 bb->index);
5310 err = 1;
5313 break;
5315 case GIMPLE_GOTO:
5316 if (simple_goto_p (stmt))
5318 error ("explicit goto at end of bb %d", bb->index);
5319 err = 1;
5321 else
5323 /* FIXME. We should double check that the labels in the
5324 destination blocks have their address taken. */
5325 FOR_EACH_EDGE (e, ei, bb->succs)
5326 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5327 | EDGE_FALSE_VALUE))
5328 || !(e->flags & EDGE_ABNORMAL))
5330 error ("wrong outgoing edge flags at end of bb %d",
5331 bb->index);
5332 err = 1;
5335 break;
5337 case GIMPLE_CALL:
5338 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5339 break;
5340 /* ... fallthru ... */
5341 case GIMPLE_RETURN:
5342 if (!single_succ_p (bb)
5343 || (single_succ_edge (bb)->flags
5344 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5345 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5347 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5348 err = 1;
5350 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5352 error ("return edge does not point to exit in bb %d",
5353 bb->index);
5354 err = 1;
5356 break;
5358 case GIMPLE_SWITCH:
5360 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5361 tree prev;
5362 edge e;
5363 size_t i, n;
5365 n = gimple_switch_num_labels (switch_stmt);
5367 /* Mark all the destination basic blocks. */
5368 for (i = 0; i < n; ++i)
5370 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5371 basic_block label_bb = label_to_block (lab);
5372 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5373 label_bb->aux = (void *)1;
5376 /* Verify that the case labels are sorted. */
5377 prev = gimple_switch_label (switch_stmt, 0);
5378 for (i = 1; i < n; ++i)
5380 tree c = gimple_switch_label (switch_stmt, i);
5381 if (!CASE_LOW (c))
5383 error ("found default case not at the start of "
5384 "case vector");
5385 err = 1;
5386 continue;
5388 if (CASE_LOW (prev)
5389 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5391 error ("case labels not sorted: ");
5392 print_generic_expr (stderr, prev, 0);
5393 fprintf (stderr," is greater than ");
5394 print_generic_expr (stderr, c, 0);
5395 fprintf (stderr," but comes before it.\n");
5396 err = 1;
5398 prev = c;
5400 /* VRP will remove the default case if it can prove it will
5401 never be executed. So do not verify there always exists
5402 a default case here. */
5404 FOR_EACH_EDGE (e, ei, bb->succs)
5406 if (!e->dest->aux)
5408 error ("extra outgoing edge %d->%d",
5409 bb->index, e->dest->index);
5410 err = 1;
5413 e->dest->aux = (void *)2;
5414 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5415 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5417 error ("wrong outgoing edge flags at end of bb %d",
5418 bb->index);
5419 err = 1;
5423 /* Check that we have all of them. */
5424 for (i = 0; i < n; ++i)
5426 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5427 basic_block label_bb = label_to_block (lab);
5429 if (label_bb->aux != (void *)2)
5431 error ("missing edge %i->%i", bb->index, label_bb->index);
5432 err = 1;
5436 FOR_EACH_EDGE (e, ei, bb->succs)
5437 e->dest->aux = (void *)0;
5439 break;
5441 case GIMPLE_EH_DISPATCH:
5442 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5443 break;
5445 default:
5446 break;
5450 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5451 verify_dominators (CDI_DOMINATORS);
5453 return err;
5457 /* Updates phi nodes after creating a forwarder block joined
5458 by edge FALLTHRU. */
5460 static void
5461 gimple_make_forwarder_block (edge fallthru)
5463 edge e;
5464 edge_iterator ei;
5465 basic_block dummy, bb;
5466 tree var;
5467 gphi_iterator gsi;
5469 dummy = fallthru->src;
5470 bb = fallthru->dest;
5472 if (single_pred_p (bb))
5473 return;
5475 /* If we redirected a branch we must create new PHI nodes at the
5476 start of BB. */
5477 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5479 gphi *phi, *new_phi;
5481 phi = gsi.phi ();
5482 var = gimple_phi_result (phi);
5483 new_phi = create_phi_node (var, bb);
5484 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5485 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5486 UNKNOWN_LOCATION);
5489 /* Add the arguments we have stored on edges. */
5490 FOR_EACH_EDGE (e, ei, bb->preds)
5492 if (e == fallthru)
5493 continue;
5495 flush_pending_stmts (e);
5500 /* Return a non-special label in the head of basic block BLOCK.
5501 Create one if it doesn't exist. */
5503 tree
5504 gimple_block_label (basic_block bb)
5506 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5507 bool first = true;
5508 tree label;
5509 glabel *stmt;
5511 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5513 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5514 if (!stmt)
5515 break;
5516 label = gimple_label_label (stmt);
5517 if (!DECL_NONLOCAL (label))
5519 if (!first)
5520 gsi_move_before (&i, &s);
5521 return label;
5525 label = create_artificial_label (UNKNOWN_LOCATION);
5526 stmt = gimple_build_label (label);
5527 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5528 return label;
5532 /* Attempt to perform edge redirection by replacing a possibly complex
5533 jump instruction by a goto or by removing the jump completely.
5534 This can apply only if all edges now point to the same block. The
5535 parameters and return values are equivalent to
5536 redirect_edge_and_branch. */
5538 static edge
5539 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5541 basic_block src = e->src;
5542 gimple_stmt_iterator i;
5543 gimple stmt;
5545 /* We can replace or remove a complex jump only when we have exactly
5546 two edges. */
5547 if (EDGE_COUNT (src->succs) != 2
5548 /* Verify that all targets will be TARGET. Specifically, the
5549 edge that is not E must also go to TARGET. */
5550 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5551 return NULL;
5553 i = gsi_last_bb (src);
5554 if (gsi_end_p (i))
5555 return NULL;
5557 stmt = gsi_stmt (i);
5559 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5561 gsi_remove (&i, true);
5562 e = ssa_redirect_edge (e, target);
5563 e->flags = EDGE_FALLTHRU;
5564 return e;
5567 return NULL;
5571 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5572 edge representing the redirected branch. */
5574 static edge
5575 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5577 basic_block bb = e->src;
5578 gimple_stmt_iterator gsi;
5579 edge ret;
5580 gimple stmt;
5582 if (e->flags & EDGE_ABNORMAL)
5583 return NULL;
5585 if (e->dest == dest)
5586 return NULL;
5588 if (e->flags & EDGE_EH)
5589 return redirect_eh_edge (e, dest);
5591 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5593 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5594 if (ret)
5595 return ret;
5598 gsi = gsi_last_bb (bb);
5599 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5601 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5603 case GIMPLE_COND:
5604 /* For COND_EXPR, we only need to redirect the edge. */
5605 break;
5607 case GIMPLE_GOTO:
5608 /* No non-abnormal edges should lead from a non-simple goto, and
5609 simple ones should be represented implicitly. */
5610 gcc_unreachable ();
5612 case GIMPLE_SWITCH:
5614 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5615 tree label = gimple_block_label (dest);
5616 tree cases = get_cases_for_edge (e, switch_stmt);
5618 /* If we have a list of cases associated with E, then use it
5619 as it's a lot faster than walking the entire case vector. */
5620 if (cases)
5622 edge e2 = find_edge (e->src, dest);
5623 tree last, first;
5625 first = cases;
5626 while (cases)
5628 last = cases;
5629 CASE_LABEL (cases) = label;
5630 cases = CASE_CHAIN (cases);
5633 /* If there was already an edge in the CFG, then we need
5634 to move all the cases associated with E to E2. */
5635 if (e2)
5637 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5639 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5640 CASE_CHAIN (cases2) = first;
5642 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5644 else
5646 size_t i, n = gimple_switch_num_labels (switch_stmt);
5648 for (i = 0; i < n; i++)
5650 tree elt = gimple_switch_label (switch_stmt, i);
5651 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5652 CASE_LABEL (elt) = label;
5656 break;
5658 case GIMPLE_ASM:
5660 gasm *asm_stmt = as_a <gasm *> (stmt);
5661 int i, n = gimple_asm_nlabels (asm_stmt);
5662 tree label = NULL;
5664 for (i = 0; i < n; ++i)
5666 tree cons = gimple_asm_label_op (asm_stmt, i);
5667 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5669 if (!label)
5670 label = gimple_block_label (dest);
5671 TREE_VALUE (cons) = label;
5675 /* If we didn't find any label matching the former edge in the
5676 asm labels, we must be redirecting the fallthrough
5677 edge. */
5678 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5680 break;
5682 case GIMPLE_RETURN:
5683 gsi_remove (&gsi, true);
5684 e->flags |= EDGE_FALLTHRU;
5685 break;
5687 case GIMPLE_OMP_RETURN:
5688 case GIMPLE_OMP_CONTINUE:
5689 case GIMPLE_OMP_SECTIONS_SWITCH:
5690 case GIMPLE_OMP_FOR:
5691 /* The edges from OMP constructs can be simply redirected. */
5692 break;
5694 case GIMPLE_EH_DISPATCH:
5695 if (!(e->flags & EDGE_FALLTHRU))
5696 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5697 break;
5699 case GIMPLE_TRANSACTION:
5700 /* The ABORT edge has a stored label associated with it, otherwise
5701 the edges are simply redirectable. */
5702 if (e->flags == 0)
5703 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5704 gimple_block_label (dest));
5705 break;
5707 default:
5708 /* Otherwise it must be a fallthru edge, and we don't need to
5709 do anything besides redirecting it. */
5710 gcc_assert (e->flags & EDGE_FALLTHRU);
5711 break;
5714 /* Update/insert PHI nodes as necessary. */
5716 /* Now update the edges in the CFG. */
5717 e = ssa_redirect_edge (e, dest);
5719 return e;
5722 /* Returns true if it is possible to remove edge E by redirecting
5723 it to the destination of the other edge from E->src. */
5725 static bool
5726 gimple_can_remove_branch_p (const_edge e)
5728 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5729 return false;
5731 return true;
5734 /* Simple wrapper, as we can always redirect fallthru edges. */
5736 static basic_block
5737 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5739 e = gimple_redirect_edge_and_branch (e, dest);
5740 gcc_assert (e);
5742 return NULL;
5746 /* Splits basic block BB after statement STMT (but at least after the
5747 labels). If STMT is NULL, BB is split just after the labels. */
5749 static basic_block
5750 gimple_split_block (basic_block bb, void *stmt)
5752 gimple_stmt_iterator gsi;
5753 gimple_stmt_iterator gsi_tgt;
5754 gimple_seq list;
5755 basic_block new_bb;
5756 edge e;
5757 edge_iterator ei;
5759 new_bb = create_empty_bb (bb);
5761 /* Redirect the outgoing edges. */
5762 new_bb->succs = bb->succs;
5763 bb->succs = NULL;
5764 FOR_EACH_EDGE (e, ei, new_bb->succs)
5765 e->src = new_bb;
5767 /* Get a stmt iterator pointing to the first stmt to move. */
5768 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5769 gsi = gsi_after_labels (bb);
5770 else
5772 gsi = gsi_for_stmt ((gimple) stmt);
5773 gsi_next (&gsi);
5776 /* Move everything from GSI to the new basic block. */
5777 if (gsi_end_p (gsi))
5778 return new_bb;
5780 /* Split the statement list - avoid re-creating new containers as this
5781 brings ugly quadratic memory consumption in the inliner.
5782 (We are still quadratic since we need to update stmt BB pointers,
5783 sadly.) */
5784 gsi_split_seq_before (&gsi, &list);
5785 set_bb_seq (new_bb, list);
5786 for (gsi_tgt = gsi_start (list);
5787 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5788 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5790 return new_bb;
5794 /* Moves basic block BB after block AFTER. */
5796 static bool
5797 gimple_move_block_after (basic_block bb, basic_block after)
5799 if (bb->prev_bb == after)
5800 return true;
5802 unlink_block (bb);
5803 link_block (bb, after);
5805 return true;
5809 /* Return TRUE if block BB has no executable statements, otherwise return
5810 FALSE. */
5812 static bool
5813 gimple_empty_block_p (basic_block bb)
5815 /* BB must have no executable statements. */
5816 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5817 if (phi_nodes (bb))
5818 return false;
5819 if (gsi_end_p (gsi))
5820 return true;
5821 if (is_gimple_debug (gsi_stmt (gsi)))
5822 gsi_next_nondebug (&gsi);
5823 return gsi_end_p (gsi);
5827 /* Split a basic block if it ends with a conditional branch and if the
5828 other part of the block is not empty. */
5830 static basic_block
5831 gimple_split_block_before_cond_jump (basic_block bb)
5833 gimple last, split_point;
5834 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5835 if (gsi_end_p (gsi))
5836 return NULL;
5837 last = gsi_stmt (gsi);
5838 if (gimple_code (last) != GIMPLE_COND
5839 && gimple_code (last) != GIMPLE_SWITCH)
5840 return NULL;
5841 gsi_prev_nondebug (&gsi);
5842 split_point = gsi_stmt (gsi);
5843 return split_block (bb, split_point)->dest;
5847 /* Return true if basic_block can be duplicated. */
5849 static bool
5850 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5852 return true;
5855 /* Create a duplicate of the basic block BB. NOTE: This does not
5856 preserve SSA form. */
5858 static basic_block
5859 gimple_duplicate_bb (basic_block bb)
5861 basic_block new_bb;
5862 gimple_stmt_iterator gsi_tgt;
5864 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5866 /* Copy the PHI nodes. We ignore PHI node arguments here because
5867 the incoming edges have not been setup yet. */
5868 for (gphi_iterator gpi = gsi_start_phis (bb);
5869 !gsi_end_p (gpi);
5870 gsi_next (&gpi))
5872 gphi *phi, *copy;
5873 phi = gpi.phi ();
5874 copy = create_phi_node (NULL_TREE, new_bb);
5875 create_new_def_for (gimple_phi_result (phi), copy,
5876 gimple_phi_result_ptr (copy));
5877 gimple_set_uid (copy, gimple_uid (phi));
5880 gsi_tgt = gsi_start_bb (new_bb);
5881 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5882 !gsi_end_p (gsi);
5883 gsi_next (&gsi))
5885 def_operand_p def_p;
5886 ssa_op_iter op_iter;
5887 tree lhs;
5888 gimple stmt, copy;
5890 stmt = gsi_stmt (gsi);
5891 if (gimple_code (stmt) == GIMPLE_LABEL)
5892 continue;
5894 /* Don't duplicate label debug stmts. */
5895 if (gimple_debug_bind_p (stmt)
5896 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5897 == LABEL_DECL)
5898 continue;
5900 /* Create a new copy of STMT and duplicate STMT's virtual
5901 operands. */
5902 copy = gimple_copy (stmt);
5903 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5905 maybe_duplicate_eh_stmt (copy, stmt);
5906 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5908 /* When copying around a stmt writing into a local non-user
5909 aggregate, make sure it won't share stack slot with other
5910 vars. */
5911 lhs = gimple_get_lhs (stmt);
5912 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5914 tree base = get_base_address (lhs);
5915 if (base
5916 && (TREE_CODE (base) == VAR_DECL
5917 || TREE_CODE (base) == RESULT_DECL)
5918 && DECL_IGNORED_P (base)
5919 && !TREE_STATIC (base)
5920 && !DECL_EXTERNAL (base)
5921 && (TREE_CODE (base) != VAR_DECL
5922 || !DECL_HAS_VALUE_EXPR_P (base)))
5923 DECL_NONSHAREABLE (base) = 1;
5926 /* Create new names for all the definitions created by COPY and
5927 add replacement mappings for each new name. */
5928 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5929 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5932 return new_bb;
5935 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5937 static void
5938 add_phi_args_after_copy_edge (edge e_copy)
5940 basic_block bb, bb_copy = e_copy->src, dest;
5941 edge e;
5942 edge_iterator ei;
5943 gphi *phi, *phi_copy;
5944 tree def;
5945 gphi_iterator psi, psi_copy;
5947 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5948 return;
5950 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5952 if (e_copy->dest->flags & BB_DUPLICATED)
5953 dest = get_bb_original (e_copy->dest);
5954 else
5955 dest = e_copy->dest;
5957 e = find_edge (bb, dest);
5958 if (!e)
5960 /* During loop unrolling the target of the latch edge is copied.
5961 In this case we are not looking for edge to dest, but to
5962 duplicated block whose original was dest. */
5963 FOR_EACH_EDGE (e, ei, bb->succs)
5965 if ((e->dest->flags & BB_DUPLICATED)
5966 && get_bb_original (e->dest) == dest)
5967 break;
5970 gcc_assert (e != NULL);
5973 for (psi = gsi_start_phis (e->dest),
5974 psi_copy = gsi_start_phis (e_copy->dest);
5975 !gsi_end_p (psi);
5976 gsi_next (&psi), gsi_next (&psi_copy))
5978 phi = psi.phi ();
5979 phi_copy = psi_copy.phi ();
5980 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5981 add_phi_arg (phi_copy, def, e_copy,
5982 gimple_phi_arg_location_from_edge (phi, e));
5987 /* Basic block BB_COPY was created by code duplication. Add phi node
5988 arguments for edges going out of BB_COPY. The blocks that were
5989 duplicated have BB_DUPLICATED set. */
5991 void
5992 add_phi_args_after_copy_bb (basic_block bb_copy)
5994 edge e_copy;
5995 edge_iterator ei;
5997 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5999 add_phi_args_after_copy_edge (e_copy);
6003 /* Blocks in REGION_COPY array of length N_REGION were created by
6004 duplication of basic blocks. Add phi node arguments for edges
6005 going from these blocks. If E_COPY is not NULL, also add
6006 phi node arguments for its destination.*/
6008 void
6009 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6010 edge e_copy)
6012 unsigned i;
6014 for (i = 0; i < n_region; i++)
6015 region_copy[i]->flags |= BB_DUPLICATED;
6017 for (i = 0; i < n_region; i++)
6018 add_phi_args_after_copy_bb (region_copy[i]);
6019 if (e_copy)
6020 add_phi_args_after_copy_edge (e_copy);
6022 for (i = 0; i < n_region; i++)
6023 region_copy[i]->flags &= ~BB_DUPLICATED;
6026 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6027 important exit edge EXIT. By important we mean that no SSA name defined
6028 inside region is live over the other exit edges of the region. All entry
6029 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6030 to the duplicate of the region. Dominance and loop information is
6031 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6032 UPDATE_DOMINANCE is false then we assume that the caller will update the
6033 dominance information after calling this function. The new basic
6034 blocks are stored to REGION_COPY in the same order as they had in REGION,
6035 provided that REGION_COPY is not NULL.
6036 The function returns false if it is unable to copy the region,
6037 true otherwise. */
6039 bool
6040 gimple_duplicate_sese_region (edge entry, edge exit,
6041 basic_block *region, unsigned n_region,
6042 basic_block *region_copy,
6043 bool update_dominance)
6045 unsigned i;
6046 bool free_region_copy = false, copying_header = false;
6047 struct loop *loop = entry->dest->loop_father;
6048 edge exit_copy;
6049 vec<basic_block> doms;
6050 edge redirected;
6051 int total_freq = 0, entry_freq = 0;
6052 gcov_type total_count = 0, entry_count = 0;
6054 if (!can_copy_bbs_p (region, n_region))
6055 return false;
6057 /* Some sanity checking. Note that we do not check for all possible
6058 missuses of the functions. I.e. if you ask to copy something weird,
6059 it will work, but the state of structures probably will not be
6060 correct. */
6061 for (i = 0; i < n_region; i++)
6063 /* We do not handle subloops, i.e. all the blocks must belong to the
6064 same loop. */
6065 if (region[i]->loop_father != loop)
6066 return false;
6068 if (region[i] != entry->dest
6069 && region[i] == loop->header)
6070 return false;
6073 /* In case the function is used for loop header copying (which is the primary
6074 use), ensure that EXIT and its copy will be new latch and entry edges. */
6075 if (loop->header == entry->dest)
6077 copying_header = true;
6079 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6080 return false;
6082 for (i = 0; i < n_region; i++)
6083 if (region[i] != exit->src
6084 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6085 return false;
6088 initialize_original_copy_tables ();
6090 if (copying_header)
6091 set_loop_copy (loop, loop_outer (loop));
6092 else
6093 set_loop_copy (loop, loop);
6095 if (!region_copy)
6097 region_copy = XNEWVEC (basic_block, n_region);
6098 free_region_copy = true;
6101 /* Record blocks outside the region that are dominated by something
6102 inside. */
6103 if (update_dominance)
6105 doms.create (0);
6106 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6109 if (entry->dest->count)
6111 total_count = entry->dest->count;
6112 entry_count = entry->count;
6113 /* Fix up corner cases, to avoid division by zero or creation of negative
6114 frequencies. */
6115 if (entry_count > total_count)
6116 entry_count = total_count;
6118 else
6120 total_freq = entry->dest->frequency;
6121 entry_freq = EDGE_FREQUENCY (entry);
6122 /* Fix up corner cases, to avoid division by zero or creation of negative
6123 frequencies. */
6124 if (total_freq == 0)
6125 total_freq = 1;
6126 else if (entry_freq > total_freq)
6127 entry_freq = total_freq;
6130 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6131 split_edge_bb_loc (entry), update_dominance);
6132 if (total_count)
6134 scale_bbs_frequencies_gcov_type (region, n_region,
6135 total_count - entry_count,
6136 total_count);
6137 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6138 total_count);
6140 else
6142 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6143 total_freq);
6144 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6147 if (copying_header)
6149 loop->header = exit->dest;
6150 loop->latch = exit->src;
6153 /* Redirect the entry and add the phi node arguments. */
6154 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6155 gcc_assert (redirected != NULL);
6156 flush_pending_stmts (entry);
6158 /* Concerning updating of dominators: We must recount dominators
6159 for entry block and its copy. Anything that is outside of the
6160 region, but was dominated by something inside needs recounting as
6161 well. */
6162 if (update_dominance)
6164 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6165 doms.safe_push (get_bb_original (entry->dest));
6166 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6167 doms.release ();
6170 /* Add the other PHI node arguments. */
6171 add_phi_args_after_copy (region_copy, n_region, NULL);
6173 if (free_region_copy)
6174 free (region_copy);
6176 free_original_copy_tables ();
6177 return true;
6180 /* Checks if BB is part of the region defined by N_REGION BBS. */
6181 static bool
6182 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6184 unsigned int n;
6186 for (n = 0; n < n_region; n++)
6188 if (bb == bbs[n])
6189 return true;
6191 return false;
6194 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6195 are stored to REGION_COPY in the same order in that they appear
6196 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6197 the region, EXIT an exit from it. The condition guarding EXIT
6198 is moved to ENTRY. Returns true if duplication succeeds, false
6199 otherwise.
6201 For example,
6203 some_code;
6204 if (cond)
6206 else
6209 is transformed to
6211 if (cond)
6213 some_code;
6216 else
6218 some_code;
6223 bool
6224 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6225 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6226 basic_block *region_copy ATTRIBUTE_UNUSED)
6228 unsigned i;
6229 bool free_region_copy = false;
6230 struct loop *loop = exit->dest->loop_father;
6231 struct loop *orig_loop = entry->dest->loop_father;
6232 basic_block switch_bb, entry_bb, nentry_bb;
6233 vec<basic_block> doms;
6234 int total_freq = 0, exit_freq = 0;
6235 gcov_type total_count = 0, exit_count = 0;
6236 edge exits[2], nexits[2], e;
6237 gimple_stmt_iterator gsi;
6238 gimple cond_stmt;
6239 edge sorig, snew;
6240 basic_block exit_bb;
6241 gphi_iterator psi;
6242 gphi *phi;
6243 tree def;
6244 struct loop *target, *aloop, *cloop;
6246 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6247 exits[0] = exit;
6248 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6250 if (!can_copy_bbs_p (region, n_region))
6251 return false;
6253 initialize_original_copy_tables ();
6254 set_loop_copy (orig_loop, loop);
6256 target= loop;
6257 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6259 if (bb_part_of_region_p (aloop->header, region, n_region))
6261 cloop = duplicate_loop (aloop, target);
6262 duplicate_subloops (aloop, cloop);
6266 if (!region_copy)
6268 region_copy = XNEWVEC (basic_block, n_region);
6269 free_region_copy = true;
6272 gcc_assert (!need_ssa_update_p (cfun));
6274 /* Record blocks outside the region that are dominated by something
6275 inside. */
6276 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6278 if (exit->src->count)
6280 total_count = exit->src->count;
6281 exit_count = exit->count;
6282 /* Fix up corner cases, to avoid division by zero or creation of negative
6283 frequencies. */
6284 if (exit_count > total_count)
6285 exit_count = total_count;
6287 else
6289 total_freq = exit->src->frequency;
6290 exit_freq = EDGE_FREQUENCY (exit);
6291 /* Fix up corner cases, to avoid division by zero or creation of negative
6292 frequencies. */
6293 if (total_freq == 0)
6294 total_freq = 1;
6295 if (exit_freq > total_freq)
6296 exit_freq = total_freq;
6299 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6300 split_edge_bb_loc (exit), true);
6301 if (total_count)
6303 scale_bbs_frequencies_gcov_type (region, n_region,
6304 total_count - exit_count,
6305 total_count);
6306 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6307 total_count);
6309 else
6311 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6312 total_freq);
6313 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6316 /* Create the switch block, and put the exit condition to it. */
6317 entry_bb = entry->dest;
6318 nentry_bb = get_bb_copy (entry_bb);
6319 if (!last_stmt (entry->src)
6320 || !stmt_ends_bb_p (last_stmt (entry->src)))
6321 switch_bb = entry->src;
6322 else
6323 switch_bb = split_edge (entry);
6324 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6326 gsi = gsi_last_bb (switch_bb);
6327 cond_stmt = last_stmt (exit->src);
6328 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6329 cond_stmt = gimple_copy (cond_stmt);
6331 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6333 sorig = single_succ_edge (switch_bb);
6334 sorig->flags = exits[1]->flags;
6335 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6337 /* Register the new edge from SWITCH_BB in loop exit lists. */
6338 rescan_loop_exit (snew, true, false);
6340 /* Add the PHI node arguments. */
6341 add_phi_args_after_copy (region_copy, n_region, snew);
6343 /* Get rid of now superfluous conditions and associated edges (and phi node
6344 arguments). */
6345 exit_bb = exit->dest;
6347 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6348 PENDING_STMT (e) = NULL;
6350 /* The latch of ORIG_LOOP was copied, and so was the backedge
6351 to the original header. We redirect this backedge to EXIT_BB. */
6352 for (i = 0; i < n_region; i++)
6353 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6355 gcc_assert (single_succ_edge (region_copy[i]));
6356 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6357 PENDING_STMT (e) = NULL;
6358 for (psi = gsi_start_phis (exit_bb);
6359 !gsi_end_p (psi);
6360 gsi_next (&psi))
6362 phi = psi.phi ();
6363 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6364 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6367 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6368 PENDING_STMT (e) = NULL;
6370 /* Anything that is outside of the region, but was dominated by something
6371 inside needs to update dominance info. */
6372 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6373 doms.release ();
6374 /* Update the SSA web. */
6375 update_ssa (TODO_update_ssa);
6377 if (free_region_copy)
6378 free (region_copy);
6380 free_original_copy_tables ();
6381 return true;
6384 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6385 adding blocks when the dominator traversal reaches EXIT. This
6386 function silently assumes that ENTRY strictly dominates EXIT. */
6388 void
6389 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6390 vec<basic_block> *bbs_p)
6392 basic_block son;
6394 for (son = first_dom_son (CDI_DOMINATORS, entry);
6395 son;
6396 son = next_dom_son (CDI_DOMINATORS, son))
6398 bbs_p->safe_push (son);
6399 if (son != exit)
6400 gather_blocks_in_sese_region (son, exit, bbs_p);
6404 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6405 The duplicates are recorded in VARS_MAP. */
6407 static void
6408 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6409 tree to_context)
6411 tree t = *tp, new_t;
6412 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6414 if (DECL_CONTEXT (t) == to_context)
6415 return;
6417 bool existed;
6418 tree &loc = vars_map->get_or_insert (t, &existed);
6420 if (!existed)
6422 if (SSA_VAR_P (t))
6424 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6425 add_local_decl (f, new_t);
6427 else
6429 gcc_assert (TREE_CODE (t) == CONST_DECL);
6430 new_t = copy_node (t);
6432 DECL_CONTEXT (new_t) = to_context;
6434 loc = new_t;
6436 else
6437 new_t = loc;
6439 *tp = new_t;
6443 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6444 VARS_MAP maps old ssa names and var_decls to the new ones. */
6446 static tree
6447 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6448 tree to_context)
6450 tree new_name;
6452 gcc_assert (!virtual_operand_p (name));
6454 tree *loc = vars_map->get (name);
6456 if (!loc)
6458 tree decl = SSA_NAME_VAR (name);
6459 if (decl)
6461 replace_by_duplicate_decl (&decl, vars_map, to_context);
6462 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6463 decl, SSA_NAME_DEF_STMT (name));
6464 if (SSA_NAME_IS_DEFAULT_DEF (name))
6465 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6466 decl, new_name);
6468 else
6469 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6470 name, SSA_NAME_DEF_STMT (name));
6472 vars_map->put (name, new_name);
6474 else
6475 new_name = *loc;
6477 return new_name;
6480 struct move_stmt_d
6482 tree orig_block;
6483 tree new_block;
6484 tree from_context;
6485 tree to_context;
6486 hash_map<tree, tree> *vars_map;
6487 htab_t new_label_map;
6488 hash_map<void *, void *> *eh_map;
6489 bool remap_decls_p;
6492 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6493 contained in *TP if it has been ORIG_BLOCK previously and change the
6494 DECL_CONTEXT of every local variable referenced in *TP. */
6496 static tree
6497 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6499 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6500 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6501 tree t = *tp;
6503 if (EXPR_P (t))
6505 tree block = TREE_BLOCK (t);
6506 if (block == p->orig_block
6507 || (p->orig_block == NULL_TREE
6508 && block != NULL_TREE))
6509 TREE_SET_BLOCK (t, p->new_block);
6510 #ifdef ENABLE_CHECKING
6511 else if (block != NULL_TREE)
6513 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6514 block = BLOCK_SUPERCONTEXT (block);
6515 gcc_assert (block == p->orig_block);
6517 #endif
6519 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6521 if (TREE_CODE (t) == SSA_NAME)
6522 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6523 else if (TREE_CODE (t) == LABEL_DECL)
6525 if (p->new_label_map)
6527 struct tree_map in, *out;
6528 in.base.from = t;
6529 out = (struct tree_map *)
6530 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6531 if (out)
6532 *tp = t = out->to;
6535 DECL_CONTEXT (t) = p->to_context;
6537 else if (p->remap_decls_p)
6539 /* Replace T with its duplicate. T should no longer appear in the
6540 parent function, so this looks wasteful; however, it may appear
6541 in referenced_vars, and more importantly, as virtual operands of
6542 statements, and in alias lists of other variables. It would be
6543 quite difficult to expunge it from all those places. ??? It might
6544 suffice to do this for addressable variables. */
6545 if ((TREE_CODE (t) == VAR_DECL
6546 && !is_global_var (t))
6547 || TREE_CODE (t) == CONST_DECL)
6548 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6550 *walk_subtrees = 0;
6552 else if (TYPE_P (t))
6553 *walk_subtrees = 0;
6555 return NULL_TREE;
6558 /* Helper for move_stmt_r. Given an EH region number for the source
6559 function, map that to the duplicate EH regio number in the dest. */
6561 static int
6562 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6564 eh_region old_r, new_r;
6566 old_r = get_eh_region_from_number (old_nr);
6567 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6569 return new_r->index;
6572 /* Similar, but operate on INTEGER_CSTs. */
6574 static tree
6575 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6577 int old_nr, new_nr;
6579 old_nr = tree_to_shwi (old_t_nr);
6580 new_nr = move_stmt_eh_region_nr (old_nr, p);
6582 return build_int_cst (integer_type_node, new_nr);
6585 /* Like move_stmt_op, but for gimple statements.
6587 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6588 contained in the current statement in *GSI_P and change the
6589 DECL_CONTEXT of every local variable referenced in the current
6590 statement. */
6592 static tree
6593 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6594 struct walk_stmt_info *wi)
6596 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6597 gimple stmt = gsi_stmt (*gsi_p);
6598 tree block = gimple_block (stmt);
6600 if (block == p->orig_block
6601 || (p->orig_block == NULL_TREE
6602 && block != NULL_TREE))
6603 gimple_set_block (stmt, p->new_block);
6605 switch (gimple_code (stmt))
6607 case GIMPLE_CALL:
6608 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6610 tree r, fndecl = gimple_call_fndecl (stmt);
6611 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6612 switch (DECL_FUNCTION_CODE (fndecl))
6614 case BUILT_IN_EH_COPY_VALUES:
6615 r = gimple_call_arg (stmt, 1);
6616 r = move_stmt_eh_region_tree_nr (r, p);
6617 gimple_call_set_arg (stmt, 1, r);
6618 /* FALLTHRU */
6620 case BUILT_IN_EH_POINTER:
6621 case BUILT_IN_EH_FILTER:
6622 r = gimple_call_arg (stmt, 0);
6623 r = move_stmt_eh_region_tree_nr (r, p);
6624 gimple_call_set_arg (stmt, 0, r);
6625 break;
6627 default:
6628 break;
6631 break;
6633 case GIMPLE_RESX:
6635 gresx *resx_stmt = as_a <gresx *> (stmt);
6636 int r = gimple_resx_region (resx_stmt);
6637 r = move_stmt_eh_region_nr (r, p);
6638 gimple_resx_set_region (resx_stmt, r);
6640 break;
6642 case GIMPLE_EH_DISPATCH:
6644 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6645 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6646 r = move_stmt_eh_region_nr (r, p);
6647 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6649 break;
6651 case GIMPLE_OMP_RETURN:
6652 case GIMPLE_OMP_CONTINUE:
6653 break;
6654 default:
6655 if (is_gimple_omp (stmt))
6657 /* Do not remap variables inside OMP directives. Variables
6658 referenced in clauses and directive header belong to the
6659 parent function and should not be moved into the child
6660 function. */
6661 bool save_remap_decls_p = p->remap_decls_p;
6662 p->remap_decls_p = false;
6663 *handled_ops_p = true;
6665 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6666 move_stmt_op, wi);
6668 p->remap_decls_p = save_remap_decls_p;
6670 break;
6673 return NULL_TREE;
6676 /* Move basic block BB from function CFUN to function DEST_FN. The
6677 block is moved out of the original linked list and placed after
6678 block AFTER in the new list. Also, the block is removed from the
6679 original array of blocks and placed in DEST_FN's array of blocks.
6680 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6681 updated to reflect the moved edges.
6683 The local variables are remapped to new instances, VARS_MAP is used
6684 to record the mapping. */
6686 static void
6687 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6688 basic_block after, bool update_edge_count_p,
6689 struct move_stmt_d *d)
6691 struct control_flow_graph *cfg;
6692 edge_iterator ei;
6693 edge e;
6694 gimple_stmt_iterator si;
6695 unsigned old_len, new_len;
6697 /* Remove BB from dominance structures. */
6698 delete_from_dominance_info (CDI_DOMINATORS, bb);
6700 /* Move BB from its current loop to the copy in the new function. */
6701 if (current_loops)
6703 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6704 if (new_loop)
6705 bb->loop_father = new_loop;
6708 /* Link BB to the new linked list. */
6709 move_block_after (bb, after);
6711 /* Update the edge count in the corresponding flowgraphs. */
6712 if (update_edge_count_p)
6713 FOR_EACH_EDGE (e, ei, bb->succs)
6715 cfun->cfg->x_n_edges--;
6716 dest_cfun->cfg->x_n_edges++;
6719 /* Remove BB from the original basic block array. */
6720 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6721 cfun->cfg->x_n_basic_blocks--;
6723 /* Grow DEST_CFUN's basic block array if needed. */
6724 cfg = dest_cfun->cfg;
6725 cfg->x_n_basic_blocks++;
6726 if (bb->index >= cfg->x_last_basic_block)
6727 cfg->x_last_basic_block = bb->index + 1;
6729 old_len = vec_safe_length (cfg->x_basic_block_info);
6730 if ((unsigned) cfg->x_last_basic_block >= old_len)
6732 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6733 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6736 (*cfg->x_basic_block_info)[bb->index] = bb;
6738 /* Remap the variables in phi nodes. */
6739 for (gphi_iterator psi = gsi_start_phis (bb);
6740 !gsi_end_p (psi); )
6742 gphi *phi = psi.phi ();
6743 use_operand_p use;
6744 tree op = PHI_RESULT (phi);
6745 ssa_op_iter oi;
6746 unsigned i;
6748 if (virtual_operand_p (op))
6750 /* Remove the phi nodes for virtual operands (alias analysis will be
6751 run for the new function, anyway). */
6752 remove_phi_node (&psi, true);
6753 continue;
6756 SET_PHI_RESULT (phi,
6757 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6758 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6760 op = USE_FROM_PTR (use);
6761 if (TREE_CODE (op) == SSA_NAME)
6762 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6765 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6767 location_t locus = gimple_phi_arg_location (phi, i);
6768 tree block = LOCATION_BLOCK (locus);
6770 if (locus == UNKNOWN_LOCATION)
6771 continue;
6772 if (d->orig_block == NULL_TREE || block == d->orig_block)
6774 if (d->new_block == NULL_TREE)
6775 locus = LOCATION_LOCUS (locus);
6776 else
6777 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6778 gimple_phi_arg_set_location (phi, i, locus);
6782 gsi_next (&psi);
6785 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6787 gimple stmt = gsi_stmt (si);
6788 struct walk_stmt_info wi;
6790 memset (&wi, 0, sizeof (wi));
6791 wi.info = d;
6792 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6794 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6796 tree label = gimple_label_label (label_stmt);
6797 int uid = LABEL_DECL_UID (label);
6799 gcc_assert (uid > -1);
6801 old_len = vec_safe_length (cfg->x_label_to_block_map);
6802 if (old_len <= (unsigned) uid)
6804 new_len = 3 * uid / 2 + 1;
6805 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6808 (*cfg->x_label_to_block_map)[uid] = bb;
6809 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6811 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6813 if (uid >= dest_cfun->cfg->last_label_uid)
6814 dest_cfun->cfg->last_label_uid = uid + 1;
6817 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6818 remove_stmt_from_eh_lp_fn (cfun, stmt);
6820 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6821 gimple_remove_stmt_histograms (cfun, stmt);
6823 /* We cannot leave any operands allocated from the operand caches of
6824 the current function. */
6825 free_stmt_operands (cfun, stmt);
6826 push_cfun (dest_cfun);
6827 update_stmt (stmt);
6828 pop_cfun ();
6831 FOR_EACH_EDGE (e, ei, bb->succs)
6832 if (e->goto_locus != UNKNOWN_LOCATION)
6834 tree block = LOCATION_BLOCK (e->goto_locus);
6835 if (d->orig_block == NULL_TREE
6836 || block == d->orig_block)
6837 e->goto_locus = d->new_block ?
6838 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6839 LOCATION_LOCUS (e->goto_locus);
6843 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6844 the outermost EH region. Use REGION as the incoming base EH region. */
6846 static eh_region
6847 find_outermost_region_in_block (struct function *src_cfun,
6848 basic_block bb, eh_region region)
6850 gimple_stmt_iterator si;
6852 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6854 gimple stmt = gsi_stmt (si);
6855 eh_region stmt_region;
6856 int lp_nr;
6858 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6859 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6860 if (stmt_region)
6862 if (region == NULL)
6863 region = stmt_region;
6864 else if (stmt_region != region)
6866 region = eh_region_outermost (src_cfun, stmt_region, region);
6867 gcc_assert (region != NULL);
6872 return region;
6875 static tree
6876 new_label_mapper (tree decl, void *data)
6878 htab_t hash = (htab_t) data;
6879 struct tree_map *m;
6880 void **slot;
6882 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6884 m = XNEW (struct tree_map);
6885 m->hash = DECL_UID (decl);
6886 m->base.from = decl;
6887 m->to = create_artificial_label (UNKNOWN_LOCATION);
6888 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6889 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6890 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6892 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6893 gcc_assert (*slot == NULL);
6895 *slot = m;
6897 return m->to;
6900 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6901 subblocks. */
6903 static void
6904 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6905 tree to_context)
6907 tree *tp, t;
6909 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6911 t = *tp;
6912 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6913 continue;
6914 replace_by_duplicate_decl (&t, vars_map, to_context);
6915 if (t != *tp)
6917 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6919 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6920 DECL_HAS_VALUE_EXPR_P (t) = 1;
6922 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6923 *tp = t;
6927 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6928 replace_block_vars_by_duplicates (block, vars_map, to_context);
6931 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6932 from FN1 to FN2. */
6934 static void
6935 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6936 struct loop *loop)
6938 /* Discard it from the old loop array. */
6939 (*get_loops (fn1))[loop->num] = NULL;
6941 /* Place it in the new loop array, assigning it a new number. */
6942 loop->num = number_of_loops (fn2);
6943 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6945 /* Recurse to children. */
6946 for (loop = loop->inner; loop; loop = loop->next)
6947 fixup_loop_arrays_after_move (fn1, fn2, loop);
6950 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6951 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6953 DEBUG_FUNCTION void
6954 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6956 basic_block bb;
6957 edge_iterator ei;
6958 edge e;
6959 bitmap bbs = BITMAP_ALLOC (NULL);
6960 int i;
6962 gcc_assert (entry != NULL);
6963 gcc_assert (entry != exit);
6964 gcc_assert (bbs_p != NULL);
6966 gcc_assert (bbs_p->length () > 0);
6968 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6969 bitmap_set_bit (bbs, bb->index);
6971 gcc_assert (bitmap_bit_p (bbs, entry->index));
6972 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6974 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6976 if (bb == entry)
6978 gcc_assert (single_pred_p (entry));
6979 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6981 else
6982 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6984 e = ei_edge (ei);
6985 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6988 if (bb == exit)
6990 gcc_assert (single_succ_p (exit));
6991 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6993 else
6994 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6996 e = ei_edge (ei);
6997 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7001 BITMAP_FREE (bbs);
7005 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7006 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7007 single basic block in the original CFG and the new basic block is
7008 returned. DEST_CFUN must not have a CFG yet.
7010 Note that the region need not be a pure SESE region. Blocks inside
7011 the region may contain calls to abort/exit. The only restriction
7012 is that ENTRY_BB should be the only entry point and it must
7013 dominate EXIT_BB.
7015 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7016 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7017 to the new function.
7019 All local variables referenced in the region are assumed to be in
7020 the corresponding BLOCK_VARS and unexpanded variable lists
7021 associated with DEST_CFUN. */
7023 basic_block
7024 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7025 basic_block exit_bb, tree orig_block)
7027 vec<basic_block> bbs, dom_bbs;
7028 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7029 basic_block after, bb, *entry_pred, *exit_succ, abb;
7030 struct function *saved_cfun = cfun;
7031 int *entry_flag, *exit_flag;
7032 unsigned *entry_prob, *exit_prob;
7033 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7034 edge e;
7035 edge_iterator ei;
7036 htab_t new_label_map;
7037 hash_map<void *, void *> *eh_map;
7038 struct loop *loop = entry_bb->loop_father;
7039 struct loop *loop0 = get_loop (saved_cfun, 0);
7040 struct move_stmt_d d;
7042 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7043 region. */
7044 gcc_assert (entry_bb != exit_bb
7045 && (!exit_bb
7046 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7048 /* Collect all the blocks in the region. Manually add ENTRY_BB
7049 because it won't be added by dfs_enumerate_from. */
7050 bbs.create (0);
7051 bbs.safe_push (entry_bb);
7052 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7053 #ifdef ENABLE_CHECKING
7054 verify_sese (entry_bb, exit_bb, &bbs);
7055 #endif
7057 /* The blocks that used to be dominated by something in BBS will now be
7058 dominated by the new block. */
7059 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7060 bbs.address (),
7061 bbs.length ());
7063 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7064 the predecessor edges to ENTRY_BB and the successor edges to
7065 EXIT_BB so that we can re-attach them to the new basic block that
7066 will replace the region. */
7067 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7068 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7069 entry_flag = XNEWVEC (int, num_entry_edges);
7070 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7071 i = 0;
7072 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7074 entry_prob[i] = e->probability;
7075 entry_flag[i] = e->flags;
7076 entry_pred[i++] = e->src;
7077 remove_edge (e);
7080 if (exit_bb)
7082 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7083 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7084 exit_flag = XNEWVEC (int, num_exit_edges);
7085 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7086 i = 0;
7087 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7089 exit_prob[i] = e->probability;
7090 exit_flag[i] = e->flags;
7091 exit_succ[i++] = e->dest;
7092 remove_edge (e);
7095 else
7097 num_exit_edges = 0;
7098 exit_succ = NULL;
7099 exit_flag = NULL;
7100 exit_prob = NULL;
7103 /* Switch context to the child function to initialize DEST_FN's CFG. */
7104 gcc_assert (dest_cfun->cfg == NULL);
7105 push_cfun (dest_cfun);
7107 init_empty_tree_cfg ();
7109 /* Initialize EH information for the new function. */
7110 eh_map = NULL;
7111 new_label_map = NULL;
7112 if (saved_cfun->eh)
7114 eh_region region = NULL;
7116 FOR_EACH_VEC_ELT (bbs, i, bb)
7117 region = find_outermost_region_in_block (saved_cfun, bb, region);
7119 init_eh_for_function ();
7120 if (region != NULL)
7122 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7123 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7124 new_label_mapper, new_label_map);
7128 /* Initialize an empty loop tree. */
7129 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7130 init_loops_structure (dest_cfun, loops, 1);
7131 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7132 set_loops_for_fn (dest_cfun, loops);
7134 /* Move the outlined loop tree part. */
7135 num_nodes = bbs.length ();
7136 FOR_EACH_VEC_ELT (bbs, i, bb)
7138 if (bb->loop_father->header == bb)
7140 struct loop *this_loop = bb->loop_father;
7141 struct loop *outer = loop_outer (this_loop);
7142 if (outer == loop
7143 /* If the SESE region contains some bbs ending with
7144 a noreturn call, those are considered to belong
7145 to the outermost loop in saved_cfun, rather than
7146 the entry_bb's loop_father. */
7147 || outer == loop0)
7149 if (outer != loop)
7150 num_nodes -= this_loop->num_nodes;
7151 flow_loop_tree_node_remove (bb->loop_father);
7152 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7153 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7156 else if (bb->loop_father == loop0 && loop0 != loop)
7157 num_nodes--;
7159 /* Remove loop exits from the outlined region. */
7160 if (loops_for_fn (saved_cfun)->exits)
7161 FOR_EACH_EDGE (e, ei, bb->succs)
7163 struct loops *l = loops_for_fn (saved_cfun);
7164 loop_exit **slot
7165 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7166 NO_INSERT);
7167 if (slot)
7168 l->exits->clear_slot (slot);
7173 /* Adjust the number of blocks in the tree root of the outlined part. */
7174 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7176 /* Setup a mapping to be used by move_block_to_fn. */
7177 loop->aux = current_loops->tree_root;
7178 loop0->aux = current_loops->tree_root;
7180 pop_cfun ();
7182 /* Move blocks from BBS into DEST_CFUN. */
7183 gcc_assert (bbs.length () >= 2);
7184 after = dest_cfun->cfg->x_entry_block_ptr;
7185 hash_map<tree, tree> vars_map;
7187 memset (&d, 0, sizeof (d));
7188 d.orig_block = orig_block;
7189 d.new_block = DECL_INITIAL (dest_cfun->decl);
7190 d.from_context = cfun->decl;
7191 d.to_context = dest_cfun->decl;
7192 d.vars_map = &vars_map;
7193 d.new_label_map = new_label_map;
7194 d.eh_map = eh_map;
7195 d.remap_decls_p = true;
7197 FOR_EACH_VEC_ELT (bbs, i, bb)
7199 /* No need to update edge counts on the last block. It has
7200 already been updated earlier when we detached the region from
7201 the original CFG. */
7202 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7203 after = bb;
7206 loop->aux = NULL;
7207 loop0->aux = NULL;
7208 /* Loop sizes are no longer correct, fix them up. */
7209 loop->num_nodes -= num_nodes;
7210 for (struct loop *outer = loop_outer (loop);
7211 outer; outer = loop_outer (outer))
7212 outer->num_nodes -= num_nodes;
7213 loop0->num_nodes -= bbs.length () - num_nodes;
7215 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7217 struct loop *aloop;
7218 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7219 if (aloop != NULL)
7221 if (aloop->simduid)
7223 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7224 d.to_context);
7225 dest_cfun->has_simduid_loops = true;
7227 if (aloop->force_vectorize)
7228 dest_cfun->has_force_vectorize_loops = true;
7232 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7233 if (orig_block)
7235 tree block;
7236 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7237 == NULL_TREE);
7238 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7239 = BLOCK_SUBBLOCKS (orig_block);
7240 for (block = BLOCK_SUBBLOCKS (orig_block);
7241 block; block = BLOCK_CHAIN (block))
7242 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7243 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7246 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7247 &vars_map, dest_cfun->decl);
7249 if (new_label_map)
7250 htab_delete (new_label_map);
7251 if (eh_map)
7252 delete eh_map;
7254 /* Rewire the entry and exit blocks. The successor to the entry
7255 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7256 the child function. Similarly, the predecessor of DEST_FN's
7257 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7258 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7259 various CFG manipulation function get to the right CFG.
7261 FIXME, this is silly. The CFG ought to become a parameter to
7262 these helpers. */
7263 push_cfun (dest_cfun);
7264 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7265 if (exit_bb)
7266 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7267 pop_cfun ();
7269 /* Back in the original function, the SESE region has disappeared,
7270 create a new basic block in its place. */
7271 bb = create_empty_bb (entry_pred[0]);
7272 if (current_loops)
7273 add_bb_to_loop (bb, loop);
7274 for (i = 0; i < num_entry_edges; i++)
7276 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7277 e->probability = entry_prob[i];
7280 for (i = 0; i < num_exit_edges; i++)
7282 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7283 e->probability = exit_prob[i];
7286 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7287 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7288 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7289 dom_bbs.release ();
7291 if (exit_bb)
7293 free (exit_prob);
7294 free (exit_flag);
7295 free (exit_succ);
7297 free (entry_prob);
7298 free (entry_flag);
7299 free (entry_pred);
7300 bbs.release ();
7302 return bb;
7306 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7309 void
7310 dump_function_to_file (tree fndecl, FILE *file, int flags)
7312 tree arg, var, old_current_fndecl = current_function_decl;
7313 struct function *dsf;
7314 bool ignore_topmost_bind = false, any_var = false;
7315 basic_block bb;
7316 tree chain;
7317 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7318 && decl_is_tm_clone (fndecl));
7319 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7321 current_function_decl = fndecl;
7322 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7324 arg = DECL_ARGUMENTS (fndecl);
7325 while (arg)
7327 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7328 fprintf (file, " ");
7329 print_generic_expr (file, arg, dump_flags);
7330 if (flags & TDF_VERBOSE)
7331 print_node (file, "", arg, 4);
7332 if (DECL_CHAIN (arg))
7333 fprintf (file, ", ");
7334 arg = DECL_CHAIN (arg);
7336 fprintf (file, ")\n");
7338 if (flags & TDF_VERBOSE)
7339 print_node (file, "", fndecl, 2);
7341 dsf = DECL_STRUCT_FUNCTION (fndecl);
7342 if (dsf && (flags & TDF_EH))
7343 dump_eh_tree (file, dsf);
7345 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7347 dump_node (fndecl, TDF_SLIM | flags, file);
7348 current_function_decl = old_current_fndecl;
7349 return;
7352 /* When GIMPLE is lowered, the variables are no longer available in
7353 BIND_EXPRs, so display them separately. */
7354 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7356 unsigned ix;
7357 ignore_topmost_bind = true;
7359 fprintf (file, "{\n");
7360 if (!vec_safe_is_empty (fun->local_decls))
7361 FOR_EACH_LOCAL_DECL (fun, ix, var)
7363 print_generic_decl (file, var, flags);
7364 if (flags & TDF_VERBOSE)
7365 print_node (file, "", var, 4);
7366 fprintf (file, "\n");
7368 any_var = true;
7370 if (gimple_in_ssa_p (cfun))
7371 for (ix = 1; ix < num_ssa_names; ++ix)
7373 tree name = ssa_name (ix);
7374 if (name && !SSA_NAME_VAR (name))
7376 fprintf (file, " ");
7377 print_generic_expr (file, TREE_TYPE (name), flags);
7378 fprintf (file, " ");
7379 print_generic_expr (file, name, flags);
7380 fprintf (file, ";\n");
7382 any_var = true;
7387 if (fun && fun->decl == fndecl
7388 && fun->cfg
7389 && basic_block_info_for_fn (fun))
7391 /* If the CFG has been built, emit a CFG-based dump. */
7392 if (!ignore_topmost_bind)
7393 fprintf (file, "{\n");
7395 if (any_var && n_basic_blocks_for_fn (fun))
7396 fprintf (file, "\n");
7398 FOR_EACH_BB_FN (bb, fun)
7399 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7401 fprintf (file, "}\n");
7403 else if (DECL_SAVED_TREE (fndecl) == NULL)
7405 /* The function is now in GIMPLE form but the CFG has not been
7406 built yet. Emit the single sequence of GIMPLE statements
7407 that make up its body. */
7408 gimple_seq body = gimple_body (fndecl);
7410 if (gimple_seq_first_stmt (body)
7411 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7412 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7413 print_gimple_seq (file, body, 0, flags);
7414 else
7416 if (!ignore_topmost_bind)
7417 fprintf (file, "{\n");
7419 if (any_var)
7420 fprintf (file, "\n");
7422 print_gimple_seq (file, body, 2, flags);
7423 fprintf (file, "}\n");
7426 else
7428 int indent;
7430 /* Make a tree based dump. */
7431 chain = DECL_SAVED_TREE (fndecl);
7432 if (chain && TREE_CODE (chain) == BIND_EXPR)
7434 if (ignore_topmost_bind)
7436 chain = BIND_EXPR_BODY (chain);
7437 indent = 2;
7439 else
7440 indent = 0;
7442 else
7444 if (!ignore_topmost_bind)
7446 fprintf (file, "{\n");
7447 /* No topmost bind, pretend it's ignored for later. */
7448 ignore_topmost_bind = true;
7450 indent = 2;
7453 if (any_var)
7454 fprintf (file, "\n");
7456 print_generic_stmt_indented (file, chain, flags, indent);
7457 if (ignore_topmost_bind)
7458 fprintf (file, "}\n");
7461 if (flags & TDF_ENUMERATE_LOCALS)
7462 dump_enumerated_decls (file, flags);
7463 fprintf (file, "\n\n");
7465 current_function_decl = old_current_fndecl;
7468 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7470 DEBUG_FUNCTION void
7471 debug_function (tree fn, int flags)
7473 dump_function_to_file (fn, stderr, flags);
7477 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7479 static void
7480 print_pred_bbs (FILE *file, basic_block bb)
7482 edge e;
7483 edge_iterator ei;
7485 FOR_EACH_EDGE (e, ei, bb->preds)
7486 fprintf (file, "bb_%d ", e->src->index);
7490 /* Print on FILE the indexes for the successors of basic_block BB. */
7492 static void
7493 print_succ_bbs (FILE *file, basic_block bb)
7495 edge e;
7496 edge_iterator ei;
7498 FOR_EACH_EDGE (e, ei, bb->succs)
7499 fprintf (file, "bb_%d ", e->dest->index);
7502 /* Print to FILE the basic block BB following the VERBOSITY level. */
7504 void
7505 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7507 char *s_indent = (char *) alloca ((size_t) indent + 1);
7508 memset ((void *) s_indent, ' ', (size_t) indent);
7509 s_indent[indent] = '\0';
7511 /* Print basic_block's header. */
7512 if (verbosity >= 2)
7514 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7515 print_pred_bbs (file, bb);
7516 fprintf (file, "}, succs = {");
7517 print_succ_bbs (file, bb);
7518 fprintf (file, "})\n");
7521 /* Print basic_block's body. */
7522 if (verbosity >= 3)
7524 fprintf (file, "%s {\n", s_indent);
7525 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7526 fprintf (file, "%s }\n", s_indent);
7530 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7532 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7533 VERBOSITY level this outputs the contents of the loop, or just its
7534 structure. */
7536 static void
7537 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7539 char *s_indent;
7540 basic_block bb;
7542 if (loop == NULL)
7543 return;
7545 s_indent = (char *) alloca ((size_t) indent + 1);
7546 memset ((void *) s_indent, ' ', (size_t) indent);
7547 s_indent[indent] = '\0';
7549 /* Print loop's header. */
7550 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7551 if (loop->header)
7552 fprintf (file, "header = %d", loop->header->index);
7553 else
7555 fprintf (file, "deleted)\n");
7556 return;
7558 if (loop->latch)
7559 fprintf (file, ", latch = %d", loop->latch->index);
7560 else
7561 fprintf (file, ", multiple latches");
7562 fprintf (file, ", niter = ");
7563 print_generic_expr (file, loop->nb_iterations, 0);
7565 if (loop->any_upper_bound)
7567 fprintf (file, ", upper_bound = ");
7568 print_decu (loop->nb_iterations_upper_bound, file);
7571 if (loop->any_estimate)
7573 fprintf (file, ", estimate = ");
7574 print_decu (loop->nb_iterations_estimate, file);
7576 fprintf (file, ")\n");
7578 /* Print loop's body. */
7579 if (verbosity >= 1)
7581 fprintf (file, "%s{\n", s_indent);
7582 FOR_EACH_BB_FN (bb, cfun)
7583 if (bb->loop_father == loop)
7584 print_loops_bb (file, bb, indent, verbosity);
7586 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7587 fprintf (file, "%s}\n", s_indent);
7591 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7592 spaces. Following VERBOSITY level this outputs the contents of the
7593 loop, or just its structure. */
7595 static void
7596 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7597 int verbosity)
7599 if (loop == NULL)
7600 return;
7602 print_loop (file, loop, indent, verbosity);
7603 print_loop_and_siblings (file, loop->next, indent, verbosity);
7606 /* Follow a CFG edge from the entry point of the program, and on entry
7607 of a loop, pretty print the loop structure on FILE. */
7609 void
7610 print_loops (FILE *file, int verbosity)
7612 basic_block bb;
7614 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7615 if (bb && bb->loop_father)
7616 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7619 /* Dump a loop. */
7621 DEBUG_FUNCTION void
7622 debug (struct loop &ref)
7624 print_loop (stderr, &ref, 0, /*verbosity*/0);
7627 DEBUG_FUNCTION void
7628 debug (struct loop *ptr)
7630 if (ptr)
7631 debug (*ptr);
7632 else
7633 fprintf (stderr, "<nil>\n");
7636 /* Dump a loop verbosely. */
7638 DEBUG_FUNCTION void
7639 debug_verbose (struct loop &ref)
7641 print_loop (stderr, &ref, 0, /*verbosity*/3);
7644 DEBUG_FUNCTION void
7645 debug_verbose (struct loop *ptr)
7647 if (ptr)
7648 debug (*ptr);
7649 else
7650 fprintf (stderr, "<nil>\n");
7654 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7656 DEBUG_FUNCTION void
7657 debug_loops (int verbosity)
7659 print_loops (stderr, verbosity);
7662 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7664 DEBUG_FUNCTION void
7665 debug_loop (struct loop *loop, int verbosity)
7667 print_loop (stderr, loop, 0, verbosity);
7670 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7671 level. */
7673 DEBUG_FUNCTION void
7674 debug_loop_num (unsigned num, int verbosity)
7676 debug_loop (get_loop (cfun, num), verbosity);
7679 /* Return true if BB ends with a call, possibly followed by some
7680 instructions that must stay with the call. Return false,
7681 otherwise. */
7683 static bool
7684 gimple_block_ends_with_call_p (basic_block bb)
7686 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7687 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7691 /* Return true if BB ends with a conditional branch. Return false,
7692 otherwise. */
7694 static bool
7695 gimple_block_ends_with_condjump_p (const_basic_block bb)
7697 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7698 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7702 /* Return true if we need to add fake edge to exit at statement T.
7703 Helper function for gimple_flow_call_edges_add. */
7705 static bool
7706 need_fake_edge_p (gimple t)
7708 tree fndecl = NULL_TREE;
7709 int call_flags = 0;
7711 /* NORETURN and LONGJMP calls already have an edge to exit.
7712 CONST and PURE calls do not need one.
7713 We don't currently check for CONST and PURE here, although
7714 it would be a good idea, because those attributes are
7715 figured out from the RTL in mark_constant_function, and
7716 the counter incrementation code from -fprofile-arcs
7717 leads to different results from -fbranch-probabilities. */
7718 if (is_gimple_call (t))
7720 fndecl = gimple_call_fndecl (t);
7721 call_flags = gimple_call_flags (t);
7724 if (is_gimple_call (t)
7725 && fndecl
7726 && DECL_BUILT_IN (fndecl)
7727 && (call_flags & ECF_NOTHROW)
7728 && !(call_flags & ECF_RETURNS_TWICE)
7729 /* fork() doesn't really return twice, but the effect of
7730 wrapping it in __gcov_fork() which calls __gcov_flush()
7731 and clears the counters before forking has the same
7732 effect as returning twice. Force a fake edge. */
7733 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7734 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7735 return false;
7737 if (is_gimple_call (t))
7739 edge_iterator ei;
7740 edge e;
7741 basic_block bb;
7743 if (!(call_flags & ECF_NORETURN))
7744 return true;
7746 bb = gimple_bb (t);
7747 FOR_EACH_EDGE (e, ei, bb->succs)
7748 if ((e->flags & EDGE_FAKE) == 0)
7749 return true;
7752 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7753 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7754 return true;
7756 return false;
7760 /* Add fake edges to the function exit for any non constant and non
7761 noreturn calls (or noreturn calls with EH/abnormal edges),
7762 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7763 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7764 that were split.
7766 The goal is to expose cases in which entering a basic block does
7767 not imply that all subsequent instructions must be executed. */
7769 static int
7770 gimple_flow_call_edges_add (sbitmap blocks)
7772 int i;
7773 int blocks_split = 0;
7774 int last_bb = last_basic_block_for_fn (cfun);
7775 bool check_last_block = false;
7777 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7778 return 0;
7780 if (! blocks)
7781 check_last_block = true;
7782 else
7783 check_last_block = bitmap_bit_p (blocks,
7784 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7786 /* In the last basic block, before epilogue generation, there will be
7787 a fallthru edge to EXIT. Special care is required if the last insn
7788 of the last basic block is a call because make_edge folds duplicate
7789 edges, which would result in the fallthru edge also being marked
7790 fake, which would result in the fallthru edge being removed by
7791 remove_fake_edges, which would result in an invalid CFG.
7793 Moreover, we can't elide the outgoing fake edge, since the block
7794 profiler needs to take this into account in order to solve the minimal
7795 spanning tree in the case that the call doesn't return.
7797 Handle this by adding a dummy instruction in a new last basic block. */
7798 if (check_last_block)
7800 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7801 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7802 gimple t = NULL;
7804 if (!gsi_end_p (gsi))
7805 t = gsi_stmt (gsi);
7807 if (t && need_fake_edge_p (t))
7809 edge e;
7811 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7812 if (e)
7814 gsi_insert_on_edge (e, gimple_build_nop ());
7815 gsi_commit_edge_inserts ();
7820 /* Now add fake edges to the function exit for any non constant
7821 calls since there is no way that we can determine if they will
7822 return or not... */
7823 for (i = 0; i < last_bb; i++)
7825 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7826 gimple_stmt_iterator gsi;
7827 gimple stmt, last_stmt;
7829 if (!bb)
7830 continue;
7832 if (blocks && !bitmap_bit_p (blocks, i))
7833 continue;
7835 gsi = gsi_last_nondebug_bb (bb);
7836 if (!gsi_end_p (gsi))
7838 last_stmt = gsi_stmt (gsi);
7841 stmt = gsi_stmt (gsi);
7842 if (need_fake_edge_p (stmt))
7844 edge e;
7846 /* The handling above of the final block before the
7847 epilogue should be enough to verify that there is
7848 no edge to the exit block in CFG already.
7849 Calling make_edge in such case would cause us to
7850 mark that edge as fake and remove it later. */
7851 #ifdef ENABLE_CHECKING
7852 if (stmt == last_stmt)
7854 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7855 gcc_assert (e == NULL);
7857 #endif
7859 /* Note that the following may create a new basic block
7860 and renumber the existing basic blocks. */
7861 if (stmt != last_stmt)
7863 e = split_block (bb, stmt);
7864 if (e)
7865 blocks_split++;
7867 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7869 gsi_prev (&gsi);
7871 while (!gsi_end_p (gsi));
7875 if (blocks_split)
7876 verify_flow_info ();
7878 return blocks_split;
7881 /* Removes edge E and all the blocks dominated by it, and updates dominance
7882 information. The IL in E->src needs to be updated separately.
7883 If dominance info is not available, only the edge E is removed.*/
7885 void
7886 remove_edge_and_dominated_blocks (edge e)
7888 vec<basic_block> bbs_to_remove = vNULL;
7889 vec<basic_block> bbs_to_fix_dom = vNULL;
7890 bitmap df, df_idom;
7891 edge f;
7892 edge_iterator ei;
7893 bool none_removed = false;
7894 unsigned i;
7895 basic_block bb, dbb;
7896 bitmap_iterator bi;
7898 /* If we are removing a path inside a non-root loop that may change
7899 loop ownership of blocks or remove loops. Mark loops for fixup. */
7900 if (current_loops
7901 && loop_outer (e->src->loop_father) != NULL
7902 && e->src->loop_father == e->dest->loop_father)
7903 loops_state_set (LOOPS_NEED_FIXUP);
7905 if (!dom_info_available_p (CDI_DOMINATORS))
7907 remove_edge (e);
7908 return;
7911 /* No updating is needed for edges to exit. */
7912 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7914 if (cfgcleanup_altered_bbs)
7915 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7916 remove_edge (e);
7917 return;
7920 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7921 that is not dominated by E->dest, then this set is empty. Otherwise,
7922 all the basic blocks dominated by E->dest are removed.
7924 Also, to DF_IDOM we store the immediate dominators of the blocks in
7925 the dominance frontier of E (i.e., of the successors of the
7926 removed blocks, if there are any, and of E->dest otherwise). */
7927 FOR_EACH_EDGE (f, ei, e->dest->preds)
7929 if (f == e)
7930 continue;
7932 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7934 none_removed = true;
7935 break;
7939 df = BITMAP_ALLOC (NULL);
7940 df_idom = BITMAP_ALLOC (NULL);
7942 if (none_removed)
7943 bitmap_set_bit (df_idom,
7944 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7945 else
7947 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7948 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7950 FOR_EACH_EDGE (f, ei, bb->succs)
7952 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7953 bitmap_set_bit (df, f->dest->index);
7956 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7957 bitmap_clear_bit (df, bb->index);
7959 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7961 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7962 bitmap_set_bit (df_idom,
7963 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7967 if (cfgcleanup_altered_bbs)
7969 /* Record the set of the altered basic blocks. */
7970 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7971 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7974 /* Remove E and the cancelled blocks. */
7975 if (none_removed)
7976 remove_edge (e);
7977 else
7979 /* Walk backwards so as to get a chance to substitute all
7980 released DEFs into debug stmts. See
7981 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7982 details. */
7983 for (i = bbs_to_remove.length (); i-- > 0; )
7984 delete_basic_block (bbs_to_remove[i]);
7987 /* Update the dominance information. The immediate dominator may change only
7988 for blocks whose immediate dominator belongs to DF_IDOM:
7990 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7991 removal. Let Z the arbitrary block such that idom(Z) = Y and
7992 Z dominates X after the removal. Before removal, there exists a path P
7993 from Y to X that avoids Z. Let F be the last edge on P that is
7994 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7995 dominates W, and because of P, Z does not dominate W), and W belongs to
7996 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7997 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7999 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8000 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8001 dbb;
8002 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8003 bbs_to_fix_dom.safe_push (dbb);
8006 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8008 BITMAP_FREE (df);
8009 BITMAP_FREE (df_idom);
8010 bbs_to_remove.release ();
8011 bbs_to_fix_dom.release ();
8014 /* Purge dead EH edges from basic block BB. */
8016 bool
8017 gimple_purge_dead_eh_edges (basic_block bb)
8019 bool changed = false;
8020 edge e;
8021 edge_iterator ei;
8022 gimple stmt = last_stmt (bb);
8024 if (stmt && stmt_can_throw_internal (stmt))
8025 return false;
8027 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8029 if (e->flags & EDGE_EH)
8031 remove_edge_and_dominated_blocks (e);
8032 changed = true;
8034 else
8035 ei_next (&ei);
8038 return changed;
8041 /* Purge dead EH edges from basic block listed in BLOCKS. */
8043 bool
8044 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8046 bool changed = false;
8047 unsigned i;
8048 bitmap_iterator bi;
8050 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8052 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8054 /* Earlier gimple_purge_dead_eh_edges could have removed
8055 this basic block already. */
8056 gcc_assert (bb || changed);
8057 if (bb != NULL)
8058 changed |= gimple_purge_dead_eh_edges (bb);
8061 return changed;
8064 /* Purge dead abnormal call edges from basic block BB. */
8066 bool
8067 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8069 bool changed = false;
8070 edge e;
8071 edge_iterator ei;
8072 gimple stmt = last_stmt (bb);
8074 if (!cfun->has_nonlocal_label
8075 && !cfun->calls_setjmp)
8076 return false;
8078 if (stmt && stmt_can_make_abnormal_goto (stmt))
8079 return false;
8081 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8083 if (e->flags & EDGE_ABNORMAL)
8085 if (e->flags & EDGE_FALLTHRU)
8086 e->flags &= ~EDGE_ABNORMAL;
8087 else
8088 remove_edge_and_dominated_blocks (e);
8089 changed = true;
8091 else
8092 ei_next (&ei);
8095 return changed;
8098 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8100 bool
8101 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8103 bool changed = false;
8104 unsigned i;
8105 bitmap_iterator bi;
8107 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8109 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8111 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8112 this basic block already. */
8113 gcc_assert (bb || changed);
8114 if (bb != NULL)
8115 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8118 return changed;
8121 /* This function is called whenever a new edge is created or
8122 redirected. */
8124 static void
8125 gimple_execute_on_growing_pred (edge e)
8127 basic_block bb = e->dest;
8129 if (!gimple_seq_empty_p (phi_nodes (bb)))
8130 reserve_phi_args_for_new_edge (bb);
8133 /* This function is called immediately before edge E is removed from
8134 the edge vector E->dest->preds. */
8136 static void
8137 gimple_execute_on_shrinking_pred (edge e)
8139 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8140 remove_phi_args (e);
8143 /*---------------------------------------------------------------------------
8144 Helper functions for Loop versioning
8145 ---------------------------------------------------------------------------*/
8147 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8148 of 'first'. Both of them are dominated by 'new_head' basic block. When
8149 'new_head' was created by 'second's incoming edge it received phi arguments
8150 on the edge by split_edge(). Later, additional edge 'e' was created to
8151 connect 'new_head' and 'first'. Now this routine adds phi args on this
8152 additional edge 'e' that new_head to second edge received as part of edge
8153 splitting. */
8155 static void
8156 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8157 basic_block new_head, edge e)
8159 gphi *phi1, *phi2;
8160 gphi_iterator psi1, psi2;
8161 tree def;
8162 edge e2 = find_edge (new_head, second);
8164 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8165 edge, we should always have an edge from NEW_HEAD to SECOND. */
8166 gcc_assert (e2 != NULL);
8168 /* Browse all 'second' basic block phi nodes and add phi args to
8169 edge 'e' for 'first' head. PHI args are always in correct order. */
8171 for (psi2 = gsi_start_phis (second),
8172 psi1 = gsi_start_phis (first);
8173 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8174 gsi_next (&psi2), gsi_next (&psi1))
8176 phi1 = psi1.phi ();
8177 phi2 = psi2.phi ();
8178 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8179 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8184 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8185 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8186 the destination of the ELSE part. */
8188 static void
8189 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8190 basic_block second_head ATTRIBUTE_UNUSED,
8191 basic_block cond_bb, void *cond_e)
8193 gimple_stmt_iterator gsi;
8194 gimple new_cond_expr;
8195 tree cond_expr = (tree) cond_e;
8196 edge e0;
8198 /* Build new conditional expr */
8199 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8200 NULL_TREE, NULL_TREE);
8202 /* Add new cond in cond_bb. */
8203 gsi = gsi_last_bb (cond_bb);
8204 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8206 /* Adjust edges appropriately to connect new head with first head
8207 as well as second head. */
8208 e0 = single_succ_edge (cond_bb);
8209 e0->flags &= ~EDGE_FALLTHRU;
8210 e0->flags |= EDGE_FALSE_VALUE;
8214 /* Do book-keeping of basic block BB for the profile consistency checker.
8215 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8216 then do post-pass accounting. Store the counting in RECORD. */
8217 static void
8218 gimple_account_profile_record (basic_block bb, int after_pass,
8219 struct profile_record *record)
8221 gimple_stmt_iterator i;
8222 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8224 record->size[after_pass]
8225 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8226 if (profile_status_for_fn (cfun) == PROFILE_READ)
8227 record->time[after_pass]
8228 += estimate_num_insns (gsi_stmt (i),
8229 &eni_time_weights) * bb->count;
8230 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8231 record->time[after_pass]
8232 += estimate_num_insns (gsi_stmt (i),
8233 &eni_time_weights) * bb->frequency;
8237 struct cfg_hooks gimple_cfg_hooks = {
8238 "gimple",
8239 gimple_verify_flow_info,
8240 gimple_dump_bb, /* dump_bb */
8241 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8242 create_bb, /* create_basic_block */
8243 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8244 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8245 gimple_can_remove_branch_p, /* can_remove_branch_p */
8246 remove_bb, /* delete_basic_block */
8247 gimple_split_block, /* split_block */
8248 gimple_move_block_after, /* move_block_after */
8249 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8250 gimple_merge_blocks, /* merge_blocks */
8251 gimple_predict_edge, /* predict_edge */
8252 gimple_predicted_by_p, /* predicted_by_p */
8253 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8254 gimple_duplicate_bb, /* duplicate_block */
8255 gimple_split_edge, /* split_edge */
8256 gimple_make_forwarder_block, /* make_forward_block */
8257 NULL, /* tidy_fallthru_edge */
8258 NULL, /* force_nonfallthru */
8259 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8260 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8261 gimple_flow_call_edges_add, /* flow_call_edges_add */
8262 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8263 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8264 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8265 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8266 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8267 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8268 flush_pending_stmts, /* flush_pending_stmts */
8269 gimple_empty_block_p, /* block_empty_p */
8270 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8271 gimple_account_profile_record,
8275 /* Split all critical edges. */
8277 unsigned int
8278 split_critical_edges (void)
8280 basic_block bb;
8281 edge e;
8282 edge_iterator ei;
8284 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8285 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8286 mappings around the calls to split_edge. */
8287 start_recording_case_labels ();
8288 FOR_ALL_BB_FN (bb, cfun)
8290 FOR_EACH_EDGE (e, ei, bb->succs)
8292 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8293 split_edge (e);
8294 /* PRE inserts statements to edges and expects that
8295 since split_critical_edges was done beforehand, committing edge
8296 insertions will not split more edges. In addition to critical
8297 edges we must split edges that have multiple successors and
8298 end by control flow statements, such as RESX.
8299 Go ahead and split them too. This matches the logic in
8300 gimple_find_edge_insert_loc. */
8301 else if ((!single_pred_p (e->dest)
8302 || !gimple_seq_empty_p (phi_nodes (e->dest))
8303 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8304 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8305 && !(e->flags & EDGE_ABNORMAL))
8307 gimple_stmt_iterator gsi;
8309 gsi = gsi_last_bb (e->src);
8310 if (!gsi_end_p (gsi)
8311 && stmt_ends_bb_p (gsi_stmt (gsi))
8312 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8313 && !gimple_call_builtin_p (gsi_stmt (gsi),
8314 BUILT_IN_RETURN)))
8315 split_edge (e);
8319 end_recording_case_labels ();
8320 return 0;
8323 namespace {
8325 const pass_data pass_data_split_crit_edges =
8327 GIMPLE_PASS, /* type */
8328 "crited", /* name */
8329 OPTGROUP_NONE, /* optinfo_flags */
8330 TV_TREE_SPLIT_EDGES, /* tv_id */
8331 PROP_cfg, /* properties_required */
8332 PROP_no_crit_edges, /* properties_provided */
8333 0, /* properties_destroyed */
8334 0, /* todo_flags_start */
8335 0, /* todo_flags_finish */
8338 class pass_split_crit_edges : public gimple_opt_pass
8340 public:
8341 pass_split_crit_edges (gcc::context *ctxt)
8342 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8345 /* opt_pass methods: */
8346 virtual unsigned int execute (function *) { return split_critical_edges (); }
8348 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8349 }; // class pass_split_crit_edges
8351 } // anon namespace
8353 gimple_opt_pass *
8354 make_pass_split_crit_edges (gcc::context *ctxt)
8356 return new pass_split_crit_edges (ctxt);
8360 /* Insert COND expression which is GIMPLE_COND after STMT
8361 in basic block BB with appropriate basic block split
8362 and creation of a new conditionally executed basic block.
8363 Return created basic block. */
8364 basic_block
8365 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8367 edge fall = split_block (bb, stmt);
8368 gimple_stmt_iterator iter = gsi_last_bb (bb);
8369 basic_block new_bb;
8371 /* Insert cond statement. */
8372 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8373 if (gsi_end_p (iter))
8374 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8375 else
8376 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8378 /* Create conditionally executed block. */
8379 new_bb = create_empty_bb (bb);
8380 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8381 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8383 /* Fix edge for split bb. */
8384 fall->flags = EDGE_FALSE_VALUE;
8386 /* Update dominance info. */
8387 if (dom_info_available_p (CDI_DOMINATORS))
8389 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8390 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8393 /* Update loop info. */
8394 if (current_loops)
8395 add_bb_to_loop (new_bb, bb->loop_father);
8397 return new_bb;
8400 /* Build a ternary operation and gimplify it. Emit code before GSI.
8401 Return the gimple_val holding the result. */
8403 tree
8404 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8405 tree type, tree a, tree b, tree c)
8407 tree ret;
8408 location_t loc = gimple_location (gsi_stmt (*gsi));
8410 ret = fold_build3_loc (loc, code, type, a, b, c);
8411 STRIP_NOPS (ret);
8413 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8414 GSI_SAME_STMT);
8417 /* Build a binary operation and gimplify it. Emit code before GSI.
8418 Return the gimple_val holding the result. */
8420 tree
8421 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8422 tree type, tree a, tree b)
8424 tree ret;
8426 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8427 STRIP_NOPS (ret);
8429 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8430 GSI_SAME_STMT);
8433 /* Build a unary operation and gimplify it. Emit code before GSI.
8434 Return the gimple_val holding the result. */
8436 tree
8437 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8438 tree a)
8440 tree ret;
8442 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8443 STRIP_NOPS (ret);
8445 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8446 GSI_SAME_STMT);
8451 /* Given a basic block B which ends with a conditional and has
8452 precisely two successors, determine which of the edges is taken if
8453 the conditional is true and which is taken if the conditional is
8454 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8456 void
8457 extract_true_false_edges_from_block (basic_block b,
8458 edge *true_edge,
8459 edge *false_edge)
8461 edge e = EDGE_SUCC (b, 0);
8463 if (e->flags & EDGE_TRUE_VALUE)
8465 *true_edge = e;
8466 *false_edge = EDGE_SUCC (b, 1);
8468 else
8470 *false_edge = e;
8471 *true_edge = EDGE_SUCC (b, 1);
8475 /* Emit return warnings. */
8477 namespace {
8479 const pass_data pass_data_warn_function_return =
8481 GIMPLE_PASS, /* type */
8482 "*warn_function_return", /* name */
8483 OPTGROUP_NONE, /* optinfo_flags */
8484 TV_NONE, /* tv_id */
8485 PROP_cfg, /* properties_required */
8486 0, /* properties_provided */
8487 0, /* properties_destroyed */
8488 0, /* todo_flags_start */
8489 0, /* todo_flags_finish */
8492 class pass_warn_function_return : public gimple_opt_pass
8494 public:
8495 pass_warn_function_return (gcc::context *ctxt)
8496 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8499 /* opt_pass methods: */
8500 virtual unsigned int execute (function *);
8502 }; // class pass_warn_function_return
8504 unsigned int
8505 pass_warn_function_return::execute (function *fun)
8507 source_location location;
8508 gimple last;
8509 edge e;
8510 edge_iterator ei;
8512 if (!targetm.warn_func_return (fun->decl))
8513 return 0;
8515 /* If we have a path to EXIT, then we do return. */
8516 if (TREE_THIS_VOLATILE (fun->decl)
8517 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8519 location = UNKNOWN_LOCATION;
8520 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8522 last = last_stmt (e->src);
8523 if ((gimple_code (last) == GIMPLE_RETURN
8524 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8525 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8526 break;
8528 if (location == UNKNOWN_LOCATION)
8529 location = cfun->function_end_locus;
8530 warning_at (location, 0, "%<noreturn%> function does return");
8533 /* If we see "return;" in some basic block, then we do reach the end
8534 without returning a value. */
8535 else if (warn_return_type
8536 && !TREE_NO_WARNING (fun->decl)
8537 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8538 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8540 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8542 gimple last = last_stmt (e->src);
8543 greturn *return_stmt = dyn_cast <greturn *> (last);
8544 if (return_stmt
8545 && gimple_return_retval (return_stmt) == NULL
8546 && !gimple_no_warning_p (last))
8548 location = gimple_location (last);
8549 if (location == UNKNOWN_LOCATION)
8550 location = fun->function_end_locus;
8551 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8552 TREE_NO_WARNING (fun->decl) = 1;
8553 break;
8557 return 0;
8560 } // anon namespace
8562 gimple_opt_pass *
8563 make_pass_warn_function_return (gcc::context *ctxt)
8565 return new pass_warn_function_return (ctxt);
8568 /* Walk a gimplified function and warn for functions whose return value is
8569 ignored and attribute((warn_unused_result)) is set. This is done before
8570 inlining, so we don't have to worry about that. */
8572 static void
8573 do_warn_unused_result (gimple_seq seq)
8575 tree fdecl, ftype;
8576 gimple_stmt_iterator i;
8578 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8580 gimple g = gsi_stmt (i);
8582 switch (gimple_code (g))
8584 case GIMPLE_BIND:
8585 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8586 break;
8587 case GIMPLE_TRY:
8588 do_warn_unused_result (gimple_try_eval (g));
8589 do_warn_unused_result (gimple_try_cleanup (g));
8590 break;
8591 case GIMPLE_CATCH:
8592 do_warn_unused_result (gimple_catch_handler (
8593 as_a <gcatch *> (g)));
8594 break;
8595 case GIMPLE_EH_FILTER:
8596 do_warn_unused_result (gimple_eh_filter_failure (g));
8597 break;
8599 case GIMPLE_CALL:
8600 if (gimple_call_lhs (g))
8601 break;
8602 if (gimple_call_internal_p (g))
8603 break;
8605 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8606 LHS. All calls whose value is ignored should be
8607 represented like this. Look for the attribute. */
8608 fdecl = gimple_call_fndecl (g);
8609 ftype = gimple_call_fntype (g);
8611 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8613 location_t loc = gimple_location (g);
8615 if (fdecl)
8616 warning_at (loc, OPT_Wunused_result,
8617 "ignoring return value of %qD, "
8618 "declared with attribute warn_unused_result",
8619 fdecl);
8620 else
8621 warning_at (loc, OPT_Wunused_result,
8622 "ignoring return value of function "
8623 "declared with attribute warn_unused_result");
8625 break;
8627 default:
8628 /* Not a container, not a call, or a call whose value is used. */
8629 break;
8634 namespace {
8636 const pass_data pass_data_warn_unused_result =
8638 GIMPLE_PASS, /* type */
8639 "*warn_unused_result", /* name */
8640 OPTGROUP_NONE, /* optinfo_flags */
8641 TV_NONE, /* tv_id */
8642 PROP_gimple_any, /* properties_required */
8643 0, /* properties_provided */
8644 0, /* properties_destroyed */
8645 0, /* todo_flags_start */
8646 0, /* todo_flags_finish */
8649 class pass_warn_unused_result : public gimple_opt_pass
8651 public:
8652 pass_warn_unused_result (gcc::context *ctxt)
8653 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8656 /* opt_pass methods: */
8657 virtual bool gate (function *) { return flag_warn_unused_result; }
8658 virtual unsigned int execute (function *)
8660 do_warn_unused_result (gimple_body (current_function_decl));
8661 return 0;
8664 }; // class pass_warn_unused_result
8666 } // anon namespace
8668 gimple_opt_pass *
8669 make_pass_warn_unused_result (gcc::context *ctxt)
8671 return new pass_warn_unused_result (ctxt);
8674 /* IPA passes, compilation of earlier functions or inlining
8675 might have changed some properties, such as marked functions nothrow,
8676 pure, const or noreturn.
8677 Remove redundant edges and basic blocks, and create new ones if necessary.
8679 This pass can't be executed as stand alone pass from pass manager, because
8680 in between inlining and this fixup the verify_flow_info would fail. */
8682 unsigned int
8683 execute_fixup_cfg (void)
8685 basic_block bb;
8686 gimple_stmt_iterator gsi;
8687 int todo = 0;
8688 gcov_type count_scale;
8689 edge e;
8690 edge_iterator ei;
8692 count_scale
8693 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8694 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8696 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8697 cgraph_node::get (current_function_decl)->count;
8698 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8699 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8700 count_scale);
8702 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8703 e->count = apply_scale (e->count, count_scale);
8705 FOR_EACH_BB_FN (bb, cfun)
8707 bb->count = apply_scale (bb->count, count_scale);
8708 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8710 gimple stmt = gsi_stmt (gsi);
8711 tree decl = is_gimple_call (stmt)
8712 ? gimple_call_fndecl (stmt)
8713 : NULL;
8714 if (decl)
8716 int flags = gimple_call_flags (stmt);
8717 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8719 if (gimple_purge_dead_abnormal_call_edges (bb))
8720 todo |= TODO_cleanup_cfg;
8722 if (gimple_in_ssa_p (cfun))
8724 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8725 update_stmt (stmt);
8729 if (flags & ECF_NORETURN
8730 && fixup_noreturn_call (stmt))
8731 todo |= TODO_cleanup_cfg;
8734 /* Remove stores to variables we marked write-only.
8735 Keep access when store has side effect, i.e. in case when source
8736 is volatile. */
8737 if (gimple_store_p (stmt)
8738 && !gimple_has_side_effects (stmt))
8740 tree lhs = get_base_address (gimple_get_lhs (stmt));
8742 if (TREE_CODE (lhs) == VAR_DECL
8743 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8744 && varpool_node::get (lhs)->writeonly)
8746 unlink_stmt_vdef (stmt);
8747 gsi_remove (&gsi, true);
8748 release_defs (stmt);
8749 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8750 continue;
8753 /* For calls we can simply remove LHS when it is known
8754 to be write-only. */
8755 if (is_gimple_call (stmt)
8756 && gimple_get_lhs (stmt))
8758 tree lhs = get_base_address (gimple_get_lhs (stmt));
8760 if (TREE_CODE (lhs) == VAR_DECL
8761 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8762 && varpool_node::get (lhs)->writeonly)
8764 gimple_call_set_lhs (stmt, NULL);
8765 update_stmt (stmt);
8766 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8770 if (maybe_clean_eh_stmt (stmt)
8771 && gimple_purge_dead_eh_edges (bb))
8772 todo |= TODO_cleanup_cfg;
8773 gsi_next (&gsi);
8776 FOR_EACH_EDGE (e, ei, bb->succs)
8777 e->count = apply_scale (e->count, count_scale);
8779 /* If we have a basic block with no successors that does not
8780 end with a control statement or a noreturn call end it with
8781 a call to __builtin_unreachable. This situation can occur
8782 when inlining a noreturn call that does in fact return. */
8783 if (EDGE_COUNT (bb->succs) == 0)
8785 gimple stmt = last_stmt (bb);
8786 if (!stmt
8787 || (!is_ctrl_stmt (stmt)
8788 && (!is_gimple_call (stmt)
8789 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8791 if (stmt && is_gimple_call (stmt))
8792 gimple_call_set_ctrl_altering (stmt, false);
8793 stmt = gimple_build_call
8794 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8795 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8796 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8800 if (count_scale != REG_BR_PROB_BASE)
8801 compute_function_frequency ();
8803 if (current_loops
8804 && (todo & TODO_cleanup_cfg))
8805 loops_state_set (LOOPS_NEED_FIXUP);
8807 return todo;
8810 namespace {
8812 const pass_data pass_data_fixup_cfg =
8814 GIMPLE_PASS, /* type */
8815 "fixup_cfg", /* name */
8816 OPTGROUP_NONE, /* optinfo_flags */
8817 TV_NONE, /* tv_id */
8818 PROP_cfg, /* properties_required */
8819 0, /* properties_provided */
8820 0, /* properties_destroyed */
8821 0, /* todo_flags_start */
8822 0, /* todo_flags_finish */
8825 class pass_fixup_cfg : public gimple_opt_pass
8827 public:
8828 pass_fixup_cfg (gcc::context *ctxt)
8829 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8832 /* opt_pass methods: */
8833 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8834 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8836 }; // class pass_fixup_cfg
8838 } // anon namespace
8840 gimple_opt_pass *
8841 make_pass_fixup_cfg (gcc::context *ctxt)
8843 return new pass_fixup_cfg (ctxt);
8846 /* Garbage collection support for edge_def. */
8848 extern void gt_ggc_mx (tree&);
8849 extern void gt_ggc_mx (gimple&);
8850 extern void gt_ggc_mx (rtx&);
8851 extern void gt_ggc_mx (basic_block&);
8853 static void
8854 gt_ggc_mx (rtx_insn *& x)
8856 if (x)
8857 gt_ggc_mx_rtx_def ((void *) x);
8860 void
8861 gt_ggc_mx (edge_def *e)
8863 tree block = LOCATION_BLOCK (e->goto_locus);
8864 gt_ggc_mx (e->src);
8865 gt_ggc_mx (e->dest);
8866 if (current_ir_type () == IR_GIMPLE)
8867 gt_ggc_mx (e->insns.g);
8868 else
8869 gt_ggc_mx (e->insns.r);
8870 gt_ggc_mx (block);
8873 /* PCH support for edge_def. */
8875 extern void gt_pch_nx (tree&);
8876 extern void gt_pch_nx (gimple&);
8877 extern void gt_pch_nx (rtx&);
8878 extern void gt_pch_nx (basic_block&);
8880 static void
8881 gt_pch_nx (rtx_insn *& x)
8883 if (x)
8884 gt_pch_nx_rtx_def ((void *) x);
8887 void
8888 gt_pch_nx (edge_def *e)
8890 tree block = LOCATION_BLOCK (e->goto_locus);
8891 gt_pch_nx (e->src);
8892 gt_pch_nx (e->dest);
8893 if (current_ir_type () == IR_GIMPLE)
8894 gt_pch_nx (e->insns.g);
8895 else
8896 gt_pch_nx (e->insns.r);
8897 gt_pch_nx (block);
8900 void
8901 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8903 tree block = LOCATION_BLOCK (e->goto_locus);
8904 op (&(e->src), cookie);
8905 op (&(e->dest), cookie);
8906 if (current_ir_type () == IR_GIMPLE)
8907 op (&(e->insns.g), cookie);
8908 else
8909 op (&(e->insns.r), cookie);
8910 op (&(block), cookie);