re PR lto/45375 ([meta-bug] Issues with building Mozilla (i.e. Firefox) with LTO)
[official-gcc.git] / gcc / tree-cfg.c
blob2e23553229f031fe4ad2a87af559400e59ee2300
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "trans-mem.h"
39 #include "stor-layout.h"
40 #include "print-tree.h"
41 #include "tm_p.h"
42 #include "predict.h"
43 #include "hard-reg-set.h"
44 #include "function.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "cfganal.h"
48 #include "basic-block.h"
49 #include "flags.h"
50 #include "gimple-pretty-print.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-fold.h"
54 #include "tree-eh.h"
55 #include "gimple-expr.h"
56 #include "is-a.h"
57 #include "gimple.h"
58 #include "gimple-iterator.h"
59 #include "gimplify-me.h"
60 #include "gimple-walk.h"
61 #include "gimple-ssa.h"
62 #include "plugin-api.h"
63 #include "ipa-ref.h"
64 #include "cgraph.h"
65 #include "tree-cfg.h"
66 #include "tree-phinodes.h"
67 #include "ssa-iterators.h"
68 #include "stringpool.h"
69 #include "tree-ssanames.h"
70 #include "tree-ssa-loop-manip.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "tree-into-ssa.h"
73 #include "hashtab.h"
74 #include "rtl.h"
75 #include "statistics.h"
76 #include "real.h"
77 #include "fixed-value.h"
78 #include "insn-config.h"
79 #include "expmed.h"
80 #include "dojump.h"
81 #include "explow.h"
82 #include "calls.h"
83 #include "emit-rtl.h"
84 #include "varasm.h"
85 #include "stmt.h"
86 #include "expr.h"
87 #include "tree-dfa.h"
88 #include "tree-ssa.h"
89 #include "tree-dump.h"
90 #include "tree-pass.h"
91 #include "diagnostic-core.h"
92 #include "except.h"
93 #include "cfgloop.h"
94 #include "tree-ssa-propagate.h"
95 #include "value-prof.h"
96 #include "tree-inline.h"
97 #include "target.h"
98 #include "tree-ssa-live.h"
99 #include "omp-low.h"
100 #include "tree-cfgcleanup.h"
101 #include "wide-int-print.h"
103 /* This file contains functions for building the Control Flow Graph (CFG)
104 for a function tree. */
106 /* Local declarations. */
108 /* Initial capacity for the basic block array. */
109 static const int initial_cfg_capacity = 20;
111 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
112 which use a particular edge. The CASE_LABEL_EXPRs are chained together
113 via their CASE_CHAIN field, which we clear after we're done with the
114 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
116 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
117 update the case vector in response to edge redirections.
119 Right now this table is set up and torn down at key points in the
120 compilation process. It would be nice if we could make the table
121 more persistent. The key is getting notification of changes to
122 the CFG (particularly edge removal, creation and redirection). */
124 static hash_map<edge, tree> *edge_to_cases;
126 /* If we record edge_to_cases, this bitmap will hold indexes
127 of basic blocks that end in a GIMPLE_SWITCH which we touched
128 due to edge manipulations. */
130 static bitmap touched_switch_bbs;
132 /* CFG statistics. */
133 struct cfg_stats_d
135 long num_merged_labels;
138 static struct cfg_stats_d cfg_stats;
140 /* Hash table to store last discriminator assigned for each locus. */
141 struct locus_discrim_map
143 location_t locus;
144 int discriminator;
147 /* Hashtable helpers. */
149 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
151 typedef locus_discrim_map value_type;
152 typedef locus_discrim_map compare_type;
153 static inline hashval_t hash (const value_type *);
154 static inline bool equal (const value_type *, const compare_type *);
157 /* Trivial hash function for a location_t. ITEM is a pointer to
158 a hash table entry that maps a location_t to a discriminator. */
160 inline hashval_t
161 locus_discrim_hasher::hash (const value_type *item)
163 return LOCATION_LINE (item->locus);
166 /* Equality function for the locus-to-discriminator map. A and B
167 point to the two hash table entries to compare. */
169 inline bool
170 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
172 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
175 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
177 /* Basic blocks and flowgraphs. */
178 static void make_blocks (gimple_seq);
180 /* Edges. */
181 static void make_edges (void);
182 static void assign_discriminators (void);
183 static void make_cond_expr_edges (basic_block);
184 static void make_gimple_switch_edges (gswitch *, basic_block);
185 static bool make_goto_expr_edges (basic_block);
186 static void make_gimple_asm_edges (basic_block);
187 static edge gimple_redirect_edge_and_branch (edge, basic_block);
188 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
190 /* Various helpers. */
191 static inline bool stmt_starts_bb_p (gimple, gimple);
192 static int gimple_verify_flow_info (void);
193 static void gimple_make_forwarder_block (edge);
194 static gimple first_non_label_stmt (basic_block);
195 static bool verify_gimple_transaction (gtransaction *);
196 static bool call_can_make_abnormal_goto (gimple);
198 /* Flowgraph optimization and cleanup. */
199 static void gimple_merge_blocks (basic_block, basic_block);
200 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
201 static void remove_bb (basic_block);
202 static edge find_taken_edge_computed_goto (basic_block, tree);
203 static edge find_taken_edge_cond_expr (basic_block, tree);
204 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
205 static tree find_case_label_for_value (gswitch *, tree);
207 void
208 init_empty_tree_cfg_for_function (struct function *fn)
210 /* Initialize the basic block array. */
211 init_flow (fn);
212 profile_status_for_fn (fn) = PROFILE_ABSENT;
213 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
214 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
215 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
216 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
217 initial_cfg_capacity);
219 /* Build a mapping of labels to their associated blocks. */
220 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
221 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
222 initial_cfg_capacity);
224 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
225 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
227 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
228 = EXIT_BLOCK_PTR_FOR_FN (fn);
229 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
230 = ENTRY_BLOCK_PTR_FOR_FN (fn);
233 void
234 init_empty_tree_cfg (void)
236 init_empty_tree_cfg_for_function (cfun);
239 /*---------------------------------------------------------------------------
240 Create basic blocks
241 ---------------------------------------------------------------------------*/
243 /* Entry point to the CFG builder for trees. SEQ is the sequence of
244 statements to be added to the flowgraph. */
246 static void
247 build_gimple_cfg (gimple_seq seq)
249 /* Register specific gimple functions. */
250 gimple_register_cfg_hooks ();
252 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
254 init_empty_tree_cfg ();
256 make_blocks (seq);
258 /* Make sure there is always at least one block, even if it's empty. */
259 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
260 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
262 /* Adjust the size of the array. */
263 if (basic_block_info_for_fn (cfun)->length ()
264 < (size_t) n_basic_blocks_for_fn (cfun))
265 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
266 n_basic_blocks_for_fn (cfun));
268 /* To speed up statement iterator walks, we first purge dead labels. */
269 cleanup_dead_labels ();
271 /* Group case nodes to reduce the number of edges.
272 We do this after cleaning up dead labels because otherwise we miss
273 a lot of obvious case merging opportunities. */
274 group_case_labels ();
276 /* Create the edges of the flowgraph. */
277 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
278 make_edges ();
279 assign_discriminators ();
280 cleanup_dead_labels ();
281 delete discriminator_per_locus;
282 discriminator_per_locus = NULL;
285 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
286 them and propagate the information to LOOP. We assume that the annotations
287 come immediately before the condition in BB, if any. */
289 static void
290 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
292 gimple_stmt_iterator gsi = gsi_last_bb (bb);
293 gimple stmt = gsi_stmt (gsi);
295 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
296 return;
298 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
300 stmt = gsi_stmt (gsi);
301 if (gimple_code (stmt) != GIMPLE_CALL)
302 break;
303 if (!gimple_call_internal_p (stmt)
304 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
305 break;
307 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
309 case annot_expr_ivdep_kind:
310 loop->safelen = INT_MAX;
311 break;
312 case annot_expr_no_vector_kind:
313 loop->dont_vectorize = true;
314 break;
315 case annot_expr_vector_kind:
316 loop->force_vectorize = true;
317 cfun->has_force_vectorize_loops = true;
318 break;
319 default:
320 gcc_unreachable ();
323 stmt = gimple_build_assign (gimple_call_lhs (stmt),
324 gimple_call_arg (stmt, 0));
325 gsi_replace (&gsi, stmt, true);
329 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
330 them and propagate the information to the loop. We assume that the
331 annotations come immediately before the condition of the loop. */
333 static void
334 replace_loop_annotate (void)
336 struct loop *loop;
337 basic_block bb;
338 gimple_stmt_iterator gsi;
339 gimple stmt;
341 FOR_EACH_LOOP (loop, 0)
343 /* First look into the header. */
344 replace_loop_annotate_in_block (loop->header, loop);
346 /* Then look into the latch, if any. */
347 if (loop->latch)
348 replace_loop_annotate_in_block (loop->latch, loop);
351 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
352 FOR_EACH_BB_FN (bb, cfun)
354 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
356 stmt = gsi_stmt (gsi);
357 if (gimple_code (stmt) != GIMPLE_CALL)
358 continue;
359 if (!gimple_call_internal_p (stmt)
360 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
361 continue;
363 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
365 case annot_expr_ivdep_kind:
366 case annot_expr_no_vector_kind:
367 case annot_expr_vector_kind:
368 break;
369 default:
370 gcc_unreachable ();
373 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
374 stmt = gimple_build_assign (gimple_call_lhs (stmt),
375 gimple_call_arg (stmt, 0));
376 gsi_replace (&gsi, stmt, true);
382 static unsigned int
383 execute_build_cfg (void)
385 gimple_seq body = gimple_body (current_function_decl);
387 build_gimple_cfg (body);
388 gimple_set_body (current_function_decl, NULL);
389 if (dump_file && (dump_flags & TDF_DETAILS))
391 fprintf (dump_file, "Scope blocks:\n");
392 dump_scope_blocks (dump_file, dump_flags);
394 cleanup_tree_cfg ();
395 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
396 replace_loop_annotate ();
397 return 0;
400 namespace {
402 const pass_data pass_data_build_cfg =
404 GIMPLE_PASS, /* type */
405 "cfg", /* name */
406 OPTGROUP_NONE, /* optinfo_flags */
407 TV_TREE_CFG, /* tv_id */
408 PROP_gimple_leh, /* properties_required */
409 ( PROP_cfg | PROP_loops ), /* properties_provided */
410 0, /* properties_destroyed */
411 0, /* todo_flags_start */
412 0, /* todo_flags_finish */
415 class pass_build_cfg : public gimple_opt_pass
417 public:
418 pass_build_cfg (gcc::context *ctxt)
419 : gimple_opt_pass (pass_data_build_cfg, ctxt)
422 /* opt_pass methods: */
423 virtual unsigned int execute (function *) { return execute_build_cfg (); }
425 }; // class pass_build_cfg
427 } // anon namespace
429 gimple_opt_pass *
430 make_pass_build_cfg (gcc::context *ctxt)
432 return new pass_build_cfg (ctxt);
436 /* Return true if T is a computed goto. */
438 bool
439 computed_goto_p (gimple t)
441 return (gimple_code (t) == GIMPLE_GOTO
442 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
445 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
446 the other edge points to a bb with just __builtin_unreachable ().
447 I.e. return true for C->M edge in:
448 <bb C>:
450 if (something)
451 goto <bb N>;
452 else
453 goto <bb M>;
454 <bb N>:
455 __builtin_unreachable ();
456 <bb M>: */
458 bool
459 assert_unreachable_fallthru_edge_p (edge e)
461 basic_block pred_bb = e->src;
462 gimple last = last_stmt (pred_bb);
463 if (last && gimple_code (last) == GIMPLE_COND)
465 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
466 if (other_bb == e->dest)
467 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
468 if (EDGE_COUNT (other_bb->succs) == 0)
470 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
471 gimple stmt;
473 if (gsi_end_p (gsi))
474 return false;
475 stmt = gsi_stmt (gsi);
476 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
478 gsi_next (&gsi);
479 if (gsi_end_p (gsi))
480 return false;
481 stmt = gsi_stmt (gsi);
483 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
486 return false;
490 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
491 could alter control flow except via eh. We initialize the flag at
492 CFG build time and only ever clear it later. */
494 static void
495 gimple_call_initialize_ctrl_altering (gimple stmt)
497 int flags = gimple_call_flags (stmt);
499 /* A call alters control flow if it can make an abnormal goto. */
500 if (call_can_make_abnormal_goto (stmt)
501 /* A call also alters control flow if it does not return. */
502 || flags & ECF_NORETURN
503 /* TM ending statements have backedges out of the transaction.
504 Return true so we split the basic block containing them.
505 Note that the TM_BUILTIN test is merely an optimization. */
506 || ((flags & ECF_TM_BUILTIN)
507 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
508 /* BUILT_IN_RETURN call is same as return statement. */
509 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
510 gimple_call_set_ctrl_altering (stmt, true);
511 else
512 gimple_call_set_ctrl_altering (stmt, false);
516 /* Build a flowgraph for the sequence of stmts SEQ. */
518 static void
519 make_blocks (gimple_seq seq)
521 gimple_stmt_iterator i = gsi_start (seq);
522 gimple stmt = NULL;
523 bool start_new_block = true;
524 bool first_stmt_of_seq = true;
525 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
527 while (!gsi_end_p (i))
529 gimple prev_stmt;
531 prev_stmt = stmt;
532 stmt = gsi_stmt (i);
534 if (stmt && is_gimple_call (stmt))
535 gimple_call_initialize_ctrl_altering (stmt);
537 /* If the statement starts a new basic block or if we have determined
538 in a previous pass that we need to create a new block for STMT, do
539 so now. */
540 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
542 if (!first_stmt_of_seq)
543 gsi_split_seq_before (&i, &seq);
544 bb = create_basic_block (seq, NULL, bb);
545 start_new_block = false;
548 /* Now add STMT to BB and create the subgraphs for special statement
549 codes. */
550 gimple_set_bb (stmt, bb);
552 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
553 next iteration. */
554 if (stmt_ends_bb_p (stmt))
556 /* If the stmt can make abnormal goto use a new temporary
557 for the assignment to the LHS. This makes sure the old value
558 of the LHS is available on the abnormal edge. Otherwise
559 we will end up with overlapping life-ranges for abnormal
560 SSA names. */
561 if (gimple_has_lhs (stmt)
562 && stmt_can_make_abnormal_goto (stmt)
563 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
565 tree lhs = gimple_get_lhs (stmt);
566 tree tmp = create_tmp_var (TREE_TYPE (lhs));
567 gimple s = gimple_build_assign (lhs, tmp);
568 gimple_set_location (s, gimple_location (stmt));
569 gimple_set_block (s, gimple_block (stmt));
570 gimple_set_lhs (stmt, tmp);
571 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
572 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
573 DECL_GIMPLE_REG_P (tmp) = 1;
574 gsi_insert_after (&i, s, GSI_SAME_STMT);
576 start_new_block = true;
579 gsi_next (&i);
580 first_stmt_of_seq = false;
585 /* Create and return a new empty basic block after bb AFTER. */
587 static basic_block
588 create_bb (void *h, void *e, basic_block after)
590 basic_block bb;
592 gcc_assert (!e);
594 /* Create and initialize a new basic block. Since alloc_block uses
595 GC allocation that clears memory to allocate a basic block, we do
596 not have to clear the newly allocated basic block here. */
597 bb = alloc_block ();
599 bb->index = last_basic_block_for_fn (cfun);
600 bb->flags = BB_NEW;
601 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
603 /* Add the new block to the linked list of blocks. */
604 link_block (bb, after);
606 /* Grow the basic block array if needed. */
607 if ((size_t) last_basic_block_for_fn (cfun)
608 == basic_block_info_for_fn (cfun)->length ())
610 size_t new_size =
611 (last_basic_block_for_fn (cfun)
612 + (last_basic_block_for_fn (cfun) + 3) / 4);
613 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
616 /* Add the newly created block to the array. */
617 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
619 n_basic_blocks_for_fn (cfun)++;
620 last_basic_block_for_fn (cfun)++;
622 return bb;
626 /*---------------------------------------------------------------------------
627 Edge creation
628 ---------------------------------------------------------------------------*/
630 /* Fold COND_EXPR_COND of each COND_EXPR. */
632 void
633 fold_cond_expr_cond (void)
635 basic_block bb;
637 FOR_EACH_BB_FN (bb, cfun)
639 gimple stmt = last_stmt (bb);
641 if (stmt && gimple_code (stmt) == GIMPLE_COND)
643 gcond *cond_stmt = as_a <gcond *> (stmt);
644 location_t loc = gimple_location (stmt);
645 tree cond;
646 bool zerop, onep;
648 fold_defer_overflow_warnings ();
649 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
650 boolean_type_node,
651 gimple_cond_lhs (cond_stmt),
652 gimple_cond_rhs (cond_stmt));
653 if (cond)
655 zerop = integer_zerop (cond);
656 onep = integer_onep (cond);
658 else
659 zerop = onep = false;
661 fold_undefer_overflow_warnings (zerop || onep,
662 stmt,
663 WARN_STRICT_OVERFLOW_CONDITIONAL);
664 if (zerop)
665 gimple_cond_make_false (cond_stmt);
666 else if (onep)
667 gimple_cond_make_true (cond_stmt);
672 /* If basic block BB has an abnormal edge to a basic block
673 containing IFN_ABNORMAL_DISPATCHER internal call, return
674 that the dispatcher's basic block, otherwise return NULL. */
676 basic_block
677 get_abnormal_succ_dispatcher (basic_block bb)
679 edge e;
680 edge_iterator ei;
682 FOR_EACH_EDGE (e, ei, bb->succs)
683 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
685 gimple_stmt_iterator gsi
686 = gsi_start_nondebug_after_labels_bb (e->dest);
687 gimple g = gsi_stmt (gsi);
688 if (g
689 && is_gimple_call (g)
690 && gimple_call_internal_p (g)
691 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
692 return e->dest;
694 return NULL;
697 /* Helper function for make_edges. Create a basic block with
698 with ABNORMAL_DISPATCHER internal call in it if needed, and
699 create abnormal edges from BBS to it and from it to FOR_BB
700 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
702 static void
703 handle_abnormal_edges (basic_block *dispatcher_bbs,
704 basic_block for_bb, int *bb_to_omp_idx,
705 auto_vec<basic_block> *bbs, bool computed_goto)
707 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
708 unsigned int idx = 0;
709 basic_block bb;
710 bool inner = false;
712 if (bb_to_omp_idx)
714 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
715 if (bb_to_omp_idx[for_bb->index] != 0)
716 inner = true;
719 /* If the dispatcher has been created already, then there are basic
720 blocks with abnormal edges to it, so just make a new edge to
721 for_bb. */
722 if (*dispatcher == NULL)
724 /* Check if there are any basic blocks that need to have
725 abnormal edges to this dispatcher. If there are none, return
726 early. */
727 if (bb_to_omp_idx == NULL)
729 if (bbs->is_empty ())
730 return;
732 else
734 FOR_EACH_VEC_ELT (*bbs, idx, bb)
735 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
736 break;
737 if (bb == NULL)
738 return;
741 /* Create the dispatcher bb. */
742 *dispatcher = create_basic_block (NULL, NULL, for_bb);
743 if (computed_goto)
745 /* Factor computed gotos into a common computed goto site. Also
746 record the location of that site so that we can un-factor the
747 gotos after we have converted back to normal form. */
748 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
750 /* Create the destination of the factored goto. Each original
751 computed goto will put its desired destination into this
752 variable and jump to the label we create immediately below. */
753 tree var = create_tmp_var (ptr_type_node, "gotovar");
755 /* Build a label for the new block which will contain the
756 factored computed goto. */
757 tree factored_label_decl
758 = create_artificial_label (UNKNOWN_LOCATION);
759 gimple factored_computed_goto_label
760 = gimple_build_label (factored_label_decl);
761 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
763 /* Build our new computed goto. */
764 gimple factored_computed_goto = gimple_build_goto (var);
765 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
767 FOR_EACH_VEC_ELT (*bbs, idx, bb)
769 if (bb_to_omp_idx
770 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
771 continue;
773 gsi = gsi_last_bb (bb);
774 gimple last = gsi_stmt (gsi);
776 gcc_assert (computed_goto_p (last));
778 /* Copy the original computed goto's destination into VAR. */
779 gimple assignment
780 = gimple_build_assign (var, gimple_goto_dest (last));
781 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
783 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
784 e->goto_locus = gimple_location (last);
785 gsi_remove (&gsi, true);
788 else
790 tree arg = inner ? boolean_true_node : boolean_false_node;
791 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
792 1, arg);
793 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
794 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
796 /* Create predecessor edges of the dispatcher. */
797 FOR_EACH_VEC_ELT (*bbs, idx, bb)
799 if (bb_to_omp_idx
800 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 continue;
802 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
807 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
810 /* Join all the blocks in the flowgraph. */
812 static void
813 make_edges (void)
815 basic_block bb;
816 struct omp_region *cur_region = NULL;
817 auto_vec<basic_block> ab_edge_goto;
818 auto_vec<basic_block> ab_edge_call;
819 int *bb_to_omp_idx = NULL;
820 int cur_omp_region_idx = 0;
822 /* Create an edge from entry to the first block with executable
823 statements in it. */
824 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
825 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
826 EDGE_FALLTHRU);
828 /* Traverse the basic block array placing edges. */
829 FOR_EACH_BB_FN (bb, cfun)
831 gimple last = last_stmt (bb);
832 bool fallthru;
834 if (bb_to_omp_idx)
835 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
837 if (last)
839 enum gimple_code code = gimple_code (last);
840 switch (code)
842 case GIMPLE_GOTO:
843 if (make_goto_expr_edges (bb))
844 ab_edge_goto.safe_push (bb);
845 fallthru = false;
846 break;
847 case GIMPLE_RETURN:
849 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
850 e->goto_locus = gimple_location (last);
851 fallthru = false;
853 break;
854 case GIMPLE_COND:
855 make_cond_expr_edges (bb);
856 fallthru = false;
857 break;
858 case GIMPLE_SWITCH:
859 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
860 fallthru = false;
861 break;
862 case GIMPLE_RESX:
863 make_eh_edges (last);
864 fallthru = false;
865 break;
866 case GIMPLE_EH_DISPATCH:
867 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
868 break;
870 case GIMPLE_CALL:
871 /* If this function receives a nonlocal goto, then we need to
872 make edges from this call site to all the nonlocal goto
873 handlers. */
874 if (stmt_can_make_abnormal_goto (last))
875 ab_edge_call.safe_push (bb);
877 /* If this statement has reachable exception handlers, then
878 create abnormal edges to them. */
879 make_eh_edges (last);
881 /* BUILTIN_RETURN is really a return statement. */
882 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
884 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
885 fallthru = false;
887 /* Some calls are known not to return. */
888 else
889 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
890 break;
892 case GIMPLE_ASSIGN:
893 /* A GIMPLE_ASSIGN may throw internally and thus be considered
894 control-altering. */
895 if (is_ctrl_altering_stmt (last))
896 make_eh_edges (last);
897 fallthru = true;
898 break;
900 case GIMPLE_ASM:
901 make_gimple_asm_edges (bb);
902 fallthru = true;
903 break;
905 CASE_GIMPLE_OMP:
906 fallthru = make_gimple_omp_edges (bb, &cur_region,
907 &cur_omp_region_idx);
908 if (cur_region && bb_to_omp_idx == NULL)
909 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
910 break;
912 case GIMPLE_TRANSACTION:
914 tree abort_label
915 = gimple_transaction_label (as_a <gtransaction *> (last));
916 if (abort_label)
917 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
918 fallthru = true;
920 break;
922 default:
923 gcc_assert (!stmt_ends_bb_p (last));
924 fallthru = true;
927 else
928 fallthru = true;
930 if (fallthru)
931 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
934 /* Computed gotos are hell to deal with, especially if there are
935 lots of them with a large number of destinations. So we factor
936 them to a common computed goto location before we build the
937 edge list. After we convert back to normal form, we will un-factor
938 the computed gotos since factoring introduces an unwanted jump.
939 For non-local gotos and abnormal edges from calls to calls that return
940 twice or forced labels, factor the abnormal edges too, by having all
941 abnormal edges from the calls go to a common artificial basic block
942 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
943 basic block to all forced labels and calls returning twice.
944 We do this per-OpenMP structured block, because those regions
945 are guaranteed to be single entry single exit by the standard,
946 so it is not allowed to enter or exit such regions abnormally this way,
947 thus all computed gotos, non-local gotos and setjmp/longjmp calls
948 must not transfer control across SESE region boundaries. */
949 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
951 gimple_stmt_iterator gsi;
952 basic_block dispatcher_bb_array[2] = { NULL, NULL };
953 basic_block *dispatcher_bbs = dispatcher_bb_array;
954 int count = n_basic_blocks_for_fn (cfun);
956 if (bb_to_omp_idx)
957 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
959 FOR_EACH_BB_FN (bb, cfun)
961 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
963 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
964 tree target;
966 if (!label_stmt)
967 break;
969 target = gimple_label_label (label_stmt);
971 /* Make an edge to every label block that has been marked as a
972 potential target for a computed goto or a non-local goto. */
973 if (FORCED_LABEL (target))
974 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
975 &ab_edge_goto, true);
976 if (DECL_NONLOCAL (target))
978 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
979 &ab_edge_call, false);
980 break;
984 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
985 gsi_next_nondebug (&gsi);
986 if (!gsi_end_p (gsi))
988 /* Make an edge to every setjmp-like call. */
989 gimple call_stmt = gsi_stmt (gsi);
990 if (is_gimple_call (call_stmt)
991 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
992 || gimple_call_builtin_p (call_stmt,
993 BUILT_IN_SETJMP_RECEIVER)))
994 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
995 &ab_edge_call, false);
999 if (bb_to_omp_idx)
1000 XDELETE (dispatcher_bbs);
1003 XDELETE (bb_to_omp_idx);
1005 free_omp_regions ();
1007 /* Fold COND_EXPR_COND of each COND_EXPR. */
1008 fold_cond_expr_cond ();
1011 /* Find the next available discriminator value for LOCUS. The
1012 discriminator distinguishes among several basic blocks that
1013 share a common locus, allowing for more accurate sample-based
1014 profiling. */
1016 static int
1017 next_discriminator_for_locus (location_t locus)
1019 struct locus_discrim_map item;
1020 struct locus_discrim_map **slot;
1022 item.locus = locus;
1023 item.discriminator = 0;
1024 slot = discriminator_per_locus->find_slot_with_hash (
1025 &item, LOCATION_LINE (locus), INSERT);
1026 gcc_assert (slot);
1027 if (*slot == HTAB_EMPTY_ENTRY)
1029 *slot = XNEW (struct locus_discrim_map);
1030 gcc_assert (*slot);
1031 (*slot)->locus = locus;
1032 (*slot)->discriminator = 0;
1034 (*slot)->discriminator++;
1035 return (*slot)->discriminator;
1038 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1040 static bool
1041 same_line_p (location_t locus1, location_t locus2)
1043 expanded_location from, to;
1045 if (locus1 == locus2)
1046 return true;
1048 from = expand_location (locus1);
1049 to = expand_location (locus2);
1051 if (from.line != to.line)
1052 return false;
1053 if (from.file == to.file)
1054 return true;
1055 return (from.file != NULL
1056 && to.file != NULL
1057 && filename_cmp (from.file, to.file) == 0);
1060 /* Assign discriminators to each basic block. */
1062 static void
1063 assign_discriminators (void)
1065 basic_block bb;
1067 FOR_EACH_BB_FN (bb, cfun)
1069 edge e;
1070 edge_iterator ei;
1071 gimple last = last_stmt (bb);
1072 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1074 if (locus == UNKNOWN_LOCATION)
1075 continue;
1077 FOR_EACH_EDGE (e, ei, bb->succs)
1079 gimple first = first_non_label_stmt (e->dest);
1080 gimple last = last_stmt (e->dest);
1081 if ((first && same_line_p (locus, gimple_location (first)))
1082 || (last && same_line_p (locus, gimple_location (last))))
1084 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1085 bb->discriminator = next_discriminator_for_locus (locus);
1086 else
1087 e->dest->discriminator = next_discriminator_for_locus (locus);
1093 /* Create the edges for a GIMPLE_COND starting at block BB. */
1095 static void
1096 make_cond_expr_edges (basic_block bb)
1098 gcond *entry = as_a <gcond *> (last_stmt (bb));
1099 gimple then_stmt, else_stmt;
1100 basic_block then_bb, else_bb;
1101 tree then_label, else_label;
1102 edge e;
1104 gcc_assert (entry);
1105 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1107 /* Entry basic blocks for each component. */
1108 then_label = gimple_cond_true_label (entry);
1109 else_label = gimple_cond_false_label (entry);
1110 then_bb = label_to_block (then_label);
1111 else_bb = label_to_block (else_label);
1112 then_stmt = first_stmt (then_bb);
1113 else_stmt = first_stmt (else_bb);
1115 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1116 e->goto_locus = gimple_location (then_stmt);
1117 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1118 if (e)
1119 e->goto_locus = gimple_location (else_stmt);
1121 /* We do not need the labels anymore. */
1122 gimple_cond_set_true_label (entry, NULL_TREE);
1123 gimple_cond_set_false_label (entry, NULL_TREE);
1127 /* Called for each element in the hash table (P) as we delete the
1128 edge to cases hash table.
1130 Clear all the TREE_CHAINs to prevent problems with copying of
1131 SWITCH_EXPRs and structure sharing rules, then free the hash table
1132 element. */
1134 bool
1135 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1137 tree t, next;
1139 for (t = value; t; t = next)
1141 next = CASE_CHAIN (t);
1142 CASE_CHAIN (t) = NULL;
1145 return true;
1148 /* Start recording information mapping edges to case labels. */
1150 void
1151 start_recording_case_labels (void)
1153 gcc_assert (edge_to_cases == NULL);
1154 edge_to_cases = new hash_map<edge, tree>;
1155 touched_switch_bbs = BITMAP_ALLOC (NULL);
1158 /* Return nonzero if we are recording information for case labels. */
1160 static bool
1161 recording_case_labels_p (void)
1163 return (edge_to_cases != NULL);
1166 /* Stop recording information mapping edges to case labels and
1167 remove any information we have recorded. */
1168 void
1169 end_recording_case_labels (void)
1171 bitmap_iterator bi;
1172 unsigned i;
1173 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1174 delete edge_to_cases;
1175 edge_to_cases = NULL;
1176 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1178 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1179 if (bb)
1181 gimple stmt = last_stmt (bb);
1182 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1183 group_case_labels_stmt (as_a <gswitch *> (stmt));
1186 BITMAP_FREE (touched_switch_bbs);
1189 /* If we are inside a {start,end}_recording_cases block, then return
1190 a chain of CASE_LABEL_EXPRs from T which reference E.
1192 Otherwise return NULL. */
1194 static tree
1195 get_cases_for_edge (edge e, gswitch *t)
1197 tree *slot;
1198 size_t i, n;
1200 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1201 chains available. Return NULL so the caller can detect this case. */
1202 if (!recording_case_labels_p ())
1203 return NULL;
1205 slot = edge_to_cases->get (e);
1206 if (slot)
1207 return *slot;
1209 /* If we did not find E in the hash table, then this must be the first
1210 time we have been queried for information about E & T. Add all the
1211 elements from T to the hash table then perform the query again. */
1213 n = gimple_switch_num_labels (t);
1214 for (i = 0; i < n; i++)
1216 tree elt = gimple_switch_label (t, i);
1217 tree lab = CASE_LABEL (elt);
1218 basic_block label_bb = label_to_block (lab);
1219 edge this_edge = find_edge (e->src, label_bb);
1221 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1222 a new chain. */
1223 tree &s = edge_to_cases->get_or_insert (this_edge);
1224 CASE_CHAIN (elt) = s;
1225 s = elt;
1228 return *edge_to_cases->get (e);
1231 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1233 static void
1234 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1236 size_t i, n;
1238 n = gimple_switch_num_labels (entry);
1240 for (i = 0; i < n; ++i)
1242 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1243 basic_block label_bb = label_to_block (lab);
1244 make_edge (bb, label_bb, 0);
1249 /* Return the basic block holding label DEST. */
1251 basic_block
1252 label_to_block_fn (struct function *ifun, tree dest)
1254 int uid = LABEL_DECL_UID (dest);
1256 /* We would die hard when faced by an undefined label. Emit a label to
1257 the very first basic block. This will hopefully make even the dataflow
1258 and undefined variable warnings quite right. */
1259 if (seen_error () && uid < 0)
1261 gimple_stmt_iterator gsi =
1262 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1263 gimple stmt;
1265 stmt = gimple_build_label (dest);
1266 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1267 uid = LABEL_DECL_UID (dest);
1269 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1270 return NULL;
1271 return (*ifun->cfg->x_label_to_block_map)[uid];
1274 /* Create edges for a goto statement at block BB. Returns true
1275 if abnormal edges should be created. */
1277 static bool
1278 make_goto_expr_edges (basic_block bb)
1280 gimple_stmt_iterator last = gsi_last_bb (bb);
1281 gimple goto_t = gsi_stmt (last);
1283 /* A simple GOTO creates normal edges. */
1284 if (simple_goto_p (goto_t))
1286 tree dest = gimple_goto_dest (goto_t);
1287 basic_block label_bb = label_to_block (dest);
1288 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1289 e->goto_locus = gimple_location (goto_t);
1290 gsi_remove (&last, true);
1291 return false;
1294 /* A computed GOTO creates abnormal edges. */
1295 return true;
1298 /* Create edges for an asm statement with labels at block BB. */
1300 static void
1301 make_gimple_asm_edges (basic_block bb)
1303 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1304 int i, n = gimple_asm_nlabels (stmt);
1306 for (i = 0; i < n; ++i)
1308 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1309 basic_block label_bb = label_to_block (label);
1310 make_edge (bb, label_bb, 0);
1314 /*---------------------------------------------------------------------------
1315 Flowgraph analysis
1316 ---------------------------------------------------------------------------*/
1318 /* Cleanup useless labels in basic blocks. This is something we wish
1319 to do early because it allows us to group case labels before creating
1320 the edges for the CFG, and it speeds up block statement iterators in
1321 all passes later on.
1322 We rerun this pass after CFG is created, to get rid of the labels that
1323 are no longer referenced. After then we do not run it any more, since
1324 (almost) no new labels should be created. */
1326 /* A map from basic block index to the leading label of that block. */
1327 static struct label_record
1329 /* The label. */
1330 tree label;
1332 /* True if the label is referenced from somewhere. */
1333 bool used;
1334 } *label_for_bb;
1336 /* Given LABEL return the first label in the same basic block. */
1338 static tree
1339 main_block_label (tree label)
1341 basic_block bb = label_to_block (label);
1342 tree main_label = label_for_bb[bb->index].label;
1344 /* label_to_block possibly inserted undefined label into the chain. */
1345 if (!main_label)
1347 label_for_bb[bb->index].label = label;
1348 main_label = label;
1351 label_for_bb[bb->index].used = true;
1352 return main_label;
1355 /* Clean up redundant labels within the exception tree. */
1357 static void
1358 cleanup_dead_labels_eh (void)
1360 eh_landing_pad lp;
1361 eh_region r;
1362 tree lab;
1363 int i;
1365 if (cfun->eh == NULL)
1366 return;
1368 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1369 if (lp && lp->post_landing_pad)
1371 lab = main_block_label (lp->post_landing_pad);
1372 if (lab != lp->post_landing_pad)
1374 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1375 EH_LANDING_PAD_NR (lab) = lp->index;
1379 FOR_ALL_EH_REGION (r)
1380 switch (r->type)
1382 case ERT_CLEANUP:
1383 case ERT_MUST_NOT_THROW:
1384 break;
1386 case ERT_TRY:
1388 eh_catch c;
1389 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1391 lab = c->label;
1392 if (lab)
1393 c->label = main_block_label (lab);
1396 break;
1398 case ERT_ALLOWED_EXCEPTIONS:
1399 lab = r->u.allowed.label;
1400 if (lab)
1401 r->u.allowed.label = main_block_label (lab);
1402 break;
1407 /* Cleanup redundant labels. This is a three-step process:
1408 1) Find the leading label for each block.
1409 2) Redirect all references to labels to the leading labels.
1410 3) Cleanup all useless labels. */
1412 void
1413 cleanup_dead_labels (void)
1415 basic_block bb;
1416 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1418 /* Find a suitable label for each block. We use the first user-defined
1419 label if there is one, or otherwise just the first label we see. */
1420 FOR_EACH_BB_FN (bb, cfun)
1422 gimple_stmt_iterator i;
1424 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1426 tree label;
1427 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1429 if (!label_stmt)
1430 break;
1432 label = gimple_label_label (label_stmt);
1434 /* If we have not yet seen a label for the current block,
1435 remember this one and see if there are more labels. */
1436 if (!label_for_bb[bb->index].label)
1438 label_for_bb[bb->index].label = label;
1439 continue;
1442 /* If we did see a label for the current block already, but it
1443 is an artificially created label, replace it if the current
1444 label is a user defined label. */
1445 if (!DECL_ARTIFICIAL (label)
1446 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1448 label_for_bb[bb->index].label = label;
1449 break;
1454 /* Now redirect all jumps/branches to the selected label.
1455 First do so for each block ending in a control statement. */
1456 FOR_EACH_BB_FN (bb, cfun)
1458 gimple stmt = last_stmt (bb);
1459 tree label, new_label;
1461 if (!stmt)
1462 continue;
1464 switch (gimple_code (stmt))
1466 case GIMPLE_COND:
1468 gcond *cond_stmt = as_a <gcond *> (stmt);
1469 label = gimple_cond_true_label (cond_stmt);
1470 if (label)
1472 new_label = main_block_label (label);
1473 if (new_label != label)
1474 gimple_cond_set_true_label (cond_stmt, new_label);
1477 label = gimple_cond_false_label (cond_stmt);
1478 if (label)
1480 new_label = main_block_label (label);
1481 if (new_label != label)
1482 gimple_cond_set_false_label (cond_stmt, new_label);
1485 break;
1487 case GIMPLE_SWITCH:
1489 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1490 size_t i, n = gimple_switch_num_labels (switch_stmt);
1492 /* Replace all destination labels. */
1493 for (i = 0; i < n; ++i)
1495 tree case_label = gimple_switch_label (switch_stmt, i);
1496 label = CASE_LABEL (case_label);
1497 new_label = main_block_label (label);
1498 if (new_label != label)
1499 CASE_LABEL (case_label) = new_label;
1501 break;
1504 case GIMPLE_ASM:
1506 gasm *asm_stmt = as_a <gasm *> (stmt);
1507 int i, n = gimple_asm_nlabels (asm_stmt);
1509 for (i = 0; i < n; ++i)
1511 tree cons = gimple_asm_label_op (asm_stmt, i);
1512 tree label = main_block_label (TREE_VALUE (cons));
1513 TREE_VALUE (cons) = label;
1515 break;
1518 /* We have to handle gotos until they're removed, and we don't
1519 remove them until after we've created the CFG edges. */
1520 case GIMPLE_GOTO:
1521 if (!computed_goto_p (stmt))
1523 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1524 label = gimple_goto_dest (goto_stmt);
1525 new_label = main_block_label (label);
1526 if (new_label != label)
1527 gimple_goto_set_dest (goto_stmt, new_label);
1529 break;
1531 case GIMPLE_TRANSACTION:
1533 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1534 tree label = gimple_transaction_label (trans_stmt);
1535 if (label)
1537 tree new_label = main_block_label (label);
1538 if (new_label != label)
1539 gimple_transaction_set_label (trans_stmt, new_label);
1542 break;
1544 default:
1545 break;
1549 /* Do the same for the exception region tree labels. */
1550 cleanup_dead_labels_eh ();
1552 /* Finally, purge dead labels. All user-defined labels and labels that
1553 can be the target of non-local gotos and labels which have their
1554 address taken are preserved. */
1555 FOR_EACH_BB_FN (bb, cfun)
1557 gimple_stmt_iterator i;
1558 tree label_for_this_bb = label_for_bb[bb->index].label;
1560 if (!label_for_this_bb)
1561 continue;
1563 /* If the main label of the block is unused, we may still remove it. */
1564 if (!label_for_bb[bb->index].used)
1565 label_for_this_bb = NULL;
1567 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1569 tree label;
1570 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1572 if (!label_stmt)
1573 break;
1575 label = gimple_label_label (label_stmt);
1577 if (label == label_for_this_bb
1578 || !DECL_ARTIFICIAL (label)
1579 || DECL_NONLOCAL (label)
1580 || FORCED_LABEL (label))
1581 gsi_next (&i);
1582 else
1583 gsi_remove (&i, true);
1587 free (label_for_bb);
1590 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1591 the ones jumping to the same label.
1592 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1594 void
1595 group_case_labels_stmt (gswitch *stmt)
1597 int old_size = gimple_switch_num_labels (stmt);
1598 int i, j, new_size = old_size;
1599 basic_block default_bb = NULL;
1601 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1603 /* Look for possible opportunities to merge cases. */
1604 i = 1;
1605 while (i < old_size)
1607 tree base_case, base_high;
1608 basic_block base_bb;
1610 base_case = gimple_switch_label (stmt, i);
1612 gcc_assert (base_case);
1613 base_bb = label_to_block (CASE_LABEL (base_case));
1615 /* Discard cases that have the same destination as the
1616 default case. */
1617 if (base_bb == default_bb)
1619 gimple_switch_set_label (stmt, i, NULL_TREE);
1620 i++;
1621 new_size--;
1622 continue;
1625 base_high = CASE_HIGH (base_case)
1626 ? CASE_HIGH (base_case)
1627 : CASE_LOW (base_case);
1628 i++;
1630 /* Try to merge case labels. Break out when we reach the end
1631 of the label vector or when we cannot merge the next case
1632 label with the current one. */
1633 while (i < old_size)
1635 tree merge_case = gimple_switch_label (stmt, i);
1636 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1637 wide_int bhp1 = wi::add (base_high, 1);
1639 /* Merge the cases if they jump to the same place,
1640 and their ranges are consecutive. */
1641 if (merge_bb == base_bb
1642 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1644 base_high = CASE_HIGH (merge_case) ?
1645 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1646 CASE_HIGH (base_case) = base_high;
1647 gimple_switch_set_label (stmt, i, NULL_TREE);
1648 new_size--;
1649 i++;
1651 else
1652 break;
1656 /* Compress the case labels in the label vector, and adjust the
1657 length of the vector. */
1658 for (i = 0, j = 0; i < new_size; i++)
1660 while (! gimple_switch_label (stmt, j))
1661 j++;
1662 gimple_switch_set_label (stmt, i,
1663 gimple_switch_label (stmt, j++));
1666 gcc_assert (new_size <= old_size);
1667 gimple_switch_set_num_labels (stmt, new_size);
1670 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1671 and scan the sorted vector of cases. Combine the ones jumping to the
1672 same label. */
1674 void
1675 group_case_labels (void)
1677 basic_block bb;
1679 FOR_EACH_BB_FN (bb, cfun)
1681 gimple stmt = last_stmt (bb);
1682 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1683 group_case_labels_stmt (as_a <gswitch *> (stmt));
1687 /* Checks whether we can merge block B into block A. */
1689 static bool
1690 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1692 gimple stmt;
1694 if (!single_succ_p (a))
1695 return false;
1697 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1698 return false;
1700 if (single_succ (a) != b)
1701 return false;
1703 if (!single_pred_p (b))
1704 return false;
1706 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1707 return false;
1709 /* If A ends by a statement causing exceptions or something similar, we
1710 cannot merge the blocks. */
1711 stmt = last_stmt (a);
1712 if (stmt && stmt_ends_bb_p (stmt))
1713 return false;
1715 /* Do not allow a block with only a non-local label to be merged. */
1716 if (stmt)
1717 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1718 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1719 return false;
1721 /* Examine the labels at the beginning of B. */
1722 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1723 gsi_next (&gsi))
1725 tree lab;
1726 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1727 if (!label_stmt)
1728 break;
1729 lab = gimple_label_label (label_stmt);
1731 /* Do not remove user forced labels or for -O0 any user labels. */
1732 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1733 return false;
1736 /* Protect simple loop latches. We only want to avoid merging
1737 the latch with the loop header or with a block in another
1738 loop in this case. */
1739 if (current_loops
1740 && b->loop_father->latch == b
1741 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1742 && (b->loop_father->header == a
1743 || b->loop_father != a->loop_father))
1744 return false;
1746 /* It must be possible to eliminate all phi nodes in B. If ssa form
1747 is not up-to-date and a name-mapping is registered, we cannot eliminate
1748 any phis. Symbols marked for renaming are never a problem though. */
1749 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1750 gsi_next (&gsi))
1752 gphi *phi = gsi.phi ();
1753 /* Technically only new names matter. */
1754 if (name_registered_for_update_p (PHI_RESULT (phi)))
1755 return false;
1758 /* When not optimizing, don't merge if we'd lose goto_locus. */
1759 if (!optimize
1760 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1762 location_t goto_locus = single_succ_edge (a)->goto_locus;
1763 gimple_stmt_iterator prev, next;
1764 prev = gsi_last_nondebug_bb (a);
1765 next = gsi_after_labels (b);
1766 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1767 gsi_next_nondebug (&next);
1768 if ((gsi_end_p (prev)
1769 || gimple_location (gsi_stmt (prev)) != goto_locus)
1770 && (gsi_end_p (next)
1771 || gimple_location (gsi_stmt (next)) != goto_locus))
1772 return false;
1775 return true;
1778 /* Replaces all uses of NAME by VAL. */
1780 void
1781 replace_uses_by (tree name, tree val)
1783 imm_use_iterator imm_iter;
1784 use_operand_p use;
1785 gimple stmt;
1786 edge e;
1788 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1790 /* Mark the block if we change the last stmt in it. */
1791 if (cfgcleanup_altered_bbs
1792 && stmt_ends_bb_p (stmt))
1793 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1795 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1797 replace_exp (use, val);
1799 if (gimple_code (stmt) == GIMPLE_PHI)
1801 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1802 PHI_ARG_INDEX_FROM_USE (use));
1803 if (e->flags & EDGE_ABNORMAL
1804 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1806 /* This can only occur for virtual operands, since
1807 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1808 would prevent replacement. */
1809 gcc_checking_assert (virtual_operand_p (name));
1810 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1815 if (gimple_code (stmt) != GIMPLE_PHI)
1817 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1818 gimple orig_stmt = stmt;
1819 size_t i;
1821 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1822 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1823 only change sth from non-invariant to invariant, and only
1824 when propagating constants. */
1825 if (is_gimple_min_invariant (val))
1826 for (i = 0; i < gimple_num_ops (stmt); i++)
1828 tree op = gimple_op (stmt, i);
1829 /* Operands may be empty here. For example, the labels
1830 of a GIMPLE_COND are nulled out following the creation
1831 of the corresponding CFG edges. */
1832 if (op && TREE_CODE (op) == ADDR_EXPR)
1833 recompute_tree_invariant_for_addr_expr (op);
1836 if (fold_stmt (&gsi))
1837 stmt = gsi_stmt (gsi);
1839 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1840 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1842 update_stmt (stmt);
1846 gcc_checking_assert (has_zero_uses (name));
1848 /* Also update the trees stored in loop structures. */
1849 if (current_loops)
1851 struct loop *loop;
1853 FOR_EACH_LOOP (loop, 0)
1855 substitute_in_loop_info (loop, name, val);
1860 /* Merge block B into block A. */
1862 static void
1863 gimple_merge_blocks (basic_block a, basic_block b)
1865 gimple_stmt_iterator last, gsi;
1866 gphi_iterator psi;
1868 if (dump_file)
1869 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1871 /* Remove all single-valued PHI nodes from block B of the form
1872 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1873 gsi = gsi_last_bb (a);
1874 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1876 gimple phi = gsi_stmt (psi);
1877 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1878 gimple copy;
1879 bool may_replace_uses = (virtual_operand_p (def)
1880 || may_propagate_copy (def, use));
1882 /* In case we maintain loop closed ssa form, do not propagate arguments
1883 of loop exit phi nodes. */
1884 if (current_loops
1885 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1886 && !virtual_operand_p (def)
1887 && TREE_CODE (use) == SSA_NAME
1888 && a->loop_father != b->loop_father)
1889 may_replace_uses = false;
1891 if (!may_replace_uses)
1893 gcc_assert (!virtual_operand_p (def));
1895 /* Note that just emitting the copies is fine -- there is no problem
1896 with ordering of phi nodes. This is because A is the single
1897 predecessor of B, therefore results of the phi nodes cannot
1898 appear as arguments of the phi nodes. */
1899 copy = gimple_build_assign (def, use);
1900 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1901 remove_phi_node (&psi, false);
1903 else
1905 /* If we deal with a PHI for virtual operands, we can simply
1906 propagate these without fussing with folding or updating
1907 the stmt. */
1908 if (virtual_operand_p (def))
1910 imm_use_iterator iter;
1911 use_operand_p use_p;
1912 gimple stmt;
1914 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1915 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1916 SET_USE (use_p, use);
1918 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1919 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1921 else
1922 replace_uses_by (def, use);
1924 remove_phi_node (&psi, true);
1928 /* Ensure that B follows A. */
1929 move_block_after (b, a);
1931 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1932 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1934 /* Remove labels from B and set gimple_bb to A for other statements. */
1935 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1937 gimple stmt = gsi_stmt (gsi);
1938 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1940 tree label = gimple_label_label (label_stmt);
1941 int lp_nr;
1943 gsi_remove (&gsi, false);
1945 /* Now that we can thread computed gotos, we might have
1946 a situation where we have a forced label in block B
1947 However, the label at the start of block B might still be
1948 used in other ways (think about the runtime checking for
1949 Fortran assigned gotos). So we can not just delete the
1950 label. Instead we move the label to the start of block A. */
1951 if (FORCED_LABEL (label))
1953 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1954 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1956 /* Other user labels keep around in a form of a debug stmt. */
1957 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1959 gimple dbg = gimple_build_debug_bind (label,
1960 integer_zero_node,
1961 stmt);
1962 gimple_debug_bind_reset_value (dbg);
1963 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1966 lp_nr = EH_LANDING_PAD_NR (label);
1967 if (lp_nr)
1969 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1970 lp->post_landing_pad = NULL;
1973 else
1975 gimple_set_bb (stmt, a);
1976 gsi_next (&gsi);
1980 /* When merging two BBs, if their counts are different, the larger count
1981 is selected as the new bb count. This is to handle inconsistent
1982 profiles. */
1983 if (a->loop_father == b->loop_father)
1985 a->count = MAX (a->count, b->count);
1986 a->frequency = MAX (a->frequency, b->frequency);
1989 /* Merge the sequences. */
1990 last = gsi_last_bb (a);
1991 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1992 set_bb_seq (b, NULL);
1994 if (cfgcleanup_altered_bbs)
1995 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1999 /* Return the one of two successors of BB that is not reachable by a
2000 complex edge, if there is one. Else, return BB. We use
2001 this in optimizations that use post-dominators for their heuristics,
2002 to catch the cases in C++ where function calls are involved. */
2004 basic_block
2005 single_noncomplex_succ (basic_block bb)
2007 edge e0, e1;
2008 if (EDGE_COUNT (bb->succs) != 2)
2009 return bb;
2011 e0 = EDGE_SUCC (bb, 0);
2012 e1 = EDGE_SUCC (bb, 1);
2013 if (e0->flags & EDGE_COMPLEX)
2014 return e1->dest;
2015 if (e1->flags & EDGE_COMPLEX)
2016 return e0->dest;
2018 return bb;
2021 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2023 void
2024 notice_special_calls (gcall *call)
2026 int flags = gimple_call_flags (call);
2028 if (flags & ECF_MAY_BE_ALLOCA)
2029 cfun->calls_alloca = true;
2030 if (flags & ECF_RETURNS_TWICE)
2031 cfun->calls_setjmp = true;
2035 /* Clear flags set by notice_special_calls. Used by dead code removal
2036 to update the flags. */
2038 void
2039 clear_special_calls (void)
2041 cfun->calls_alloca = false;
2042 cfun->calls_setjmp = false;
2045 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2047 static void
2048 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2050 /* Since this block is no longer reachable, we can just delete all
2051 of its PHI nodes. */
2052 remove_phi_nodes (bb);
2054 /* Remove edges to BB's successors. */
2055 while (EDGE_COUNT (bb->succs) > 0)
2056 remove_edge (EDGE_SUCC (bb, 0));
2060 /* Remove statements of basic block BB. */
2062 static void
2063 remove_bb (basic_block bb)
2065 gimple_stmt_iterator i;
2067 if (dump_file)
2069 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2070 if (dump_flags & TDF_DETAILS)
2072 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2073 fprintf (dump_file, "\n");
2077 if (current_loops)
2079 struct loop *loop = bb->loop_father;
2081 /* If a loop gets removed, clean up the information associated
2082 with it. */
2083 if (loop->latch == bb
2084 || loop->header == bb)
2085 free_numbers_of_iterations_estimates_loop (loop);
2088 /* Remove all the instructions in the block. */
2089 if (bb_seq (bb) != NULL)
2091 /* Walk backwards so as to get a chance to substitute all
2092 released DEFs into debug stmts. See
2093 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2094 details. */
2095 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2097 gimple stmt = gsi_stmt (i);
2098 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2099 if (label_stmt
2100 && (FORCED_LABEL (gimple_label_label (label_stmt))
2101 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2103 basic_block new_bb;
2104 gimple_stmt_iterator new_gsi;
2106 /* A non-reachable non-local label may still be referenced.
2107 But it no longer needs to carry the extra semantics of
2108 non-locality. */
2109 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2111 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2112 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2115 new_bb = bb->prev_bb;
2116 new_gsi = gsi_start_bb (new_bb);
2117 gsi_remove (&i, false);
2118 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2120 else
2122 /* Release SSA definitions if we are in SSA. Note that we
2123 may be called when not in SSA. For example,
2124 final_cleanup calls this function via
2125 cleanup_tree_cfg. */
2126 if (gimple_in_ssa_p (cfun))
2127 release_defs (stmt);
2129 gsi_remove (&i, true);
2132 if (gsi_end_p (i))
2133 i = gsi_last_bb (bb);
2134 else
2135 gsi_prev (&i);
2139 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2140 bb->il.gimple.seq = NULL;
2141 bb->il.gimple.phi_nodes = NULL;
2145 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2146 predicate VAL, return the edge that will be taken out of the block.
2147 If VAL does not match a unique edge, NULL is returned. */
2149 edge
2150 find_taken_edge (basic_block bb, tree val)
2152 gimple stmt;
2154 stmt = last_stmt (bb);
2156 gcc_assert (stmt);
2157 gcc_assert (is_ctrl_stmt (stmt));
2159 if (val == NULL)
2160 return NULL;
2162 if (!is_gimple_min_invariant (val))
2163 return NULL;
2165 if (gimple_code (stmt) == GIMPLE_COND)
2166 return find_taken_edge_cond_expr (bb, val);
2168 if (gimple_code (stmt) == GIMPLE_SWITCH)
2169 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2171 if (computed_goto_p (stmt))
2173 /* Only optimize if the argument is a label, if the argument is
2174 not a label then we can not construct a proper CFG.
2176 It may be the case that we only need to allow the LABEL_REF to
2177 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2178 appear inside a LABEL_EXPR just to be safe. */
2179 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2180 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2181 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2182 return NULL;
2185 gcc_unreachable ();
2188 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2189 statement, determine which of the outgoing edges will be taken out of the
2190 block. Return NULL if either edge may be taken. */
2192 static edge
2193 find_taken_edge_computed_goto (basic_block bb, tree val)
2195 basic_block dest;
2196 edge e = NULL;
2198 dest = label_to_block (val);
2199 if (dest)
2201 e = find_edge (bb, dest);
2202 gcc_assert (e != NULL);
2205 return e;
2208 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2209 statement, determine which of the two edges will be taken out of the
2210 block. Return NULL if either edge may be taken. */
2212 static edge
2213 find_taken_edge_cond_expr (basic_block bb, tree val)
2215 edge true_edge, false_edge;
2217 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2219 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2220 return (integer_zerop (val) ? false_edge : true_edge);
2223 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2224 statement, determine which edge will be taken out of the block. Return
2225 NULL if any edge may be taken. */
2227 static edge
2228 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2229 tree val)
2231 basic_block dest_bb;
2232 edge e;
2233 tree taken_case;
2235 taken_case = find_case_label_for_value (switch_stmt, val);
2236 dest_bb = label_to_block (CASE_LABEL (taken_case));
2238 e = find_edge (bb, dest_bb);
2239 gcc_assert (e);
2240 return e;
2244 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2245 We can make optimal use here of the fact that the case labels are
2246 sorted: We can do a binary search for a case matching VAL. */
2248 static tree
2249 find_case_label_for_value (gswitch *switch_stmt, tree val)
2251 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2252 tree default_case = gimple_switch_default_label (switch_stmt);
2254 for (low = 0, high = n; high - low > 1; )
2256 size_t i = (high + low) / 2;
2257 tree t = gimple_switch_label (switch_stmt, i);
2258 int cmp;
2260 /* Cache the result of comparing CASE_LOW and val. */
2261 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2263 if (cmp > 0)
2264 high = i;
2265 else
2266 low = i;
2268 if (CASE_HIGH (t) == NULL)
2270 /* A singe-valued case label. */
2271 if (cmp == 0)
2272 return t;
2274 else
2276 /* A case range. We can only handle integer ranges. */
2277 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2278 return t;
2282 return default_case;
2286 /* Dump a basic block on stderr. */
2288 void
2289 gimple_debug_bb (basic_block bb)
2291 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2295 /* Dump basic block with index N on stderr. */
2297 basic_block
2298 gimple_debug_bb_n (int n)
2300 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2301 return BASIC_BLOCK_FOR_FN (cfun, n);
2305 /* Dump the CFG on stderr.
2307 FLAGS are the same used by the tree dumping functions
2308 (see TDF_* in dumpfile.h). */
2310 void
2311 gimple_debug_cfg (int flags)
2313 gimple_dump_cfg (stderr, flags);
2317 /* Dump the program showing basic block boundaries on the given FILE.
2319 FLAGS are the same used by the tree dumping functions (see TDF_* in
2320 tree.h). */
2322 void
2323 gimple_dump_cfg (FILE *file, int flags)
2325 if (flags & TDF_DETAILS)
2327 dump_function_header (file, current_function_decl, flags);
2328 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2329 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2330 last_basic_block_for_fn (cfun));
2332 brief_dump_cfg (file, flags | TDF_COMMENT);
2333 fprintf (file, "\n");
2336 if (flags & TDF_STATS)
2337 dump_cfg_stats (file);
2339 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2343 /* Dump CFG statistics on FILE. */
2345 void
2346 dump_cfg_stats (FILE *file)
2348 static long max_num_merged_labels = 0;
2349 unsigned long size, total = 0;
2350 long num_edges;
2351 basic_block bb;
2352 const char * const fmt_str = "%-30s%-13s%12s\n";
2353 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2354 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2355 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2356 const char *funcname = current_function_name ();
2358 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2360 fprintf (file, "---------------------------------------------------------\n");
2361 fprintf (file, fmt_str, "", " Number of ", "Memory");
2362 fprintf (file, fmt_str, "", " instances ", "used ");
2363 fprintf (file, "---------------------------------------------------------\n");
2365 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2366 total += size;
2367 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2368 SCALE (size), LABEL (size));
2370 num_edges = 0;
2371 FOR_EACH_BB_FN (bb, cfun)
2372 num_edges += EDGE_COUNT (bb->succs);
2373 size = num_edges * sizeof (struct edge_def);
2374 total += size;
2375 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2377 fprintf (file, "---------------------------------------------------------\n");
2378 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2379 LABEL (total));
2380 fprintf (file, "---------------------------------------------------------\n");
2381 fprintf (file, "\n");
2383 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2384 max_num_merged_labels = cfg_stats.num_merged_labels;
2386 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2387 cfg_stats.num_merged_labels, max_num_merged_labels);
2389 fprintf (file, "\n");
2393 /* Dump CFG statistics on stderr. Keep extern so that it's always
2394 linked in the final executable. */
2396 DEBUG_FUNCTION void
2397 debug_cfg_stats (void)
2399 dump_cfg_stats (stderr);
2402 /*---------------------------------------------------------------------------
2403 Miscellaneous helpers
2404 ---------------------------------------------------------------------------*/
2406 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2407 flow. Transfers of control flow associated with EH are excluded. */
2409 static bool
2410 call_can_make_abnormal_goto (gimple t)
2412 /* If the function has no non-local labels, then a call cannot make an
2413 abnormal transfer of control. */
2414 if (!cfun->has_nonlocal_label
2415 && !cfun->calls_setjmp)
2416 return false;
2418 /* Likewise if the call has no side effects. */
2419 if (!gimple_has_side_effects (t))
2420 return false;
2422 /* Likewise if the called function is leaf. */
2423 if (gimple_call_flags (t) & ECF_LEAF)
2424 return false;
2426 return true;
2430 /* Return true if T can make an abnormal transfer of control flow.
2431 Transfers of control flow associated with EH are excluded. */
2433 bool
2434 stmt_can_make_abnormal_goto (gimple t)
2436 if (computed_goto_p (t))
2437 return true;
2438 if (is_gimple_call (t))
2439 return call_can_make_abnormal_goto (t);
2440 return false;
2444 /* Return true if T represents a stmt that always transfers control. */
2446 bool
2447 is_ctrl_stmt (gimple t)
2449 switch (gimple_code (t))
2451 case GIMPLE_COND:
2452 case GIMPLE_SWITCH:
2453 case GIMPLE_GOTO:
2454 case GIMPLE_RETURN:
2455 case GIMPLE_RESX:
2456 return true;
2457 default:
2458 return false;
2463 /* Return true if T is a statement that may alter the flow of control
2464 (e.g., a call to a non-returning function). */
2466 bool
2467 is_ctrl_altering_stmt (gimple t)
2469 gcc_assert (t);
2471 switch (gimple_code (t))
2473 case GIMPLE_CALL:
2474 /* Per stmt call flag indicates whether the call could alter
2475 controlflow. */
2476 if (gimple_call_ctrl_altering_p (t))
2477 return true;
2478 break;
2480 case GIMPLE_EH_DISPATCH:
2481 /* EH_DISPATCH branches to the individual catch handlers at
2482 this level of a try or allowed-exceptions region. It can
2483 fallthru to the next statement as well. */
2484 return true;
2486 case GIMPLE_ASM:
2487 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2488 return true;
2489 break;
2491 CASE_GIMPLE_OMP:
2492 /* OpenMP directives alter control flow. */
2493 return true;
2495 case GIMPLE_TRANSACTION:
2496 /* A transaction start alters control flow. */
2497 return true;
2499 default:
2500 break;
2503 /* If a statement can throw, it alters control flow. */
2504 return stmt_can_throw_internal (t);
2508 /* Return true if T is a simple local goto. */
2510 bool
2511 simple_goto_p (gimple t)
2513 return (gimple_code (t) == GIMPLE_GOTO
2514 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2518 /* Return true if STMT should start a new basic block. PREV_STMT is
2519 the statement preceding STMT. It is used when STMT is a label or a
2520 case label. Labels should only start a new basic block if their
2521 previous statement wasn't a label. Otherwise, sequence of labels
2522 would generate unnecessary basic blocks that only contain a single
2523 label. */
2525 static inline bool
2526 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2528 if (stmt == NULL)
2529 return false;
2531 /* Labels start a new basic block only if the preceding statement
2532 wasn't a label of the same type. This prevents the creation of
2533 consecutive blocks that have nothing but a single label. */
2534 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2536 /* Nonlocal and computed GOTO targets always start a new block. */
2537 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2538 || FORCED_LABEL (gimple_label_label (label_stmt)))
2539 return true;
2541 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2543 if (DECL_NONLOCAL (gimple_label_label (
2544 as_a <glabel *> (prev_stmt))))
2545 return true;
2547 cfg_stats.num_merged_labels++;
2548 return false;
2550 else
2551 return true;
2553 else if (gimple_code (stmt) == GIMPLE_CALL
2554 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2555 /* setjmp acts similar to a nonlocal GOTO target and thus should
2556 start a new block. */
2557 return true;
2559 return false;
2563 /* Return true if T should end a basic block. */
2565 bool
2566 stmt_ends_bb_p (gimple t)
2568 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2571 /* Remove block annotations and other data structures. */
2573 void
2574 delete_tree_cfg_annotations (void)
2576 vec_free (label_to_block_map_for_fn (cfun));
2580 /* Return the first statement in basic block BB. */
2582 gimple
2583 first_stmt (basic_block bb)
2585 gimple_stmt_iterator i = gsi_start_bb (bb);
2586 gimple stmt = NULL;
2588 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2590 gsi_next (&i);
2591 stmt = NULL;
2593 return stmt;
2596 /* Return the first non-label statement in basic block BB. */
2598 static gimple
2599 first_non_label_stmt (basic_block bb)
2601 gimple_stmt_iterator i = gsi_start_bb (bb);
2602 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2603 gsi_next (&i);
2604 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2607 /* Return the last statement in basic block BB. */
2609 gimple
2610 last_stmt (basic_block bb)
2612 gimple_stmt_iterator i = gsi_last_bb (bb);
2613 gimple stmt = NULL;
2615 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2617 gsi_prev (&i);
2618 stmt = NULL;
2620 return stmt;
2623 /* Return the last statement of an otherwise empty block. Return NULL
2624 if the block is totally empty, or if it contains more than one
2625 statement. */
2627 gimple
2628 last_and_only_stmt (basic_block bb)
2630 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2631 gimple last, prev;
2633 if (gsi_end_p (i))
2634 return NULL;
2636 last = gsi_stmt (i);
2637 gsi_prev_nondebug (&i);
2638 if (gsi_end_p (i))
2639 return last;
2641 /* Empty statements should no longer appear in the instruction stream.
2642 Everything that might have appeared before should be deleted by
2643 remove_useless_stmts, and the optimizers should just gsi_remove
2644 instead of smashing with build_empty_stmt.
2646 Thus the only thing that should appear here in a block containing
2647 one executable statement is a label. */
2648 prev = gsi_stmt (i);
2649 if (gimple_code (prev) == GIMPLE_LABEL)
2650 return last;
2651 else
2652 return NULL;
2655 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2657 static void
2658 reinstall_phi_args (edge new_edge, edge old_edge)
2660 edge_var_map *vm;
2661 int i;
2662 gphi_iterator phis;
2664 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2665 if (!v)
2666 return;
2668 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2669 v->iterate (i, &vm) && !gsi_end_p (phis);
2670 i++, gsi_next (&phis))
2672 gphi *phi = phis.phi ();
2673 tree result = redirect_edge_var_map_result (vm);
2674 tree arg = redirect_edge_var_map_def (vm);
2676 gcc_assert (result == gimple_phi_result (phi));
2678 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2681 redirect_edge_var_map_clear (old_edge);
2684 /* Returns the basic block after which the new basic block created
2685 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2686 near its "logical" location. This is of most help to humans looking
2687 at debugging dumps. */
2689 basic_block
2690 split_edge_bb_loc (edge edge_in)
2692 basic_block dest = edge_in->dest;
2693 basic_block dest_prev = dest->prev_bb;
2695 if (dest_prev)
2697 edge e = find_edge (dest_prev, dest);
2698 if (e && !(e->flags & EDGE_COMPLEX))
2699 return edge_in->src;
2701 return dest_prev;
2704 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2705 Abort on abnormal edges. */
2707 static basic_block
2708 gimple_split_edge (edge edge_in)
2710 basic_block new_bb, after_bb, dest;
2711 edge new_edge, e;
2713 /* Abnormal edges cannot be split. */
2714 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2716 dest = edge_in->dest;
2718 after_bb = split_edge_bb_loc (edge_in);
2720 new_bb = create_empty_bb (after_bb);
2721 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2722 new_bb->count = edge_in->count;
2723 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2724 new_edge->probability = REG_BR_PROB_BASE;
2725 new_edge->count = edge_in->count;
2727 e = redirect_edge_and_branch (edge_in, new_bb);
2728 gcc_assert (e == edge_in);
2729 reinstall_phi_args (new_edge, e);
2731 return new_bb;
2735 /* Verify properties of the address expression T with base object BASE. */
2737 static tree
2738 verify_address (tree t, tree base)
2740 bool old_constant;
2741 bool old_side_effects;
2742 bool new_constant;
2743 bool new_side_effects;
2745 old_constant = TREE_CONSTANT (t);
2746 old_side_effects = TREE_SIDE_EFFECTS (t);
2748 recompute_tree_invariant_for_addr_expr (t);
2749 new_side_effects = TREE_SIDE_EFFECTS (t);
2750 new_constant = TREE_CONSTANT (t);
2752 if (old_constant != new_constant)
2754 error ("constant not recomputed when ADDR_EXPR changed");
2755 return t;
2757 if (old_side_effects != new_side_effects)
2759 error ("side effects not recomputed when ADDR_EXPR changed");
2760 return t;
2763 if (!(TREE_CODE (base) == VAR_DECL
2764 || TREE_CODE (base) == PARM_DECL
2765 || TREE_CODE (base) == RESULT_DECL))
2766 return NULL_TREE;
2768 if (DECL_GIMPLE_REG_P (base))
2770 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2771 return base;
2774 return NULL_TREE;
2777 /* Callback for walk_tree, check that all elements with address taken are
2778 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2779 inside a PHI node. */
2781 static tree
2782 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2784 tree t = *tp, x;
2786 if (TYPE_P (t))
2787 *walk_subtrees = 0;
2789 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2790 #define CHECK_OP(N, MSG) \
2791 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2792 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2794 switch (TREE_CODE (t))
2796 case SSA_NAME:
2797 if (SSA_NAME_IN_FREE_LIST (t))
2799 error ("SSA name in freelist but still referenced");
2800 return *tp;
2802 break;
2804 case INDIRECT_REF:
2805 error ("INDIRECT_REF in gimple IL");
2806 return t;
2808 case MEM_REF:
2809 x = TREE_OPERAND (t, 0);
2810 if (!POINTER_TYPE_P (TREE_TYPE (x))
2811 || !is_gimple_mem_ref_addr (x))
2813 error ("invalid first operand of MEM_REF");
2814 return x;
2816 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2817 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2819 error ("invalid offset operand of MEM_REF");
2820 return TREE_OPERAND (t, 1);
2822 if (TREE_CODE (x) == ADDR_EXPR
2823 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2824 return x;
2825 *walk_subtrees = 0;
2826 break;
2828 case ASSERT_EXPR:
2829 x = fold (ASSERT_EXPR_COND (t));
2830 if (x == boolean_false_node)
2832 error ("ASSERT_EXPR with an always-false condition");
2833 return *tp;
2835 break;
2837 case MODIFY_EXPR:
2838 error ("MODIFY_EXPR not expected while having tuples");
2839 return *tp;
2841 case ADDR_EXPR:
2843 tree tem;
2845 gcc_assert (is_gimple_address (t));
2847 /* Skip any references (they will be checked when we recurse down the
2848 tree) and ensure that any variable used as a prefix is marked
2849 addressable. */
2850 for (x = TREE_OPERAND (t, 0);
2851 handled_component_p (x);
2852 x = TREE_OPERAND (x, 0))
2855 if ((tem = verify_address (t, x)))
2856 return tem;
2858 if (!(TREE_CODE (x) == VAR_DECL
2859 || TREE_CODE (x) == PARM_DECL
2860 || TREE_CODE (x) == RESULT_DECL))
2861 return NULL;
2863 if (!TREE_ADDRESSABLE (x))
2865 error ("address taken, but ADDRESSABLE bit not set");
2866 return x;
2869 break;
2872 case COND_EXPR:
2873 x = COND_EXPR_COND (t);
2874 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2876 error ("non-integral used in condition");
2877 return x;
2879 if (!is_gimple_condexpr (x))
2881 error ("invalid conditional operand");
2882 return x;
2884 break;
2886 case NON_LVALUE_EXPR:
2887 case TRUTH_NOT_EXPR:
2888 gcc_unreachable ();
2890 CASE_CONVERT:
2891 case FIX_TRUNC_EXPR:
2892 case FLOAT_EXPR:
2893 case NEGATE_EXPR:
2894 case ABS_EXPR:
2895 case BIT_NOT_EXPR:
2896 CHECK_OP (0, "invalid operand to unary operator");
2897 break;
2899 case REALPART_EXPR:
2900 case IMAGPART_EXPR:
2901 case BIT_FIELD_REF:
2902 if (!is_gimple_reg_type (TREE_TYPE (t)))
2904 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2905 return t;
2908 if (TREE_CODE (t) == BIT_FIELD_REF)
2910 tree t0 = TREE_OPERAND (t, 0);
2911 tree t1 = TREE_OPERAND (t, 1);
2912 tree t2 = TREE_OPERAND (t, 2);
2913 if (!tree_fits_uhwi_p (t1)
2914 || !tree_fits_uhwi_p (t2))
2916 error ("invalid position or size operand to BIT_FIELD_REF");
2917 return t;
2919 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2920 && (TYPE_PRECISION (TREE_TYPE (t))
2921 != tree_to_uhwi (t1)))
2923 error ("integral result type precision does not match "
2924 "field size of BIT_FIELD_REF");
2925 return t;
2927 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2928 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2929 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2930 != tree_to_uhwi (t1)))
2932 error ("mode precision of non-integral result does not "
2933 "match field size of BIT_FIELD_REF");
2934 return t;
2936 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2937 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2938 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2940 error ("position plus size exceeds size of referenced object in "
2941 "BIT_FIELD_REF");
2942 return t;
2945 t = TREE_OPERAND (t, 0);
2947 /* Fall-through. */
2948 case COMPONENT_REF:
2949 case ARRAY_REF:
2950 case ARRAY_RANGE_REF:
2951 case VIEW_CONVERT_EXPR:
2952 /* We have a nest of references. Verify that each of the operands
2953 that determine where to reference is either a constant or a variable,
2954 verify that the base is valid, and then show we've already checked
2955 the subtrees. */
2956 while (handled_component_p (t))
2958 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2959 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2960 else if (TREE_CODE (t) == ARRAY_REF
2961 || TREE_CODE (t) == ARRAY_RANGE_REF)
2963 CHECK_OP (1, "invalid array index");
2964 if (TREE_OPERAND (t, 2))
2965 CHECK_OP (2, "invalid array lower bound");
2966 if (TREE_OPERAND (t, 3))
2967 CHECK_OP (3, "invalid array stride");
2969 else if (TREE_CODE (t) == BIT_FIELD_REF
2970 || TREE_CODE (t) == REALPART_EXPR
2971 || TREE_CODE (t) == IMAGPART_EXPR)
2973 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2974 "REALPART_EXPR");
2975 return t;
2978 t = TREE_OPERAND (t, 0);
2981 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2983 error ("invalid reference prefix");
2984 return t;
2986 *walk_subtrees = 0;
2987 break;
2988 case PLUS_EXPR:
2989 case MINUS_EXPR:
2990 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2991 POINTER_PLUS_EXPR. */
2992 if (POINTER_TYPE_P (TREE_TYPE (t)))
2994 error ("invalid operand to plus/minus, type is a pointer");
2995 return t;
2997 CHECK_OP (0, "invalid operand to binary operator");
2998 CHECK_OP (1, "invalid operand to binary operator");
2999 break;
3001 case POINTER_PLUS_EXPR:
3002 /* Check to make sure the first operand is a pointer or reference type. */
3003 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3005 error ("invalid operand to pointer plus, first operand is not a pointer");
3006 return t;
3008 /* Check to make sure the second operand is a ptrofftype. */
3009 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3011 error ("invalid operand to pointer plus, second operand is not an "
3012 "integer type of appropriate width");
3013 return t;
3015 /* FALLTHROUGH */
3016 case LT_EXPR:
3017 case LE_EXPR:
3018 case GT_EXPR:
3019 case GE_EXPR:
3020 case EQ_EXPR:
3021 case NE_EXPR:
3022 case UNORDERED_EXPR:
3023 case ORDERED_EXPR:
3024 case UNLT_EXPR:
3025 case UNLE_EXPR:
3026 case UNGT_EXPR:
3027 case UNGE_EXPR:
3028 case UNEQ_EXPR:
3029 case LTGT_EXPR:
3030 case MULT_EXPR:
3031 case TRUNC_DIV_EXPR:
3032 case CEIL_DIV_EXPR:
3033 case FLOOR_DIV_EXPR:
3034 case ROUND_DIV_EXPR:
3035 case TRUNC_MOD_EXPR:
3036 case CEIL_MOD_EXPR:
3037 case FLOOR_MOD_EXPR:
3038 case ROUND_MOD_EXPR:
3039 case RDIV_EXPR:
3040 case EXACT_DIV_EXPR:
3041 case MIN_EXPR:
3042 case MAX_EXPR:
3043 case LSHIFT_EXPR:
3044 case RSHIFT_EXPR:
3045 case LROTATE_EXPR:
3046 case RROTATE_EXPR:
3047 case BIT_IOR_EXPR:
3048 case BIT_XOR_EXPR:
3049 case BIT_AND_EXPR:
3050 CHECK_OP (0, "invalid operand to binary operator");
3051 CHECK_OP (1, "invalid operand to binary operator");
3052 break;
3054 case CONSTRUCTOR:
3055 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3056 *walk_subtrees = 0;
3057 break;
3059 case CASE_LABEL_EXPR:
3060 if (CASE_CHAIN (t))
3062 error ("invalid CASE_CHAIN");
3063 return t;
3065 break;
3067 default:
3068 break;
3070 return NULL;
3072 #undef CHECK_OP
3076 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3077 Returns true if there is an error, otherwise false. */
3079 static bool
3080 verify_types_in_gimple_min_lval (tree expr)
3082 tree op;
3084 if (is_gimple_id (expr))
3085 return false;
3087 if (TREE_CODE (expr) != TARGET_MEM_REF
3088 && TREE_CODE (expr) != MEM_REF)
3090 error ("invalid expression for min lvalue");
3091 return true;
3094 /* TARGET_MEM_REFs are strange beasts. */
3095 if (TREE_CODE (expr) == TARGET_MEM_REF)
3096 return false;
3098 op = TREE_OPERAND (expr, 0);
3099 if (!is_gimple_val (op))
3101 error ("invalid operand in indirect reference");
3102 debug_generic_stmt (op);
3103 return true;
3105 /* Memory references now generally can involve a value conversion. */
3107 return false;
3110 /* Verify if EXPR is a valid GIMPLE reference expression. If
3111 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3112 if there is an error, otherwise false. */
3114 static bool
3115 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3117 while (handled_component_p (expr))
3119 tree op = TREE_OPERAND (expr, 0);
3121 if (TREE_CODE (expr) == ARRAY_REF
3122 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3124 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3125 || (TREE_OPERAND (expr, 2)
3126 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3127 || (TREE_OPERAND (expr, 3)
3128 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3130 error ("invalid operands to array reference");
3131 debug_generic_stmt (expr);
3132 return true;
3136 /* Verify if the reference array element types are compatible. */
3137 if (TREE_CODE (expr) == ARRAY_REF
3138 && !useless_type_conversion_p (TREE_TYPE (expr),
3139 TREE_TYPE (TREE_TYPE (op))))
3141 error ("type mismatch in array reference");
3142 debug_generic_stmt (TREE_TYPE (expr));
3143 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3144 return true;
3146 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3147 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3148 TREE_TYPE (TREE_TYPE (op))))
3150 error ("type mismatch in array range reference");
3151 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3152 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3153 return true;
3156 if ((TREE_CODE (expr) == REALPART_EXPR
3157 || TREE_CODE (expr) == IMAGPART_EXPR)
3158 && !useless_type_conversion_p (TREE_TYPE (expr),
3159 TREE_TYPE (TREE_TYPE (op))))
3161 error ("type mismatch in real/imagpart reference");
3162 debug_generic_stmt (TREE_TYPE (expr));
3163 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3164 return true;
3167 if (TREE_CODE (expr) == COMPONENT_REF
3168 && !useless_type_conversion_p (TREE_TYPE (expr),
3169 TREE_TYPE (TREE_OPERAND (expr, 1))))
3171 error ("type mismatch in component reference");
3172 debug_generic_stmt (TREE_TYPE (expr));
3173 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3174 return true;
3177 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3179 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3180 that their operand is not an SSA name or an invariant when
3181 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3182 bug). Otherwise there is nothing to verify, gross mismatches at
3183 most invoke undefined behavior. */
3184 if (require_lvalue
3185 && (TREE_CODE (op) == SSA_NAME
3186 || is_gimple_min_invariant (op)))
3188 error ("conversion of an SSA_NAME on the left hand side");
3189 debug_generic_stmt (expr);
3190 return true;
3192 else if (TREE_CODE (op) == SSA_NAME
3193 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3195 error ("conversion of register to a different size");
3196 debug_generic_stmt (expr);
3197 return true;
3199 else if (!handled_component_p (op))
3200 return false;
3203 expr = op;
3206 if (TREE_CODE (expr) == MEM_REF)
3208 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3210 error ("invalid address operand in MEM_REF");
3211 debug_generic_stmt (expr);
3212 return true;
3214 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3215 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3217 error ("invalid offset operand in MEM_REF");
3218 debug_generic_stmt (expr);
3219 return true;
3222 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3224 if (!TMR_BASE (expr)
3225 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3227 error ("invalid address operand in TARGET_MEM_REF");
3228 return true;
3230 if (!TMR_OFFSET (expr)
3231 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3232 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3234 error ("invalid offset operand in TARGET_MEM_REF");
3235 debug_generic_stmt (expr);
3236 return true;
3240 return ((require_lvalue || !is_gimple_min_invariant (expr))
3241 && verify_types_in_gimple_min_lval (expr));
3244 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3245 list of pointer-to types that is trivially convertible to DEST. */
3247 static bool
3248 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3250 tree src;
3252 if (!TYPE_POINTER_TO (src_obj))
3253 return true;
3255 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3256 if (useless_type_conversion_p (dest, src))
3257 return true;
3259 return false;
3262 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3263 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3265 static bool
3266 valid_fixed_convert_types_p (tree type1, tree type2)
3268 return (FIXED_POINT_TYPE_P (type1)
3269 && (INTEGRAL_TYPE_P (type2)
3270 || SCALAR_FLOAT_TYPE_P (type2)
3271 || FIXED_POINT_TYPE_P (type2)));
3274 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3275 is a problem, otherwise false. */
3277 static bool
3278 verify_gimple_call (gcall *stmt)
3280 tree fn = gimple_call_fn (stmt);
3281 tree fntype, fndecl;
3282 unsigned i;
3284 if (gimple_call_internal_p (stmt))
3286 if (fn)
3288 error ("gimple call has two targets");
3289 debug_generic_stmt (fn);
3290 return true;
3293 else
3295 if (!fn)
3297 error ("gimple call has no target");
3298 return true;
3302 if (fn && !is_gimple_call_addr (fn))
3304 error ("invalid function in gimple call");
3305 debug_generic_stmt (fn);
3306 return true;
3309 if (fn
3310 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3311 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3312 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3314 error ("non-function in gimple call");
3315 return true;
3318 fndecl = gimple_call_fndecl (stmt);
3319 if (fndecl
3320 && TREE_CODE (fndecl) == FUNCTION_DECL
3321 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3322 && !DECL_PURE_P (fndecl)
3323 && !TREE_READONLY (fndecl))
3325 error ("invalid pure const state for function");
3326 return true;
3329 if (gimple_call_lhs (stmt)
3330 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3331 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3333 error ("invalid LHS in gimple call");
3334 return true;
3337 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3339 error ("LHS in noreturn call");
3340 return true;
3343 fntype = gimple_call_fntype (stmt);
3344 if (fntype
3345 && gimple_call_lhs (stmt)
3346 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3347 TREE_TYPE (fntype))
3348 /* ??? At least C++ misses conversions at assignments from
3349 void * call results.
3350 ??? Java is completely off. Especially with functions
3351 returning java.lang.Object.
3352 For now simply allow arbitrary pointer type conversions. */
3353 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3354 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3356 error ("invalid conversion in gimple call");
3357 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3358 debug_generic_stmt (TREE_TYPE (fntype));
3359 return true;
3362 if (gimple_call_chain (stmt)
3363 && !is_gimple_val (gimple_call_chain (stmt)))
3365 error ("invalid static chain in gimple call");
3366 debug_generic_stmt (gimple_call_chain (stmt));
3367 return true;
3370 /* If there is a static chain argument, the call should either be
3371 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3372 if (gimple_call_chain (stmt)
3373 && fndecl
3374 && !DECL_STATIC_CHAIN (fndecl))
3376 error ("static chain with function that doesn%'t use one");
3377 return true;
3380 /* ??? The C frontend passes unpromoted arguments in case it
3381 didn't see a function declaration before the call. So for now
3382 leave the call arguments mostly unverified. Once we gimplify
3383 unit-at-a-time we have a chance to fix this. */
3385 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3387 tree arg = gimple_call_arg (stmt, i);
3388 if ((is_gimple_reg_type (TREE_TYPE (arg))
3389 && !is_gimple_val (arg))
3390 || (!is_gimple_reg_type (TREE_TYPE (arg))
3391 && !is_gimple_lvalue (arg)))
3393 error ("invalid argument to gimple call");
3394 debug_generic_expr (arg);
3395 return true;
3399 return false;
3402 /* Verifies the gimple comparison with the result type TYPE and
3403 the operands OP0 and OP1. */
3405 static bool
3406 verify_gimple_comparison (tree type, tree op0, tree op1)
3408 tree op0_type = TREE_TYPE (op0);
3409 tree op1_type = TREE_TYPE (op1);
3411 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3413 error ("invalid operands in gimple comparison");
3414 return true;
3417 /* For comparisons we do not have the operations type as the
3418 effective type the comparison is carried out in. Instead
3419 we require that either the first operand is trivially
3420 convertible into the second, or the other way around.
3421 Because we special-case pointers to void we allow
3422 comparisons of pointers with the same mode as well. */
3423 if (!useless_type_conversion_p (op0_type, op1_type)
3424 && !useless_type_conversion_p (op1_type, op0_type)
3425 && (!POINTER_TYPE_P (op0_type)
3426 || !POINTER_TYPE_P (op1_type)
3427 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3429 error ("mismatching comparison operand types");
3430 debug_generic_expr (op0_type);
3431 debug_generic_expr (op1_type);
3432 return true;
3435 /* The resulting type of a comparison may be an effective boolean type. */
3436 if (INTEGRAL_TYPE_P (type)
3437 && (TREE_CODE (type) == BOOLEAN_TYPE
3438 || TYPE_PRECISION (type) == 1))
3440 if (TREE_CODE (op0_type) == VECTOR_TYPE
3441 || TREE_CODE (op1_type) == VECTOR_TYPE)
3443 error ("vector comparison returning a boolean");
3444 debug_generic_expr (op0_type);
3445 debug_generic_expr (op1_type);
3446 return true;
3449 /* Or an integer vector type with the same size and element count
3450 as the comparison operand types. */
3451 else if (TREE_CODE (type) == VECTOR_TYPE
3452 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3454 if (TREE_CODE (op0_type) != VECTOR_TYPE
3455 || TREE_CODE (op1_type) != VECTOR_TYPE)
3457 error ("non-vector operands in vector comparison");
3458 debug_generic_expr (op0_type);
3459 debug_generic_expr (op1_type);
3460 return true;
3463 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3464 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3465 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3466 /* The result of a vector comparison is of signed
3467 integral type. */
3468 || TYPE_UNSIGNED (TREE_TYPE (type)))
3470 error ("invalid vector comparison resulting type");
3471 debug_generic_expr (type);
3472 return true;
3475 else
3477 error ("bogus comparison result type");
3478 debug_generic_expr (type);
3479 return true;
3482 return false;
3485 /* Verify a gimple assignment statement STMT with an unary rhs.
3486 Returns true if anything is wrong. */
3488 static bool
3489 verify_gimple_assign_unary (gassign *stmt)
3491 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3492 tree lhs = gimple_assign_lhs (stmt);
3493 tree lhs_type = TREE_TYPE (lhs);
3494 tree rhs1 = gimple_assign_rhs1 (stmt);
3495 tree rhs1_type = TREE_TYPE (rhs1);
3497 if (!is_gimple_reg (lhs))
3499 error ("non-register as LHS of unary operation");
3500 return true;
3503 if (!is_gimple_val (rhs1))
3505 error ("invalid operand in unary operation");
3506 return true;
3509 /* First handle conversions. */
3510 switch (rhs_code)
3512 CASE_CONVERT:
3514 /* Allow conversions from pointer type to integral type only if
3515 there is no sign or zero extension involved.
3516 For targets were the precision of ptrofftype doesn't match that
3517 of pointers we need to allow arbitrary conversions to ptrofftype. */
3518 if ((POINTER_TYPE_P (lhs_type)
3519 && INTEGRAL_TYPE_P (rhs1_type))
3520 || (POINTER_TYPE_P (rhs1_type)
3521 && INTEGRAL_TYPE_P (lhs_type)
3522 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3523 || ptrofftype_p (sizetype))))
3524 return false;
3526 /* Allow conversion from integral to offset type and vice versa. */
3527 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3528 && INTEGRAL_TYPE_P (rhs1_type))
3529 || (INTEGRAL_TYPE_P (lhs_type)
3530 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3531 return false;
3533 /* Otherwise assert we are converting between types of the
3534 same kind. */
3535 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3537 error ("invalid types in nop conversion");
3538 debug_generic_expr (lhs_type);
3539 debug_generic_expr (rhs1_type);
3540 return true;
3543 return false;
3546 case ADDR_SPACE_CONVERT_EXPR:
3548 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3549 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3550 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3552 error ("invalid types in address space conversion");
3553 debug_generic_expr (lhs_type);
3554 debug_generic_expr (rhs1_type);
3555 return true;
3558 return false;
3561 case FIXED_CONVERT_EXPR:
3563 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3564 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3566 error ("invalid types in fixed-point conversion");
3567 debug_generic_expr (lhs_type);
3568 debug_generic_expr (rhs1_type);
3569 return true;
3572 return false;
3575 case FLOAT_EXPR:
3577 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3578 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3579 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3581 error ("invalid types in conversion to floating point");
3582 debug_generic_expr (lhs_type);
3583 debug_generic_expr (rhs1_type);
3584 return true;
3587 return false;
3590 case FIX_TRUNC_EXPR:
3592 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3593 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3594 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3596 error ("invalid types in conversion to integer");
3597 debug_generic_expr (lhs_type);
3598 debug_generic_expr (rhs1_type);
3599 return true;
3602 return false;
3604 case REDUC_MAX_EXPR:
3605 case REDUC_MIN_EXPR:
3606 case REDUC_PLUS_EXPR:
3607 if (!VECTOR_TYPE_P (rhs1_type)
3608 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3610 error ("reduction should convert from vector to element type");
3611 debug_generic_expr (lhs_type);
3612 debug_generic_expr (rhs1_type);
3613 return true;
3615 return false;
3617 case VEC_UNPACK_HI_EXPR:
3618 case VEC_UNPACK_LO_EXPR:
3619 case VEC_UNPACK_FLOAT_HI_EXPR:
3620 case VEC_UNPACK_FLOAT_LO_EXPR:
3621 /* FIXME. */
3622 return false;
3624 case NEGATE_EXPR:
3625 case ABS_EXPR:
3626 case BIT_NOT_EXPR:
3627 case PAREN_EXPR:
3628 case CONJ_EXPR:
3629 break;
3631 default:
3632 gcc_unreachable ();
3635 /* For the remaining codes assert there is no conversion involved. */
3636 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3638 error ("non-trivial conversion in unary operation");
3639 debug_generic_expr (lhs_type);
3640 debug_generic_expr (rhs1_type);
3641 return true;
3644 return false;
3647 /* Verify a gimple assignment statement STMT with a binary rhs.
3648 Returns true if anything is wrong. */
3650 static bool
3651 verify_gimple_assign_binary (gassign *stmt)
3653 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3654 tree lhs = gimple_assign_lhs (stmt);
3655 tree lhs_type = TREE_TYPE (lhs);
3656 tree rhs1 = gimple_assign_rhs1 (stmt);
3657 tree rhs1_type = TREE_TYPE (rhs1);
3658 tree rhs2 = gimple_assign_rhs2 (stmt);
3659 tree rhs2_type = TREE_TYPE (rhs2);
3661 if (!is_gimple_reg (lhs))
3663 error ("non-register as LHS of binary operation");
3664 return true;
3667 if (!is_gimple_val (rhs1)
3668 || !is_gimple_val (rhs2))
3670 error ("invalid operands in binary operation");
3671 return true;
3674 /* First handle operations that involve different types. */
3675 switch (rhs_code)
3677 case COMPLEX_EXPR:
3679 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3680 || !(INTEGRAL_TYPE_P (rhs1_type)
3681 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3682 || !(INTEGRAL_TYPE_P (rhs2_type)
3683 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3685 error ("type mismatch in complex expression");
3686 debug_generic_expr (lhs_type);
3687 debug_generic_expr (rhs1_type);
3688 debug_generic_expr (rhs2_type);
3689 return true;
3692 return false;
3695 case LSHIFT_EXPR:
3696 case RSHIFT_EXPR:
3697 case LROTATE_EXPR:
3698 case RROTATE_EXPR:
3700 /* Shifts and rotates are ok on integral types, fixed point
3701 types and integer vector types. */
3702 if ((!INTEGRAL_TYPE_P (rhs1_type)
3703 && !FIXED_POINT_TYPE_P (rhs1_type)
3704 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3705 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3706 || (!INTEGRAL_TYPE_P (rhs2_type)
3707 /* Vector shifts of vectors are also ok. */
3708 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3709 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3710 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3711 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3712 || !useless_type_conversion_p (lhs_type, rhs1_type))
3714 error ("type mismatch in shift expression");
3715 debug_generic_expr (lhs_type);
3716 debug_generic_expr (rhs1_type);
3717 debug_generic_expr (rhs2_type);
3718 return true;
3721 return false;
3724 case WIDEN_LSHIFT_EXPR:
3726 if (!INTEGRAL_TYPE_P (lhs_type)
3727 || !INTEGRAL_TYPE_P (rhs1_type)
3728 || TREE_CODE (rhs2) != INTEGER_CST
3729 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3731 error ("type mismatch in widening vector shift expression");
3732 debug_generic_expr (lhs_type);
3733 debug_generic_expr (rhs1_type);
3734 debug_generic_expr (rhs2_type);
3735 return true;
3738 return false;
3741 case VEC_WIDEN_LSHIFT_HI_EXPR:
3742 case VEC_WIDEN_LSHIFT_LO_EXPR:
3744 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3745 || TREE_CODE (lhs_type) != VECTOR_TYPE
3746 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3747 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3748 || TREE_CODE (rhs2) != INTEGER_CST
3749 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3750 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3752 error ("type mismatch in widening vector shift expression");
3753 debug_generic_expr (lhs_type);
3754 debug_generic_expr (rhs1_type);
3755 debug_generic_expr (rhs2_type);
3756 return true;
3759 return false;
3762 case PLUS_EXPR:
3763 case MINUS_EXPR:
3765 tree lhs_etype = lhs_type;
3766 tree rhs1_etype = rhs1_type;
3767 tree rhs2_etype = rhs2_type;
3768 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3770 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3771 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3773 error ("invalid non-vector operands to vector valued plus");
3774 return true;
3776 lhs_etype = TREE_TYPE (lhs_type);
3777 rhs1_etype = TREE_TYPE (rhs1_type);
3778 rhs2_etype = TREE_TYPE (rhs2_type);
3780 if (POINTER_TYPE_P (lhs_etype)
3781 || POINTER_TYPE_P (rhs1_etype)
3782 || POINTER_TYPE_P (rhs2_etype))
3784 error ("invalid (pointer) operands to plus/minus");
3785 return true;
3788 /* Continue with generic binary expression handling. */
3789 break;
3792 case POINTER_PLUS_EXPR:
3794 if (!POINTER_TYPE_P (rhs1_type)
3795 || !useless_type_conversion_p (lhs_type, rhs1_type)
3796 || !ptrofftype_p (rhs2_type))
3798 error ("type mismatch in pointer plus expression");
3799 debug_generic_stmt (lhs_type);
3800 debug_generic_stmt (rhs1_type);
3801 debug_generic_stmt (rhs2_type);
3802 return true;
3805 return false;
3808 case TRUTH_ANDIF_EXPR:
3809 case TRUTH_ORIF_EXPR:
3810 case TRUTH_AND_EXPR:
3811 case TRUTH_OR_EXPR:
3812 case TRUTH_XOR_EXPR:
3814 gcc_unreachable ();
3816 case LT_EXPR:
3817 case LE_EXPR:
3818 case GT_EXPR:
3819 case GE_EXPR:
3820 case EQ_EXPR:
3821 case NE_EXPR:
3822 case UNORDERED_EXPR:
3823 case ORDERED_EXPR:
3824 case UNLT_EXPR:
3825 case UNLE_EXPR:
3826 case UNGT_EXPR:
3827 case UNGE_EXPR:
3828 case UNEQ_EXPR:
3829 case LTGT_EXPR:
3830 /* Comparisons are also binary, but the result type is not
3831 connected to the operand types. */
3832 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3834 case WIDEN_MULT_EXPR:
3835 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3836 return true;
3837 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3838 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3840 case WIDEN_SUM_EXPR:
3841 case VEC_WIDEN_MULT_HI_EXPR:
3842 case VEC_WIDEN_MULT_LO_EXPR:
3843 case VEC_WIDEN_MULT_EVEN_EXPR:
3844 case VEC_WIDEN_MULT_ODD_EXPR:
3845 case VEC_PACK_TRUNC_EXPR:
3846 case VEC_PACK_SAT_EXPR:
3847 case VEC_PACK_FIX_TRUNC_EXPR:
3848 /* FIXME. */
3849 return false;
3851 case MULT_EXPR:
3852 case MULT_HIGHPART_EXPR:
3853 case TRUNC_DIV_EXPR:
3854 case CEIL_DIV_EXPR:
3855 case FLOOR_DIV_EXPR:
3856 case ROUND_DIV_EXPR:
3857 case TRUNC_MOD_EXPR:
3858 case CEIL_MOD_EXPR:
3859 case FLOOR_MOD_EXPR:
3860 case ROUND_MOD_EXPR:
3861 case RDIV_EXPR:
3862 case EXACT_DIV_EXPR:
3863 case MIN_EXPR:
3864 case MAX_EXPR:
3865 case BIT_IOR_EXPR:
3866 case BIT_XOR_EXPR:
3867 case BIT_AND_EXPR:
3868 /* Continue with generic binary expression handling. */
3869 break;
3871 default:
3872 gcc_unreachable ();
3875 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3876 || !useless_type_conversion_p (lhs_type, rhs2_type))
3878 error ("type mismatch in binary expression");
3879 debug_generic_stmt (lhs_type);
3880 debug_generic_stmt (rhs1_type);
3881 debug_generic_stmt (rhs2_type);
3882 return true;
3885 return false;
3888 /* Verify a gimple assignment statement STMT with a ternary rhs.
3889 Returns true if anything is wrong. */
3891 static bool
3892 verify_gimple_assign_ternary (gassign *stmt)
3894 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3895 tree lhs = gimple_assign_lhs (stmt);
3896 tree lhs_type = TREE_TYPE (lhs);
3897 tree rhs1 = gimple_assign_rhs1 (stmt);
3898 tree rhs1_type = TREE_TYPE (rhs1);
3899 tree rhs2 = gimple_assign_rhs2 (stmt);
3900 tree rhs2_type = TREE_TYPE (rhs2);
3901 tree rhs3 = gimple_assign_rhs3 (stmt);
3902 tree rhs3_type = TREE_TYPE (rhs3);
3904 if (!is_gimple_reg (lhs))
3906 error ("non-register as LHS of ternary operation");
3907 return true;
3910 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3911 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3912 || !is_gimple_val (rhs2)
3913 || !is_gimple_val (rhs3))
3915 error ("invalid operands in ternary operation");
3916 return true;
3919 /* First handle operations that involve different types. */
3920 switch (rhs_code)
3922 case WIDEN_MULT_PLUS_EXPR:
3923 case WIDEN_MULT_MINUS_EXPR:
3924 if ((!INTEGRAL_TYPE_P (rhs1_type)
3925 && !FIXED_POINT_TYPE_P (rhs1_type))
3926 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3927 || !useless_type_conversion_p (lhs_type, rhs3_type)
3928 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3929 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3931 error ("type mismatch in widening multiply-accumulate expression");
3932 debug_generic_expr (lhs_type);
3933 debug_generic_expr (rhs1_type);
3934 debug_generic_expr (rhs2_type);
3935 debug_generic_expr (rhs3_type);
3936 return true;
3938 break;
3940 case FMA_EXPR:
3941 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3942 || !useless_type_conversion_p (lhs_type, rhs2_type)
3943 || !useless_type_conversion_p (lhs_type, rhs3_type))
3945 error ("type mismatch in fused multiply-add expression");
3946 debug_generic_expr (lhs_type);
3947 debug_generic_expr (rhs1_type);
3948 debug_generic_expr (rhs2_type);
3949 debug_generic_expr (rhs3_type);
3950 return true;
3952 break;
3954 case COND_EXPR:
3955 case VEC_COND_EXPR:
3956 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3957 || !useless_type_conversion_p (lhs_type, rhs3_type))
3959 error ("type mismatch in conditional expression");
3960 debug_generic_expr (lhs_type);
3961 debug_generic_expr (rhs2_type);
3962 debug_generic_expr (rhs3_type);
3963 return true;
3965 break;
3967 case VEC_PERM_EXPR:
3968 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3969 || !useless_type_conversion_p (lhs_type, rhs2_type))
3971 error ("type mismatch in vector permute expression");
3972 debug_generic_expr (lhs_type);
3973 debug_generic_expr (rhs1_type);
3974 debug_generic_expr (rhs2_type);
3975 debug_generic_expr (rhs3_type);
3976 return true;
3979 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3980 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3981 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3983 error ("vector types expected in vector permute expression");
3984 debug_generic_expr (lhs_type);
3985 debug_generic_expr (rhs1_type);
3986 debug_generic_expr (rhs2_type);
3987 debug_generic_expr (rhs3_type);
3988 return true;
3991 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3992 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3993 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3994 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3995 != TYPE_VECTOR_SUBPARTS (lhs_type))
3997 error ("vectors with different element number found "
3998 "in vector permute expression");
3999 debug_generic_expr (lhs_type);
4000 debug_generic_expr (rhs1_type);
4001 debug_generic_expr (rhs2_type);
4002 debug_generic_expr (rhs3_type);
4003 return true;
4006 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4007 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4008 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4010 error ("invalid mask type in vector permute expression");
4011 debug_generic_expr (lhs_type);
4012 debug_generic_expr (rhs1_type);
4013 debug_generic_expr (rhs2_type);
4014 debug_generic_expr (rhs3_type);
4015 return true;
4018 return false;
4020 case SAD_EXPR:
4021 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4022 || !useless_type_conversion_p (lhs_type, rhs3_type)
4023 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4024 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4025 > GET_MODE_BITSIZE (GET_MODE_INNER
4026 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4028 error ("type mismatch in sad expression");
4029 debug_generic_expr (lhs_type);
4030 debug_generic_expr (rhs1_type);
4031 debug_generic_expr (rhs2_type);
4032 debug_generic_expr (rhs3_type);
4033 return true;
4036 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4037 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4038 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4040 error ("vector types expected in sad expression");
4041 debug_generic_expr (lhs_type);
4042 debug_generic_expr (rhs1_type);
4043 debug_generic_expr (rhs2_type);
4044 debug_generic_expr (rhs3_type);
4045 return true;
4048 return false;
4050 case DOT_PROD_EXPR:
4051 case REALIGN_LOAD_EXPR:
4052 /* FIXME. */
4053 return false;
4055 default:
4056 gcc_unreachable ();
4058 return false;
4061 /* Verify a gimple assignment statement STMT with a single rhs.
4062 Returns true if anything is wrong. */
4064 static bool
4065 verify_gimple_assign_single (gassign *stmt)
4067 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4068 tree lhs = gimple_assign_lhs (stmt);
4069 tree lhs_type = TREE_TYPE (lhs);
4070 tree rhs1 = gimple_assign_rhs1 (stmt);
4071 tree rhs1_type = TREE_TYPE (rhs1);
4072 bool res = false;
4074 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4076 error ("non-trivial conversion at assignment");
4077 debug_generic_expr (lhs_type);
4078 debug_generic_expr (rhs1_type);
4079 return true;
4082 if (gimple_clobber_p (stmt)
4083 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4085 error ("non-decl/MEM_REF LHS in clobber statement");
4086 debug_generic_expr (lhs);
4087 return true;
4090 if (handled_component_p (lhs)
4091 || TREE_CODE (lhs) == MEM_REF
4092 || TREE_CODE (lhs) == TARGET_MEM_REF)
4093 res |= verify_types_in_gimple_reference (lhs, true);
4095 /* Special codes we cannot handle via their class. */
4096 switch (rhs_code)
4098 case ADDR_EXPR:
4100 tree op = TREE_OPERAND (rhs1, 0);
4101 if (!is_gimple_addressable (op))
4103 error ("invalid operand in unary expression");
4104 return true;
4107 /* Technically there is no longer a need for matching types, but
4108 gimple hygiene asks for this check. In LTO we can end up
4109 combining incompatible units and thus end up with addresses
4110 of globals that change their type to a common one. */
4111 if (!in_lto_p
4112 && !types_compatible_p (TREE_TYPE (op),
4113 TREE_TYPE (TREE_TYPE (rhs1)))
4114 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4115 TREE_TYPE (op)))
4117 error ("type mismatch in address expression");
4118 debug_generic_stmt (TREE_TYPE (rhs1));
4119 debug_generic_stmt (TREE_TYPE (op));
4120 return true;
4123 return verify_types_in_gimple_reference (op, true);
4126 /* tcc_reference */
4127 case INDIRECT_REF:
4128 error ("INDIRECT_REF in gimple IL");
4129 return true;
4131 case COMPONENT_REF:
4132 case BIT_FIELD_REF:
4133 case ARRAY_REF:
4134 case ARRAY_RANGE_REF:
4135 case VIEW_CONVERT_EXPR:
4136 case REALPART_EXPR:
4137 case IMAGPART_EXPR:
4138 case TARGET_MEM_REF:
4139 case MEM_REF:
4140 if (!is_gimple_reg (lhs)
4141 && is_gimple_reg_type (TREE_TYPE (lhs)))
4143 error ("invalid rhs for gimple memory store");
4144 debug_generic_stmt (lhs);
4145 debug_generic_stmt (rhs1);
4146 return true;
4148 return res || verify_types_in_gimple_reference (rhs1, false);
4150 /* tcc_constant */
4151 case SSA_NAME:
4152 case INTEGER_CST:
4153 case REAL_CST:
4154 case FIXED_CST:
4155 case COMPLEX_CST:
4156 case VECTOR_CST:
4157 case STRING_CST:
4158 return res;
4160 /* tcc_declaration */
4161 case CONST_DECL:
4162 return res;
4163 case VAR_DECL:
4164 case PARM_DECL:
4165 if (!is_gimple_reg (lhs)
4166 && !is_gimple_reg (rhs1)
4167 && is_gimple_reg_type (TREE_TYPE (lhs)))
4169 error ("invalid rhs for gimple memory store");
4170 debug_generic_stmt (lhs);
4171 debug_generic_stmt (rhs1);
4172 return true;
4174 return res;
4176 case CONSTRUCTOR:
4177 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4179 unsigned int i;
4180 tree elt_i, elt_v, elt_t = NULL_TREE;
4182 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4183 return res;
4184 /* For vector CONSTRUCTORs we require that either it is empty
4185 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4186 (then the element count must be correct to cover the whole
4187 outer vector and index must be NULL on all elements, or it is
4188 a CONSTRUCTOR of scalar elements, where we as an exception allow
4189 smaller number of elements (assuming zero filling) and
4190 consecutive indexes as compared to NULL indexes (such
4191 CONSTRUCTORs can appear in the IL from FEs). */
4192 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4194 if (elt_t == NULL_TREE)
4196 elt_t = TREE_TYPE (elt_v);
4197 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4199 tree elt_t = TREE_TYPE (elt_v);
4200 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4201 TREE_TYPE (elt_t)))
4203 error ("incorrect type of vector CONSTRUCTOR"
4204 " elements");
4205 debug_generic_stmt (rhs1);
4206 return true;
4208 else if (CONSTRUCTOR_NELTS (rhs1)
4209 * TYPE_VECTOR_SUBPARTS (elt_t)
4210 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4212 error ("incorrect number of vector CONSTRUCTOR"
4213 " elements");
4214 debug_generic_stmt (rhs1);
4215 return true;
4218 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4219 elt_t))
4221 error ("incorrect type of vector CONSTRUCTOR elements");
4222 debug_generic_stmt (rhs1);
4223 return true;
4225 else if (CONSTRUCTOR_NELTS (rhs1)
4226 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4228 error ("incorrect number of vector CONSTRUCTOR elements");
4229 debug_generic_stmt (rhs1);
4230 return true;
4233 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4235 error ("incorrect type of vector CONSTRUCTOR elements");
4236 debug_generic_stmt (rhs1);
4237 return true;
4239 if (elt_i != NULL_TREE
4240 && (TREE_CODE (elt_t) == VECTOR_TYPE
4241 || TREE_CODE (elt_i) != INTEGER_CST
4242 || compare_tree_int (elt_i, i) != 0))
4244 error ("vector CONSTRUCTOR with non-NULL element index");
4245 debug_generic_stmt (rhs1);
4246 return true;
4248 if (!is_gimple_val (elt_v))
4250 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4251 debug_generic_stmt (rhs1);
4252 return true;
4256 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4258 error ("non-vector CONSTRUCTOR with elements");
4259 debug_generic_stmt (rhs1);
4260 return true;
4262 return res;
4263 case OBJ_TYPE_REF:
4264 case ASSERT_EXPR:
4265 case WITH_SIZE_EXPR:
4266 /* FIXME. */
4267 return res;
4269 default:;
4272 return res;
4275 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4276 is a problem, otherwise false. */
4278 static bool
4279 verify_gimple_assign (gassign *stmt)
4281 switch (gimple_assign_rhs_class (stmt))
4283 case GIMPLE_SINGLE_RHS:
4284 return verify_gimple_assign_single (stmt);
4286 case GIMPLE_UNARY_RHS:
4287 return verify_gimple_assign_unary (stmt);
4289 case GIMPLE_BINARY_RHS:
4290 return verify_gimple_assign_binary (stmt);
4292 case GIMPLE_TERNARY_RHS:
4293 return verify_gimple_assign_ternary (stmt);
4295 default:
4296 gcc_unreachable ();
4300 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4301 is a problem, otherwise false. */
4303 static bool
4304 verify_gimple_return (greturn *stmt)
4306 tree op = gimple_return_retval (stmt);
4307 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4309 /* We cannot test for present return values as we do not fix up missing
4310 return values from the original source. */
4311 if (op == NULL)
4312 return false;
4314 if (!is_gimple_val (op)
4315 && TREE_CODE (op) != RESULT_DECL)
4317 error ("invalid operand in return statement");
4318 debug_generic_stmt (op);
4319 return true;
4322 if ((TREE_CODE (op) == RESULT_DECL
4323 && DECL_BY_REFERENCE (op))
4324 || (TREE_CODE (op) == SSA_NAME
4325 && SSA_NAME_VAR (op)
4326 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4327 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4328 op = TREE_TYPE (op);
4330 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4332 error ("invalid conversion in return statement");
4333 debug_generic_stmt (restype);
4334 debug_generic_stmt (TREE_TYPE (op));
4335 return true;
4338 return false;
4342 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4343 is a problem, otherwise false. */
4345 static bool
4346 verify_gimple_goto (ggoto *stmt)
4348 tree dest = gimple_goto_dest (stmt);
4350 /* ??? We have two canonical forms of direct goto destinations, a
4351 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4352 if (TREE_CODE (dest) != LABEL_DECL
4353 && (!is_gimple_val (dest)
4354 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4356 error ("goto destination is neither a label nor a pointer");
4357 return true;
4360 return false;
4363 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4364 is a problem, otherwise false. */
4366 static bool
4367 verify_gimple_switch (gswitch *stmt)
4369 unsigned int i, n;
4370 tree elt, prev_upper_bound = NULL_TREE;
4371 tree index_type, elt_type = NULL_TREE;
4373 if (!is_gimple_val (gimple_switch_index (stmt)))
4375 error ("invalid operand to switch statement");
4376 debug_generic_stmt (gimple_switch_index (stmt));
4377 return true;
4380 index_type = TREE_TYPE (gimple_switch_index (stmt));
4381 if (! INTEGRAL_TYPE_P (index_type))
4383 error ("non-integral type switch statement");
4384 debug_generic_expr (index_type);
4385 return true;
4388 elt = gimple_switch_label (stmt, 0);
4389 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4391 error ("invalid default case label in switch statement");
4392 debug_generic_expr (elt);
4393 return true;
4396 n = gimple_switch_num_labels (stmt);
4397 for (i = 1; i < n; i++)
4399 elt = gimple_switch_label (stmt, i);
4401 if (! CASE_LOW (elt))
4403 error ("invalid case label in switch statement");
4404 debug_generic_expr (elt);
4405 return true;
4407 if (CASE_HIGH (elt)
4408 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4410 error ("invalid case range in switch statement");
4411 debug_generic_expr (elt);
4412 return true;
4415 if (elt_type)
4417 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4418 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4420 error ("type mismatch for case label in switch statement");
4421 debug_generic_expr (elt);
4422 return true;
4425 else
4427 elt_type = TREE_TYPE (CASE_LOW (elt));
4428 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4430 error ("type precision mismatch in switch statement");
4431 return true;
4435 if (prev_upper_bound)
4437 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4439 error ("case labels not sorted in switch statement");
4440 return true;
4444 prev_upper_bound = CASE_HIGH (elt);
4445 if (! prev_upper_bound)
4446 prev_upper_bound = CASE_LOW (elt);
4449 return false;
4452 /* Verify a gimple debug statement STMT.
4453 Returns true if anything is wrong. */
4455 static bool
4456 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4458 /* There isn't much that could be wrong in a gimple debug stmt. A
4459 gimple debug bind stmt, for example, maps a tree, that's usually
4460 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4461 component or member of an aggregate type, to another tree, that
4462 can be an arbitrary expression. These stmts expand into debug
4463 insns, and are converted to debug notes by var-tracking.c. */
4464 return false;
4467 /* Verify a gimple label statement STMT.
4468 Returns true if anything is wrong. */
4470 static bool
4471 verify_gimple_label (glabel *stmt)
4473 tree decl = gimple_label_label (stmt);
4474 int uid;
4475 bool err = false;
4477 if (TREE_CODE (decl) != LABEL_DECL)
4478 return true;
4479 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4480 && DECL_CONTEXT (decl) != current_function_decl)
4482 error ("label's context is not the current function decl");
4483 err |= true;
4486 uid = LABEL_DECL_UID (decl);
4487 if (cfun->cfg
4488 && (uid == -1
4489 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4491 error ("incorrect entry in label_to_block_map");
4492 err |= true;
4495 uid = EH_LANDING_PAD_NR (decl);
4496 if (uid)
4498 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4499 if (decl != lp->post_landing_pad)
4501 error ("incorrect setting of landing pad number");
4502 err |= true;
4506 return err;
4509 /* Verify a gimple cond statement STMT.
4510 Returns true if anything is wrong. */
4512 static bool
4513 verify_gimple_cond (gcond *stmt)
4515 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4517 error ("invalid comparison code in gimple cond");
4518 return true;
4520 if (!(!gimple_cond_true_label (stmt)
4521 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4522 || !(!gimple_cond_false_label (stmt)
4523 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4525 error ("invalid labels in gimple cond");
4526 return true;
4529 return verify_gimple_comparison (boolean_type_node,
4530 gimple_cond_lhs (stmt),
4531 gimple_cond_rhs (stmt));
4534 /* Verify the GIMPLE statement STMT. Returns true if there is an
4535 error, otherwise false. */
4537 static bool
4538 verify_gimple_stmt (gimple stmt)
4540 switch (gimple_code (stmt))
4542 case GIMPLE_ASSIGN:
4543 return verify_gimple_assign (as_a <gassign *> (stmt));
4545 case GIMPLE_LABEL:
4546 return verify_gimple_label (as_a <glabel *> (stmt));
4548 case GIMPLE_CALL:
4549 return verify_gimple_call (as_a <gcall *> (stmt));
4551 case GIMPLE_COND:
4552 return verify_gimple_cond (as_a <gcond *> (stmt));
4554 case GIMPLE_GOTO:
4555 return verify_gimple_goto (as_a <ggoto *> (stmt));
4557 case GIMPLE_SWITCH:
4558 return verify_gimple_switch (as_a <gswitch *> (stmt));
4560 case GIMPLE_RETURN:
4561 return verify_gimple_return (as_a <greturn *> (stmt));
4563 case GIMPLE_ASM:
4564 return false;
4566 case GIMPLE_TRANSACTION:
4567 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4569 /* Tuples that do not have tree operands. */
4570 case GIMPLE_NOP:
4571 case GIMPLE_PREDICT:
4572 case GIMPLE_RESX:
4573 case GIMPLE_EH_DISPATCH:
4574 case GIMPLE_EH_MUST_NOT_THROW:
4575 return false;
4577 CASE_GIMPLE_OMP:
4578 /* OpenMP directives are validated by the FE and never operated
4579 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4580 non-gimple expressions when the main index variable has had
4581 its address taken. This does not affect the loop itself
4582 because the header of an GIMPLE_OMP_FOR is merely used to determine
4583 how to setup the parallel iteration. */
4584 return false;
4586 case GIMPLE_DEBUG:
4587 return verify_gimple_debug (stmt);
4589 default:
4590 gcc_unreachable ();
4594 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4595 and false otherwise. */
4597 static bool
4598 verify_gimple_phi (gimple phi)
4600 bool err = false;
4601 unsigned i;
4602 tree phi_result = gimple_phi_result (phi);
4603 bool virtual_p;
4605 if (!phi_result)
4607 error ("invalid PHI result");
4608 return true;
4611 virtual_p = virtual_operand_p (phi_result);
4612 if (TREE_CODE (phi_result) != SSA_NAME
4613 || (virtual_p
4614 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4616 error ("invalid PHI result");
4617 err = true;
4620 for (i = 0; i < gimple_phi_num_args (phi); i++)
4622 tree t = gimple_phi_arg_def (phi, i);
4624 if (!t)
4626 error ("missing PHI def");
4627 err |= true;
4628 continue;
4630 /* Addressable variables do have SSA_NAMEs but they
4631 are not considered gimple values. */
4632 else if ((TREE_CODE (t) == SSA_NAME
4633 && virtual_p != virtual_operand_p (t))
4634 || (virtual_p
4635 && (TREE_CODE (t) != SSA_NAME
4636 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4637 || (!virtual_p
4638 && !is_gimple_val (t)))
4640 error ("invalid PHI argument");
4641 debug_generic_expr (t);
4642 err |= true;
4644 #ifdef ENABLE_TYPES_CHECKING
4645 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4647 error ("incompatible types in PHI argument %u", i);
4648 debug_generic_stmt (TREE_TYPE (phi_result));
4649 debug_generic_stmt (TREE_TYPE (t));
4650 err |= true;
4652 #endif
4655 return err;
4658 /* Verify the GIMPLE statements inside the sequence STMTS. */
4660 static bool
4661 verify_gimple_in_seq_2 (gimple_seq stmts)
4663 gimple_stmt_iterator ittr;
4664 bool err = false;
4666 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4668 gimple stmt = gsi_stmt (ittr);
4670 switch (gimple_code (stmt))
4672 case GIMPLE_BIND:
4673 err |= verify_gimple_in_seq_2 (
4674 gimple_bind_body (as_a <gbind *> (stmt)));
4675 break;
4677 case GIMPLE_TRY:
4678 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4679 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4680 break;
4682 case GIMPLE_EH_FILTER:
4683 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4684 break;
4686 case GIMPLE_EH_ELSE:
4688 geh_else *eh_else = as_a <geh_else *> (stmt);
4689 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4690 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4692 break;
4694 case GIMPLE_CATCH:
4695 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4696 as_a <gcatch *> (stmt)));
4697 break;
4699 case GIMPLE_TRANSACTION:
4700 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4701 break;
4703 default:
4705 bool err2 = verify_gimple_stmt (stmt);
4706 if (err2)
4707 debug_gimple_stmt (stmt);
4708 err |= err2;
4713 return err;
4716 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4717 is a problem, otherwise false. */
4719 static bool
4720 verify_gimple_transaction (gtransaction *stmt)
4722 tree lab = gimple_transaction_label (stmt);
4723 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4724 return true;
4725 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4729 /* Verify the GIMPLE statements inside the statement list STMTS. */
4731 DEBUG_FUNCTION void
4732 verify_gimple_in_seq (gimple_seq stmts)
4734 timevar_push (TV_TREE_STMT_VERIFY);
4735 if (verify_gimple_in_seq_2 (stmts))
4736 internal_error ("verify_gimple failed");
4737 timevar_pop (TV_TREE_STMT_VERIFY);
4740 /* Return true when the T can be shared. */
4742 static bool
4743 tree_node_can_be_shared (tree t)
4745 if (IS_TYPE_OR_DECL_P (t)
4746 || is_gimple_min_invariant (t)
4747 || TREE_CODE (t) == SSA_NAME
4748 || t == error_mark_node
4749 || TREE_CODE (t) == IDENTIFIER_NODE)
4750 return true;
4752 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4753 return true;
4755 if (DECL_P (t))
4756 return true;
4758 return false;
4761 /* Called via walk_tree. Verify tree sharing. */
4763 static tree
4764 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4766 hash_set<void *> *visited = (hash_set<void *> *) data;
4768 if (tree_node_can_be_shared (*tp))
4770 *walk_subtrees = false;
4771 return NULL;
4774 if (visited->add (*tp))
4775 return *tp;
4777 return NULL;
4780 /* Called via walk_gimple_stmt. Verify tree sharing. */
4782 static tree
4783 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4785 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4786 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4789 static bool eh_error_found;
4790 bool
4791 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4792 hash_set<gimple> *visited)
4794 if (!visited->contains (stmt))
4796 error ("dead STMT in EH table");
4797 debug_gimple_stmt (stmt);
4798 eh_error_found = true;
4800 return true;
4803 /* Verify if the location LOCs block is in BLOCKS. */
4805 static bool
4806 verify_location (hash_set<tree> *blocks, location_t loc)
4808 tree block = LOCATION_BLOCK (loc);
4809 if (block != NULL_TREE
4810 && !blocks->contains (block))
4812 error ("location references block not in block tree");
4813 return true;
4815 if (block != NULL_TREE)
4816 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4817 return false;
4820 /* Called via walk_tree. Verify that expressions have no blocks. */
4822 static tree
4823 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4825 if (!EXPR_P (*tp))
4827 *walk_subtrees = false;
4828 return NULL;
4831 location_t loc = EXPR_LOCATION (*tp);
4832 if (LOCATION_BLOCK (loc) != NULL)
4833 return *tp;
4835 return NULL;
4838 /* Called via walk_tree. Verify locations of expressions. */
4840 static tree
4841 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4843 hash_set<tree> *blocks = (hash_set<tree> *) data;
4845 if (TREE_CODE (*tp) == VAR_DECL
4846 && DECL_HAS_DEBUG_EXPR_P (*tp))
4848 tree t = DECL_DEBUG_EXPR (*tp);
4849 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4850 if (addr)
4851 return addr;
4853 if ((TREE_CODE (*tp) == VAR_DECL
4854 || TREE_CODE (*tp) == PARM_DECL
4855 || TREE_CODE (*tp) == RESULT_DECL)
4856 && DECL_HAS_VALUE_EXPR_P (*tp))
4858 tree t = DECL_VALUE_EXPR (*tp);
4859 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4860 if (addr)
4861 return addr;
4864 if (!EXPR_P (*tp))
4866 *walk_subtrees = false;
4867 return NULL;
4870 location_t loc = EXPR_LOCATION (*tp);
4871 if (verify_location (blocks, loc))
4872 return *tp;
4874 return NULL;
4877 /* Called via walk_gimple_op. Verify locations of expressions. */
4879 static tree
4880 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4882 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4883 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4886 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4888 static void
4889 collect_subblocks (hash_set<tree> *blocks, tree block)
4891 tree t;
4892 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4894 blocks->add (t);
4895 collect_subblocks (blocks, t);
4899 /* Verify the GIMPLE statements in the CFG of FN. */
4901 DEBUG_FUNCTION void
4902 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4904 basic_block bb;
4905 bool err = false;
4907 timevar_push (TV_TREE_STMT_VERIFY);
4908 hash_set<void *> visited;
4909 hash_set<gimple> visited_stmts;
4911 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4912 hash_set<tree> blocks;
4913 if (DECL_INITIAL (fn->decl))
4915 blocks.add (DECL_INITIAL (fn->decl));
4916 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4919 FOR_EACH_BB_FN (bb, fn)
4921 gimple_stmt_iterator gsi;
4923 for (gphi_iterator gpi = gsi_start_phis (bb);
4924 !gsi_end_p (gpi);
4925 gsi_next (&gpi))
4927 gphi *phi = gpi.phi ();
4928 bool err2 = false;
4929 unsigned i;
4931 visited_stmts.add (phi);
4933 if (gimple_bb (phi) != bb)
4935 error ("gimple_bb (phi) is set to a wrong basic block");
4936 err2 = true;
4939 err2 |= verify_gimple_phi (phi);
4941 /* Only PHI arguments have locations. */
4942 if (gimple_location (phi) != UNKNOWN_LOCATION)
4944 error ("PHI node with location");
4945 err2 = true;
4948 for (i = 0; i < gimple_phi_num_args (phi); i++)
4950 tree arg = gimple_phi_arg_def (phi, i);
4951 tree addr = walk_tree (&arg, verify_node_sharing_1,
4952 &visited, NULL);
4953 if (addr)
4955 error ("incorrect sharing of tree nodes");
4956 debug_generic_expr (addr);
4957 err2 |= true;
4959 location_t loc = gimple_phi_arg_location (phi, i);
4960 if (virtual_operand_p (gimple_phi_result (phi))
4961 && loc != UNKNOWN_LOCATION)
4963 error ("virtual PHI with argument locations");
4964 err2 = true;
4966 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4967 if (addr)
4969 debug_generic_expr (addr);
4970 err2 = true;
4972 err2 |= verify_location (&blocks, loc);
4975 if (err2)
4976 debug_gimple_stmt (phi);
4977 err |= err2;
4980 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4982 gimple stmt = gsi_stmt (gsi);
4983 bool err2 = false;
4984 struct walk_stmt_info wi;
4985 tree addr;
4986 int lp_nr;
4988 visited_stmts.add (stmt);
4990 if (gimple_bb (stmt) != bb)
4992 error ("gimple_bb (stmt) is set to a wrong basic block");
4993 err2 = true;
4996 err2 |= verify_gimple_stmt (stmt);
4997 err2 |= verify_location (&blocks, gimple_location (stmt));
4999 memset (&wi, 0, sizeof (wi));
5000 wi.info = (void *) &visited;
5001 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5002 if (addr)
5004 error ("incorrect sharing of tree nodes");
5005 debug_generic_expr (addr);
5006 err2 |= true;
5009 memset (&wi, 0, sizeof (wi));
5010 wi.info = (void *) &blocks;
5011 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5012 if (addr)
5014 debug_generic_expr (addr);
5015 err2 |= true;
5018 /* ??? Instead of not checking these stmts at all the walker
5019 should know its context via wi. */
5020 if (!is_gimple_debug (stmt)
5021 && !is_gimple_omp (stmt))
5023 memset (&wi, 0, sizeof (wi));
5024 addr = walk_gimple_op (stmt, verify_expr, &wi);
5025 if (addr)
5027 debug_generic_expr (addr);
5028 inform (gimple_location (stmt), "in statement");
5029 err2 |= true;
5033 /* If the statement is marked as part of an EH region, then it is
5034 expected that the statement could throw. Verify that when we
5035 have optimizations that simplify statements such that we prove
5036 that they cannot throw, that we update other data structures
5037 to match. */
5038 lp_nr = lookup_stmt_eh_lp (stmt);
5039 if (lp_nr > 0)
5041 if (!stmt_could_throw_p (stmt))
5043 if (verify_nothrow)
5045 error ("statement marked for throw, but doesn%'t");
5046 err2 |= true;
5049 else if (!gsi_one_before_end_p (gsi))
5051 error ("statement marked for throw in middle of block");
5052 err2 |= true;
5056 if (err2)
5057 debug_gimple_stmt (stmt);
5058 err |= err2;
5062 eh_error_found = false;
5063 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5064 if (eh_table)
5065 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5066 (&visited_stmts);
5068 if (err || eh_error_found)
5069 internal_error ("verify_gimple failed");
5071 verify_histograms ();
5072 timevar_pop (TV_TREE_STMT_VERIFY);
5076 /* Verifies that the flow information is OK. */
5078 static int
5079 gimple_verify_flow_info (void)
5081 int err = 0;
5082 basic_block bb;
5083 gimple_stmt_iterator gsi;
5084 gimple stmt;
5085 edge e;
5086 edge_iterator ei;
5088 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5089 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5091 error ("ENTRY_BLOCK has IL associated with it");
5092 err = 1;
5095 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5096 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5098 error ("EXIT_BLOCK has IL associated with it");
5099 err = 1;
5102 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5103 if (e->flags & EDGE_FALLTHRU)
5105 error ("fallthru to exit from bb %d", e->src->index);
5106 err = 1;
5109 FOR_EACH_BB_FN (bb, cfun)
5111 bool found_ctrl_stmt = false;
5113 stmt = NULL;
5115 /* Skip labels on the start of basic block. */
5116 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5118 tree label;
5119 gimple prev_stmt = stmt;
5121 stmt = gsi_stmt (gsi);
5123 if (gimple_code (stmt) != GIMPLE_LABEL)
5124 break;
5126 label = gimple_label_label (as_a <glabel *> (stmt));
5127 if (prev_stmt && DECL_NONLOCAL (label))
5129 error ("nonlocal label ");
5130 print_generic_expr (stderr, label, 0);
5131 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5132 bb->index);
5133 err = 1;
5136 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5138 error ("EH landing pad label ");
5139 print_generic_expr (stderr, label, 0);
5140 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5141 bb->index);
5142 err = 1;
5145 if (label_to_block (label) != bb)
5147 error ("label ");
5148 print_generic_expr (stderr, label, 0);
5149 fprintf (stderr, " to block does not match in bb %d",
5150 bb->index);
5151 err = 1;
5154 if (decl_function_context (label) != current_function_decl)
5156 error ("label ");
5157 print_generic_expr (stderr, label, 0);
5158 fprintf (stderr, " has incorrect context in bb %d",
5159 bb->index);
5160 err = 1;
5164 /* Verify that body of basic block BB is free of control flow. */
5165 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5167 gimple stmt = gsi_stmt (gsi);
5169 if (found_ctrl_stmt)
5171 error ("control flow in the middle of basic block %d",
5172 bb->index);
5173 err = 1;
5176 if (stmt_ends_bb_p (stmt))
5177 found_ctrl_stmt = true;
5179 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5181 error ("label ");
5182 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5183 fprintf (stderr, " in the middle of basic block %d", bb->index);
5184 err = 1;
5188 gsi = gsi_last_bb (bb);
5189 if (gsi_end_p (gsi))
5190 continue;
5192 stmt = gsi_stmt (gsi);
5194 if (gimple_code (stmt) == GIMPLE_LABEL)
5195 continue;
5197 err |= verify_eh_edges (stmt);
5199 if (is_ctrl_stmt (stmt))
5201 FOR_EACH_EDGE (e, ei, bb->succs)
5202 if (e->flags & EDGE_FALLTHRU)
5204 error ("fallthru edge after a control statement in bb %d",
5205 bb->index);
5206 err = 1;
5210 if (gimple_code (stmt) != GIMPLE_COND)
5212 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5213 after anything else but if statement. */
5214 FOR_EACH_EDGE (e, ei, bb->succs)
5215 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5217 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5218 bb->index);
5219 err = 1;
5223 switch (gimple_code (stmt))
5225 case GIMPLE_COND:
5227 edge true_edge;
5228 edge false_edge;
5230 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5232 if (!true_edge
5233 || !false_edge
5234 || !(true_edge->flags & EDGE_TRUE_VALUE)
5235 || !(false_edge->flags & EDGE_FALSE_VALUE)
5236 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5237 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5238 || EDGE_COUNT (bb->succs) >= 3)
5240 error ("wrong outgoing edge flags at end of bb %d",
5241 bb->index);
5242 err = 1;
5245 break;
5247 case GIMPLE_GOTO:
5248 if (simple_goto_p (stmt))
5250 error ("explicit goto at end of bb %d", bb->index);
5251 err = 1;
5253 else
5255 /* FIXME. We should double check that the labels in the
5256 destination blocks have their address taken. */
5257 FOR_EACH_EDGE (e, ei, bb->succs)
5258 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5259 | EDGE_FALSE_VALUE))
5260 || !(e->flags & EDGE_ABNORMAL))
5262 error ("wrong outgoing edge flags at end of bb %d",
5263 bb->index);
5264 err = 1;
5267 break;
5269 case GIMPLE_CALL:
5270 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5271 break;
5272 /* ... fallthru ... */
5273 case GIMPLE_RETURN:
5274 if (!single_succ_p (bb)
5275 || (single_succ_edge (bb)->flags
5276 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5277 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5279 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5280 err = 1;
5282 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5284 error ("return edge does not point to exit in bb %d",
5285 bb->index);
5286 err = 1;
5288 break;
5290 case GIMPLE_SWITCH:
5292 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5293 tree prev;
5294 edge e;
5295 size_t i, n;
5297 n = gimple_switch_num_labels (switch_stmt);
5299 /* Mark all the destination basic blocks. */
5300 for (i = 0; i < n; ++i)
5302 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5303 basic_block label_bb = label_to_block (lab);
5304 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5305 label_bb->aux = (void *)1;
5308 /* Verify that the case labels are sorted. */
5309 prev = gimple_switch_label (switch_stmt, 0);
5310 for (i = 1; i < n; ++i)
5312 tree c = gimple_switch_label (switch_stmt, i);
5313 if (!CASE_LOW (c))
5315 error ("found default case not at the start of "
5316 "case vector");
5317 err = 1;
5318 continue;
5320 if (CASE_LOW (prev)
5321 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5323 error ("case labels not sorted: ");
5324 print_generic_expr (stderr, prev, 0);
5325 fprintf (stderr," is greater than ");
5326 print_generic_expr (stderr, c, 0);
5327 fprintf (stderr," but comes before it.\n");
5328 err = 1;
5330 prev = c;
5332 /* VRP will remove the default case if it can prove it will
5333 never be executed. So do not verify there always exists
5334 a default case here. */
5336 FOR_EACH_EDGE (e, ei, bb->succs)
5338 if (!e->dest->aux)
5340 error ("extra outgoing edge %d->%d",
5341 bb->index, e->dest->index);
5342 err = 1;
5345 e->dest->aux = (void *)2;
5346 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5347 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5349 error ("wrong outgoing edge flags at end of bb %d",
5350 bb->index);
5351 err = 1;
5355 /* Check that we have all of them. */
5356 for (i = 0; i < n; ++i)
5358 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5359 basic_block label_bb = label_to_block (lab);
5361 if (label_bb->aux != (void *)2)
5363 error ("missing edge %i->%i", bb->index, label_bb->index);
5364 err = 1;
5368 FOR_EACH_EDGE (e, ei, bb->succs)
5369 e->dest->aux = (void *)0;
5371 break;
5373 case GIMPLE_EH_DISPATCH:
5374 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5375 break;
5377 default:
5378 break;
5382 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5383 verify_dominators (CDI_DOMINATORS);
5385 return err;
5389 /* Updates phi nodes after creating a forwarder block joined
5390 by edge FALLTHRU. */
5392 static void
5393 gimple_make_forwarder_block (edge fallthru)
5395 edge e;
5396 edge_iterator ei;
5397 basic_block dummy, bb;
5398 tree var;
5399 gphi_iterator gsi;
5401 dummy = fallthru->src;
5402 bb = fallthru->dest;
5404 if (single_pred_p (bb))
5405 return;
5407 /* If we redirected a branch we must create new PHI nodes at the
5408 start of BB. */
5409 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5411 gphi *phi, *new_phi;
5413 phi = gsi.phi ();
5414 var = gimple_phi_result (phi);
5415 new_phi = create_phi_node (var, bb);
5416 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5417 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5418 UNKNOWN_LOCATION);
5421 /* Add the arguments we have stored on edges. */
5422 FOR_EACH_EDGE (e, ei, bb->preds)
5424 if (e == fallthru)
5425 continue;
5427 flush_pending_stmts (e);
5432 /* Return a non-special label in the head of basic block BLOCK.
5433 Create one if it doesn't exist. */
5435 tree
5436 gimple_block_label (basic_block bb)
5438 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5439 bool first = true;
5440 tree label;
5441 glabel *stmt;
5443 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5445 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5446 if (!stmt)
5447 break;
5448 label = gimple_label_label (stmt);
5449 if (!DECL_NONLOCAL (label))
5451 if (!first)
5452 gsi_move_before (&i, &s);
5453 return label;
5457 label = create_artificial_label (UNKNOWN_LOCATION);
5458 stmt = gimple_build_label (label);
5459 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5460 return label;
5464 /* Attempt to perform edge redirection by replacing a possibly complex
5465 jump instruction by a goto or by removing the jump completely.
5466 This can apply only if all edges now point to the same block. The
5467 parameters and return values are equivalent to
5468 redirect_edge_and_branch. */
5470 static edge
5471 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5473 basic_block src = e->src;
5474 gimple_stmt_iterator i;
5475 gimple stmt;
5477 /* We can replace or remove a complex jump only when we have exactly
5478 two edges. */
5479 if (EDGE_COUNT (src->succs) != 2
5480 /* Verify that all targets will be TARGET. Specifically, the
5481 edge that is not E must also go to TARGET. */
5482 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5483 return NULL;
5485 i = gsi_last_bb (src);
5486 if (gsi_end_p (i))
5487 return NULL;
5489 stmt = gsi_stmt (i);
5491 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5493 gsi_remove (&i, true);
5494 e = ssa_redirect_edge (e, target);
5495 e->flags = EDGE_FALLTHRU;
5496 return e;
5499 return NULL;
5503 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5504 edge representing the redirected branch. */
5506 static edge
5507 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5509 basic_block bb = e->src;
5510 gimple_stmt_iterator gsi;
5511 edge ret;
5512 gimple stmt;
5514 if (e->flags & EDGE_ABNORMAL)
5515 return NULL;
5517 if (e->dest == dest)
5518 return NULL;
5520 if (e->flags & EDGE_EH)
5521 return redirect_eh_edge (e, dest);
5523 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5525 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5526 if (ret)
5527 return ret;
5530 gsi = gsi_last_bb (bb);
5531 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5533 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5535 case GIMPLE_COND:
5536 /* For COND_EXPR, we only need to redirect the edge. */
5537 break;
5539 case GIMPLE_GOTO:
5540 /* No non-abnormal edges should lead from a non-simple goto, and
5541 simple ones should be represented implicitly. */
5542 gcc_unreachable ();
5544 case GIMPLE_SWITCH:
5546 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5547 tree label = gimple_block_label (dest);
5548 tree cases = get_cases_for_edge (e, switch_stmt);
5550 /* If we have a list of cases associated with E, then use it
5551 as it's a lot faster than walking the entire case vector. */
5552 if (cases)
5554 edge e2 = find_edge (e->src, dest);
5555 tree last, first;
5557 first = cases;
5558 while (cases)
5560 last = cases;
5561 CASE_LABEL (cases) = label;
5562 cases = CASE_CHAIN (cases);
5565 /* If there was already an edge in the CFG, then we need
5566 to move all the cases associated with E to E2. */
5567 if (e2)
5569 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5571 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5572 CASE_CHAIN (cases2) = first;
5574 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5576 else
5578 size_t i, n = gimple_switch_num_labels (switch_stmt);
5580 for (i = 0; i < n; i++)
5582 tree elt = gimple_switch_label (switch_stmt, i);
5583 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5584 CASE_LABEL (elt) = label;
5588 break;
5590 case GIMPLE_ASM:
5592 gasm *asm_stmt = as_a <gasm *> (stmt);
5593 int i, n = gimple_asm_nlabels (asm_stmt);
5594 tree label = NULL;
5596 for (i = 0; i < n; ++i)
5598 tree cons = gimple_asm_label_op (asm_stmt, i);
5599 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5601 if (!label)
5602 label = gimple_block_label (dest);
5603 TREE_VALUE (cons) = label;
5607 /* If we didn't find any label matching the former edge in the
5608 asm labels, we must be redirecting the fallthrough
5609 edge. */
5610 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5612 break;
5614 case GIMPLE_RETURN:
5615 gsi_remove (&gsi, true);
5616 e->flags |= EDGE_FALLTHRU;
5617 break;
5619 case GIMPLE_OMP_RETURN:
5620 case GIMPLE_OMP_CONTINUE:
5621 case GIMPLE_OMP_SECTIONS_SWITCH:
5622 case GIMPLE_OMP_FOR:
5623 /* The edges from OMP constructs can be simply redirected. */
5624 break;
5626 case GIMPLE_EH_DISPATCH:
5627 if (!(e->flags & EDGE_FALLTHRU))
5628 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5629 break;
5631 case GIMPLE_TRANSACTION:
5632 /* The ABORT edge has a stored label associated with it, otherwise
5633 the edges are simply redirectable. */
5634 if (e->flags == 0)
5635 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5636 gimple_block_label (dest));
5637 break;
5639 default:
5640 /* Otherwise it must be a fallthru edge, and we don't need to
5641 do anything besides redirecting it. */
5642 gcc_assert (e->flags & EDGE_FALLTHRU);
5643 break;
5646 /* Update/insert PHI nodes as necessary. */
5648 /* Now update the edges in the CFG. */
5649 e = ssa_redirect_edge (e, dest);
5651 return e;
5654 /* Returns true if it is possible to remove edge E by redirecting
5655 it to the destination of the other edge from E->src. */
5657 static bool
5658 gimple_can_remove_branch_p (const_edge e)
5660 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5661 return false;
5663 return true;
5666 /* Simple wrapper, as we can always redirect fallthru edges. */
5668 static basic_block
5669 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5671 e = gimple_redirect_edge_and_branch (e, dest);
5672 gcc_assert (e);
5674 return NULL;
5678 /* Splits basic block BB after statement STMT (but at least after the
5679 labels). If STMT is NULL, BB is split just after the labels. */
5681 static basic_block
5682 gimple_split_block (basic_block bb, void *stmt)
5684 gimple_stmt_iterator gsi;
5685 gimple_stmt_iterator gsi_tgt;
5686 gimple act;
5687 gimple_seq list;
5688 basic_block new_bb;
5689 edge e;
5690 edge_iterator ei;
5692 new_bb = create_empty_bb (bb);
5694 /* Redirect the outgoing edges. */
5695 new_bb->succs = bb->succs;
5696 bb->succs = NULL;
5697 FOR_EACH_EDGE (e, ei, new_bb->succs)
5698 e->src = new_bb;
5700 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5701 stmt = NULL;
5703 /* Move everything from GSI to the new basic block. */
5704 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5706 act = gsi_stmt (gsi);
5707 if (gimple_code (act) == GIMPLE_LABEL)
5708 continue;
5710 if (!stmt)
5711 break;
5713 if (stmt == act)
5715 gsi_next (&gsi);
5716 break;
5720 if (gsi_end_p (gsi))
5721 return new_bb;
5723 /* Split the statement list - avoid re-creating new containers as this
5724 brings ugly quadratic memory consumption in the inliner.
5725 (We are still quadratic since we need to update stmt BB pointers,
5726 sadly.) */
5727 gsi_split_seq_before (&gsi, &list);
5728 set_bb_seq (new_bb, list);
5729 for (gsi_tgt = gsi_start (list);
5730 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5731 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5733 return new_bb;
5737 /* Moves basic block BB after block AFTER. */
5739 static bool
5740 gimple_move_block_after (basic_block bb, basic_block after)
5742 if (bb->prev_bb == after)
5743 return true;
5745 unlink_block (bb);
5746 link_block (bb, after);
5748 return true;
5752 /* Return TRUE if block BB has no executable statements, otherwise return
5753 FALSE. */
5755 static bool
5756 gimple_empty_block_p (basic_block bb)
5758 /* BB must have no executable statements. */
5759 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5760 if (phi_nodes (bb))
5761 return false;
5762 if (gsi_end_p (gsi))
5763 return true;
5764 if (is_gimple_debug (gsi_stmt (gsi)))
5765 gsi_next_nondebug (&gsi);
5766 return gsi_end_p (gsi);
5770 /* Split a basic block if it ends with a conditional branch and if the
5771 other part of the block is not empty. */
5773 static basic_block
5774 gimple_split_block_before_cond_jump (basic_block bb)
5776 gimple last, split_point;
5777 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5778 if (gsi_end_p (gsi))
5779 return NULL;
5780 last = gsi_stmt (gsi);
5781 if (gimple_code (last) != GIMPLE_COND
5782 && gimple_code (last) != GIMPLE_SWITCH)
5783 return NULL;
5784 gsi_prev_nondebug (&gsi);
5785 split_point = gsi_stmt (gsi);
5786 return split_block (bb, split_point)->dest;
5790 /* Return true if basic_block can be duplicated. */
5792 static bool
5793 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5795 return true;
5798 /* Create a duplicate of the basic block BB. NOTE: This does not
5799 preserve SSA form. */
5801 static basic_block
5802 gimple_duplicate_bb (basic_block bb)
5804 basic_block new_bb;
5805 gimple_stmt_iterator gsi_tgt;
5807 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5809 /* Copy the PHI nodes. We ignore PHI node arguments here because
5810 the incoming edges have not been setup yet. */
5811 for (gphi_iterator gpi = gsi_start_phis (bb);
5812 !gsi_end_p (gpi);
5813 gsi_next (&gpi))
5815 gphi *phi, *copy;
5816 phi = gpi.phi ();
5817 copy = create_phi_node (NULL_TREE, new_bb);
5818 create_new_def_for (gimple_phi_result (phi), copy,
5819 gimple_phi_result_ptr (copy));
5820 gimple_set_uid (copy, gimple_uid (phi));
5823 gsi_tgt = gsi_start_bb (new_bb);
5824 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5825 !gsi_end_p (gsi);
5826 gsi_next (&gsi))
5828 def_operand_p def_p;
5829 ssa_op_iter op_iter;
5830 tree lhs;
5831 gimple stmt, copy;
5833 stmt = gsi_stmt (gsi);
5834 if (gimple_code (stmt) == GIMPLE_LABEL)
5835 continue;
5837 /* Don't duplicate label debug stmts. */
5838 if (gimple_debug_bind_p (stmt)
5839 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5840 == LABEL_DECL)
5841 continue;
5843 /* Create a new copy of STMT and duplicate STMT's virtual
5844 operands. */
5845 copy = gimple_copy (stmt);
5846 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5848 maybe_duplicate_eh_stmt (copy, stmt);
5849 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5851 /* When copying around a stmt writing into a local non-user
5852 aggregate, make sure it won't share stack slot with other
5853 vars. */
5854 lhs = gimple_get_lhs (stmt);
5855 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5857 tree base = get_base_address (lhs);
5858 if (base
5859 && (TREE_CODE (base) == VAR_DECL
5860 || TREE_CODE (base) == RESULT_DECL)
5861 && DECL_IGNORED_P (base)
5862 && !TREE_STATIC (base)
5863 && !DECL_EXTERNAL (base)
5864 && (TREE_CODE (base) != VAR_DECL
5865 || !DECL_HAS_VALUE_EXPR_P (base)))
5866 DECL_NONSHAREABLE (base) = 1;
5869 /* Create new names for all the definitions created by COPY and
5870 add replacement mappings for each new name. */
5871 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5872 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5875 return new_bb;
5878 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5880 static void
5881 add_phi_args_after_copy_edge (edge e_copy)
5883 basic_block bb, bb_copy = e_copy->src, dest;
5884 edge e;
5885 edge_iterator ei;
5886 gphi *phi, *phi_copy;
5887 tree def;
5888 gphi_iterator psi, psi_copy;
5890 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5891 return;
5893 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5895 if (e_copy->dest->flags & BB_DUPLICATED)
5896 dest = get_bb_original (e_copy->dest);
5897 else
5898 dest = e_copy->dest;
5900 e = find_edge (bb, dest);
5901 if (!e)
5903 /* During loop unrolling the target of the latch edge is copied.
5904 In this case we are not looking for edge to dest, but to
5905 duplicated block whose original was dest. */
5906 FOR_EACH_EDGE (e, ei, bb->succs)
5908 if ((e->dest->flags & BB_DUPLICATED)
5909 && get_bb_original (e->dest) == dest)
5910 break;
5913 gcc_assert (e != NULL);
5916 for (psi = gsi_start_phis (e->dest),
5917 psi_copy = gsi_start_phis (e_copy->dest);
5918 !gsi_end_p (psi);
5919 gsi_next (&psi), gsi_next (&psi_copy))
5921 phi = psi.phi ();
5922 phi_copy = psi_copy.phi ();
5923 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5924 add_phi_arg (phi_copy, def, e_copy,
5925 gimple_phi_arg_location_from_edge (phi, e));
5930 /* Basic block BB_COPY was created by code duplication. Add phi node
5931 arguments for edges going out of BB_COPY. The blocks that were
5932 duplicated have BB_DUPLICATED set. */
5934 void
5935 add_phi_args_after_copy_bb (basic_block bb_copy)
5937 edge e_copy;
5938 edge_iterator ei;
5940 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5942 add_phi_args_after_copy_edge (e_copy);
5946 /* Blocks in REGION_COPY array of length N_REGION were created by
5947 duplication of basic blocks. Add phi node arguments for edges
5948 going from these blocks. If E_COPY is not NULL, also add
5949 phi node arguments for its destination.*/
5951 void
5952 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5953 edge e_copy)
5955 unsigned i;
5957 for (i = 0; i < n_region; i++)
5958 region_copy[i]->flags |= BB_DUPLICATED;
5960 for (i = 0; i < n_region; i++)
5961 add_phi_args_after_copy_bb (region_copy[i]);
5962 if (e_copy)
5963 add_phi_args_after_copy_edge (e_copy);
5965 for (i = 0; i < n_region; i++)
5966 region_copy[i]->flags &= ~BB_DUPLICATED;
5969 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5970 important exit edge EXIT. By important we mean that no SSA name defined
5971 inside region is live over the other exit edges of the region. All entry
5972 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5973 to the duplicate of the region. Dominance and loop information is
5974 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5975 UPDATE_DOMINANCE is false then we assume that the caller will update the
5976 dominance information after calling this function. The new basic
5977 blocks are stored to REGION_COPY in the same order as they had in REGION,
5978 provided that REGION_COPY is not NULL.
5979 The function returns false if it is unable to copy the region,
5980 true otherwise. */
5982 bool
5983 gimple_duplicate_sese_region (edge entry, edge exit,
5984 basic_block *region, unsigned n_region,
5985 basic_block *region_copy,
5986 bool update_dominance)
5988 unsigned i;
5989 bool free_region_copy = false, copying_header = false;
5990 struct loop *loop = entry->dest->loop_father;
5991 edge exit_copy;
5992 vec<basic_block> doms;
5993 edge redirected;
5994 int total_freq = 0, entry_freq = 0;
5995 gcov_type total_count = 0, entry_count = 0;
5997 if (!can_copy_bbs_p (region, n_region))
5998 return false;
6000 /* Some sanity checking. Note that we do not check for all possible
6001 missuses of the functions. I.e. if you ask to copy something weird,
6002 it will work, but the state of structures probably will not be
6003 correct. */
6004 for (i = 0; i < n_region; i++)
6006 /* We do not handle subloops, i.e. all the blocks must belong to the
6007 same loop. */
6008 if (region[i]->loop_father != loop)
6009 return false;
6011 if (region[i] != entry->dest
6012 && region[i] == loop->header)
6013 return false;
6016 /* In case the function is used for loop header copying (which is the primary
6017 use), ensure that EXIT and its copy will be new latch and entry edges. */
6018 if (loop->header == entry->dest)
6020 copying_header = true;
6022 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6023 return false;
6025 for (i = 0; i < n_region; i++)
6026 if (region[i] != exit->src
6027 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6028 return false;
6031 initialize_original_copy_tables ();
6033 if (copying_header)
6034 set_loop_copy (loop, loop_outer (loop));
6035 else
6036 set_loop_copy (loop, loop);
6038 if (!region_copy)
6040 region_copy = XNEWVEC (basic_block, n_region);
6041 free_region_copy = true;
6044 /* Record blocks outside the region that are dominated by something
6045 inside. */
6046 if (update_dominance)
6048 doms.create (0);
6049 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6052 if (entry->dest->count)
6054 total_count = entry->dest->count;
6055 entry_count = entry->count;
6056 /* Fix up corner cases, to avoid division by zero or creation of negative
6057 frequencies. */
6058 if (entry_count > total_count)
6059 entry_count = total_count;
6061 else
6063 total_freq = entry->dest->frequency;
6064 entry_freq = EDGE_FREQUENCY (entry);
6065 /* Fix up corner cases, to avoid division by zero or creation of negative
6066 frequencies. */
6067 if (total_freq == 0)
6068 total_freq = 1;
6069 else if (entry_freq > total_freq)
6070 entry_freq = total_freq;
6073 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6074 split_edge_bb_loc (entry), update_dominance);
6075 if (total_count)
6077 scale_bbs_frequencies_gcov_type (region, n_region,
6078 total_count - entry_count,
6079 total_count);
6080 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6081 total_count);
6083 else
6085 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6086 total_freq);
6087 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6090 if (copying_header)
6092 loop->header = exit->dest;
6093 loop->latch = exit->src;
6096 /* Redirect the entry and add the phi node arguments. */
6097 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6098 gcc_assert (redirected != NULL);
6099 flush_pending_stmts (entry);
6101 /* Concerning updating of dominators: We must recount dominators
6102 for entry block and its copy. Anything that is outside of the
6103 region, but was dominated by something inside needs recounting as
6104 well. */
6105 if (update_dominance)
6107 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6108 doms.safe_push (get_bb_original (entry->dest));
6109 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6110 doms.release ();
6113 /* Add the other PHI node arguments. */
6114 add_phi_args_after_copy (region_copy, n_region, NULL);
6116 if (free_region_copy)
6117 free (region_copy);
6119 free_original_copy_tables ();
6120 return true;
6123 /* Checks if BB is part of the region defined by N_REGION BBS. */
6124 static bool
6125 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6127 unsigned int n;
6129 for (n = 0; n < n_region; n++)
6131 if (bb == bbs[n])
6132 return true;
6134 return false;
6137 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6138 are stored to REGION_COPY in the same order in that they appear
6139 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6140 the region, EXIT an exit from it. The condition guarding EXIT
6141 is moved to ENTRY. Returns true if duplication succeeds, false
6142 otherwise.
6144 For example,
6146 some_code;
6147 if (cond)
6149 else
6152 is transformed to
6154 if (cond)
6156 some_code;
6159 else
6161 some_code;
6166 bool
6167 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6168 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6169 basic_block *region_copy ATTRIBUTE_UNUSED)
6171 unsigned i;
6172 bool free_region_copy = false;
6173 struct loop *loop = exit->dest->loop_father;
6174 struct loop *orig_loop = entry->dest->loop_father;
6175 basic_block switch_bb, entry_bb, nentry_bb;
6176 vec<basic_block> doms;
6177 int total_freq = 0, exit_freq = 0;
6178 gcov_type total_count = 0, exit_count = 0;
6179 edge exits[2], nexits[2], e;
6180 gimple_stmt_iterator gsi;
6181 gimple cond_stmt;
6182 edge sorig, snew;
6183 basic_block exit_bb;
6184 gphi_iterator psi;
6185 gphi *phi;
6186 tree def;
6187 struct loop *target, *aloop, *cloop;
6189 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6190 exits[0] = exit;
6191 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6193 if (!can_copy_bbs_p (region, n_region))
6194 return false;
6196 initialize_original_copy_tables ();
6197 set_loop_copy (orig_loop, loop);
6199 target= loop;
6200 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6202 if (bb_part_of_region_p (aloop->header, region, n_region))
6204 cloop = duplicate_loop (aloop, target);
6205 duplicate_subloops (aloop, cloop);
6209 if (!region_copy)
6211 region_copy = XNEWVEC (basic_block, n_region);
6212 free_region_copy = true;
6215 gcc_assert (!need_ssa_update_p (cfun));
6217 /* Record blocks outside the region that are dominated by something
6218 inside. */
6219 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6221 if (exit->src->count)
6223 total_count = exit->src->count;
6224 exit_count = exit->count;
6225 /* Fix up corner cases, to avoid division by zero or creation of negative
6226 frequencies. */
6227 if (exit_count > total_count)
6228 exit_count = total_count;
6230 else
6232 total_freq = exit->src->frequency;
6233 exit_freq = EDGE_FREQUENCY (exit);
6234 /* Fix up corner cases, to avoid division by zero or creation of negative
6235 frequencies. */
6236 if (total_freq == 0)
6237 total_freq = 1;
6238 if (exit_freq > total_freq)
6239 exit_freq = total_freq;
6242 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6243 split_edge_bb_loc (exit), true);
6244 if (total_count)
6246 scale_bbs_frequencies_gcov_type (region, n_region,
6247 total_count - exit_count,
6248 total_count);
6249 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6250 total_count);
6252 else
6254 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6255 total_freq);
6256 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6259 /* Create the switch block, and put the exit condition to it. */
6260 entry_bb = entry->dest;
6261 nentry_bb = get_bb_copy (entry_bb);
6262 if (!last_stmt (entry->src)
6263 || !stmt_ends_bb_p (last_stmt (entry->src)))
6264 switch_bb = entry->src;
6265 else
6266 switch_bb = split_edge (entry);
6267 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6269 gsi = gsi_last_bb (switch_bb);
6270 cond_stmt = last_stmt (exit->src);
6271 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6272 cond_stmt = gimple_copy (cond_stmt);
6274 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6276 sorig = single_succ_edge (switch_bb);
6277 sorig->flags = exits[1]->flags;
6278 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6280 /* Register the new edge from SWITCH_BB in loop exit lists. */
6281 rescan_loop_exit (snew, true, false);
6283 /* Add the PHI node arguments. */
6284 add_phi_args_after_copy (region_copy, n_region, snew);
6286 /* Get rid of now superfluous conditions and associated edges (and phi node
6287 arguments). */
6288 exit_bb = exit->dest;
6290 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6291 PENDING_STMT (e) = NULL;
6293 /* The latch of ORIG_LOOP was copied, and so was the backedge
6294 to the original header. We redirect this backedge to EXIT_BB. */
6295 for (i = 0; i < n_region; i++)
6296 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6298 gcc_assert (single_succ_edge (region_copy[i]));
6299 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6300 PENDING_STMT (e) = NULL;
6301 for (psi = gsi_start_phis (exit_bb);
6302 !gsi_end_p (psi);
6303 gsi_next (&psi))
6305 phi = psi.phi ();
6306 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6307 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6310 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6311 PENDING_STMT (e) = NULL;
6313 /* Anything that is outside of the region, but was dominated by something
6314 inside needs to update dominance info. */
6315 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6316 doms.release ();
6317 /* Update the SSA web. */
6318 update_ssa (TODO_update_ssa);
6320 if (free_region_copy)
6321 free (region_copy);
6323 free_original_copy_tables ();
6324 return true;
6327 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6328 adding blocks when the dominator traversal reaches EXIT. This
6329 function silently assumes that ENTRY strictly dominates EXIT. */
6331 void
6332 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6333 vec<basic_block> *bbs_p)
6335 basic_block son;
6337 for (son = first_dom_son (CDI_DOMINATORS, entry);
6338 son;
6339 son = next_dom_son (CDI_DOMINATORS, son))
6341 bbs_p->safe_push (son);
6342 if (son != exit)
6343 gather_blocks_in_sese_region (son, exit, bbs_p);
6347 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6348 The duplicates are recorded in VARS_MAP. */
6350 static void
6351 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6352 tree to_context)
6354 tree t = *tp, new_t;
6355 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6357 if (DECL_CONTEXT (t) == to_context)
6358 return;
6360 bool existed;
6361 tree &loc = vars_map->get_or_insert (t, &existed);
6363 if (!existed)
6365 if (SSA_VAR_P (t))
6367 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6368 add_local_decl (f, new_t);
6370 else
6372 gcc_assert (TREE_CODE (t) == CONST_DECL);
6373 new_t = copy_node (t);
6375 DECL_CONTEXT (new_t) = to_context;
6377 loc = new_t;
6379 else
6380 new_t = loc;
6382 *tp = new_t;
6386 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6387 VARS_MAP maps old ssa names and var_decls to the new ones. */
6389 static tree
6390 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6391 tree to_context)
6393 tree new_name;
6395 gcc_assert (!virtual_operand_p (name));
6397 tree *loc = vars_map->get (name);
6399 if (!loc)
6401 tree decl = SSA_NAME_VAR (name);
6402 if (decl)
6404 replace_by_duplicate_decl (&decl, vars_map, to_context);
6405 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6406 decl, SSA_NAME_DEF_STMT (name));
6407 if (SSA_NAME_IS_DEFAULT_DEF (name))
6408 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6409 decl, new_name);
6411 else
6412 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6413 name, SSA_NAME_DEF_STMT (name));
6415 vars_map->put (name, new_name);
6417 else
6418 new_name = *loc;
6420 return new_name;
6423 struct move_stmt_d
6425 tree orig_block;
6426 tree new_block;
6427 tree from_context;
6428 tree to_context;
6429 hash_map<tree, tree> *vars_map;
6430 htab_t new_label_map;
6431 hash_map<void *, void *> *eh_map;
6432 bool remap_decls_p;
6435 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6436 contained in *TP if it has been ORIG_BLOCK previously and change the
6437 DECL_CONTEXT of every local variable referenced in *TP. */
6439 static tree
6440 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6442 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6443 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6444 tree t = *tp;
6446 if (EXPR_P (t))
6448 tree block = TREE_BLOCK (t);
6449 if (block == p->orig_block
6450 || (p->orig_block == NULL_TREE
6451 && block != NULL_TREE))
6452 TREE_SET_BLOCK (t, p->new_block);
6453 #ifdef ENABLE_CHECKING
6454 else if (block != NULL_TREE)
6456 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6457 block = BLOCK_SUPERCONTEXT (block);
6458 gcc_assert (block == p->orig_block);
6460 #endif
6462 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6464 if (TREE_CODE (t) == SSA_NAME)
6465 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6466 else if (TREE_CODE (t) == LABEL_DECL)
6468 if (p->new_label_map)
6470 struct tree_map in, *out;
6471 in.base.from = t;
6472 out = (struct tree_map *)
6473 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6474 if (out)
6475 *tp = t = out->to;
6478 DECL_CONTEXT (t) = p->to_context;
6480 else if (p->remap_decls_p)
6482 /* Replace T with its duplicate. T should no longer appear in the
6483 parent function, so this looks wasteful; however, it may appear
6484 in referenced_vars, and more importantly, as virtual operands of
6485 statements, and in alias lists of other variables. It would be
6486 quite difficult to expunge it from all those places. ??? It might
6487 suffice to do this for addressable variables. */
6488 if ((TREE_CODE (t) == VAR_DECL
6489 && !is_global_var (t))
6490 || TREE_CODE (t) == CONST_DECL)
6491 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6493 *walk_subtrees = 0;
6495 else if (TYPE_P (t))
6496 *walk_subtrees = 0;
6498 return NULL_TREE;
6501 /* Helper for move_stmt_r. Given an EH region number for the source
6502 function, map that to the duplicate EH regio number in the dest. */
6504 static int
6505 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6507 eh_region old_r, new_r;
6509 old_r = get_eh_region_from_number (old_nr);
6510 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6512 return new_r->index;
6515 /* Similar, but operate on INTEGER_CSTs. */
6517 static tree
6518 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6520 int old_nr, new_nr;
6522 old_nr = tree_to_shwi (old_t_nr);
6523 new_nr = move_stmt_eh_region_nr (old_nr, p);
6525 return build_int_cst (integer_type_node, new_nr);
6528 /* Like move_stmt_op, but for gimple statements.
6530 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6531 contained in the current statement in *GSI_P and change the
6532 DECL_CONTEXT of every local variable referenced in the current
6533 statement. */
6535 static tree
6536 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6537 struct walk_stmt_info *wi)
6539 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6540 gimple stmt = gsi_stmt (*gsi_p);
6541 tree block = gimple_block (stmt);
6543 if (block == p->orig_block
6544 || (p->orig_block == NULL_TREE
6545 && block != NULL_TREE))
6546 gimple_set_block (stmt, p->new_block);
6548 switch (gimple_code (stmt))
6550 case GIMPLE_CALL:
6551 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6553 tree r, fndecl = gimple_call_fndecl (stmt);
6554 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6555 switch (DECL_FUNCTION_CODE (fndecl))
6557 case BUILT_IN_EH_COPY_VALUES:
6558 r = gimple_call_arg (stmt, 1);
6559 r = move_stmt_eh_region_tree_nr (r, p);
6560 gimple_call_set_arg (stmt, 1, r);
6561 /* FALLTHRU */
6563 case BUILT_IN_EH_POINTER:
6564 case BUILT_IN_EH_FILTER:
6565 r = gimple_call_arg (stmt, 0);
6566 r = move_stmt_eh_region_tree_nr (r, p);
6567 gimple_call_set_arg (stmt, 0, r);
6568 break;
6570 default:
6571 break;
6574 break;
6576 case GIMPLE_RESX:
6578 gresx *resx_stmt = as_a <gresx *> (stmt);
6579 int r = gimple_resx_region (resx_stmt);
6580 r = move_stmt_eh_region_nr (r, p);
6581 gimple_resx_set_region (resx_stmt, r);
6583 break;
6585 case GIMPLE_EH_DISPATCH:
6587 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6588 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6589 r = move_stmt_eh_region_nr (r, p);
6590 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6592 break;
6594 case GIMPLE_OMP_RETURN:
6595 case GIMPLE_OMP_CONTINUE:
6596 break;
6597 default:
6598 if (is_gimple_omp (stmt))
6600 /* Do not remap variables inside OMP directives. Variables
6601 referenced in clauses and directive header belong to the
6602 parent function and should not be moved into the child
6603 function. */
6604 bool save_remap_decls_p = p->remap_decls_p;
6605 p->remap_decls_p = false;
6606 *handled_ops_p = true;
6608 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6609 move_stmt_op, wi);
6611 p->remap_decls_p = save_remap_decls_p;
6613 break;
6616 return NULL_TREE;
6619 /* Move basic block BB from function CFUN to function DEST_FN. The
6620 block is moved out of the original linked list and placed after
6621 block AFTER in the new list. Also, the block is removed from the
6622 original array of blocks and placed in DEST_FN's array of blocks.
6623 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6624 updated to reflect the moved edges.
6626 The local variables are remapped to new instances, VARS_MAP is used
6627 to record the mapping. */
6629 static void
6630 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6631 basic_block after, bool update_edge_count_p,
6632 struct move_stmt_d *d)
6634 struct control_flow_graph *cfg;
6635 edge_iterator ei;
6636 edge e;
6637 gimple_stmt_iterator si;
6638 unsigned old_len, new_len;
6640 /* Remove BB from dominance structures. */
6641 delete_from_dominance_info (CDI_DOMINATORS, bb);
6643 /* Move BB from its current loop to the copy in the new function. */
6644 if (current_loops)
6646 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6647 if (new_loop)
6648 bb->loop_father = new_loop;
6651 /* Link BB to the new linked list. */
6652 move_block_after (bb, after);
6654 /* Update the edge count in the corresponding flowgraphs. */
6655 if (update_edge_count_p)
6656 FOR_EACH_EDGE (e, ei, bb->succs)
6658 cfun->cfg->x_n_edges--;
6659 dest_cfun->cfg->x_n_edges++;
6662 /* Remove BB from the original basic block array. */
6663 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6664 cfun->cfg->x_n_basic_blocks--;
6666 /* Grow DEST_CFUN's basic block array if needed. */
6667 cfg = dest_cfun->cfg;
6668 cfg->x_n_basic_blocks++;
6669 if (bb->index >= cfg->x_last_basic_block)
6670 cfg->x_last_basic_block = bb->index + 1;
6672 old_len = vec_safe_length (cfg->x_basic_block_info);
6673 if ((unsigned) cfg->x_last_basic_block >= old_len)
6675 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6676 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6679 (*cfg->x_basic_block_info)[bb->index] = bb;
6681 /* Remap the variables in phi nodes. */
6682 for (gphi_iterator psi = gsi_start_phis (bb);
6683 !gsi_end_p (psi); )
6685 gphi *phi = psi.phi ();
6686 use_operand_p use;
6687 tree op = PHI_RESULT (phi);
6688 ssa_op_iter oi;
6689 unsigned i;
6691 if (virtual_operand_p (op))
6693 /* Remove the phi nodes for virtual operands (alias analysis will be
6694 run for the new function, anyway). */
6695 remove_phi_node (&psi, true);
6696 continue;
6699 SET_PHI_RESULT (phi,
6700 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6701 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6703 op = USE_FROM_PTR (use);
6704 if (TREE_CODE (op) == SSA_NAME)
6705 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6708 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6710 location_t locus = gimple_phi_arg_location (phi, i);
6711 tree block = LOCATION_BLOCK (locus);
6713 if (locus == UNKNOWN_LOCATION)
6714 continue;
6715 if (d->orig_block == NULL_TREE || block == d->orig_block)
6717 if (d->new_block == NULL_TREE)
6718 locus = LOCATION_LOCUS (locus);
6719 else
6720 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6721 gimple_phi_arg_set_location (phi, i, locus);
6725 gsi_next (&psi);
6728 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6730 gimple stmt = gsi_stmt (si);
6731 struct walk_stmt_info wi;
6733 memset (&wi, 0, sizeof (wi));
6734 wi.info = d;
6735 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6737 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6739 tree label = gimple_label_label (label_stmt);
6740 int uid = LABEL_DECL_UID (label);
6742 gcc_assert (uid > -1);
6744 old_len = vec_safe_length (cfg->x_label_to_block_map);
6745 if (old_len <= (unsigned) uid)
6747 new_len = 3 * uid / 2 + 1;
6748 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6751 (*cfg->x_label_to_block_map)[uid] = bb;
6752 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6754 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6756 if (uid >= dest_cfun->cfg->last_label_uid)
6757 dest_cfun->cfg->last_label_uid = uid + 1;
6760 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6761 remove_stmt_from_eh_lp_fn (cfun, stmt);
6763 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6764 gimple_remove_stmt_histograms (cfun, stmt);
6766 /* We cannot leave any operands allocated from the operand caches of
6767 the current function. */
6768 free_stmt_operands (cfun, stmt);
6769 push_cfun (dest_cfun);
6770 update_stmt (stmt);
6771 pop_cfun ();
6774 FOR_EACH_EDGE (e, ei, bb->succs)
6775 if (e->goto_locus != UNKNOWN_LOCATION)
6777 tree block = LOCATION_BLOCK (e->goto_locus);
6778 if (d->orig_block == NULL_TREE
6779 || block == d->orig_block)
6780 e->goto_locus = d->new_block ?
6781 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6782 LOCATION_LOCUS (e->goto_locus);
6786 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6787 the outermost EH region. Use REGION as the incoming base EH region. */
6789 static eh_region
6790 find_outermost_region_in_block (struct function *src_cfun,
6791 basic_block bb, eh_region region)
6793 gimple_stmt_iterator si;
6795 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6797 gimple stmt = gsi_stmt (si);
6798 eh_region stmt_region;
6799 int lp_nr;
6801 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6802 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6803 if (stmt_region)
6805 if (region == NULL)
6806 region = stmt_region;
6807 else if (stmt_region != region)
6809 region = eh_region_outermost (src_cfun, stmt_region, region);
6810 gcc_assert (region != NULL);
6815 return region;
6818 static tree
6819 new_label_mapper (tree decl, void *data)
6821 htab_t hash = (htab_t) data;
6822 struct tree_map *m;
6823 void **slot;
6825 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6827 m = XNEW (struct tree_map);
6828 m->hash = DECL_UID (decl);
6829 m->base.from = decl;
6830 m->to = create_artificial_label (UNKNOWN_LOCATION);
6831 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6832 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6833 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6835 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6836 gcc_assert (*slot == NULL);
6838 *slot = m;
6840 return m->to;
6843 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6844 subblocks. */
6846 static void
6847 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6848 tree to_context)
6850 tree *tp, t;
6852 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6854 t = *tp;
6855 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6856 continue;
6857 replace_by_duplicate_decl (&t, vars_map, to_context);
6858 if (t != *tp)
6860 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6862 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6863 DECL_HAS_VALUE_EXPR_P (t) = 1;
6865 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6866 *tp = t;
6870 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6871 replace_block_vars_by_duplicates (block, vars_map, to_context);
6874 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6875 from FN1 to FN2. */
6877 static void
6878 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6879 struct loop *loop)
6881 /* Discard it from the old loop array. */
6882 (*get_loops (fn1))[loop->num] = NULL;
6884 /* Place it in the new loop array, assigning it a new number. */
6885 loop->num = number_of_loops (fn2);
6886 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6888 /* Recurse to children. */
6889 for (loop = loop->inner; loop; loop = loop->next)
6890 fixup_loop_arrays_after_move (fn1, fn2, loop);
6893 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6894 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6896 DEBUG_FUNCTION void
6897 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6899 basic_block bb;
6900 edge_iterator ei;
6901 edge e;
6902 bitmap bbs = BITMAP_ALLOC (NULL);
6903 int i;
6905 gcc_assert (entry != NULL);
6906 gcc_assert (entry != exit);
6907 gcc_assert (bbs_p != NULL);
6909 gcc_assert (bbs_p->length () > 0);
6911 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6912 bitmap_set_bit (bbs, bb->index);
6914 gcc_assert (bitmap_bit_p (bbs, entry->index));
6915 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6917 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6919 if (bb == entry)
6921 gcc_assert (single_pred_p (entry));
6922 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6924 else
6925 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6927 e = ei_edge (ei);
6928 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6931 if (bb == exit)
6933 gcc_assert (single_succ_p (exit));
6934 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6936 else
6937 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6939 e = ei_edge (ei);
6940 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6944 BITMAP_FREE (bbs);
6948 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6949 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6950 single basic block in the original CFG and the new basic block is
6951 returned. DEST_CFUN must not have a CFG yet.
6953 Note that the region need not be a pure SESE region. Blocks inside
6954 the region may contain calls to abort/exit. The only restriction
6955 is that ENTRY_BB should be the only entry point and it must
6956 dominate EXIT_BB.
6958 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6959 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6960 to the new function.
6962 All local variables referenced in the region are assumed to be in
6963 the corresponding BLOCK_VARS and unexpanded variable lists
6964 associated with DEST_CFUN. */
6966 basic_block
6967 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6968 basic_block exit_bb, tree orig_block)
6970 vec<basic_block> bbs, dom_bbs;
6971 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6972 basic_block after, bb, *entry_pred, *exit_succ, abb;
6973 struct function *saved_cfun = cfun;
6974 int *entry_flag, *exit_flag;
6975 unsigned *entry_prob, *exit_prob;
6976 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6977 edge e;
6978 edge_iterator ei;
6979 htab_t new_label_map;
6980 hash_map<void *, void *> *eh_map;
6981 struct loop *loop = entry_bb->loop_father;
6982 struct loop *loop0 = get_loop (saved_cfun, 0);
6983 struct move_stmt_d d;
6985 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6986 region. */
6987 gcc_assert (entry_bb != exit_bb
6988 && (!exit_bb
6989 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6991 /* Collect all the blocks in the region. Manually add ENTRY_BB
6992 because it won't be added by dfs_enumerate_from. */
6993 bbs.create (0);
6994 bbs.safe_push (entry_bb);
6995 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6996 #ifdef ENABLE_CHECKING
6997 verify_sese (entry_bb, exit_bb, &bbs);
6998 #endif
7000 /* The blocks that used to be dominated by something in BBS will now be
7001 dominated by the new block. */
7002 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7003 bbs.address (),
7004 bbs.length ());
7006 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7007 the predecessor edges to ENTRY_BB and the successor edges to
7008 EXIT_BB so that we can re-attach them to the new basic block that
7009 will replace the region. */
7010 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7011 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7012 entry_flag = XNEWVEC (int, num_entry_edges);
7013 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7014 i = 0;
7015 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7017 entry_prob[i] = e->probability;
7018 entry_flag[i] = e->flags;
7019 entry_pred[i++] = e->src;
7020 remove_edge (e);
7023 if (exit_bb)
7025 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7026 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7027 exit_flag = XNEWVEC (int, num_exit_edges);
7028 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7029 i = 0;
7030 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7032 exit_prob[i] = e->probability;
7033 exit_flag[i] = e->flags;
7034 exit_succ[i++] = e->dest;
7035 remove_edge (e);
7038 else
7040 num_exit_edges = 0;
7041 exit_succ = NULL;
7042 exit_flag = NULL;
7043 exit_prob = NULL;
7046 /* Switch context to the child function to initialize DEST_FN's CFG. */
7047 gcc_assert (dest_cfun->cfg == NULL);
7048 push_cfun (dest_cfun);
7050 init_empty_tree_cfg ();
7052 /* Initialize EH information for the new function. */
7053 eh_map = NULL;
7054 new_label_map = NULL;
7055 if (saved_cfun->eh)
7057 eh_region region = NULL;
7059 FOR_EACH_VEC_ELT (bbs, i, bb)
7060 region = find_outermost_region_in_block (saved_cfun, bb, region);
7062 init_eh_for_function ();
7063 if (region != NULL)
7065 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7066 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7067 new_label_mapper, new_label_map);
7071 /* Initialize an empty loop tree. */
7072 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7073 init_loops_structure (dest_cfun, loops, 1);
7074 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7075 set_loops_for_fn (dest_cfun, loops);
7077 /* Move the outlined loop tree part. */
7078 num_nodes = bbs.length ();
7079 FOR_EACH_VEC_ELT (bbs, i, bb)
7081 if (bb->loop_father->header == bb)
7083 struct loop *this_loop = bb->loop_father;
7084 struct loop *outer = loop_outer (this_loop);
7085 if (outer == loop
7086 /* If the SESE region contains some bbs ending with
7087 a noreturn call, those are considered to belong
7088 to the outermost loop in saved_cfun, rather than
7089 the entry_bb's loop_father. */
7090 || outer == loop0)
7092 if (outer != loop)
7093 num_nodes -= this_loop->num_nodes;
7094 flow_loop_tree_node_remove (bb->loop_father);
7095 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7096 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7099 else if (bb->loop_father == loop0 && loop0 != loop)
7100 num_nodes--;
7102 /* Remove loop exits from the outlined region. */
7103 if (loops_for_fn (saved_cfun)->exits)
7104 FOR_EACH_EDGE (e, ei, bb->succs)
7106 struct loops *l = loops_for_fn (saved_cfun);
7107 loop_exit **slot
7108 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7109 NO_INSERT);
7110 if (slot)
7111 l->exits->clear_slot (slot);
7116 /* Adjust the number of blocks in the tree root of the outlined part. */
7117 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7119 /* Setup a mapping to be used by move_block_to_fn. */
7120 loop->aux = current_loops->tree_root;
7121 loop0->aux = current_loops->tree_root;
7123 pop_cfun ();
7125 /* Move blocks from BBS into DEST_CFUN. */
7126 gcc_assert (bbs.length () >= 2);
7127 after = dest_cfun->cfg->x_entry_block_ptr;
7128 hash_map<tree, tree> vars_map;
7130 memset (&d, 0, sizeof (d));
7131 d.orig_block = orig_block;
7132 d.new_block = DECL_INITIAL (dest_cfun->decl);
7133 d.from_context = cfun->decl;
7134 d.to_context = dest_cfun->decl;
7135 d.vars_map = &vars_map;
7136 d.new_label_map = new_label_map;
7137 d.eh_map = eh_map;
7138 d.remap_decls_p = true;
7140 FOR_EACH_VEC_ELT (bbs, i, bb)
7142 /* No need to update edge counts on the last block. It has
7143 already been updated earlier when we detached the region from
7144 the original CFG. */
7145 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7146 after = bb;
7149 loop->aux = NULL;
7150 loop0->aux = NULL;
7151 /* Loop sizes are no longer correct, fix them up. */
7152 loop->num_nodes -= num_nodes;
7153 for (struct loop *outer = loop_outer (loop);
7154 outer; outer = loop_outer (outer))
7155 outer->num_nodes -= num_nodes;
7156 loop0->num_nodes -= bbs.length () - num_nodes;
7158 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7160 struct loop *aloop;
7161 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7162 if (aloop != NULL)
7164 if (aloop->simduid)
7166 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7167 d.to_context);
7168 dest_cfun->has_simduid_loops = true;
7170 if (aloop->force_vectorize)
7171 dest_cfun->has_force_vectorize_loops = true;
7175 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7176 if (orig_block)
7178 tree block;
7179 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7180 == NULL_TREE);
7181 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7182 = BLOCK_SUBBLOCKS (orig_block);
7183 for (block = BLOCK_SUBBLOCKS (orig_block);
7184 block; block = BLOCK_CHAIN (block))
7185 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7186 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7189 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7190 &vars_map, dest_cfun->decl);
7192 if (new_label_map)
7193 htab_delete (new_label_map);
7194 if (eh_map)
7195 delete eh_map;
7197 /* Rewire the entry and exit blocks. The successor to the entry
7198 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7199 the child function. Similarly, the predecessor of DEST_FN's
7200 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7201 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7202 various CFG manipulation function get to the right CFG.
7204 FIXME, this is silly. The CFG ought to become a parameter to
7205 these helpers. */
7206 push_cfun (dest_cfun);
7207 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7208 if (exit_bb)
7209 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7210 pop_cfun ();
7212 /* Back in the original function, the SESE region has disappeared,
7213 create a new basic block in its place. */
7214 bb = create_empty_bb (entry_pred[0]);
7215 if (current_loops)
7216 add_bb_to_loop (bb, loop);
7217 for (i = 0; i < num_entry_edges; i++)
7219 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7220 e->probability = entry_prob[i];
7223 for (i = 0; i < num_exit_edges; i++)
7225 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7226 e->probability = exit_prob[i];
7229 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7230 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7231 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7232 dom_bbs.release ();
7234 if (exit_bb)
7236 free (exit_prob);
7237 free (exit_flag);
7238 free (exit_succ);
7240 free (entry_prob);
7241 free (entry_flag);
7242 free (entry_pred);
7243 bbs.release ();
7245 return bb;
7249 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7252 void
7253 dump_function_to_file (tree fndecl, FILE *file, int flags)
7255 tree arg, var, old_current_fndecl = current_function_decl;
7256 struct function *dsf;
7257 bool ignore_topmost_bind = false, any_var = false;
7258 basic_block bb;
7259 tree chain;
7260 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7261 && decl_is_tm_clone (fndecl));
7262 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7264 current_function_decl = fndecl;
7265 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7267 arg = DECL_ARGUMENTS (fndecl);
7268 while (arg)
7270 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7271 fprintf (file, " ");
7272 print_generic_expr (file, arg, dump_flags);
7273 if (flags & TDF_VERBOSE)
7274 print_node (file, "", arg, 4);
7275 if (DECL_CHAIN (arg))
7276 fprintf (file, ", ");
7277 arg = DECL_CHAIN (arg);
7279 fprintf (file, ")\n");
7281 if (flags & TDF_VERBOSE)
7282 print_node (file, "", fndecl, 2);
7284 dsf = DECL_STRUCT_FUNCTION (fndecl);
7285 if (dsf && (flags & TDF_EH))
7286 dump_eh_tree (file, dsf);
7288 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7290 dump_node (fndecl, TDF_SLIM | flags, file);
7291 current_function_decl = old_current_fndecl;
7292 return;
7295 /* When GIMPLE is lowered, the variables are no longer available in
7296 BIND_EXPRs, so display them separately. */
7297 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7299 unsigned ix;
7300 ignore_topmost_bind = true;
7302 fprintf (file, "{\n");
7303 if (!vec_safe_is_empty (fun->local_decls))
7304 FOR_EACH_LOCAL_DECL (fun, ix, var)
7306 print_generic_decl (file, var, flags);
7307 if (flags & TDF_VERBOSE)
7308 print_node (file, "", var, 4);
7309 fprintf (file, "\n");
7311 any_var = true;
7313 if (gimple_in_ssa_p (cfun))
7314 for (ix = 1; ix < num_ssa_names; ++ix)
7316 tree name = ssa_name (ix);
7317 if (name && !SSA_NAME_VAR (name))
7319 fprintf (file, " ");
7320 print_generic_expr (file, TREE_TYPE (name), flags);
7321 fprintf (file, " ");
7322 print_generic_expr (file, name, flags);
7323 fprintf (file, ";\n");
7325 any_var = true;
7330 if (fun && fun->decl == fndecl
7331 && fun->cfg
7332 && basic_block_info_for_fn (fun))
7334 /* If the CFG has been built, emit a CFG-based dump. */
7335 if (!ignore_topmost_bind)
7336 fprintf (file, "{\n");
7338 if (any_var && n_basic_blocks_for_fn (fun))
7339 fprintf (file, "\n");
7341 FOR_EACH_BB_FN (bb, fun)
7342 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7344 fprintf (file, "}\n");
7346 else if (DECL_SAVED_TREE (fndecl) == NULL)
7348 /* The function is now in GIMPLE form but the CFG has not been
7349 built yet. Emit the single sequence of GIMPLE statements
7350 that make up its body. */
7351 gimple_seq body = gimple_body (fndecl);
7353 if (gimple_seq_first_stmt (body)
7354 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7355 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7356 print_gimple_seq (file, body, 0, flags);
7357 else
7359 if (!ignore_topmost_bind)
7360 fprintf (file, "{\n");
7362 if (any_var)
7363 fprintf (file, "\n");
7365 print_gimple_seq (file, body, 2, flags);
7366 fprintf (file, "}\n");
7369 else
7371 int indent;
7373 /* Make a tree based dump. */
7374 chain = DECL_SAVED_TREE (fndecl);
7375 if (chain && TREE_CODE (chain) == BIND_EXPR)
7377 if (ignore_topmost_bind)
7379 chain = BIND_EXPR_BODY (chain);
7380 indent = 2;
7382 else
7383 indent = 0;
7385 else
7387 if (!ignore_topmost_bind)
7388 fprintf (file, "{\n");
7389 indent = 2;
7392 if (any_var)
7393 fprintf (file, "\n");
7395 print_generic_stmt_indented (file, chain, flags, indent);
7396 if (ignore_topmost_bind)
7397 fprintf (file, "}\n");
7400 if (flags & TDF_ENUMERATE_LOCALS)
7401 dump_enumerated_decls (file, flags);
7402 fprintf (file, "\n\n");
7404 current_function_decl = old_current_fndecl;
7407 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7409 DEBUG_FUNCTION void
7410 debug_function (tree fn, int flags)
7412 dump_function_to_file (fn, stderr, flags);
7416 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7418 static void
7419 print_pred_bbs (FILE *file, basic_block bb)
7421 edge e;
7422 edge_iterator ei;
7424 FOR_EACH_EDGE (e, ei, bb->preds)
7425 fprintf (file, "bb_%d ", e->src->index);
7429 /* Print on FILE the indexes for the successors of basic_block BB. */
7431 static void
7432 print_succ_bbs (FILE *file, basic_block bb)
7434 edge e;
7435 edge_iterator ei;
7437 FOR_EACH_EDGE (e, ei, bb->succs)
7438 fprintf (file, "bb_%d ", e->dest->index);
7441 /* Print to FILE the basic block BB following the VERBOSITY level. */
7443 void
7444 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7446 char *s_indent = (char *) alloca ((size_t) indent + 1);
7447 memset ((void *) s_indent, ' ', (size_t) indent);
7448 s_indent[indent] = '\0';
7450 /* Print basic_block's header. */
7451 if (verbosity >= 2)
7453 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7454 print_pred_bbs (file, bb);
7455 fprintf (file, "}, succs = {");
7456 print_succ_bbs (file, bb);
7457 fprintf (file, "})\n");
7460 /* Print basic_block's body. */
7461 if (verbosity >= 3)
7463 fprintf (file, "%s {\n", s_indent);
7464 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7465 fprintf (file, "%s }\n", s_indent);
7469 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7471 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7472 VERBOSITY level this outputs the contents of the loop, or just its
7473 structure. */
7475 static void
7476 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7478 char *s_indent;
7479 basic_block bb;
7481 if (loop == NULL)
7482 return;
7484 s_indent = (char *) alloca ((size_t) indent + 1);
7485 memset ((void *) s_indent, ' ', (size_t) indent);
7486 s_indent[indent] = '\0';
7488 /* Print loop's header. */
7489 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7490 if (loop->header)
7491 fprintf (file, "header = %d", loop->header->index);
7492 else
7494 fprintf (file, "deleted)\n");
7495 return;
7497 if (loop->latch)
7498 fprintf (file, ", latch = %d", loop->latch->index);
7499 else
7500 fprintf (file, ", multiple latches");
7501 fprintf (file, ", niter = ");
7502 print_generic_expr (file, loop->nb_iterations, 0);
7504 if (loop->any_upper_bound)
7506 fprintf (file, ", upper_bound = ");
7507 print_decu (loop->nb_iterations_upper_bound, file);
7510 if (loop->any_estimate)
7512 fprintf (file, ", estimate = ");
7513 print_decu (loop->nb_iterations_estimate, file);
7515 fprintf (file, ")\n");
7517 /* Print loop's body. */
7518 if (verbosity >= 1)
7520 fprintf (file, "%s{\n", s_indent);
7521 FOR_EACH_BB_FN (bb, cfun)
7522 if (bb->loop_father == loop)
7523 print_loops_bb (file, bb, indent, verbosity);
7525 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7526 fprintf (file, "%s}\n", s_indent);
7530 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7531 spaces. Following VERBOSITY level this outputs the contents of the
7532 loop, or just its structure. */
7534 static void
7535 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7536 int verbosity)
7538 if (loop == NULL)
7539 return;
7541 print_loop (file, loop, indent, verbosity);
7542 print_loop_and_siblings (file, loop->next, indent, verbosity);
7545 /* Follow a CFG edge from the entry point of the program, and on entry
7546 of a loop, pretty print the loop structure on FILE. */
7548 void
7549 print_loops (FILE *file, int verbosity)
7551 basic_block bb;
7553 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7554 if (bb && bb->loop_father)
7555 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7558 /* Dump a loop. */
7560 DEBUG_FUNCTION void
7561 debug (struct loop &ref)
7563 print_loop (stderr, &ref, 0, /*verbosity*/0);
7566 DEBUG_FUNCTION void
7567 debug (struct loop *ptr)
7569 if (ptr)
7570 debug (*ptr);
7571 else
7572 fprintf (stderr, "<nil>\n");
7575 /* Dump a loop verbosely. */
7577 DEBUG_FUNCTION void
7578 debug_verbose (struct loop &ref)
7580 print_loop (stderr, &ref, 0, /*verbosity*/3);
7583 DEBUG_FUNCTION void
7584 debug_verbose (struct loop *ptr)
7586 if (ptr)
7587 debug (*ptr);
7588 else
7589 fprintf (stderr, "<nil>\n");
7593 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7595 DEBUG_FUNCTION void
7596 debug_loops (int verbosity)
7598 print_loops (stderr, verbosity);
7601 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7603 DEBUG_FUNCTION void
7604 debug_loop (struct loop *loop, int verbosity)
7606 print_loop (stderr, loop, 0, verbosity);
7609 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7610 level. */
7612 DEBUG_FUNCTION void
7613 debug_loop_num (unsigned num, int verbosity)
7615 debug_loop (get_loop (cfun, num), verbosity);
7618 /* Return true if BB ends with a call, possibly followed by some
7619 instructions that must stay with the call. Return false,
7620 otherwise. */
7622 static bool
7623 gimple_block_ends_with_call_p (basic_block bb)
7625 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7626 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7630 /* Return true if BB ends with a conditional branch. Return false,
7631 otherwise. */
7633 static bool
7634 gimple_block_ends_with_condjump_p (const_basic_block bb)
7636 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7637 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7641 /* Return true if we need to add fake edge to exit at statement T.
7642 Helper function for gimple_flow_call_edges_add. */
7644 static bool
7645 need_fake_edge_p (gimple t)
7647 tree fndecl = NULL_TREE;
7648 int call_flags = 0;
7650 /* NORETURN and LONGJMP calls already have an edge to exit.
7651 CONST and PURE calls do not need one.
7652 We don't currently check for CONST and PURE here, although
7653 it would be a good idea, because those attributes are
7654 figured out from the RTL in mark_constant_function, and
7655 the counter incrementation code from -fprofile-arcs
7656 leads to different results from -fbranch-probabilities. */
7657 if (is_gimple_call (t))
7659 fndecl = gimple_call_fndecl (t);
7660 call_flags = gimple_call_flags (t);
7663 if (is_gimple_call (t)
7664 && fndecl
7665 && DECL_BUILT_IN (fndecl)
7666 && (call_flags & ECF_NOTHROW)
7667 && !(call_flags & ECF_RETURNS_TWICE)
7668 /* fork() doesn't really return twice, but the effect of
7669 wrapping it in __gcov_fork() which calls __gcov_flush()
7670 and clears the counters before forking has the same
7671 effect as returning twice. Force a fake edge. */
7672 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7673 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7674 return false;
7676 if (is_gimple_call (t))
7678 edge_iterator ei;
7679 edge e;
7680 basic_block bb;
7682 if (!(call_flags & ECF_NORETURN))
7683 return true;
7685 bb = gimple_bb (t);
7686 FOR_EACH_EDGE (e, ei, bb->succs)
7687 if ((e->flags & EDGE_FAKE) == 0)
7688 return true;
7691 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7692 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7693 return true;
7695 return false;
7699 /* Add fake edges to the function exit for any non constant and non
7700 noreturn calls (or noreturn calls with EH/abnormal edges),
7701 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7702 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7703 that were split.
7705 The goal is to expose cases in which entering a basic block does
7706 not imply that all subsequent instructions must be executed. */
7708 static int
7709 gimple_flow_call_edges_add (sbitmap blocks)
7711 int i;
7712 int blocks_split = 0;
7713 int last_bb = last_basic_block_for_fn (cfun);
7714 bool check_last_block = false;
7716 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7717 return 0;
7719 if (! blocks)
7720 check_last_block = true;
7721 else
7722 check_last_block = bitmap_bit_p (blocks,
7723 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7725 /* In the last basic block, before epilogue generation, there will be
7726 a fallthru edge to EXIT. Special care is required if the last insn
7727 of the last basic block is a call because make_edge folds duplicate
7728 edges, which would result in the fallthru edge also being marked
7729 fake, which would result in the fallthru edge being removed by
7730 remove_fake_edges, which would result in an invalid CFG.
7732 Moreover, we can't elide the outgoing fake edge, since the block
7733 profiler needs to take this into account in order to solve the minimal
7734 spanning tree in the case that the call doesn't return.
7736 Handle this by adding a dummy instruction in a new last basic block. */
7737 if (check_last_block)
7739 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7740 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7741 gimple t = NULL;
7743 if (!gsi_end_p (gsi))
7744 t = gsi_stmt (gsi);
7746 if (t && need_fake_edge_p (t))
7748 edge e;
7750 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7751 if (e)
7753 gsi_insert_on_edge (e, gimple_build_nop ());
7754 gsi_commit_edge_inserts ();
7759 /* Now add fake edges to the function exit for any non constant
7760 calls since there is no way that we can determine if they will
7761 return or not... */
7762 for (i = 0; i < last_bb; i++)
7764 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7765 gimple_stmt_iterator gsi;
7766 gimple stmt, last_stmt;
7768 if (!bb)
7769 continue;
7771 if (blocks && !bitmap_bit_p (blocks, i))
7772 continue;
7774 gsi = gsi_last_nondebug_bb (bb);
7775 if (!gsi_end_p (gsi))
7777 last_stmt = gsi_stmt (gsi);
7780 stmt = gsi_stmt (gsi);
7781 if (need_fake_edge_p (stmt))
7783 edge e;
7785 /* The handling above of the final block before the
7786 epilogue should be enough to verify that there is
7787 no edge to the exit block in CFG already.
7788 Calling make_edge in such case would cause us to
7789 mark that edge as fake and remove it later. */
7790 #ifdef ENABLE_CHECKING
7791 if (stmt == last_stmt)
7793 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7794 gcc_assert (e == NULL);
7796 #endif
7798 /* Note that the following may create a new basic block
7799 and renumber the existing basic blocks. */
7800 if (stmt != last_stmt)
7802 e = split_block (bb, stmt);
7803 if (e)
7804 blocks_split++;
7806 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7808 gsi_prev (&gsi);
7810 while (!gsi_end_p (gsi));
7814 if (blocks_split)
7815 verify_flow_info ();
7817 return blocks_split;
7820 /* Removes edge E and all the blocks dominated by it, and updates dominance
7821 information. The IL in E->src needs to be updated separately.
7822 If dominance info is not available, only the edge E is removed.*/
7824 void
7825 remove_edge_and_dominated_blocks (edge e)
7827 vec<basic_block> bbs_to_remove = vNULL;
7828 vec<basic_block> bbs_to_fix_dom = vNULL;
7829 bitmap df, df_idom;
7830 edge f;
7831 edge_iterator ei;
7832 bool none_removed = false;
7833 unsigned i;
7834 basic_block bb, dbb;
7835 bitmap_iterator bi;
7837 if (!dom_info_available_p (CDI_DOMINATORS))
7839 remove_edge (e);
7840 return;
7843 /* No updating is needed for edges to exit. */
7844 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7846 if (cfgcleanup_altered_bbs)
7847 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7848 remove_edge (e);
7849 return;
7852 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7853 that is not dominated by E->dest, then this set is empty. Otherwise,
7854 all the basic blocks dominated by E->dest are removed.
7856 Also, to DF_IDOM we store the immediate dominators of the blocks in
7857 the dominance frontier of E (i.e., of the successors of the
7858 removed blocks, if there are any, and of E->dest otherwise). */
7859 FOR_EACH_EDGE (f, ei, e->dest->preds)
7861 if (f == e)
7862 continue;
7864 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7866 none_removed = true;
7867 break;
7871 df = BITMAP_ALLOC (NULL);
7872 df_idom = BITMAP_ALLOC (NULL);
7874 if (none_removed)
7875 bitmap_set_bit (df_idom,
7876 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7877 else
7879 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7880 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7882 FOR_EACH_EDGE (f, ei, bb->succs)
7884 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7885 bitmap_set_bit (df, f->dest->index);
7888 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7889 bitmap_clear_bit (df, bb->index);
7891 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7893 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7894 bitmap_set_bit (df_idom,
7895 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7899 if (cfgcleanup_altered_bbs)
7901 /* Record the set of the altered basic blocks. */
7902 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7903 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7906 /* Remove E and the cancelled blocks. */
7907 if (none_removed)
7908 remove_edge (e);
7909 else
7911 /* Walk backwards so as to get a chance to substitute all
7912 released DEFs into debug stmts. See
7913 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7914 details. */
7915 for (i = bbs_to_remove.length (); i-- > 0; )
7916 delete_basic_block (bbs_to_remove[i]);
7919 /* Update the dominance information. The immediate dominator may change only
7920 for blocks whose immediate dominator belongs to DF_IDOM:
7922 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7923 removal. Let Z the arbitrary block such that idom(Z) = Y and
7924 Z dominates X after the removal. Before removal, there exists a path P
7925 from Y to X that avoids Z. Let F be the last edge on P that is
7926 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7927 dominates W, and because of P, Z does not dominate W), and W belongs to
7928 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7929 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7931 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7932 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7933 dbb;
7934 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7935 bbs_to_fix_dom.safe_push (dbb);
7938 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7940 BITMAP_FREE (df);
7941 BITMAP_FREE (df_idom);
7942 bbs_to_remove.release ();
7943 bbs_to_fix_dom.release ();
7946 /* Purge dead EH edges from basic block BB. */
7948 bool
7949 gimple_purge_dead_eh_edges (basic_block bb)
7951 bool changed = false;
7952 edge e;
7953 edge_iterator ei;
7954 gimple stmt = last_stmt (bb);
7956 if (stmt && stmt_can_throw_internal (stmt))
7957 return false;
7959 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7961 if (e->flags & EDGE_EH)
7963 remove_edge_and_dominated_blocks (e);
7964 changed = true;
7966 else
7967 ei_next (&ei);
7970 return changed;
7973 /* Purge dead EH edges from basic block listed in BLOCKS. */
7975 bool
7976 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7978 bool changed = false;
7979 unsigned i;
7980 bitmap_iterator bi;
7982 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7984 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7986 /* Earlier gimple_purge_dead_eh_edges could have removed
7987 this basic block already. */
7988 gcc_assert (bb || changed);
7989 if (bb != NULL)
7990 changed |= gimple_purge_dead_eh_edges (bb);
7993 return changed;
7996 /* Purge dead abnormal call edges from basic block BB. */
7998 bool
7999 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8001 bool changed = false;
8002 edge e;
8003 edge_iterator ei;
8004 gimple stmt = last_stmt (bb);
8006 if (!cfun->has_nonlocal_label
8007 && !cfun->calls_setjmp)
8008 return false;
8010 if (stmt && stmt_can_make_abnormal_goto (stmt))
8011 return false;
8013 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8015 if (e->flags & EDGE_ABNORMAL)
8017 if (e->flags & EDGE_FALLTHRU)
8018 e->flags &= ~EDGE_ABNORMAL;
8019 else
8020 remove_edge_and_dominated_blocks (e);
8021 changed = true;
8023 else
8024 ei_next (&ei);
8027 return changed;
8030 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8032 bool
8033 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8035 bool changed = false;
8036 unsigned i;
8037 bitmap_iterator bi;
8039 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8041 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8043 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8044 this basic block already. */
8045 gcc_assert (bb || changed);
8046 if (bb != NULL)
8047 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8050 return changed;
8053 /* This function is called whenever a new edge is created or
8054 redirected. */
8056 static void
8057 gimple_execute_on_growing_pred (edge e)
8059 basic_block bb = e->dest;
8061 if (!gimple_seq_empty_p (phi_nodes (bb)))
8062 reserve_phi_args_for_new_edge (bb);
8065 /* This function is called immediately before edge E is removed from
8066 the edge vector E->dest->preds. */
8068 static void
8069 gimple_execute_on_shrinking_pred (edge e)
8071 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8072 remove_phi_args (e);
8075 /*---------------------------------------------------------------------------
8076 Helper functions for Loop versioning
8077 ---------------------------------------------------------------------------*/
8079 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8080 of 'first'. Both of them are dominated by 'new_head' basic block. When
8081 'new_head' was created by 'second's incoming edge it received phi arguments
8082 on the edge by split_edge(). Later, additional edge 'e' was created to
8083 connect 'new_head' and 'first'. Now this routine adds phi args on this
8084 additional edge 'e' that new_head to second edge received as part of edge
8085 splitting. */
8087 static void
8088 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8089 basic_block new_head, edge e)
8091 gphi *phi1, *phi2;
8092 gphi_iterator psi1, psi2;
8093 tree def;
8094 edge e2 = find_edge (new_head, second);
8096 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8097 edge, we should always have an edge from NEW_HEAD to SECOND. */
8098 gcc_assert (e2 != NULL);
8100 /* Browse all 'second' basic block phi nodes and add phi args to
8101 edge 'e' for 'first' head. PHI args are always in correct order. */
8103 for (psi2 = gsi_start_phis (second),
8104 psi1 = gsi_start_phis (first);
8105 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8106 gsi_next (&psi2), gsi_next (&psi1))
8108 phi1 = psi1.phi ();
8109 phi2 = psi2.phi ();
8110 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8111 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8116 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8117 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8118 the destination of the ELSE part. */
8120 static void
8121 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8122 basic_block second_head ATTRIBUTE_UNUSED,
8123 basic_block cond_bb, void *cond_e)
8125 gimple_stmt_iterator gsi;
8126 gimple new_cond_expr;
8127 tree cond_expr = (tree) cond_e;
8128 edge e0;
8130 /* Build new conditional expr */
8131 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8132 NULL_TREE, NULL_TREE);
8134 /* Add new cond in cond_bb. */
8135 gsi = gsi_last_bb (cond_bb);
8136 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8138 /* Adjust edges appropriately to connect new head with first head
8139 as well as second head. */
8140 e0 = single_succ_edge (cond_bb);
8141 e0->flags &= ~EDGE_FALLTHRU;
8142 e0->flags |= EDGE_FALSE_VALUE;
8146 /* Do book-keeping of basic block BB for the profile consistency checker.
8147 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8148 then do post-pass accounting. Store the counting in RECORD. */
8149 static void
8150 gimple_account_profile_record (basic_block bb, int after_pass,
8151 struct profile_record *record)
8153 gimple_stmt_iterator i;
8154 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8156 record->size[after_pass]
8157 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8158 if (profile_status_for_fn (cfun) == PROFILE_READ)
8159 record->time[after_pass]
8160 += estimate_num_insns (gsi_stmt (i),
8161 &eni_time_weights) * bb->count;
8162 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8163 record->time[after_pass]
8164 += estimate_num_insns (gsi_stmt (i),
8165 &eni_time_weights) * bb->frequency;
8169 struct cfg_hooks gimple_cfg_hooks = {
8170 "gimple",
8171 gimple_verify_flow_info,
8172 gimple_dump_bb, /* dump_bb */
8173 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8174 create_bb, /* create_basic_block */
8175 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8176 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8177 gimple_can_remove_branch_p, /* can_remove_branch_p */
8178 remove_bb, /* delete_basic_block */
8179 gimple_split_block, /* split_block */
8180 gimple_move_block_after, /* move_block_after */
8181 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8182 gimple_merge_blocks, /* merge_blocks */
8183 gimple_predict_edge, /* predict_edge */
8184 gimple_predicted_by_p, /* predicted_by_p */
8185 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8186 gimple_duplicate_bb, /* duplicate_block */
8187 gimple_split_edge, /* split_edge */
8188 gimple_make_forwarder_block, /* make_forward_block */
8189 NULL, /* tidy_fallthru_edge */
8190 NULL, /* force_nonfallthru */
8191 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8192 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8193 gimple_flow_call_edges_add, /* flow_call_edges_add */
8194 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8195 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8196 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8197 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8198 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8199 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8200 flush_pending_stmts, /* flush_pending_stmts */
8201 gimple_empty_block_p, /* block_empty_p */
8202 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8203 gimple_account_profile_record,
8207 /* Split all critical edges. */
8209 unsigned int
8210 split_critical_edges (void)
8212 basic_block bb;
8213 edge e;
8214 edge_iterator ei;
8216 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8217 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8218 mappings around the calls to split_edge. */
8219 start_recording_case_labels ();
8220 FOR_ALL_BB_FN (bb, cfun)
8222 FOR_EACH_EDGE (e, ei, bb->succs)
8224 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8225 split_edge (e);
8226 /* PRE inserts statements to edges and expects that
8227 since split_critical_edges was done beforehand, committing edge
8228 insertions will not split more edges. In addition to critical
8229 edges we must split edges that have multiple successors and
8230 end by control flow statements, such as RESX.
8231 Go ahead and split them too. This matches the logic in
8232 gimple_find_edge_insert_loc. */
8233 else if ((!single_pred_p (e->dest)
8234 || !gimple_seq_empty_p (phi_nodes (e->dest))
8235 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8236 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8237 && !(e->flags & EDGE_ABNORMAL))
8239 gimple_stmt_iterator gsi;
8241 gsi = gsi_last_bb (e->src);
8242 if (!gsi_end_p (gsi)
8243 && stmt_ends_bb_p (gsi_stmt (gsi))
8244 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8245 && !gimple_call_builtin_p (gsi_stmt (gsi),
8246 BUILT_IN_RETURN)))
8247 split_edge (e);
8251 end_recording_case_labels ();
8252 return 0;
8255 namespace {
8257 const pass_data pass_data_split_crit_edges =
8259 GIMPLE_PASS, /* type */
8260 "crited", /* name */
8261 OPTGROUP_NONE, /* optinfo_flags */
8262 TV_TREE_SPLIT_EDGES, /* tv_id */
8263 PROP_cfg, /* properties_required */
8264 PROP_no_crit_edges, /* properties_provided */
8265 0, /* properties_destroyed */
8266 0, /* todo_flags_start */
8267 0, /* todo_flags_finish */
8270 class pass_split_crit_edges : public gimple_opt_pass
8272 public:
8273 pass_split_crit_edges (gcc::context *ctxt)
8274 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8277 /* opt_pass methods: */
8278 virtual unsigned int execute (function *) { return split_critical_edges (); }
8280 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8281 }; // class pass_split_crit_edges
8283 } // anon namespace
8285 gimple_opt_pass *
8286 make_pass_split_crit_edges (gcc::context *ctxt)
8288 return new pass_split_crit_edges (ctxt);
8292 /* Insert COND expression which is GIMPLE_COND after STMT
8293 in basic block BB with appropriate basic block split
8294 and creation of a new conditionally executed basic block.
8295 Return created basic block. */
8296 basic_block
8297 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8299 edge fall = split_block (bb, stmt);
8300 gimple_stmt_iterator iter = gsi_last_bb (bb);
8301 basic_block new_bb;
8303 /* Insert cond statement. */
8304 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8305 if (gsi_end_p (iter))
8306 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8307 else
8308 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8310 /* Create conditionally executed block. */
8311 new_bb = create_empty_bb (bb);
8312 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8313 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8315 /* Fix edge for split bb. */
8316 fall->flags = EDGE_FALSE_VALUE;
8318 /* Update dominance info. */
8319 if (dom_info_available_p (CDI_DOMINATORS))
8321 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8322 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8325 /* Update loop info. */
8326 if (current_loops)
8327 add_bb_to_loop (new_bb, bb->loop_father);
8329 return new_bb;
8332 /* Build a ternary operation and gimplify it. Emit code before GSI.
8333 Return the gimple_val holding the result. */
8335 tree
8336 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8337 tree type, tree a, tree b, tree c)
8339 tree ret;
8340 location_t loc = gimple_location (gsi_stmt (*gsi));
8342 ret = fold_build3_loc (loc, code, type, a, b, c);
8343 STRIP_NOPS (ret);
8345 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8346 GSI_SAME_STMT);
8349 /* Build a binary operation and gimplify it. Emit code before GSI.
8350 Return the gimple_val holding the result. */
8352 tree
8353 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8354 tree type, tree a, tree b)
8356 tree ret;
8358 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8359 STRIP_NOPS (ret);
8361 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8362 GSI_SAME_STMT);
8365 /* Build a unary operation and gimplify it. Emit code before GSI.
8366 Return the gimple_val holding the result. */
8368 tree
8369 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8370 tree a)
8372 tree ret;
8374 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8375 STRIP_NOPS (ret);
8377 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8378 GSI_SAME_STMT);
8383 /* Given a basic block B which ends with a conditional and has
8384 precisely two successors, determine which of the edges is taken if
8385 the conditional is true and which is taken if the conditional is
8386 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8388 void
8389 extract_true_false_edges_from_block (basic_block b,
8390 edge *true_edge,
8391 edge *false_edge)
8393 edge e = EDGE_SUCC (b, 0);
8395 if (e->flags & EDGE_TRUE_VALUE)
8397 *true_edge = e;
8398 *false_edge = EDGE_SUCC (b, 1);
8400 else
8402 *false_edge = e;
8403 *true_edge = EDGE_SUCC (b, 1);
8407 /* Emit return warnings. */
8409 namespace {
8411 const pass_data pass_data_warn_function_return =
8413 GIMPLE_PASS, /* type */
8414 "*warn_function_return", /* name */
8415 OPTGROUP_NONE, /* optinfo_flags */
8416 TV_NONE, /* tv_id */
8417 PROP_cfg, /* properties_required */
8418 0, /* properties_provided */
8419 0, /* properties_destroyed */
8420 0, /* todo_flags_start */
8421 0, /* todo_flags_finish */
8424 class pass_warn_function_return : public gimple_opt_pass
8426 public:
8427 pass_warn_function_return (gcc::context *ctxt)
8428 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8431 /* opt_pass methods: */
8432 virtual unsigned int execute (function *);
8434 }; // class pass_warn_function_return
8436 unsigned int
8437 pass_warn_function_return::execute (function *fun)
8439 source_location location;
8440 gimple last;
8441 edge e;
8442 edge_iterator ei;
8444 if (!targetm.warn_func_return (fun->decl))
8445 return 0;
8447 /* If we have a path to EXIT, then we do return. */
8448 if (TREE_THIS_VOLATILE (fun->decl)
8449 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8451 location = UNKNOWN_LOCATION;
8452 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8454 last = last_stmt (e->src);
8455 if ((gimple_code (last) == GIMPLE_RETURN
8456 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8457 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8458 break;
8460 if (location == UNKNOWN_LOCATION)
8461 location = cfun->function_end_locus;
8462 warning_at (location, 0, "%<noreturn%> function does return");
8465 /* If we see "return;" in some basic block, then we do reach the end
8466 without returning a value. */
8467 else if (warn_return_type
8468 && !TREE_NO_WARNING (fun->decl)
8469 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8470 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8472 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8474 gimple last = last_stmt (e->src);
8475 greturn *return_stmt = dyn_cast <greturn *> (last);
8476 if (return_stmt
8477 && gimple_return_retval (return_stmt) == NULL
8478 && !gimple_no_warning_p (last))
8480 location = gimple_location (last);
8481 if (location == UNKNOWN_LOCATION)
8482 location = fun->function_end_locus;
8483 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8484 TREE_NO_WARNING (fun->decl) = 1;
8485 break;
8489 return 0;
8492 } // anon namespace
8494 gimple_opt_pass *
8495 make_pass_warn_function_return (gcc::context *ctxt)
8497 return new pass_warn_function_return (ctxt);
8500 /* Walk a gimplified function and warn for functions whose return value is
8501 ignored and attribute((warn_unused_result)) is set. This is done before
8502 inlining, so we don't have to worry about that. */
8504 static void
8505 do_warn_unused_result (gimple_seq seq)
8507 tree fdecl, ftype;
8508 gimple_stmt_iterator i;
8510 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8512 gimple g = gsi_stmt (i);
8514 switch (gimple_code (g))
8516 case GIMPLE_BIND:
8517 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8518 break;
8519 case GIMPLE_TRY:
8520 do_warn_unused_result (gimple_try_eval (g));
8521 do_warn_unused_result (gimple_try_cleanup (g));
8522 break;
8523 case GIMPLE_CATCH:
8524 do_warn_unused_result (gimple_catch_handler (
8525 as_a <gcatch *> (g)));
8526 break;
8527 case GIMPLE_EH_FILTER:
8528 do_warn_unused_result (gimple_eh_filter_failure (g));
8529 break;
8531 case GIMPLE_CALL:
8532 if (gimple_call_lhs (g))
8533 break;
8534 if (gimple_call_internal_p (g))
8535 break;
8537 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8538 LHS. All calls whose value is ignored should be
8539 represented like this. Look for the attribute. */
8540 fdecl = gimple_call_fndecl (g);
8541 ftype = gimple_call_fntype (g);
8543 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8545 location_t loc = gimple_location (g);
8547 if (fdecl)
8548 warning_at (loc, OPT_Wunused_result,
8549 "ignoring return value of %qD, "
8550 "declared with attribute warn_unused_result",
8551 fdecl);
8552 else
8553 warning_at (loc, OPT_Wunused_result,
8554 "ignoring return value of function "
8555 "declared with attribute warn_unused_result");
8557 break;
8559 default:
8560 /* Not a container, not a call, or a call whose value is used. */
8561 break;
8566 namespace {
8568 const pass_data pass_data_warn_unused_result =
8570 GIMPLE_PASS, /* type */
8571 "*warn_unused_result", /* name */
8572 OPTGROUP_NONE, /* optinfo_flags */
8573 TV_NONE, /* tv_id */
8574 PROP_gimple_any, /* properties_required */
8575 0, /* properties_provided */
8576 0, /* properties_destroyed */
8577 0, /* todo_flags_start */
8578 0, /* todo_flags_finish */
8581 class pass_warn_unused_result : public gimple_opt_pass
8583 public:
8584 pass_warn_unused_result (gcc::context *ctxt)
8585 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8588 /* opt_pass methods: */
8589 virtual bool gate (function *) { return flag_warn_unused_result; }
8590 virtual unsigned int execute (function *)
8592 do_warn_unused_result (gimple_body (current_function_decl));
8593 return 0;
8596 }; // class pass_warn_unused_result
8598 } // anon namespace
8600 gimple_opt_pass *
8601 make_pass_warn_unused_result (gcc::context *ctxt)
8603 return new pass_warn_unused_result (ctxt);
8606 /* IPA passes, compilation of earlier functions or inlining
8607 might have changed some properties, such as marked functions nothrow,
8608 pure, const or noreturn.
8609 Remove redundant edges and basic blocks, and create new ones if necessary.
8611 This pass can't be executed as stand alone pass from pass manager, because
8612 in between inlining and this fixup the verify_flow_info would fail. */
8614 unsigned int
8615 execute_fixup_cfg (void)
8617 basic_block bb;
8618 gimple_stmt_iterator gsi;
8619 int todo = 0;
8620 gcov_type count_scale;
8621 edge e;
8622 edge_iterator ei;
8624 count_scale
8625 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8626 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8628 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8629 cgraph_node::get (current_function_decl)->count;
8630 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8631 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8632 count_scale);
8634 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8635 e->count = apply_scale (e->count, count_scale);
8637 FOR_EACH_BB_FN (bb, cfun)
8639 bb->count = apply_scale (bb->count, count_scale);
8640 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8642 gimple stmt = gsi_stmt (gsi);
8643 tree decl = is_gimple_call (stmt)
8644 ? gimple_call_fndecl (stmt)
8645 : NULL;
8646 if (decl)
8648 int flags = gimple_call_flags (stmt);
8649 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8651 if (gimple_purge_dead_abnormal_call_edges (bb))
8652 todo |= TODO_cleanup_cfg;
8654 if (gimple_in_ssa_p (cfun))
8656 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8657 update_stmt (stmt);
8661 if (flags & ECF_NORETURN
8662 && fixup_noreturn_call (stmt))
8663 todo |= TODO_cleanup_cfg;
8666 /* Remove stores to variables we marked write-only.
8667 Keep access when store has side effect, i.e. in case when source
8668 is volatile. */
8669 if (gimple_store_p (stmt)
8670 && !gimple_has_side_effects (stmt))
8672 tree lhs = get_base_address (gimple_get_lhs (stmt));
8674 if (TREE_CODE (lhs) == VAR_DECL
8675 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8676 && varpool_node::get (lhs)->writeonly)
8678 unlink_stmt_vdef (stmt);
8679 gsi_remove (&gsi, true);
8680 release_defs (stmt);
8681 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8682 continue;
8685 /* For calls we can simply remove LHS when it is known
8686 to be write-only. */
8687 if (is_gimple_call (stmt)
8688 && gimple_get_lhs (stmt))
8690 tree lhs = get_base_address (gimple_get_lhs (stmt));
8692 if (TREE_CODE (lhs) == VAR_DECL
8693 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8694 && varpool_node::get (lhs)->writeonly)
8696 gimple_call_set_lhs (stmt, NULL);
8697 update_stmt (stmt);
8698 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8702 if (maybe_clean_eh_stmt (stmt)
8703 && gimple_purge_dead_eh_edges (bb))
8704 todo |= TODO_cleanup_cfg;
8705 gsi_next (&gsi);
8708 FOR_EACH_EDGE (e, ei, bb->succs)
8709 e->count = apply_scale (e->count, count_scale);
8711 /* If we have a basic block with no successors that does not
8712 end with a control statement or a noreturn call end it with
8713 a call to __builtin_unreachable. This situation can occur
8714 when inlining a noreturn call that does in fact return. */
8715 if (EDGE_COUNT (bb->succs) == 0)
8717 gimple stmt = last_stmt (bb);
8718 if (!stmt
8719 || (!is_ctrl_stmt (stmt)
8720 && (!is_gimple_call (stmt)
8721 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8723 if (stmt && is_gimple_call (stmt))
8724 gimple_call_set_ctrl_altering (stmt, false);
8725 stmt = gimple_build_call
8726 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8727 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8728 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8732 if (count_scale != REG_BR_PROB_BASE)
8733 compute_function_frequency ();
8735 /* Dump a textual representation of the flowgraph. */
8736 if (dump_file)
8737 gimple_dump_cfg (dump_file, dump_flags);
8739 if (current_loops
8740 && (todo & TODO_cleanup_cfg))
8741 loops_state_set (LOOPS_NEED_FIXUP);
8743 return todo;
8746 namespace {
8748 const pass_data pass_data_fixup_cfg =
8750 GIMPLE_PASS, /* type */
8751 "*free_cfg_annotations", /* name */
8752 OPTGROUP_NONE, /* optinfo_flags */
8753 TV_NONE, /* tv_id */
8754 PROP_cfg, /* properties_required */
8755 0, /* properties_provided */
8756 0, /* properties_destroyed */
8757 TODO_update_ssa_only_virtuals, /* todo_flags_start */
8758 0, /* todo_flags_finish */
8761 class pass_fixup_cfg : public gimple_opt_pass
8763 public:
8764 pass_fixup_cfg (gcc::context *ctxt)
8765 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8768 /* opt_pass methods: */
8769 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8770 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8772 }; // class pass_fixup_cfg
8774 } // anon namespace
8776 gimple_opt_pass *
8777 make_pass_fixup_cfg (gcc::context *ctxt)
8779 return new pass_fixup_cfg (ctxt);
8782 /* Garbage collection support for edge_def. */
8784 extern void gt_ggc_mx (tree&);
8785 extern void gt_ggc_mx (gimple&);
8786 extern void gt_ggc_mx (rtx&);
8787 extern void gt_ggc_mx (basic_block&);
8789 static void
8790 gt_ggc_mx (rtx_insn *& x)
8792 if (x)
8793 gt_ggc_mx_rtx_def ((void *) x);
8796 void
8797 gt_ggc_mx (edge_def *e)
8799 tree block = LOCATION_BLOCK (e->goto_locus);
8800 gt_ggc_mx (e->src);
8801 gt_ggc_mx (e->dest);
8802 if (current_ir_type () == IR_GIMPLE)
8803 gt_ggc_mx (e->insns.g);
8804 else
8805 gt_ggc_mx (e->insns.r);
8806 gt_ggc_mx (block);
8809 /* PCH support for edge_def. */
8811 extern void gt_pch_nx (tree&);
8812 extern void gt_pch_nx (gimple&);
8813 extern void gt_pch_nx (rtx&);
8814 extern void gt_pch_nx (basic_block&);
8816 static void
8817 gt_pch_nx (rtx_insn *& x)
8819 if (x)
8820 gt_pch_nx_rtx_def ((void *) x);
8823 void
8824 gt_pch_nx (edge_def *e)
8826 tree block = LOCATION_BLOCK (e->goto_locus);
8827 gt_pch_nx (e->src);
8828 gt_pch_nx (e->dest);
8829 if (current_ir_type () == IR_GIMPLE)
8830 gt_pch_nx (e->insns.g);
8831 else
8832 gt_pch_nx (e->insns.r);
8833 gt_pch_nx (block);
8836 void
8837 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8839 tree block = LOCATION_BLOCK (e->goto_locus);
8840 op (&(e->src), cookie);
8841 op (&(e->dest), cookie);
8842 if (current_ir_type () == IR_GIMPLE)
8843 op (&(e->insns.g), cookie);
8844 else
8845 op (&(e->insns.r), cookie);
8846 op (&(block), cookie);