2015-03-24 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc.git] / gcc / tree-cfg.c
blob98d6ba448faf629b20f83c24ace7b072a02b0e4f
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "trans-mem.h"
39 #include "stor-layout.h"
40 #include "print-tree.h"
41 #include "tm_p.h"
42 #include "predict.h"
43 #include "hard-reg-set.h"
44 #include "function.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "cfganal.h"
48 #include "basic-block.h"
49 #include "flags.h"
50 #include "gimple-pretty-print.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-fold.h"
54 #include "tree-eh.h"
55 #include "gimple-expr.h"
56 #include "is-a.h"
57 #include "gimple.h"
58 #include "gimple-iterator.h"
59 #include "gimplify-me.h"
60 #include "gimple-walk.h"
61 #include "gimple-ssa.h"
62 #include "plugin-api.h"
63 #include "ipa-ref.h"
64 #include "cgraph.h"
65 #include "tree-cfg.h"
66 #include "tree-phinodes.h"
67 #include "ssa-iterators.h"
68 #include "stringpool.h"
69 #include "tree-ssanames.h"
70 #include "tree-ssa-loop-manip.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "tree-into-ssa.h"
73 #include "hashtab.h"
74 #include "rtl.h"
75 #include "statistics.h"
76 #include "real.h"
77 #include "fixed-value.h"
78 #include "insn-config.h"
79 #include "expmed.h"
80 #include "dojump.h"
81 #include "explow.h"
82 #include "calls.h"
83 #include "emit-rtl.h"
84 #include "varasm.h"
85 #include "stmt.h"
86 #include "expr.h"
87 #include "tree-dfa.h"
88 #include "tree-ssa.h"
89 #include "tree-dump.h"
90 #include "tree-pass.h"
91 #include "diagnostic-core.h"
92 #include "except.h"
93 #include "cfgloop.h"
94 #include "tree-ssa-propagate.h"
95 #include "value-prof.h"
96 #include "tree-inline.h"
97 #include "target.h"
98 #include "tree-ssa-live.h"
99 #include "omp-low.h"
100 #include "tree-cfgcleanup.h"
101 #include "wide-int-print.h"
103 /* This file contains functions for building the Control Flow Graph (CFG)
104 for a function tree. */
106 /* Local declarations. */
108 /* Initial capacity for the basic block array. */
109 static const int initial_cfg_capacity = 20;
111 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
112 which use a particular edge. The CASE_LABEL_EXPRs are chained together
113 via their CASE_CHAIN field, which we clear after we're done with the
114 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
116 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
117 update the case vector in response to edge redirections.
119 Right now this table is set up and torn down at key points in the
120 compilation process. It would be nice if we could make the table
121 more persistent. The key is getting notification of changes to
122 the CFG (particularly edge removal, creation and redirection). */
124 static hash_map<edge, tree> *edge_to_cases;
126 /* If we record edge_to_cases, this bitmap will hold indexes
127 of basic blocks that end in a GIMPLE_SWITCH which we touched
128 due to edge manipulations. */
130 static bitmap touched_switch_bbs;
132 /* CFG statistics. */
133 struct cfg_stats_d
135 long num_merged_labels;
138 static struct cfg_stats_d cfg_stats;
140 /* Hash table to store last discriminator assigned for each locus. */
141 struct locus_discrim_map
143 location_t locus;
144 int discriminator;
147 /* Hashtable helpers. */
149 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
151 typedef locus_discrim_map value_type;
152 typedef locus_discrim_map compare_type;
153 static inline hashval_t hash (const value_type *);
154 static inline bool equal (const value_type *, const compare_type *);
157 /* Trivial hash function for a location_t. ITEM is a pointer to
158 a hash table entry that maps a location_t to a discriminator. */
160 inline hashval_t
161 locus_discrim_hasher::hash (const value_type *item)
163 return LOCATION_LINE (item->locus);
166 /* Equality function for the locus-to-discriminator map. A and B
167 point to the two hash table entries to compare. */
169 inline bool
170 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
172 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
175 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
177 /* Basic blocks and flowgraphs. */
178 static void make_blocks (gimple_seq);
180 /* Edges. */
181 static void make_edges (void);
182 static void assign_discriminators (void);
183 static void make_cond_expr_edges (basic_block);
184 static void make_gimple_switch_edges (gswitch *, basic_block);
185 static bool make_goto_expr_edges (basic_block);
186 static void make_gimple_asm_edges (basic_block);
187 static edge gimple_redirect_edge_and_branch (edge, basic_block);
188 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
190 /* Various helpers. */
191 static inline bool stmt_starts_bb_p (gimple, gimple);
192 static int gimple_verify_flow_info (void);
193 static void gimple_make_forwarder_block (edge);
194 static gimple first_non_label_stmt (basic_block);
195 static bool verify_gimple_transaction (gtransaction *);
196 static bool call_can_make_abnormal_goto (gimple);
198 /* Flowgraph optimization and cleanup. */
199 static void gimple_merge_blocks (basic_block, basic_block);
200 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
201 static void remove_bb (basic_block);
202 static edge find_taken_edge_computed_goto (basic_block, tree);
203 static edge find_taken_edge_cond_expr (basic_block, tree);
204 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
205 static tree find_case_label_for_value (gswitch *, tree);
207 void
208 init_empty_tree_cfg_for_function (struct function *fn)
210 /* Initialize the basic block array. */
211 init_flow (fn);
212 profile_status_for_fn (fn) = PROFILE_ABSENT;
213 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
214 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
215 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
216 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
217 initial_cfg_capacity);
219 /* Build a mapping of labels to their associated blocks. */
220 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
221 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
222 initial_cfg_capacity);
224 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
225 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
227 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
228 = EXIT_BLOCK_PTR_FOR_FN (fn);
229 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
230 = ENTRY_BLOCK_PTR_FOR_FN (fn);
233 void
234 init_empty_tree_cfg (void)
236 init_empty_tree_cfg_for_function (cfun);
239 /*---------------------------------------------------------------------------
240 Create basic blocks
241 ---------------------------------------------------------------------------*/
243 /* Entry point to the CFG builder for trees. SEQ is the sequence of
244 statements to be added to the flowgraph. */
246 static void
247 build_gimple_cfg (gimple_seq seq)
249 /* Register specific gimple functions. */
250 gimple_register_cfg_hooks ();
252 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
254 init_empty_tree_cfg ();
256 make_blocks (seq);
258 /* Make sure there is always at least one block, even if it's empty. */
259 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
260 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
262 /* Adjust the size of the array. */
263 if (basic_block_info_for_fn (cfun)->length ()
264 < (size_t) n_basic_blocks_for_fn (cfun))
265 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
266 n_basic_blocks_for_fn (cfun));
268 /* To speed up statement iterator walks, we first purge dead labels. */
269 cleanup_dead_labels ();
271 /* Group case nodes to reduce the number of edges.
272 We do this after cleaning up dead labels because otherwise we miss
273 a lot of obvious case merging opportunities. */
274 group_case_labels ();
276 /* Create the edges of the flowgraph. */
277 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
278 make_edges ();
279 assign_discriminators ();
280 cleanup_dead_labels ();
281 delete discriminator_per_locus;
282 discriminator_per_locus = NULL;
285 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
286 them and propagate the information to LOOP. We assume that the annotations
287 come immediately before the condition in BB, if any. */
289 static void
290 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
292 gimple_stmt_iterator gsi = gsi_last_bb (bb);
293 gimple stmt = gsi_stmt (gsi);
295 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
296 return;
298 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
300 stmt = gsi_stmt (gsi);
301 if (gimple_code (stmt) != GIMPLE_CALL)
302 break;
303 if (!gimple_call_internal_p (stmt)
304 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
305 break;
307 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
309 case annot_expr_ivdep_kind:
310 loop->safelen = INT_MAX;
311 break;
312 case annot_expr_no_vector_kind:
313 loop->dont_vectorize = true;
314 break;
315 case annot_expr_vector_kind:
316 loop->force_vectorize = true;
317 cfun->has_force_vectorize_loops = true;
318 break;
319 default:
320 gcc_unreachable ();
323 stmt = gimple_build_assign (gimple_call_lhs (stmt),
324 gimple_call_arg (stmt, 0));
325 gsi_replace (&gsi, stmt, true);
329 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
330 them and propagate the information to the loop. We assume that the
331 annotations come immediately before the condition of the loop. */
333 static void
334 replace_loop_annotate (void)
336 struct loop *loop;
337 basic_block bb;
338 gimple_stmt_iterator gsi;
339 gimple stmt;
341 FOR_EACH_LOOP (loop, 0)
343 /* First look into the header. */
344 replace_loop_annotate_in_block (loop->header, loop);
346 /* Then look into the latch, if any. */
347 if (loop->latch)
348 replace_loop_annotate_in_block (loop->latch, loop);
351 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
352 FOR_EACH_BB_FN (bb, cfun)
354 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
356 stmt = gsi_stmt (gsi);
357 if (gimple_code (stmt) != GIMPLE_CALL)
358 continue;
359 if (!gimple_call_internal_p (stmt)
360 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
361 continue;
363 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
365 case annot_expr_ivdep_kind:
366 case annot_expr_no_vector_kind:
367 case annot_expr_vector_kind:
368 break;
369 default:
370 gcc_unreachable ();
373 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
374 stmt = gimple_build_assign (gimple_call_lhs (stmt),
375 gimple_call_arg (stmt, 0));
376 gsi_replace (&gsi, stmt, true);
382 static unsigned int
383 execute_build_cfg (void)
385 gimple_seq body = gimple_body (current_function_decl);
387 build_gimple_cfg (body);
388 gimple_set_body (current_function_decl, NULL);
389 if (dump_file && (dump_flags & TDF_DETAILS))
391 fprintf (dump_file, "Scope blocks:\n");
392 dump_scope_blocks (dump_file, dump_flags);
394 cleanup_tree_cfg ();
395 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
396 replace_loop_annotate ();
397 return 0;
400 namespace {
402 const pass_data pass_data_build_cfg =
404 GIMPLE_PASS, /* type */
405 "cfg", /* name */
406 OPTGROUP_NONE, /* optinfo_flags */
407 TV_TREE_CFG, /* tv_id */
408 PROP_gimple_leh, /* properties_required */
409 ( PROP_cfg | PROP_loops ), /* properties_provided */
410 0, /* properties_destroyed */
411 0, /* todo_flags_start */
412 0, /* todo_flags_finish */
415 class pass_build_cfg : public gimple_opt_pass
417 public:
418 pass_build_cfg (gcc::context *ctxt)
419 : gimple_opt_pass (pass_data_build_cfg, ctxt)
422 /* opt_pass methods: */
423 virtual unsigned int execute (function *) { return execute_build_cfg (); }
425 }; // class pass_build_cfg
427 } // anon namespace
429 gimple_opt_pass *
430 make_pass_build_cfg (gcc::context *ctxt)
432 return new pass_build_cfg (ctxt);
436 /* Return true if T is a computed goto. */
438 bool
439 computed_goto_p (gimple t)
441 return (gimple_code (t) == GIMPLE_GOTO
442 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
445 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
446 the other edge points to a bb with just __builtin_unreachable ().
447 I.e. return true for C->M edge in:
448 <bb C>:
450 if (something)
451 goto <bb N>;
452 else
453 goto <bb M>;
454 <bb N>:
455 __builtin_unreachable ();
456 <bb M>: */
458 bool
459 assert_unreachable_fallthru_edge_p (edge e)
461 basic_block pred_bb = e->src;
462 gimple last = last_stmt (pred_bb);
463 if (last && gimple_code (last) == GIMPLE_COND)
465 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
466 if (other_bb == e->dest)
467 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
468 if (EDGE_COUNT (other_bb->succs) == 0)
470 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
471 gimple stmt;
473 if (gsi_end_p (gsi))
474 return false;
475 stmt = gsi_stmt (gsi);
476 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
478 gsi_next (&gsi);
479 if (gsi_end_p (gsi))
480 return false;
481 stmt = gsi_stmt (gsi);
483 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
486 return false;
490 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
491 could alter control flow except via eh. We initialize the flag at
492 CFG build time and only ever clear it later. */
494 static void
495 gimple_call_initialize_ctrl_altering (gimple stmt)
497 int flags = gimple_call_flags (stmt);
499 /* A call alters control flow if it can make an abnormal goto. */
500 if (call_can_make_abnormal_goto (stmt)
501 /* A call also alters control flow if it does not return. */
502 || flags & ECF_NORETURN
503 /* TM ending statements have backedges out of the transaction.
504 Return true so we split the basic block containing them.
505 Note that the TM_BUILTIN test is merely an optimization. */
506 || ((flags & ECF_TM_BUILTIN)
507 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
508 /* BUILT_IN_RETURN call is same as return statement. */
509 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
510 gimple_call_set_ctrl_altering (stmt, true);
511 else
512 gimple_call_set_ctrl_altering (stmt, false);
516 /* Build a flowgraph for the sequence of stmts SEQ. */
518 static void
519 make_blocks (gimple_seq seq)
521 gimple_stmt_iterator i = gsi_start (seq);
522 gimple stmt = NULL;
523 bool start_new_block = true;
524 bool first_stmt_of_seq = true;
525 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
527 while (!gsi_end_p (i))
529 gimple prev_stmt;
531 prev_stmt = stmt;
532 stmt = gsi_stmt (i);
534 if (stmt && is_gimple_call (stmt))
535 gimple_call_initialize_ctrl_altering (stmt);
537 /* If the statement starts a new basic block or if we have determined
538 in a previous pass that we need to create a new block for STMT, do
539 so now. */
540 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
542 if (!first_stmt_of_seq)
543 gsi_split_seq_before (&i, &seq);
544 bb = create_basic_block (seq, NULL, bb);
545 start_new_block = false;
548 /* Now add STMT to BB and create the subgraphs for special statement
549 codes. */
550 gimple_set_bb (stmt, bb);
552 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
553 next iteration. */
554 if (stmt_ends_bb_p (stmt))
556 /* If the stmt can make abnormal goto use a new temporary
557 for the assignment to the LHS. This makes sure the old value
558 of the LHS is available on the abnormal edge. Otherwise
559 we will end up with overlapping life-ranges for abnormal
560 SSA names. */
561 if (gimple_has_lhs (stmt)
562 && stmt_can_make_abnormal_goto (stmt)
563 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
565 tree lhs = gimple_get_lhs (stmt);
566 tree tmp = create_tmp_var (TREE_TYPE (lhs));
567 gimple s = gimple_build_assign (lhs, tmp);
568 gimple_set_location (s, gimple_location (stmt));
569 gimple_set_block (s, gimple_block (stmt));
570 gimple_set_lhs (stmt, tmp);
571 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
572 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
573 DECL_GIMPLE_REG_P (tmp) = 1;
574 gsi_insert_after (&i, s, GSI_SAME_STMT);
576 start_new_block = true;
579 gsi_next (&i);
580 first_stmt_of_seq = false;
585 /* Create and return a new empty basic block after bb AFTER. */
587 static basic_block
588 create_bb (void *h, void *e, basic_block after)
590 basic_block bb;
592 gcc_assert (!e);
594 /* Create and initialize a new basic block. Since alloc_block uses
595 GC allocation that clears memory to allocate a basic block, we do
596 not have to clear the newly allocated basic block here. */
597 bb = alloc_block ();
599 bb->index = last_basic_block_for_fn (cfun);
600 bb->flags = BB_NEW;
601 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
603 /* Add the new block to the linked list of blocks. */
604 link_block (bb, after);
606 /* Grow the basic block array if needed. */
607 if ((size_t) last_basic_block_for_fn (cfun)
608 == basic_block_info_for_fn (cfun)->length ())
610 size_t new_size =
611 (last_basic_block_for_fn (cfun)
612 + (last_basic_block_for_fn (cfun) + 3) / 4);
613 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
616 /* Add the newly created block to the array. */
617 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
619 n_basic_blocks_for_fn (cfun)++;
620 last_basic_block_for_fn (cfun)++;
622 return bb;
626 /*---------------------------------------------------------------------------
627 Edge creation
628 ---------------------------------------------------------------------------*/
630 /* Fold COND_EXPR_COND of each COND_EXPR. */
632 void
633 fold_cond_expr_cond (void)
635 basic_block bb;
637 FOR_EACH_BB_FN (bb, cfun)
639 gimple stmt = last_stmt (bb);
641 if (stmt && gimple_code (stmt) == GIMPLE_COND)
643 gcond *cond_stmt = as_a <gcond *> (stmt);
644 location_t loc = gimple_location (stmt);
645 tree cond;
646 bool zerop, onep;
648 fold_defer_overflow_warnings ();
649 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
650 boolean_type_node,
651 gimple_cond_lhs (cond_stmt),
652 gimple_cond_rhs (cond_stmt));
653 if (cond)
655 zerop = integer_zerop (cond);
656 onep = integer_onep (cond);
658 else
659 zerop = onep = false;
661 fold_undefer_overflow_warnings (zerop || onep,
662 stmt,
663 WARN_STRICT_OVERFLOW_CONDITIONAL);
664 if (zerop)
665 gimple_cond_make_false (cond_stmt);
666 else if (onep)
667 gimple_cond_make_true (cond_stmt);
672 /* If basic block BB has an abnormal edge to a basic block
673 containing IFN_ABNORMAL_DISPATCHER internal call, return
674 that the dispatcher's basic block, otherwise return NULL. */
676 basic_block
677 get_abnormal_succ_dispatcher (basic_block bb)
679 edge e;
680 edge_iterator ei;
682 FOR_EACH_EDGE (e, ei, bb->succs)
683 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
685 gimple_stmt_iterator gsi
686 = gsi_start_nondebug_after_labels_bb (e->dest);
687 gimple g = gsi_stmt (gsi);
688 if (g
689 && is_gimple_call (g)
690 && gimple_call_internal_p (g)
691 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
692 return e->dest;
694 return NULL;
697 /* Helper function for make_edges. Create a basic block with
698 with ABNORMAL_DISPATCHER internal call in it if needed, and
699 create abnormal edges from BBS to it and from it to FOR_BB
700 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
702 static void
703 handle_abnormal_edges (basic_block *dispatcher_bbs,
704 basic_block for_bb, int *bb_to_omp_idx,
705 auto_vec<basic_block> *bbs, bool computed_goto)
707 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
708 unsigned int idx = 0;
709 basic_block bb;
710 bool inner = false;
712 if (bb_to_omp_idx)
714 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
715 if (bb_to_omp_idx[for_bb->index] != 0)
716 inner = true;
719 /* If the dispatcher has been created already, then there are basic
720 blocks with abnormal edges to it, so just make a new edge to
721 for_bb. */
722 if (*dispatcher == NULL)
724 /* Check if there are any basic blocks that need to have
725 abnormal edges to this dispatcher. If there are none, return
726 early. */
727 if (bb_to_omp_idx == NULL)
729 if (bbs->is_empty ())
730 return;
732 else
734 FOR_EACH_VEC_ELT (*bbs, idx, bb)
735 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
736 break;
737 if (bb == NULL)
738 return;
741 /* Create the dispatcher bb. */
742 *dispatcher = create_basic_block (NULL, NULL, for_bb);
743 if (computed_goto)
745 /* Factor computed gotos into a common computed goto site. Also
746 record the location of that site so that we can un-factor the
747 gotos after we have converted back to normal form. */
748 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
750 /* Create the destination of the factored goto. Each original
751 computed goto will put its desired destination into this
752 variable and jump to the label we create immediately below. */
753 tree var = create_tmp_var (ptr_type_node, "gotovar");
755 /* Build a label for the new block which will contain the
756 factored computed goto. */
757 tree factored_label_decl
758 = create_artificial_label (UNKNOWN_LOCATION);
759 gimple factored_computed_goto_label
760 = gimple_build_label (factored_label_decl);
761 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
763 /* Build our new computed goto. */
764 gimple factored_computed_goto = gimple_build_goto (var);
765 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
767 FOR_EACH_VEC_ELT (*bbs, idx, bb)
769 if (bb_to_omp_idx
770 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
771 continue;
773 gsi = gsi_last_bb (bb);
774 gimple last = gsi_stmt (gsi);
776 gcc_assert (computed_goto_p (last));
778 /* Copy the original computed goto's destination into VAR. */
779 gimple assignment
780 = gimple_build_assign (var, gimple_goto_dest (last));
781 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
783 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
784 e->goto_locus = gimple_location (last);
785 gsi_remove (&gsi, true);
788 else
790 tree arg = inner ? boolean_true_node : boolean_false_node;
791 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
792 1, arg);
793 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
794 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
796 /* Create predecessor edges of the dispatcher. */
797 FOR_EACH_VEC_ELT (*bbs, idx, bb)
799 if (bb_to_omp_idx
800 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 continue;
802 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
807 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
810 /* Join all the blocks in the flowgraph. */
812 static void
813 make_edges (void)
815 basic_block bb;
816 struct omp_region *cur_region = NULL;
817 auto_vec<basic_block> ab_edge_goto;
818 auto_vec<basic_block> ab_edge_call;
819 int *bb_to_omp_idx = NULL;
820 int cur_omp_region_idx = 0;
822 /* Create an edge from entry to the first block with executable
823 statements in it. */
824 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
825 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
826 EDGE_FALLTHRU);
828 /* Traverse the basic block array placing edges. */
829 FOR_EACH_BB_FN (bb, cfun)
831 gimple last = last_stmt (bb);
832 bool fallthru;
834 if (bb_to_omp_idx)
835 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
837 if (last)
839 enum gimple_code code = gimple_code (last);
840 switch (code)
842 case GIMPLE_GOTO:
843 if (make_goto_expr_edges (bb))
844 ab_edge_goto.safe_push (bb);
845 fallthru = false;
846 break;
847 case GIMPLE_RETURN:
849 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
850 e->goto_locus = gimple_location (last);
851 fallthru = false;
853 break;
854 case GIMPLE_COND:
855 make_cond_expr_edges (bb);
856 fallthru = false;
857 break;
858 case GIMPLE_SWITCH:
859 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
860 fallthru = false;
861 break;
862 case GIMPLE_RESX:
863 make_eh_edges (last);
864 fallthru = false;
865 break;
866 case GIMPLE_EH_DISPATCH:
867 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
868 break;
870 case GIMPLE_CALL:
871 /* If this function receives a nonlocal goto, then we need to
872 make edges from this call site to all the nonlocal goto
873 handlers. */
874 if (stmt_can_make_abnormal_goto (last))
875 ab_edge_call.safe_push (bb);
877 /* If this statement has reachable exception handlers, then
878 create abnormal edges to them. */
879 make_eh_edges (last);
881 /* BUILTIN_RETURN is really a return statement. */
882 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
884 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
885 fallthru = false;
887 /* Some calls are known not to return. */
888 else
889 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
890 break;
892 case GIMPLE_ASSIGN:
893 /* A GIMPLE_ASSIGN may throw internally and thus be considered
894 control-altering. */
895 if (is_ctrl_altering_stmt (last))
896 make_eh_edges (last);
897 fallthru = true;
898 break;
900 case GIMPLE_ASM:
901 make_gimple_asm_edges (bb);
902 fallthru = true;
903 break;
905 CASE_GIMPLE_OMP:
906 fallthru = make_gimple_omp_edges (bb, &cur_region,
907 &cur_omp_region_idx);
908 if (cur_region && bb_to_omp_idx == NULL)
909 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
910 break;
912 case GIMPLE_TRANSACTION:
914 tree abort_label
915 = gimple_transaction_label (as_a <gtransaction *> (last));
916 if (abort_label)
917 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
918 fallthru = true;
920 break;
922 default:
923 gcc_assert (!stmt_ends_bb_p (last));
924 fallthru = true;
927 else
928 fallthru = true;
930 if (fallthru)
931 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
934 /* Computed gotos are hell to deal with, especially if there are
935 lots of them with a large number of destinations. So we factor
936 them to a common computed goto location before we build the
937 edge list. After we convert back to normal form, we will un-factor
938 the computed gotos since factoring introduces an unwanted jump.
939 For non-local gotos and abnormal edges from calls to calls that return
940 twice or forced labels, factor the abnormal edges too, by having all
941 abnormal edges from the calls go to a common artificial basic block
942 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
943 basic block to all forced labels and calls returning twice.
944 We do this per-OpenMP structured block, because those regions
945 are guaranteed to be single entry single exit by the standard,
946 so it is not allowed to enter or exit such regions abnormally this way,
947 thus all computed gotos, non-local gotos and setjmp/longjmp calls
948 must not transfer control across SESE region boundaries. */
949 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
951 gimple_stmt_iterator gsi;
952 basic_block dispatcher_bb_array[2] = { NULL, NULL };
953 basic_block *dispatcher_bbs = dispatcher_bb_array;
954 int count = n_basic_blocks_for_fn (cfun);
956 if (bb_to_omp_idx)
957 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
959 FOR_EACH_BB_FN (bb, cfun)
961 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
963 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
964 tree target;
966 if (!label_stmt)
967 break;
969 target = gimple_label_label (label_stmt);
971 /* Make an edge to every label block that has been marked as a
972 potential target for a computed goto or a non-local goto. */
973 if (FORCED_LABEL (target))
974 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
975 &ab_edge_goto, true);
976 if (DECL_NONLOCAL (target))
978 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
979 &ab_edge_call, false);
980 break;
984 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
985 gsi_next_nondebug (&gsi);
986 if (!gsi_end_p (gsi))
988 /* Make an edge to every setjmp-like call. */
989 gimple call_stmt = gsi_stmt (gsi);
990 if (is_gimple_call (call_stmt)
991 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
992 || gimple_call_builtin_p (call_stmt,
993 BUILT_IN_SETJMP_RECEIVER)))
994 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
995 &ab_edge_call, false);
999 if (bb_to_omp_idx)
1000 XDELETE (dispatcher_bbs);
1003 XDELETE (bb_to_omp_idx);
1005 free_omp_regions ();
1007 /* Fold COND_EXPR_COND of each COND_EXPR. */
1008 fold_cond_expr_cond ();
1011 /* Find the next available discriminator value for LOCUS. The
1012 discriminator distinguishes among several basic blocks that
1013 share a common locus, allowing for more accurate sample-based
1014 profiling. */
1016 static int
1017 next_discriminator_for_locus (location_t locus)
1019 struct locus_discrim_map item;
1020 struct locus_discrim_map **slot;
1022 item.locus = locus;
1023 item.discriminator = 0;
1024 slot = discriminator_per_locus->find_slot_with_hash (
1025 &item, LOCATION_LINE (locus), INSERT);
1026 gcc_assert (slot);
1027 if (*slot == HTAB_EMPTY_ENTRY)
1029 *slot = XNEW (struct locus_discrim_map);
1030 gcc_assert (*slot);
1031 (*slot)->locus = locus;
1032 (*slot)->discriminator = 0;
1034 (*slot)->discriminator++;
1035 return (*slot)->discriminator;
1038 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1040 static bool
1041 same_line_p (location_t locus1, location_t locus2)
1043 expanded_location from, to;
1045 if (locus1 == locus2)
1046 return true;
1048 from = expand_location (locus1);
1049 to = expand_location (locus2);
1051 if (from.line != to.line)
1052 return false;
1053 if (from.file == to.file)
1054 return true;
1055 return (from.file != NULL
1056 && to.file != NULL
1057 && filename_cmp (from.file, to.file) == 0);
1060 /* Assign discriminators to each basic block. */
1062 static void
1063 assign_discriminators (void)
1065 basic_block bb;
1067 FOR_EACH_BB_FN (bb, cfun)
1069 edge e;
1070 edge_iterator ei;
1071 gimple last = last_stmt (bb);
1072 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1074 if (locus == UNKNOWN_LOCATION)
1075 continue;
1077 FOR_EACH_EDGE (e, ei, bb->succs)
1079 gimple first = first_non_label_stmt (e->dest);
1080 gimple last = last_stmt (e->dest);
1081 if ((first && same_line_p (locus, gimple_location (first)))
1082 || (last && same_line_p (locus, gimple_location (last))))
1084 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1085 bb->discriminator = next_discriminator_for_locus (locus);
1086 else
1087 e->dest->discriminator = next_discriminator_for_locus (locus);
1093 /* Create the edges for a GIMPLE_COND starting at block BB. */
1095 static void
1096 make_cond_expr_edges (basic_block bb)
1098 gcond *entry = as_a <gcond *> (last_stmt (bb));
1099 gimple then_stmt, else_stmt;
1100 basic_block then_bb, else_bb;
1101 tree then_label, else_label;
1102 edge e;
1104 gcc_assert (entry);
1105 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1107 /* Entry basic blocks for each component. */
1108 then_label = gimple_cond_true_label (entry);
1109 else_label = gimple_cond_false_label (entry);
1110 then_bb = label_to_block (then_label);
1111 else_bb = label_to_block (else_label);
1112 then_stmt = first_stmt (then_bb);
1113 else_stmt = first_stmt (else_bb);
1115 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1116 e->goto_locus = gimple_location (then_stmt);
1117 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1118 if (e)
1119 e->goto_locus = gimple_location (else_stmt);
1121 /* We do not need the labels anymore. */
1122 gimple_cond_set_true_label (entry, NULL_TREE);
1123 gimple_cond_set_false_label (entry, NULL_TREE);
1127 /* Called for each element in the hash table (P) as we delete the
1128 edge to cases hash table.
1130 Clear all the TREE_CHAINs to prevent problems with copying of
1131 SWITCH_EXPRs and structure sharing rules, then free the hash table
1132 element. */
1134 bool
1135 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1137 tree t, next;
1139 for (t = value; t; t = next)
1141 next = CASE_CHAIN (t);
1142 CASE_CHAIN (t) = NULL;
1145 return true;
1148 /* Start recording information mapping edges to case labels. */
1150 void
1151 start_recording_case_labels (void)
1153 gcc_assert (edge_to_cases == NULL);
1154 edge_to_cases = new hash_map<edge, tree>;
1155 touched_switch_bbs = BITMAP_ALLOC (NULL);
1158 /* Return nonzero if we are recording information for case labels. */
1160 static bool
1161 recording_case_labels_p (void)
1163 return (edge_to_cases != NULL);
1166 /* Stop recording information mapping edges to case labels and
1167 remove any information we have recorded. */
1168 void
1169 end_recording_case_labels (void)
1171 bitmap_iterator bi;
1172 unsigned i;
1173 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1174 delete edge_to_cases;
1175 edge_to_cases = NULL;
1176 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1178 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1179 if (bb)
1181 gimple stmt = last_stmt (bb);
1182 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1183 group_case_labels_stmt (as_a <gswitch *> (stmt));
1186 BITMAP_FREE (touched_switch_bbs);
1189 /* If we are inside a {start,end}_recording_cases block, then return
1190 a chain of CASE_LABEL_EXPRs from T which reference E.
1192 Otherwise return NULL. */
1194 static tree
1195 get_cases_for_edge (edge e, gswitch *t)
1197 tree *slot;
1198 size_t i, n;
1200 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1201 chains available. Return NULL so the caller can detect this case. */
1202 if (!recording_case_labels_p ())
1203 return NULL;
1205 slot = edge_to_cases->get (e);
1206 if (slot)
1207 return *slot;
1209 /* If we did not find E in the hash table, then this must be the first
1210 time we have been queried for information about E & T. Add all the
1211 elements from T to the hash table then perform the query again. */
1213 n = gimple_switch_num_labels (t);
1214 for (i = 0; i < n; i++)
1216 tree elt = gimple_switch_label (t, i);
1217 tree lab = CASE_LABEL (elt);
1218 basic_block label_bb = label_to_block (lab);
1219 edge this_edge = find_edge (e->src, label_bb);
1221 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1222 a new chain. */
1223 tree &s = edge_to_cases->get_or_insert (this_edge);
1224 CASE_CHAIN (elt) = s;
1225 s = elt;
1228 return *edge_to_cases->get (e);
1231 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1233 static void
1234 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1236 size_t i, n;
1238 n = gimple_switch_num_labels (entry);
1240 for (i = 0; i < n; ++i)
1242 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1243 basic_block label_bb = label_to_block (lab);
1244 make_edge (bb, label_bb, 0);
1249 /* Return the basic block holding label DEST. */
1251 basic_block
1252 label_to_block_fn (struct function *ifun, tree dest)
1254 int uid = LABEL_DECL_UID (dest);
1256 /* We would die hard when faced by an undefined label. Emit a label to
1257 the very first basic block. This will hopefully make even the dataflow
1258 and undefined variable warnings quite right. */
1259 if (seen_error () && uid < 0)
1261 gimple_stmt_iterator gsi =
1262 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1263 gimple stmt;
1265 stmt = gimple_build_label (dest);
1266 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1267 uid = LABEL_DECL_UID (dest);
1269 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1270 return NULL;
1271 return (*ifun->cfg->x_label_to_block_map)[uid];
1274 /* Create edges for a goto statement at block BB. Returns true
1275 if abnormal edges should be created. */
1277 static bool
1278 make_goto_expr_edges (basic_block bb)
1280 gimple_stmt_iterator last = gsi_last_bb (bb);
1281 gimple goto_t = gsi_stmt (last);
1283 /* A simple GOTO creates normal edges. */
1284 if (simple_goto_p (goto_t))
1286 tree dest = gimple_goto_dest (goto_t);
1287 basic_block label_bb = label_to_block (dest);
1288 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1289 e->goto_locus = gimple_location (goto_t);
1290 gsi_remove (&last, true);
1291 return false;
1294 /* A computed GOTO creates abnormal edges. */
1295 return true;
1298 /* Create edges for an asm statement with labels at block BB. */
1300 static void
1301 make_gimple_asm_edges (basic_block bb)
1303 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1304 int i, n = gimple_asm_nlabels (stmt);
1306 for (i = 0; i < n; ++i)
1308 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1309 basic_block label_bb = label_to_block (label);
1310 make_edge (bb, label_bb, 0);
1314 /*---------------------------------------------------------------------------
1315 Flowgraph analysis
1316 ---------------------------------------------------------------------------*/
1318 /* Cleanup useless labels in basic blocks. This is something we wish
1319 to do early because it allows us to group case labels before creating
1320 the edges for the CFG, and it speeds up block statement iterators in
1321 all passes later on.
1322 We rerun this pass after CFG is created, to get rid of the labels that
1323 are no longer referenced. After then we do not run it any more, since
1324 (almost) no new labels should be created. */
1326 /* A map from basic block index to the leading label of that block. */
1327 static struct label_record
1329 /* The label. */
1330 tree label;
1332 /* True if the label is referenced from somewhere. */
1333 bool used;
1334 } *label_for_bb;
1336 /* Given LABEL return the first label in the same basic block. */
1338 static tree
1339 main_block_label (tree label)
1341 basic_block bb = label_to_block (label);
1342 tree main_label = label_for_bb[bb->index].label;
1344 /* label_to_block possibly inserted undefined label into the chain. */
1345 if (!main_label)
1347 label_for_bb[bb->index].label = label;
1348 main_label = label;
1351 label_for_bb[bb->index].used = true;
1352 return main_label;
1355 /* Clean up redundant labels within the exception tree. */
1357 static void
1358 cleanup_dead_labels_eh (void)
1360 eh_landing_pad lp;
1361 eh_region r;
1362 tree lab;
1363 int i;
1365 if (cfun->eh == NULL)
1366 return;
1368 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1369 if (lp && lp->post_landing_pad)
1371 lab = main_block_label (lp->post_landing_pad);
1372 if (lab != lp->post_landing_pad)
1374 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1375 EH_LANDING_PAD_NR (lab) = lp->index;
1379 FOR_ALL_EH_REGION (r)
1380 switch (r->type)
1382 case ERT_CLEANUP:
1383 case ERT_MUST_NOT_THROW:
1384 break;
1386 case ERT_TRY:
1388 eh_catch c;
1389 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1391 lab = c->label;
1392 if (lab)
1393 c->label = main_block_label (lab);
1396 break;
1398 case ERT_ALLOWED_EXCEPTIONS:
1399 lab = r->u.allowed.label;
1400 if (lab)
1401 r->u.allowed.label = main_block_label (lab);
1402 break;
1407 /* Cleanup redundant labels. This is a three-step process:
1408 1) Find the leading label for each block.
1409 2) Redirect all references to labels to the leading labels.
1410 3) Cleanup all useless labels. */
1412 void
1413 cleanup_dead_labels (void)
1415 basic_block bb;
1416 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1418 /* Find a suitable label for each block. We use the first user-defined
1419 label if there is one, or otherwise just the first label we see. */
1420 FOR_EACH_BB_FN (bb, cfun)
1422 gimple_stmt_iterator i;
1424 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1426 tree label;
1427 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1429 if (!label_stmt)
1430 break;
1432 label = gimple_label_label (label_stmt);
1434 /* If we have not yet seen a label for the current block,
1435 remember this one and see if there are more labels. */
1436 if (!label_for_bb[bb->index].label)
1438 label_for_bb[bb->index].label = label;
1439 continue;
1442 /* If we did see a label for the current block already, but it
1443 is an artificially created label, replace it if the current
1444 label is a user defined label. */
1445 if (!DECL_ARTIFICIAL (label)
1446 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1448 label_for_bb[bb->index].label = label;
1449 break;
1454 /* Now redirect all jumps/branches to the selected label.
1455 First do so for each block ending in a control statement. */
1456 FOR_EACH_BB_FN (bb, cfun)
1458 gimple stmt = last_stmt (bb);
1459 tree label, new_label;
1461 if (!stmt)
1462 continue;
1464 switch (gimple_code (stmt))
1466 case GIMPLE_COND:
1468 gcond *cond_stmt = as_a <gcond *> (stmt);
1469 label = gimple_cond_true_label (cond_stmt);
1470 if (label)
1472 new_label = main_block_label (label);
1473 if (new_label != label)
1474 gimple_cond_set_true_label (cond_stmt, new_label);
1477 label = gimple_cond_false_label (cond_stmt);
1478 if (label)
1480 new_label = main_block_label (label);
1481 if (new_label != label)
1482 gimple_cond_set_false_label (cond_stmt, new_label);
1485 break;
1487 case GIMPLE_SWITCH:
1489 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1490 size_t i, n = gimple_switch_num_labels (switch_stmt);
1492 /* Replace all destination labels. */
1493 for (i = 0; i < n; ++i)
1495 tree case_label = gimple_switch_label (switch_stmt, i);
1496 label = CASE_LABEL (case_label);
1497 new_label = main_block_label (label);
1498 if (new_label != label)
1499 CASE_LABEL (case_label) = new_label;
1501 break;
1504 case GIMPLE_ASM:
1506 gasm *asm_stmt = as_a <gasm *> (stmt);
1507 int i, n = gimple_asm_nlabels (asm_stmt);
1509 for (i = 0; i < n; ++i)
1511 tree cons = gimple_asm_label_op (asm_stmt, i);
1512 tree label = main_block_label (TREE_VALUE (cons));
1513 TREE_VALUE (cons) = label;
1515 break;
1518 /* We have to handle gotos until they're removed, and we don't
1519 remove them until after we've created the CFG edges. */
1520 case GIMPLE_GOTO:
1521 if (!computed_goto_p (stmt))
1523 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1524 label = gimple_goto_dest (goto_stmt);
1525 new_label = main_block_label (label);
1526 if (new_label != label)
1527 gimple_goto_set_dest (goto_stmt, new_label);
1529 break;
1531 case GIMPLE_TRANSACTION:
1533 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1534 tree label = gimple_transaction_label (trans_stmt);
1535 if (label)
1537 tree new_label = main_block_label (label);
1538 if (new_label != label)
1539 gimple_transaction_set_label (trans_stmt, new_label);
1542 break;
1544 default:
1545 break;
1549 /* Do the same for the exception region tree labels. */
1550 cleanup_dead_labels_eh ();
1552 /* Finally, purge dead labels. All user-defined labels and labels that
1553 can be the target of non-local gotos and labels which have their
1554 address taken are preserved. */
1555 FOR_EACH_BB_FN (bb, cfun)
1557 gimple_stmt_iterator i;
1558 tree label_for_this_bb = label_for_bb[bb->index].label;
1560 if (!label_for_this_bb)
1561 continue;
1563 /* If the main label of the block is unused, we may still remove it. */
1564 if (!label_for_bb[bb->index].used)
1565 label_for_this_bb = NULL;
1567 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1569 tree label;
1570 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1572 if (!label_stmt)
1573 break;
1575 label = gimple_label_label (label_stmt);
1577 if (label == label_for_this_bb
1578 || !DECL_ARTIFICIAL (label)
1579 || DECL_NONLOCAL (label)
1580 || FORCED_LABEL (label))
1581 gsi_next (&i);
1582 else
1583 gsi_remove (&i, true);
1587 free (label_for_bb);
1590 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1591 the ones jumping to the same label.
1592 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1594 void
1595 group_case_labels_stmt (gswitch *stmt)
1597 int old_size = gimple_switch_num_labels (stmt);
1598 int i, j, new_size = old_size;
1599 basic_block default_bb = NULL;
1601 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1603 /* Look for possible opportunities to merge cases. */
1604 i = 1;
1605 while (i < old_size)
1607 tree base_case, base_high;
1608 basic_block base_bb;
1610 base_case = gimple_switch_label (stmt, i);
1612 gcc_assert (base_case);
1613 base_bb = label_to_block (CASE_LABEL (base_case));
1615 /* Discard cases that have the same destination as the
1616 default case. */
1617 if (base_bb == default_bb)
1619 gimple_switch_set_label (stmt, i, NULL_TREE);
1620 i++;
1621 new_size--;
1622 continue;
1625 base_high = CASE_HIGH (base_case)
1626 ? CASE_HIGH (base_case)
1627 : CASE_LOW (base_case);
1628 i++;
1630 /* Try to merge case labels. Break out when we reach the end
1631 of the label vector or when we cannot merge the next case
1632 label with the current one. */
1633 while (i < old_size)
1635 tree merge_case = gimple_switch_label (stmt, i);
1636 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1637 wide_int bhp1 = wi::add (base_high, 1);
1639 /* Merge the cases if they jump to the same place,
1640 and their ranges are consecutive. */
1641 if (merge_bb == base_bb
1642 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1644 base_high = CASE_HIGH (merge_case) ?
1645 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1646 CASE_HIGH (base_case) = base_high;
1647 gimple_switch_set_label (stmt, i, NULL_TREE);
1648 new_size--;
1649 i++;
1651 else
1652 break;
1656 /* Compress the case labels in the label vector, and adjust the
1657 length of the vector. */
1658 for (i = 0, j = 0; i < new_size; i++)
1660 while (! gimple_switch_label (stmt, j))
1661 j++;
1662 gimple_switch_set_label (stmt, i,
1663 gimple_switch_label (stmt, j++));
1666 gcc_assert (new_size <= old_size);
1667 gimple_switch_set_num_labels (stmt, new_size);
1670 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1671 and scan the sorted vector of cases. Combine the ones jumping to the
1672 same label. */
1674 void
1675 group_case_labels (void)
1677 basic_block bb;
1679 FOR_EACH_BB_FN (bb, cfun)
1681 gimple stmt = last_stmt (bb);
1682 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1683 group_case_labels_stmt (as_a <gswitch *> (stmt));
1687 /* Checks whether we can merge block B into block A. */
1689 static bool
1690 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1692 gimple stmt;
1694 if (!single_succ_p (a))
1695 return false;
1697 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1698 return false;
1700 if (single_succ (a) != b)
1701 return false;
1703 if (!single_pred_p (b))
1704 return false;
1706 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1707 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1708 return false;
1710 /* If A ends by a statement causing exceptions or something similar, we
1711 cannot merge the blocks. */
1712 stmt = last_stmt (a);
1713 if (stmt && stmt_ends_bb_p (stmt))
1714 return false;
1716 /* Do not allow a block with only a non-local label to be merged. */
1717 if (stmt)
1718 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1719 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1720 return false;
1722 /* Examine the labels at the beginning of B. */
1723 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1724 gsi_next (&gsi))
1726 tree lab;
1727 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1728 if (!label_stmt)
1729 break;
1730 lab = gimple_label_label (label_stmt);
1732 /* Do not remove user forced labels or for -O0 any user labels. */
1733 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1734 return false;
1737 /* Protect simple loop latches. We only want to avoid merging
1738 the latch with the loop header or with a block in another
1739 loop in this case. */
1740 if (current_loops
1741 && b->loop_father->latch == b
1742 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1743 && (b->loop_father->header == a
1744 || b->loop_father != a->loop_father))
1745 return false;
1747 /* It must be possible to eliminate all phi nodes in B. If ssa form
1748 is not up-to-date and a name-mapping is registered, we cannot eliminate
1749 any phis. Symbols marked for renaming are never a problem though. */
1750 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1751 gsi_next (&gsi))
1753 gphi *phi = gsi.phi ();
1754 /* Technically only new names matter. */
1755 if (name_registered_for_update_p (PHI_RESULT (phi)))
1756 return false;
1759 /* When not optimizing, don't merge if we'd lose goto_locus. */
1760 if (!optimize
1761 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1763 location_t goto_locus = single_succ_edge (a)->goto_locus;
1764 gimple_stmt_iterator prev, next;
1765 prev = gsi_last_nondebug_bb (a);
1766 next = gsi_after_labels (b);
1767 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1768 gsi_next_nondebug (&next);
1769 if ((gsi_end_p (prev)
1770 || gimple_location (gsi_stmt (prev)) != goto_locus)
1771 && (gsi_end_p (next)
1772 || gimple_location (gsi_stmt (next)) != goto_locus))
1773 return false;
1776 return true;
1779 /* Replaces all uses of NAME by VAL. */
1781 void
1782 replace_uses_by (tree name, tree val)
1784 imm_use_iterator imm_iter;
1785 use_operand_p use;
1786 gimple stmt;
1787 edge e;
1789 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1791 /* Mark the block if we change the last stmt in it. */
1792 if (cfgcleanup_altered_bbs
1793 && stmt_ends_bb_p (stmt))
1794 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1796 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1798 replace_exp (use, val);
1800 if (gimple_code (stmt) == GIMPLE_PHI)
1802 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1803 PHI_ARG_INDEX_FROM_USE (use));
1804 if (e->flags & EDGE_ABNORMAL
1805 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1807 /* This can only occur for virtual operands, since
1808 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1809 would prevent replacement. */
1810 gcc_checking_assert (virtual_operand_p (name));
1811 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1816 if (gimple_code (stmt) != GIMPLE_PHI)
1818 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1819 gimple orig_stmt = stmt;
1820 size_t i;
1822 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1823 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1824 only change sth from non-invariant to invariant, and only
1825 when propagating constants. */
1826 if (is_gimple_min_invariant (val))
1827 for (i = 0; i < gimple_num_ops (stmt); i++)
1829 tree op = gimple_op (stmt, i);
1830 /* Operands may be empty here. For example, the labels
1831 of a GIMPLE_COND are nulled out following the creation
1832 of the corresponding CFG edges. */
1833 if (op && TREE_CODE (op) == ADDR_EXPR)
1834 recompute_tree_invariant_for_addr_expr (op);
1837 if (fold_stmt (&gsi))
1838 stmt = gsi_stmt (gsi);
1840 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1841 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1843 update_stmt (stmt);
1847 gcc_checking_assert (has_zero_uses (name));
1849 /* Also update the trees stored in loop structures. */
1850 if (current_loops)
1852 struct loop *loop;
1854 FOR_EACH_LOOP (loop, 0)
1856 substitute_in_loop_info (loop, name, val);
1861 /* Merge block B into block A. */
1863 static void
1864 gimple_merge_blocks (basic_block a, basic_block b)
1866 gimple_stmt_iterator last, gsi;
1867 gphi_iterator psi;
1869 if (dump_file)
1870 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1872 /* Remove all single-valued PHI nodes from block B of the form
1873 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1874 gsi = gsi_last_bb (a);
1875 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1877 gimple phi = gsi_stmt (psi);
1878 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1879 gimple copy;
1880 bool may_replace_uses = (virtual_operand_p (def)
1881 || may_propagate_copy (def, use));
1883 /* In case we maintain loop closed ssa form, do not propagate arguments
1884 of loop exit phi nodes. */
1885 if (current_loops
1886 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1887 && !virtual_operand_p (def)
1888 && TREE_CODE (use) == SSA_NAME
1889 && a->loop_father != b->loop_father)
1890 may_replace_uses = false;
1892 if (!may_replace_uses)
1894 gcc_assert (!virtual_operand_p (def));
1896 /* Note that just emitting the copies is fine -- there is no problem
1897 with ordering of phi nodes. This is because A is the single
1898 predecessor of B, therefore results of the phi nodes cannot
1899 appear as arguments of the phi nodes. */
1900 copy = gimple_build_assign (def, use);
1901 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1902 remove_phi_node (&psi, false);
1904 else
1906 /* If we deal with a PHI for virtual operands, we can simply
1907 propagate these without fussing with folding or updating
1908 the stmt. */
1909 if (virtual_operand_p (def))
1911 imm_use_iterator iter;
1912 use_operand_p use_p;
1913 gimple stmt;
1915 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1916 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1917 SET_USE (use_p, use);
1919 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1920 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1922 else
1923 replace_uses_by (def, use);
1925 remove_phi_node (&psi, true);
1929 /* Ensure that B follows A. */
1930 move_block_after (b, a);
1932 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1933 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1935 /* Remove labels from B and set gimple_bb to A for other statements. */
1936 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1938 gimple stmt = gsi_stmt (gsi);
1939 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1941 tree label = gimple_label_label (label_stmt);
1942 int lp_nr;
1944 gsi_remove (&gsi, false);
1946 /* Now that we can thread computed gotos, we might have
1947 a situation where we have a forced label in block B
1948 However, the label at the start of block B might still be
1949 used in other ways (think about the runtime checking for
1950 Fortran assigned gotos). So we can not just delete the
1951 label. Instead we move the label to the start of block A. */
1952 if (FORCED_LABEL (label))
1954 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1955 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1957 /* Other user labels keep around in a form of a debug stmt. */
1958 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1960 gimple dbg = gimple_build_debug_bind (label,
1961 integer_zero_node,
1962 stmt);
1963 gimple_debug_bind_reset_value (dbg);
1964 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1967 lp_nr = EH_LANDING_PAD_NR (label);
1968 if (lp_nr)
1970 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1971 lp->post_landing_pad = NULL;
1974 else
1976 gimple_set_bb (stmt, a);
1977 gsi_next (&gsi);
1981 /* When merging two BBs, if their counts are different, the larger count
1982 is selected as the new bb count. This is to handle inconsistent
1983 profiles. */
1984 if (a->loop_father == b->loop_father)
1986 a->count = MAX (a->count, b->count);
1987 a->frequency = MAX (a->frequency, b->frequency);
1990 /* Merge the sequences. */
1991 last = gsi_last_bb (a);
1992 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1993 set_bb_seq (b, NULL);
1995 if (cfgcleanup_altered_bbs)
1996 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2000 /* Return the one of two successors of BB that is not reachable by a
2001 complex edge, if there is one. Else, return BB. We use
2002 this in optimizations that use post-dominators for their heuristics,
2003 to catch the cases in C++ where function calls are involved. */
2005 basic_block
2006 single_noncomplex_succ (basic_block bb)
2008 edge e0, e1;
2009 if (EDGE_COUNT (bb->succs) != 2)
2010 return bb;
2012 e0 = EDGE_SUCC (bb, 0);
2013 e1 = EDGE_SUCC (bb, 1);
2014 if (e0->flags & EDGE_COMPLEX)
2015 return e1->dest;
2016 if (e1->flags & EDGE_COMPLEX)
2017 return e0->dest;
2019 return bb;
2022 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2024 void
2025 notice_special_calls (gcall *call)
2027 int flags = gimple_call_flags (call);
2029 if (flags & ECF_MAY_BE_ALLOCA)
2030 cfun->calls_alloca = true;
2031 if (flags & ECF_RETURNS_TWICE)
2032 cfun->calls_setjmp = true;
2036 /* Clear flags set by notice_special_calls. Used by dead code removal
2037 to update the flags. */
2039 void
2040 clear_special_calls (void)
2042 cfun->calls_alloca = false;
2043 cfun->calls_setjmp = false;
2046 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2048 static void
2049 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2051 /* Since this block is no longer reachable, we can just delete all
2052 of its PHI nodes. */
2053 remove_phi_nodes (bb);
2055 /* Remove edges to BB's successors. */
2056 while (EDGE_COUNT (bb->succs) > 0)
2057 remove_edge (EDGE_SUCC (bb, 0));
2061 /* Remove statements of basic block BB. */
2063 static void
2064 remove_bb (basic_block bb)
2066 gimple_stmt_iterator i;
2068 if (dump_file)
2070 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2071 if (dump_flags & TDF_DETAILS)
2073 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2074 fprintf (dump_file, "\n");
2078 if (current_loops)
2080 struct loop *loop = bb->loop_father;
2082 /* If a loop gets removed, clean up the information associated
2083 with it. */
2084 if (loop->latch == bb
2085 || loop->header == bb)
2086 free_numbers_of_iterations_estimates_loop (loop);
2089 /* Remove all the instructions in the block. */
2090 if (bb_seq (bb) != NULL)
2092 /* Walk backwards so as to get a chance to substitute all
2093 released DEFs into debug stmts. See
2094 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2095 details. */
2096 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2098 gimple stmt = gsi_stmt (i);
2099 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2100 if (label_stmt
2101 && (FORCED_LABEL (gimple_label_label (label_stmt))
2102 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2104 basic_block new_bb;
2105 gimple_stmt_iterator new_gsi;
2107 /* A non-reachable non-local label may still be referenced.
2108 But it no longer needs to carry the extra semantics of
2109 non-locality. */
2110 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2112 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2113 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2116 new_bb = bb->prev_bb;
2117 new_gsi = gsi_start_bb (new_bb);
2118 gsi_remove (&i, false);
2119 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2121 else
2123 /* Release SSA definitions if we are in SSA. Note that we
2124 may be called when not in SSA. For example,
2125 final_cleanup calls this function via
2126 cleanup_tree_cfg. */
2127 if (gimple_in_ssa_p (cfun))
2128 release_defs (stmt);
2130 gsi_remove (&i, true);
2133 if (gsi_end_p (i))
2134 i = gsi_last_bb (bb);
2135 else
2136 gsi_prev (&i);
2140 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2141 bb->il.gimple.seq = NULL;
2142 bb->il.gimple.phi_nodes = NULL;
2146 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2147 predicate VAL, return the edge that will be taken out of the block.
2148 If VAL does not match a unique edge, NULL is returned. */
2150 edge
2151 find_taken_edge (basic_block bb, tree val)
2153 gimple stmt;
2155 stmt = last_stmt (bb);
2157 gcc_assert (stmt);
2158 gcc_assert (is_ctrl_stmt (stmt));
2160 if (val == NULL)
2161 return NULL;
2163 if (!is_gimple_min_invariant (val))
2164 return NULL;
2166 if (gimple_code (stmt) == GIMPLE_COND)
2167 return find_taken_edge_cond_expr (bb, val);
2169 if (gimple_code (stmt) == GIMPLE_SWITCH)
2170 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2172 if (computed_goto_p (stmt))
2174 /* Only optimize if the argument is a label, if the argument is
2175 not a label then we can not construct a proper CFG.
2177 It may be the case that we only need to allow the LABEL_REF to
2178 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2179 appear inside a LABEL_EXPR just to be safe. */
2180 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2181 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2182 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2183 return NULL;
2186 gcc_unreachable ();
2189 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2190 statement, determine which of the outgoing edges will be taken out of the
2191 block. Return NULL if either edge may be taken. */
2193 static edge
2194 find_taken_edge_computed_goto (basic_block bb, tree val)
2196 basic_block dest;
2197 edge e = NULL;
2199 dest = label_to_block (val);
2200 if (dest)
2202 e = find_edge (bb, dest);
2203 gcc_assert (e != NULL);
2206 return e;
2209 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2210 statement, determine which of the two edges will be taken out of the
2211 block. Return NULL if either edge may be taken. */
2213 static edge
2214 find_taken_edge_cond_expr (basic_block bb, tree val)
2216 edge true_edge, false_edge;
2218 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2220 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2221 return (integer_zerop (val) ? false_edge : true_edge);
2224 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2225 statement, determine which edge will be taken out of the block. Return
2226 NULL if any edge may be taken. */
2228 static edge
2229 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2230 tree val)
2232 basic_block dest_bb;
2233 edge e;
2234 tree taken_case;
2236 taken_case = find_case_label_for_value (switch_stmt, val);
2237 dest_bb = label_to_block (CASE_LABEL (taken_case));
2239 e = find_edge (bb, dest_bb);
2240 gcc_assert (e);
2241 return e;
2245 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2246 We can make optimal use here of the fact that the case labels are
2247 sorted: We can do a binary search for a case matching VAL. */
2249 static tree
2250 find_case_label_for_value (gswitch *switch_stmt, tree val)
2252 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2253 tree default_case = gimple_switch_default_label (switch_stmt);
2255 for (low = 0, high = n; high - low > 1; )
2257 size_t i = (high + low) / 2;
2258 tree t = gimple_switch_label (switch_stmt, i);
2259 int cmp;
2261 /* Cache the result of comparing CASE_LOW and val. */
2262 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2264 if (cmp > 0)
2265 high = i;
2266 else
2267 low = i;
2269 if (CASE_HIGH (t) == NULL)
2271 /* A singe-valued case label. */
2272 if (cmp == 0)
2273 return t;
2275 else
2277 /* A case range. We can only handle integer ranges. */
2278 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2279 return t;
2283 return default_case;
2287 /* Dump a basic block on stderr. */
2289 void
2290 gimple_debug_bb (basic_block bb)
2292 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2296 /* Dump basic block with index N on stderr. */
2298 basic_block
2299 gimple_debug_bb_n (int n)
2301 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2302 return BASIC_BLOCK_FOR_FN (cfun, n);
2306 /* Dump the CFG on stderr.
2308 FLAGS are the same used by the tree dumping functions
2309 (see TDF_* in dumpfile.h). */
2311 void
2312 gimple_debug_cfg (int flags)
2314 gimple_dump_cfg (stderr, flags);
2318 /* Dump the program showing basic block boundaries on the given FILE.
2320 FLAGS are the same used by the tree dumping functions (see TDF_* in
2321 tree.h). */
2323 void
2324 gimple_dump_cfg (FILE *file, int flags)
2326 if (flags & TDF_DETAILS)
2328 dump_function_header (file, current_function_decl, flags);
2329 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2330 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2331 last_basic_block_for_fn (cfun));
2333 brief_dump_cfg (file, flags | TDF_COMMENT);
2334 fprintf (file, "\n");
2337 if (flags & TDF_STATS)
2338 dump_cfg_stats (file);
2340 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2344 /* Dump CFG statistics on FILE. */
2346 void
2347 dump_cfg_stats (FILE *file)
2349 static long max_num_merged_labels = 0;
2350 unsigned long size, total = 0;
2351 long num_edges;
2352 basic_block bb;
2353 const char * const fmt_str = "%-30s%-13s%12s\n";
2354 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2355 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2356 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2357 const char *funcname = current_function_name ();
2359 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2361 fprintf (file, "---------------------------------------------------------\n");
2362 fprintf (file, fmt_str, "", " Number of ", "Memory");
2363 fprintf (file, fmt_str, "", " instances ", "used ");
2364 fprintf (file, "---------------------------------------------------------\n");
2366 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2367 total += size;
2368 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2369 SCALE (size), LABEL (size));
2371 num_edges = 0;
2372 FOR_EACH_BB_FN (bb, cfun)
2373 num_edges += EDGE_COUNT (bb->succs);
2374 size = num_edges * sizeof (struct edge_def);
2375 total += size;
2376 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2378 fprintf (file, "---------------------------------------------------------\n");
2379 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2380 LABEL (total));
2381 fprintf (file, "---------------------------------------------------------\n");
2382 fprintf (file, "\n");
2384 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2385 max_num_merged_labels = cfg_stats.num_merged_labels;
2387 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2388 cfg_stats.num_merged_labels, max_num_merged_labels);
2390 fprintf (file, "\n");
2394 /* Dump CFG statistics on stderr. Keep extern so that it's always
2395 linked in the final executable. */
2397 DEBUG_FUNCTION void
2398 debug_cfg_stats (void)
2400 dump_cfg_stats (stderr);
2403 /*---------------------------------------------------------------------------
2404 Miscellaneous helpers
2405 ---------------------------------------------------------------------------*/
2407 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2408 flow. Transfers of control flow associated with EH are excluded. */
2410 static bool
2411 call_can_make_abnormal_goto (gimple t)
2413 /* If the function has no non-local labels, then a call cannot make an
2414 abnormal transfer of control. */
2415 if (!cfun->has_nonlocal_label
2416 && !cfun->calls_setjmp)
2417 return false;
2419 /* Likewise if the call has no side effects. */
2420 if (!gimple_has_side_effects (t))
2421 return false;
2423 /* Likewise if the called function is leaf. */
2424 if (gimple_call_flags (t) & ECF_LEAF)
2425 return false;
2427 return true;
2431 /* Return true if T can make an abnormal transfer of control flow.
2432 Transfers of control flow associated with EH are excluded. */
2434 bool
2435 stmt_can_make_abnormal_goto (gimple t)
2437 if (computed_goto_p (t))
2438 return true;
2439 if (is_gimple_call (t))
2440 return call_can_make_abnormal_goto (t);
2441 return false;
2445 /* Return true if T represents a stmt that always transfers control. */
2447 bool
2448 is_ctrl_stmt (gimple t)
2450 switch (gimple_code (t))
2452 case GIMPLE_COND:
2453 case GIMPLE_SWITCH:
2454 case GIMPLE_GOTO:
2455 case GIMPLE_RETURN:
2456 case GIMPLE_RESX:
2457 return true;
2458 default:
2459 return false;
2464 /* Return true if T is a statement that may alter the flow of control
2465 (e.g., a call to a non-returning function). */
2467 bool
2468 is_ctrl_altering_stmt (gimple t)
2470 gcc_assert (t);
2472 switch (gimple_code (t))
2474 case GIMPLE_CALL:
2475 /* Per stmt call flag indicates whether the call could alter
2476 controlflow. */
2477 if (gimple_call_ctrl_altering_p (t))
2478 return true;
2479 break;
2481 case GIMPLE_EH_DISPATCH:
2482 /* EH_DISPATCH branches to the individual catch handlers at
2483 this level of a try or allowed-exceptions region. It can
2484 fallthru to the next statement as well. */
2485 return true;
2487 case GIMPLE_ASM:
2488 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2489 return true;
2490 break;
2492 CASE_GIMPLE_OMP:
2493 /* OpenMP directives alter control flow. */
2494 return true;
2496 case GIMPLE_TRANSACTION:
2497 /* A transaction start alters control flow. */
2498 return true;
2500 default:
2501 break;
2504 /* If a statement can throw, it alters control flow. */
2505 return stmt_can_throw_internal (t);
2509 /* Return true if T is a simple local goto. */
2511 bool
2512 simple_goto_p (gimple t)
2514 return (gimple_code (t) == GIMPLE_GOTO
2515 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2519 /* Return true if STMT should start a new basic block. PREV_STMT is
2520 the statement preceding STMT. It is used when STMT is a label or a
2521 case label. Labels should only start a new basic block if their
2522 previous statement wasn't a label. Otherwise, sequence of labels
2523 would generate unnecessary basic blocks that only contain a single
2524 label. */
2526 static inline bool
2527 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2529 if (stmt == NULL)
2530 return false;
2532 /* Labels start a new basic block only if the preceding statement
2533 wasn't a label of the same type. This prevents the creation of
2534 consecutive blocks that have nothing but a single label. */
2535 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2537 /* Nonlocal and computed GOTO targets always start a new block. */
2538 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2539 || FORCED_LABEL (gimple_label_label (label_stmt)))
2540 return true;
2542 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2544 if (DECL_NONLOCAL (gimple_label_label (
2545 as_a <glabel *> (prev_stmt))))
2546 return true;
2548 cfg_stats.num_merged_labels++;
2549 return false;
2551 else
2552 return true;
2554 else if (gimple_code (stmt) == GIMPLE_CALL
2555 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2556 /* setjmp acts similar to a nonlocal GOTO target and thus should
2557 start a new block. */
2558 return true;
2560 return false;
2564 /* Return true if T should end a basic block. */
2566 bool
2567 stmt_ends_bb_p (gimple t)
2569 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2572 /* Remove block annotations and other data structures. */
2574 void
2575 delete_tree_cfg_annotations (void)
2577 vec_free (label_to_block_map_for_fn (cfun));
2581 /* Return the first statement in basic block BB. */
2583 gimple
2584 first_stmt (basic_block bb)
2586 gimple_stmt_iterator i = gsi_start_bb (bb);
2587 gimple stmt = NULL;
2589 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2591 gsi_next (&i);
2592 stmt = NULL;
2594 return stmt;
2597 /* Return the first non-label statement in basic block BB. */
2599 static gimple
2600 first_non_label_stmt (basic_block bb)
2602 gimple_stmt_iterator i = gsi_start_bb (bb);
2603 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2604 gsi_next (&i);
2605 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2608 /* Return the last statement in basic block BB. */
2610 gimple
2611 last_stmt (basic_block bb)
2613 gimple_stmt_iterator i = gsi_last_bb (bb);
2614 gimple stmt = NULL;
2616 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2618 gsi_prev (&i);
2619 stmt = NULL;
2621 return stmt;
2624 /* Return the last statement of an otherwise empty block. Return NULL
2625 if the block is totally empty, or if it contains more than one
2626 statement. */
2628 gimple
2629 last_and_only_stmt (basic_block bb)
2631 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2632 gimple last, prev;
2634 if (gsi_end_p (i))
2635 return NULL;
2637 last = gsi_stmt (i);
2638 gsi_prev_nondebug (&i);
2639 if (gsi_end_p (i))
2640 return last;
2642 /* Empty statements should no longer appear in the instruction stream.
2643 Everything that might have appeared before should be deleted by
2644 remove_useless_stmts, and the optimizers should just gsi_remove
2645 instead of smashing with build_empty_stmt.
2647 Thus the only thing that should appear here in a block containing
2648 one executable statement is a label. */
2649 prev = gsi_stmt (i);
2650 if (gimple_code (prev) == GIMPLE_LABEL)
2651 return last;
2652 else
2653 return NULL;
2656 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2658 static void
2659 reinstall_phi_args (edge new_edge, edge old_edge)
2661 edge_var_map *vm;
2662 int i;
2663 gphi_iterator phis;
2665 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2666 if (!v)
2667 return;
2669 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2670 v->iterate (i, &vm) && !gsi_end_p (phis);
2671 i++, gsi_next (&phis))
2673 gphi *phi = phis.phi ();
2674 tree result = redirect_edge_var_map_result (vm);
2675 tree arg = redirect_edge_var_map_def (vm);
2677 gcc_assert (result == gimple_phi_result (phi));
2679 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2682 redirect_edge_var_map_clear (old_edge);
2685 /* Returns the basic block after which the new basic block created
2686 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2687 near its "logical" location. This is of most help to humans looking
2688 at debugging dumps. */
2690 basic_block
2691 split_edge_bb_loc (edge edge_in)
2693 basic_block dest = edge_in->dest;
2694 basic_block dest_prev = dest->prev_bb;
2696 if (dest_prev)
2698 edge e = find_edge (dest_prev, dest);
2699 if (e && !(e->flags & EDGE_COMPLEX))
2700 return edge_in->src;
2702 return dest_prev;
2705 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2706 Abort on abnormal edges. */
2708 static basic_block
2709 gimple_split_edge (edge edge_in)
2711 basic_block new_bb, after_bb, dest;
2712 edge new_edge, e;
2714 /* Abnormal edges cannot be split. */
2715 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2717 dest = edge_in->dest;
2719 after_bb = split_edge_bb_loc (edge_in);
2721 new_bb = create_empty_bb (after_bb);
2722 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2723 new_bb->count = edge_in->count;
2724 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2725 new_edge->probability = REG_BR_PROB_BASE;
2726 new_edge->count = edge_in->count;
2728 e = redirect_edge_and_branch (edge_in, new_bb);
2729 gcc_assert (e == edge_in);
2730 reinstall_phi_args (new_edge, e);
2732 return new_bb;
2736 /* Verify properties of the address expression T with base object BASE. */
2738 static tree
2739 verify_address (tree t, tree base)
2741 bool old_constant;
2742 bool old_side_effects;
2743 bool new_constant;
2744 bool new_side_effects;
2746 old_constant = TREE_CONSTANT (t);
2747 old_side_effects = TREE_SIDE_EFFECTS (t);
2749 recompute_tree_invariant_for_addr_expr (t);
2750 new_side_effects = TREE_SIDE_EFFECTS (t);
2751 new_constant = TREE_CONSTANT (t);
2753 if (old_constant != new_constant)
2755 error ("constant not recomputed when ADDR_EXPR changed");
2756 return t;
2758 if (old_side_effects != new_side_effects)
2760 error ("side effects not recomputed when ADDR_EXPR changed");
2761 return t;
2764 if (!(TREE_CODE (base) == VAR_DECL
2765 || TREE_CODE (base) == PARM_DECL
2766 || TREE_CODE (base) == RESULT_DECL))
2767 return NULL_TREE;
2769 if (DECL_GIMPLE_REG_P (base))
2771 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2772 return base;
2775 return NULL_TREE;
2778 /* Callback for walk_tree, check that all elements with address taken are
2779 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2780 inside a PHI node. */
2782 static tree
2783 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2785 tree t = *tp, x;
2787 if (TYPE_P (t))
2788 *walk_subtrees = 0;
2790 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2791 #define CHECK_OP(N, MSG) \
2792 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2793 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2795 switch (TREE_CODE (t))
2797 case SSA_NAME:
2798 if (SSA_NAME_IN_FREE_LIST (t))
2800 error ("SSA name in freelist but still referenced");
2801 return *tp;
2803 break;
2805 case INDIRECT_REF:
2806 error ("INDIRECT_REF in gimple IL");
2807 return t;
2809 case MEM_REF:
2810 x = TREE_OPERAND (t, 0);
2811 if (!POINTER_TYPE_P (TREE_TYPE (x))
2812 || !is_gimple_mem_ref_addr (x))
2814 error ("invalid first operand of MEM_REF");
2815 return x;
2817 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2818 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2820 error ("invalid offset operand of MEM_REF");
2821 return TREE_OPERAND (t, 1);
2823 if (TREE_CODE (x) == ADDR_EXPR
2824 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2825 return x;
2826 *walk_subtrees = 0;
2827 break;
2829 case ASSERT_EXPR:
2830 x = fold (ASSERT_EXPR_COND (t));
2831 if (x == boolean_false_node)
2833 error ("ASSERT_EXPR with an always-false condition");
2834 return *tp;
2836 break;
2838 case MODIFY_EXPR:
2839 error ("MODIFY_EXPR not expected while having tuples");
2840 return *tp;
2842 case ADDR_EXPR:
2844 tree tem;
2846 gcc_assert (is_gimple_address (t));
2848 /* Skip any references (they will be checked when we recurse down the
2849 tree) and ensure that any variable used as a prefix is marked
2850 addressable. */
2851 for (x = TREE_OPERAND (t, 0);
2852 handled_component_p (x);
2853 x = TREE_OPERAND (x, 0))
2856 if ((tem = verify_address (t, x)))
2857 return tem;
2859 if (!(TREE_CODE (x) == VAR_DECL
2860 || TREE_CODE (x) == PARM_DECL
2861 || TREE_CODE (x) == RESULT_DECL))
2862 return NULL;
2864 if (!TREE_ADDRESSABLE (x))
2866 error ("address taken, but ADDRESSABLE bit not set");
2867 return x;
2870 break;
2873 case COND_EXPR:
2874 x = COND_EXPR_COND (t);
2875 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2877 error ("non-integral used in condition");
2878 return x;
2880 if (!is_gimple_condexpr (x))
2882 error ("invalid conditional operand");
2883 return x;
2885 break;
2887 case NON_LVALUE_EXPR:
2888 case TRUTH_NOT_EXPR:
2889 gcc_unreachable ();
2891 CASE_CONVERT:
2892 case FIX_TRUNC_EXPR:
2893 case FLOAT_EXPR:
2894 case NEGATE_EXPR:
2895 case ABS_EXPR:
2896 case BIT_NOT_EXPR:
2897 CHECK_OP (0, "invalid operand to unary operator");
2898 break;
2900 case REALPART_EXPR:
2901 case IMAGPART_EXPR:
2902 case BIT_FIELD_REF:
2903 if (!is_gimple_reg_type (TREE_TYPE (t)))
2905 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2906 return t;
2909 if (TREE_CODE (t) == BIT_FIELD_REF)
2911 tree t0 = TREE_OPERAND (t, 0);
2912 tree t1 = TREE_OPERAND (t, 1);
2913 tree t2 = TREE_OPERAND (t, 2);
2914 if (!tree_fits_uhwi_p (t1)
2915 || !tree_fits_uhwi_p (t2))
2917 error ("invalid position or size operand to BIT_FIELD_REF");
2918 return t;
2920 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2921 && (TYPE_PRECISION (TREE_TYPE (t))
2922 != tree_to_uhwi (t1)))
2924 error ("integral result type precision does not match "
2925 "field size of BIT_FIELD_REF");
2926 return t;
2928 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2929 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2930 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2931 != tree_to_uhwi (t1)))
2933 error ("mode precision of non-integral result does not "
2934 "match field size of BIT_FIELD_REF");
2935 return t;
2937 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2938 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2939 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2941 error ("position plus size exceeds size of referenced object in "
2942 "BIT_FIELD_REF");
2943 return t;
2946 t = TREE_OPERAND (t, 0);
2948 /* Fall-through. */
2949 case COMPONENT_REF:
2950 case ARRAY_REF:
2951 case ARRAY_RANGE_REF:
2952 case VIEW_CONVERT_EXPR:
2953 /* We have a nest of references. Verify that each of the operands
2954 that determine where to reference is either a constant or a variable,
2955 verify that the base is valid, and then show we've already checked
2956 the subtrees. */
2957 while (handled_component_p (t))
2959 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2960 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2961 else if (TREE_CODE (t) == ARRAY_REF
2962 || TREE_CODE (t) == ARRAY_RANGE_REF)
2964 CHECK_OP (1, "invalid array index");
2965 if (TREE_OPERAND (t, 2))
2966 CHECK_OP (2, "invalid array lower bound");
2967 if (TREE_OPERAND (t, 3))
2968 CHECK_OP (3, "invalid array stride");
2970 else if (TREE_CODE (t) == BIT_FIELD_REF
2971 || TREE_CODE (t) == REALPART_EXPR
2972 || TREE_CODE (t) == IMAGPART_EXPR)
2974 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2975 "REALPART_EXPR");
2976 return t;
2979 t = TREE_OPERAND (t, 0);
2982 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2984 error ("invalid reference prefix");
2985 return t;
2987 *walk_subtrees = 0;
2988 break;
2989 case PLUS_EXPR:
2990 case MINUS_EXPR:
2991 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2992 POINTER_PLUS_EXPR. */
2993 if (POINTER_TYPE_P (TREE_TYPE (t)))
2995 error ("invalid operand to plus/minus, type is a pointer");
2996 return t;
2998 CHECK_OP (0, "invalid operand to binary operator");
2999 CHECK_OP (1, "invalid operand to binary operator");
3000 break;
3002 case POINTER_PLUS_EXPR:
3003 /* Check to make sure the first operand is a pointer or reference type. */
3004 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3006 error ("invalid operand to pointer plus, first operand is not a pointer");
3007 return t;
3009 /* Check to make sure the second operand is a ptrofftype. */
3010 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3012 error ("invalid operand to pointer plus, second operand is not an "
3013 "integer type of appropriate width");
3014 return t;
3016 /* FALLTHROUGH */
3017 case LT_EXPR:
3018 case LE_EXPR:
3019 case GT_EXPR:
3020 case GE_EXPR:
3021 case EQ_EXPR:
3022 case NE_EXPR:
3023 case UNORDERED_EXPR:
3024 case ORDERED_EXPR:
3025 case UNLT_EXPR:
3026 case UNLE_EXPR:
3027 case UNGT_EXPR:
3028 case UNGE_EXPR:
3029 case UNEQ_EXPR:
3030 case LTGT_EXPR:
3031 case MULT_EXPR:
3032 case TRUNC_DIV_EXPR:
3033 case CEIL_DIV_EXPR:
3034 case FLOOR_DIV_EXPR:
3035 case ROUND_DIV_EXPR:
3036 case TRUNC_MOD_EXPR:
3037 case CEIL_MOD_EXPR:
3038 case FLOOR_MOD_EXPR:
3039 case ROUND_MOD_EXPR:
3040 case RDIV_EXPR:
3041 case EXACT_DIV_EXPR:
3042 case MIN_EXPR:
3043 case MAX_EXPR:
3044 case LSHIFT_EXPR:
3045 case RSHIFT_EXPR:
3046 case LROTATE_EXPR:
3047 case RROTATE_EXPR:
3048 case BIT_IOR_EXPR:
3049 case BIT_XOR_EXPR:
3050 case BIT_AND_EXPR:
3051 CHECK_OP (0, "invalid operand to binary operator");
3052 CHECK_OP (1, "invalid operand to binary operator");
3053 break;
3055 case CONSTRUCTOR:
3056 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3057 *walk_subtrees = 0;
3058 break;
3060 case CASE_LABEL_EXPR:
3061 if (CASE_CHAIN (t))
3063 error ("invalid CASE_CHAIN");
3064 return t;
3066 break;
3068 default:
3069 break;
3071 return NULL;
3073 #undef CHECK_OP
3077 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3078 Returns true if there is an error, otherwise false. */
3080 static bool
3081 verify_types_in_gimple_min_lval (tree expr)
3083 tree op;
3085 if (is_gimple_id (expr))
3086 return false;
3088 if (TREE_CODE (expr) != TARGET_MEM_REF
3089 && TREE_CODE (expr) != MEM_REF)
3091 error ("invalid expression for min lvalue");
3092 return true;
3095 /* TARGET_MEM_REFs are strange beasts. */
3096 if (TREE_CODE (expr) == TARGET_MEM_REF)
3097 return false;
3099 op = TREE_OPERAND (expr, 0);
3100 if (!is_gimple_val (op))
3102 error ("invalid operand in indirect reference");
3103 debug_generic_stmt (op);
3104 return true;
3106 /* Memory references now generally can involve a value conversion. */
3108 return false;
3111 /* Verify if EXPR is a valid GIMPLE reference expression. If
3112 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3113 if there is an error, otherwise false. */
3115 static bool
3116 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3118 while (handled_component_p (expr))
3120 tree op = TREE_OPERAND (expr, 0);
3122 if (TREE_CODE (expr) == ARRAY_REF
3123 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3125 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3126 || (TREE_OPERAND (expr, 2)
3127 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3128 || (TREE_OPERAND (expr, 3)
3129 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3131 error ("invalid operands to array reference");
3132 debug_generic_stmt (expr);
3133 return true;
3137 /* Verify if the reference array element types are compatible. */
3138 if (TREE_CODE (expr) == ARRAY_REF
3139 && !useless_type_conversion_p (TREE_TYPE (expr),
3140 TREE_TYPE (TREE_TYPE (op))))
3142 error ("type mismatch in array reference");
3143 debug_generic_stmt (TREE_TYPE (expr));
3144 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3145 return true;
3147 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3148 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3149 TREE_TYPE (TREE_TYPE (op))))
3151 error ("type mismatch in array range reference");
3152 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3153 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3154 return true;
3157 if ((TREE_CODE (expr) == REALPART_EXPR
3158 || TREE_CODE (expr) == IMAGPART_EXPR)
3159 && !useless_type_conversion_p (TREE_TYPE (expr),
3160 TREE_TYPE (TREE_TYPE (op))))
3162 error ("type mismatch in real/imagpart reference");
3163 debug_generic_stmt (TREE_TYPE (expr));
3164 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3165 return true;
3168 if (TREE_CODE (expr) == COMPONENT_REF
3169 && !useless_type_conversion_p (TREE_TYPE (expr),
3170 TREE_TYPE (TREE_OPERAND (expr, 1))))
3172 error ("type mismatch in component reference");
3173 debug_generic_stmt (TREE_TYPE (expr));
3174 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3175 return true;
3178 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3180 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3181 that their operand is not an SSA name or an invariant when
3182 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3183 bug). Otherwise there is nothing to verify, gross mismatches at
3184 most invoke undefined behavior. */
3185 if (require_lvalue
3186 && (TREE_CODE (op) == SSA_NAME
3187 || is_gimple_min_invariant (op)))
3189 error ("conversion of an SSA_NAME on the left hand side");
3190 debug_generic_stmt (expr);
3191 return true;
3193 else if (TREE_CODE (op) == SSA_NAME
3194 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3196 error ("conversion of register to a different size");
3197 debug_generic_stmt (expr);
3198 return true;
3200 else if (!handled_component_p (op))
3201 return false;
3204 expr = op;
3207 if (TREE_CODE (expr) == MEM_REF)
3209 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3211 error ("invalid address operand in MEM_REF");
3212 debug_generic_stmt (expr);
3213 return true;
3215 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3216 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3218 error ("invalid offset operand in MEM_REF");
3219 debug_generic_stmt (expr);
3220 return true;
3223 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3225 if (!TMR_BASE (expr)
3226 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3228 error ("invalid address operand in TARGET_MEM_REF");
3229 return true;
3231 if (!TMR_OFFSET (expr)
3232 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3233 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3235 error ("invalid offset operand in TARGET_MEM_REF");
3236 debug_generic_stmt (expr);
3237 return true;
3241 return ((require_lvalue || !is_gimple_min_invariant (expr))
3242 && verify_types_in_gimple_min_lval (expr));
3245 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3246 list of pointer-to types that is trivially convertible to DEST. */
3248 static bool
3249 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3251 tree src;
3253 if (!TYPE_POINTER_TO (src_obj))
3254 return true;
3256 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3257 if (useless_type_conversion_p (dest, src))
3258 return true;
3260 return false;
3263 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3264 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3266 static bool
3267 valid_fixed_convert_types_p (tree type1, tree type2)
3269 return (FIXED_POINT_TYPE_P (type1)
3270 && (INTEGRAL_TYPE_P (type2)
3271 || SCALAR_FLOAT_TYPE_P (type2)
3272 || FIXED_POINT_TYPE_P (type2)));
3275 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3276 is a problem, otherwise false. */
3278 static bool
3279 verify_gimple_call (gcall *stmt)
3281 tree fn = gimple_call_fn (stmt);
3282 tree fntype, fndecl;
3283 unsigned i;
3285 if (gimple_call_internal_p (stmt))
3287 if (fn)
3289 error ("gimple call has two targets");
3290 debug_generic_stmt (fn);
3291 return true;
3294 else
3296 if (!fn)
3298 error ("gimple call has no target");
3299 return true;
3303 if (fn && !is_gimple_call_addr (fn))
3305 error ("invalid function in gimple call");
3306 debug_generic_stmt (fn);
3307 return true;
3310 if (fn
3311 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3312 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3313 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3315 error ("non-function in gimple call");
3316 return true;
3319 fndecl = gimple_call_fndecl (stmt);
3320 if (fndecl
3321 && TREE_CODE (fndecl) == FUNCTION_DECL
3322 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3323 && !DECL_PURE_P (fndecl)
3324 && !TREE_READONLY (fndecl))
3326 error ("invalid pure const state for function");
3327 return true;
3330 if (gimple_call_lhs (stmt)
3331 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3332 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3334 error ("invalid LHS in gimple call");
3335 return true;
3338 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3340 error ("LHS in noreturn call");
3341 return true;
3344 fntype = gimple_call_fntype (stmt);
3345 if (fntype
3346 && gimple_call_lhs (stmt)
3347 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3348 TREE_TYPE (fntype))
3349 /* ??? At least C++ misses conversions at assignments from
3350 void * call results.
3351 ??? Java is completely off. Especially with functions
3352 returning java.lang.Object.
3353 For now simply allow arbitrary pointer type conversions. */
3354 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3355 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3357 error ("invalid conversion in gimple call");
3358 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3359 debug_generic_stmt (TREE_TYPE (fntype));
3360 return true;
3363 if (gimple_call_chain (stmt)
3364 && !is_gimple_val (gimple_call_chain (stmt)))
3366 error ("invalid static chain in gimple call");
3367 debug_generic_stmt (gimple_call_chain (stmt));
3368 return true;
3371 /* If there is a static chain argument, the call should either be
3372 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3373 if (gimple_call_chain (stmt)
3374 && fndecl
3375 && !DECL_STATIC_CHAIN (fndecl))
3377 error ("static chain with function that doesn%'t use one");
3378 return true;
3381 /* ??? The C frontend passes unpromoted arguments in case it
3382 didn't see a function declaration before the call. So for now
3383 leave the call arguments mostly unverified. Once we gimplify
3384 unit-at-a-time we have a chance to fix this. */
3386 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3388 tree arg = gimple_call_arg (stmt, i);
3389 if ((is_gimple_reg_type (TREE_TYPE (arg))
3390 && !is_gimple_val (arg))
3391 || (!is_gimple_reg_type (TREE_TYPE (arg))
3392 && !is_gimple_lvalue (arg)))
3394 error ("invalid argument to gimple call");
3395 debug_generic_expr (arg);
3396 return true;
3400 return false;
3403 /* Verifies the gimple comparison with the result type TYPE and
3404 the operands OP0 and OP1. */
3406 static bool
3407 verify_gimple_comparison (tree type, tree op0, tree op1)
3409 tree op0_type = TREE_TYPE (op0);
3410 tree op1_type = TREE_TYPE (op1);
3412 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3414 error ("invalid operands in gimple comparison");
3415 return true;
3418 /* For comparisons we do not have the operations type as the
3419 effective type the comparison is carried out in. Instead
3420 we require that either the first operand is trivially
3421 convertible into the second, or the other way around.
3422 Because we special-case pointers to void we allow
3423 comparisons of pointers with the same mode as well. */
3424 if (!useless_type_conversion_p (op0_type, op1_type)
3425 && !useless_type_conversion_p (op1_type, op0_type)
3426 && (!POINTER_TYPE_P (op0_type)
3427 || !POINTER_TYPE_P (op1_type)
3428 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3430 error ("mismatching comparison operand types");
3431 debug_generic_expr (op0_type);
3432 debug_generic_expr (op1_type);
3433 return true;
3436 /* The resulting type of a comparison may be an effective boolean type. */
3437 if (INTEGRAL_TYPE_P (type)
3438 && (TREE_CODE (type) == BOOLEAN_TYPE
3439 || TYPE_PRECISION (type) == 1))
3441 if (TREE_CODE (op0_type) == VECTOR_TYPE
3442 || TREE_CODE (op1_type) == VECTOR_TYPE)
3444 error ("vector comparison returning a boolean");
3445 debug_generic_expr (op0_type);
3446 debug_generic_expr (op1_type);
3447 return true;
3450 /* Or an integer vector type with the same size and element count
3451 as the comparison operand types. */
3452 else if (TREE_CODE (type) == VECTOR_TYPE
3453 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3455 if (TREE_CODE (op0_type) != VECTOR_TYPE
3456 || TREE_CODE (op1_type) != VECTOR_TYPE)
3458 error ("non-vector operands in vector comparison");
3459 debug_generic_expr (op0_type);
3460 debug_generic_expr (op1_type);
3461 return true;
3464 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3465 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3466 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3467 /* The result of a vector comparison is of signed
3468 integral type. */
3469 || TYPE_UNSIGNED (TREE_TYPE (type)))
3471 error ("invalid vector comparison resulting type");
3472 debug_generic_expr (type);
3473 return true;
3476 else
3478 error ("bogus comparison result type");
3479 debug_generic_expr (type);
3480 return true;
3483 return false;
3486 /* Verify a gimple assignment statement STMT with an unary rhs.
3487 Returns true if anything is wrong. */
3489 static bool
3490 verify_gimple_assign_unary (gassign *stmt)
3492 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3493 tree lhs = gimple_assign_lhs (stmt);
3494 tree lhs_type = TREE_TYPE (lhs);
3495 tree rhs1 = gimple_assign_rhs1 (stmt);
3496 tree rhs1_type = TREE_TYPE (rhs1);
3498 if (!is_gimple_reg (lhs))
3500 error ("non-register as LHS of unary operation");
3501 return true;
3504 if (!is_gimple_val (rhs1))
3506 error ("invalid operand in unary operation");
3507 return true;
3510 /* First handle conversions. */
3511 switch (rhs_code)
3513 CASE_CONVERT:
3515 /* Allow conversions from pointer type to integral type only if
3516 there is no sign or zero extension involved.
3517 For targets were the precision of ptrofftype doesn't match that
3518 of pointers we need to allow arbitrary conversions to ptrofftype. */
3519 if ((POINTER_TYPE_P (lhs_type)
3520 && INTEGRAL_TYPE_P (rhs1_type))
3521 || (POINTER_TYPE_P (rhs1_type)
3522 && INTEGRAL_TYPE_P (lhs_type)
3523 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3524 || ptrofftype_p (sizetype))))
3525 return false;
3527 /* Allow conversion from integral to offset type and vice versa. */
3528 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3529 && INTEGRAL_TYPE_P (rhs1_type))
3530 || (INTEGRAL_TYPE_P (lhs_type)
3531 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3532 return false;
3534 /* Otherwise assert we are converting between types of the
3535 same kind. */
3536 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3538 error ("invalid types in nop conversion");
3539 debug_generic_expr (lhs_type);
3540 debug_generic_expr (rhs1_type);
3541 return true;
3544 return false;
3547 case ADDR_SPACE_CONVERT_EXPR:
3549 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3550 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3551 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3553 error ("invalid types in address space conversion");
3554 debug_generic_expr (lhs_type);
3555 debug_generic_expr (rhs1_type);
3556 return true;
3559 return false;
3562 case FIXED_CONVERT_EXPR:
3564 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3565 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3567 error ("invalid types in fixed-point conversion");
3568 debug_generic_expr (lhs_type);
3569 debug_generic_expr (rhs1_type);
3570 return true;
3573 return false;
3576 case FLOAT_EXPR:
3578 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3579 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3580 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3582 error ("invalid types in conversion to floating point");
3583 debug_generic_expr (lhs_type);
3584 debug_generic_expr (rhs1_type);
3585 return true;
3588 return false;
3591 case FIX_TRUNC_EXPR:
3593 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3594 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3595 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3597 error ("invalid types in conversion to integer");
3598 debug_generic_expr (lhs_type);
3599 debug_generic_expr (rhs1_type);
3600 return true;
3603 return false;
3605 case REDUC_MAX_EXPR:
3606 case REDUC_MIN_EXPR:
3607 case REDUC_PLUS_EXPR:
3608 if (!VECTOR_TYPE_P (rhs1_type)
3609 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3611 error ("reduction should convert from vector to element type");
3612 debug_generic_expr (lhs_type);
3613 debug_generic_expr (rhs1_type);
3614 return true;
3616 return false;
3618 case VEC_UNPACK_HI_EXPR:
3619 case VEC_UNPACK_LO_EXPR:
3620 case VEC_UNPACK_FLOAT_HI_EXPR:
3621 case VEC_UNPACK_FLOAT_LO_EXPR:
3622 /* FIXME. */
3623 return false;
3625 case NEGATE_EXPR:
3626 case ABS_EXPR:
3627 case BIT_NOT_EXPR:
3628 case PAREN_EXPR:
3629 case CONJ_EXPR:
3630 break;
3632 default:
3633 gcc_unreachable ();
3636 /* For the remaining codes assert there is no conversion involved. */
3637 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3639 error ("non-trivial conversion in unary operation");
3640 debug_generic_expr (lhs_type);
3641 debug_generic_expr (rhs1_type);
3642 return true;
3645 return false;
3648 /* Verify a gimple assignment statement STMT with a binary rhs.
3649 Returns true if anything is wrong. */
3651 static bool
3652 verify_gimple_assign_binary (gassign *stmt)
3654 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3655 tree lhs = gimple_assign_lhs (stmt);
3656 tree lhs_type = TREE_TYPE (lhs);
3657 tree rhs1 = gimple_assign_rhs1 (stmt);
3658 tree rhs1_type = TREE_TYPE (rhs1);
3659 tree rhs2 = gimple_assign_rhs2 (stmt);
3660 tree rhs2_type = TREE_TYPE (rhs2);
3662 if (!is_gimple_reg (lhs))
3664 error ("non-register as LHS of binary operation");
3665 return true;
3668 if (!is_gimple_val (rhs1)
3669 || !is_gimple_val (rhs2))
3671 error ("invalid operands in binary operation");
3672 return true;
3675 /* First handle operations that involve different types. */
3676 switch (rhs_code)
3678 case COMPLEX_EXPR:
3680 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3681 || !(INTEGRAL_TYPE_P (rhs1_type)
3682 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3683 || !(INTEGRAL_TYPE_P (rhs2_type)
3684 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3686 error ("type mismatch in complex expression");
3687 debug_generic_expr (lhs_type);
3688 debug_generic_expr (rhs1_type);
3689 debug_generic_expr (rhs2_type);
3690 return true;
3693 return false;
3696 case LSHIFT_EXPR:
3697 case RSHIFT_EXPR:
3698 case LROTATE_EXPR:
3699 case RROTATE_EXPR:
3701 /* Shifts and rotates are ok on integral types, fixed point
3702 types and integer vector types. */
3703 if ((!INTEGRAL_TYPE_P (rhs1_type)
3704 && !FIXED_POINT_TYPE_P (rhs1_type)
3705 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3706 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3707 || (!INTEGRAL_TYPE_P (rhs2_type)
3708 /* Vector shifts of vectors are also ok. */
3709 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3710 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3711 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3712 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3713 || !useless_type_conversion_p (lhs_type, rhs1_type))
3715 error ("type mismatch in shift expression");
3716 debug_generic_expr (lhs_type);
3717 debug_generic_expr (rhs1_type);
3718 debug_generic_expr (rhs2_type);
3719 return true;
3722 return false;
3725 case WIDEN_LSHIFT_EXPR:
3727 if (!INTEGRAL_TYPE_P (lhs_type)
3728 || !INTEGRAL_TYPE_P (rhs1_type)
3729 || TREE_CODE (rhs2) != INTEGER_CST
3730 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3732 error ("type mismatch in widening vector shift expression");
3733 debug_generic_expr (lhs_type);
3734 debug_generic_expr (rhs1_type);
3735 debug_generic_expr (rhs2_type);
3736 return true;
3739 return false;
3742 case VEC_WIDEN_LSHIFT_HI_EXPR:
3743 case VEC_WIDEN_LSHIFT_LO_EXPR:
3745 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3746 || TREE_CODE (lhs_type) != VECTOR_TYPE
3747 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3748 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3749 || TREE_CODE (rhs2) != INTEGER_CST
3750 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3751 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3753 error ("type mismatch in widening vector shift expression");
3754 debug_generic_expr (lhs_type);
3755 debug_generic_expr (rhs1_type);
3756 debug_generic_expr (rhs2_type);
3757 return true;
3760 return false;
3763 case PLUS_EXPR:
3764 case MINUS_EXPR:
3766 tree lhs_etype = lhs_type;
3767 tree rhs1_etype = rhs1_type;
3768 tree rhs2_etype = rhs2_type;
3769 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3771 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3772 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3774 error ("invalid non-vector operands to vector valued plus");
3775 return true;
3777 lhs_etype = TREE_TYPE (lhs_type);
3778 rhs1_etype = TREE_TYPE (rhs1_type);
3779 rhs2_etype = TREE_TYPE (rhs2_type);
3781 if (POINTER_TYPE_P (lhs_etype)
3782 || POINTER_TYPE_P (rhs1_etype)
3783 || POINTER_TYPE_P (rhs2_etype))
3785 error ("invalid (pointer) operands to plus/minus");
3786 return true;
3789 /* Continue with generic binary expression handling. */
3790 break;
3793 case POINTER_PLUS_EXPR:
3795 if (!POINTER_TYPE_P (rhs1_type)
3796 || !useless_type_conversion_p (lhs_type, rhs1_type)
3797 || !ptrofftype_p (rhs2_type))
3799 error ("type mismatch in pointer plus expression");
3800 debug_generic_stmt (lhs_type);
3801 debug_generic_stmt (rhs1_type);
3802 debug_generic_stmt (rhs2_type);
3803 return true;
3806 return false;
3809 case TRUTH_ANDIF_EXPR:
3810 case TRUTH_ORIF_EXPR:
3811 case TRUTH_AND_EXPR:
3812 case TRUTH_OR_EXPR:
3813 case TRUTH_XOR_EXPR:
3815 gcc_unreachable ();
3817 case LT_EXPR:
3818 case LE_EXPR:
3819 case GT_EXPR:
3820 case GE_EXPR:
3821 case EQ_EXPR:
3822 case NE_EXPR:
3823 case UNORDERED_EXPR:
3824 case ORDERED_EXPR:
3825 case UNLT_EXPR:
3826 case UNLE_EXPR:
3827 case UNGT_EXPR:
3828 case UNGE_EXPR:
3829 case UNEQ_EXPR:
3830 case LTGT_EXPR:
3831 /* Comparisons are also binary, but the result type is not
3832 connected to the operand types. */
3833 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3835 case WIDEN_MULT_EXPR:
3836 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3837 return true;
3838 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3839 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3841 case WIDEN_SUM_EXPR:
3842 case VEC_WIDEN_MULT_HI_EXPR:
3843 case VEC_WIDEN_MULT_LO_EXPR:
3844 case VEC_WIDEN_MULT_EVEN_EXPR:
3845 case VEC_WIDEN_MULT_ODD_EXPR:
3846 case VEC_PACK_TRUNC_EXPR:
3847 case VEC_PACK_SAT_EXPR:
3848 case VEC_PACK_FIX_TRUNC_EXPR:
3849 /* FIXME. */
3850 return false;
3852 case MULT_EXPR:
3853 case MULT_HIGHPART_EXPR:
3854 case TRUNC_DIV_EXPR:
3855 case CEIL_DIV_EXPR:
3856 case FLOOR_DIV_EXPR:
3857 case ROUND_DIV_EXPR:
3858 case TRUNC_MOD_EXPR:
3859 case CEIL_MOD_EXPR:
3860 case FLOOR_MOD_EXPR:
3861 case ROUND_MOD_EXPR:
3862 case RDIV_EXPR:
3863 case EXACT_DIV_EXPR:
3864 case MIN_EXPR:
3865 case MAX_EXPR:
3866 case BIT_IOR_EXPR:
3867 case BIT_XOR_EXPR:
3868 case BIT_AND_EXPR:
3869 /* Continue with generic binary expression handling. */
3870 break;
3872 default:
3873 gcc_unreachable ();
3876 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3877 || !useless_type_conversion_p (lhs_type, rhs2_type))
3879 error ("type mismatch in binary expression");
3880 debug_generic_stmt (lhs_type);
3881 debug_generic_stmt (rhs1_type);
3882 debug_generic_stmt (rhs2_type);
3883 return true;
3886 return false;
3889 /* Verify a gimple assignment statement STMT with a ternary rhs.
3890 Returns true if anything is wrong. */
3892 static bool
3893 verify_gimple_assign_ternary (gassign *stmt)
3895 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3896 tree lhs = gimple_assign_lhs (stmt);
3897 tree lhs_type = TREE_TYPE (lhs);
3898 tree rhs1 = gimple_assign_rhs1 (stmt);
3899 tree rhs1_type = TREE_TYPE (rhs1);
3900 tree rhs2 = gimple_assign_rhs2 (stmt);
3901 tree rhs2_type = TREE_TYPE (rhs2);
3902 tree rhs3 = gimple_assign_rhs3 (stmt);
3903 tree rhs3_type = TREE_TYPE (rhs3);
3905 if (!is_gimple_reg (lhs))
3907 error ("non-register as LHS of ternary operation");
3908 return true;
3911 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3912 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3913 || !is_gimple_val (rhs2)
3914 || !is_gimple_val (rhs3))
3916 error ("invalid operands in ternary operation");
3917 return true;
3920 /* First handle operations that involve different types. */
3921 switch (rhs_code)
3923 case WIDEN_MULT_PLUS_EXPR:
3924 case WIDEN_MULT_MINUS_EXPR:
3925 if ((!INTEGRAL_TYPE_P (rhs1_type)
3926 && !FIXED_POINT_TYPE_P (rhs1_type))
3927 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3928 || !useless_type_conversion_p (lhs_type, rhs3_type)
3929 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3930 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3932 error ("type mismatch in widening multiply-accumulate expression");
3933 debug_generic_expr (lhs_type);
3934 debug_generic_expr (rhs1_type);
3935 debug_generic_expr (rhs2_type);
3936 debug_generic_expr (rhs3_type);
3937 return true;
3939 break;
3941 case FMA_EXPR:
3942 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3943 || !useless_type_conversion_p (lhs_type, rhs2_type)
3944 || !useless_type_conversion_p (lhs_type, rhs3_type))
3946 error ("type mismatch in fused multiply-add expression");
3947 debug_generic_expr (lhs_type);
3948 debug_generic_expr (rhs1_type);
3949 debug_generic_expr (rhs2_type);
3950 debug_generic_expr (rhs3_type);
3951 return true;
3953 break;
3955 case COND_EXPR:
3956 case VEC_COND_EXPR:
3957 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3958 || !useless_type_conversion_p (lhs_type, rhs3_type))
3960 error ("type mismatch in conditional expression");
3961 debug_generic_expr (lhs_type);
3962 debug_generic_expr (rhs2_type);
3963 debug_generic_expr (rhs3_type);
3964 return true;
3966 break;
3968 case VEC_PERM_EXPR:
3969 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3970 || !useless_type_conversion_p (lhs_type, rhs2_type))
3972 error ("type mismatch in vector permute expression");
3973 debug_generic_expr (lhs_type);
3974 debug_generic_expr (rhs1_type);
3975 debug_generic_expr (rhs2_type);
3976 debug_generic_expr (rhs3_type);
3977 return true;
3980 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3981 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3982 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3984 error ("vector types expected in vector permute expression");
3985 debug_generic_expr (lhs_type);
3986 debug_generic_expr (rhs1_type);
3987 debug_generic_expr (rhs2_type);
3988 debug_generic_expr (rhs3_type);
3989 return true;
3992 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3993 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3994 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3995 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3996 != TYPE_VECTOR_SUBPARTS (lhs_type))
3998 error ("vectors with different element number found "
3999 "in vector permute expression");
4000 debug_generic_expr (lhs_type);
4001 debug_generic_expr (rhs1_type);
4002 debug_generic_expr (rhs2_type);
4003 debug_generic_expr (rhs3_type);
4004 return true;
4007 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4008 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4009 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4011 error ("invalid mask type in vector permute expression");
4012 debug_generic_expr (lhs_type);
4013 debug_generic_expr (rhs1_type);
4014 debug_generic_expr (rhs2_type);
4015 debug_generic_expr (rhs3_type);
4016 return true;
4019 return false;
4021 case SAD_EXPR:
4022 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4023 || !useless_type_conversion_p (lhs_type, rhs3_type)
4024 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4025 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4026 > GET_MODE_BITSIZE (GET_MODE_INNER
4027 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4029 error ("type mismatch in sad expression");
4030 debug_generic_expr (lhs_type);
4031 debug_generic_expr (rhs1_type);
4032 debug_generic_expr (rhs2_type);
4033 debug_generic_expr (rhs3_type);
4034 return true;
4037 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4038 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4039 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4041 error ("vector types expected in sad expression");
4042 debug_generic_expr (lhs_type);
4043 debug_generic_expr (rhs1_type);
4044 debug_generic_expr (rhs2_type);
4045 debug_generic_expr (rhs3_type);
4046 return true;
4049 return false;
4051 case DOT_PROD_EXPR:
4052 case REALIGN_LOAD_EXPR:
4053 /* FIXME. */
4054 return false;
4056 default:
4057 gcc_unreachable ();
4059 return false;
4062 /* Verify a gimple assignment statement STMT with a single rhs.
4063 Returns true if anything is wrong. */
4065 static bool
4066 verify_gimple_assign_single (gassign *stmt)
4068 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4069 tree lhs = gimple_assign_lhs (stmt);
4070 tree lhs_type = TREE_TYPE (lhs);
4071 tree rhs1 = gimple_assign_rhs1 (stmt);
4072 tree rhs1_type = TREE_TYPE (rhs1);
4073 bool res = false;
4075 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4077 error ("non-trivial conversion at assignment");
4078 debug_generic_expr (lhs_type);
4079 debug_generic_expr (rhs1_type);
4080 return true;
4083 if (gimple_clobber_p (stmt)
4084 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4086 error ("non-decl/MEM_REF LHS in clobber statement");
4087 debug_generic_expr (lhs);
4088 return true;
4091 if (handled_component_p (lhs)
4092 || TREE_CODE (lhs) == MEM_REF
4093 || TREE_CODE (lhs) == TARGET_MEM_REF)
4094 res |= verify_types_in_gimple_reference (lhs, true);
4096 /* Special codes we cannot handle via their class. */
4097 switch (rhs_code)
4099 case ADDR_EXPR:
4101 tree op = TREE_OPERAND (rhs1, 0);
4102 if (!is_gimple_addressable (op))
4104 error ("invalid operand in unary expression");
4105 return true;
4108 /* Technically there is no longer a need for matching types, but
4109 gimple hygiene asks for this check. In LTO we can end up
4110 combining incompatible units and thus end up with addresses
4111 of globals that change their type to a common one. */
4112 if (!in_lto_p
4113 && !types_compatible_p (TREE_TYPE (op),
4114 TREE_TYPE (TREE_TYPE (rhs1)))
4115 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4116 TREE_TYPE (op)))
4118 error ("type mismatch in address expression");
4119 debug_generic_stmt (TREE_TYPE (rhs1));
4120 debug_generic_stmt (TREE_TYPE (op));
4121 return true;
4124 return verify_types_in_gimple_reference (op, true);
4127 /* tcc_reference */
4128 case INDIRECT_REF:
4129 error ("INDIRECT_REF in gimple IL");
4130 return true;
4132 case COMPONENT_REF:
4133 case BIT_FIELD_REF:
4134 case ARRAY_REF:
4135 case ARRAY_RANGE_REF:
4136 case VIEW_CONVERT_EXPR:
4137 case REALPART_EXPR:
4138 case IMAGPART_EXPR:
4139 case TARGET_MEM_REF:
4140 case MEM_REF:
4141 if (!is_gimple_reg (lhs)
4142 && is_gimple_reg_type (TREE_TYPE (lhs)))
4144 error ("invalid rhs for gimple memory store");
4145 debug_generic_stmt (lhs);
4146 debug_generic_stmt (rhs1);
4147 return true;
4149 return res || verify_types_in_gimple_reference (rhs1, false);
4151 /* tcc_constant */
4152 case SSA_NAME:
4153 case INTEGER_CST:
4154 case REAL_CST:
4155 case FIXED_CST:
4156 case COMPLEX_CST:
4157 case VECTOR_CST:
4158 case STRING_CST:
4159 return res;
4161 /* tcc_declaration */
4162 case CONST_DECL:
4163 return res;
4164 case VAR_DECL:
4165 case PARM_DECL:
4166 if (!is_gimple_reg (lhs)
4167 && !is_gimple_reg (rhs1)
4168 && is_gimple_reg_type (TREE_TYPE (lhs)))
4170 error ("invalid rhs for gimple memory store");
4171 debug_generic_stmt (lhs);
4172 debug_generic_stmt (rhs1);
4173 return true;
4175 return res;
4177 case CONSTRUCTOR:
4178 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4180 unsigned int i;
4181 tree elt_i, elt_v, elt_t = NULL_TREE;
4183 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4184 return res;
4185 /* For vector CONSTRUCTORs we require that either it is empty
4186 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4187 (then the element count must be correct to cover the whole
4188 outer vector and index must be NULL on all elements, or it is
4189 a CONSTRUCTOR of scalar elements, where we as an exception allow
4190 smaller number of elements (assuming zero filling) and
4191 consecutive indexes as compared to NULL indexes (such
4192 CONSTRUCTORs can appear in the IL from FEs). */
4193 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4195 if (elt_t == NULL_TREE)
4197 elt_t = TREE_TYPE (elt_v);
4198 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4200 tree elt_t = TREE_TYPE (elt_v);
4201 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4202 TREE_TYPE (elt_t)))
4204 error ("incorrect type of vector CONSTRUCTOR"
4205 " elements");
4206 debug_generic_stmt (rhs1);
4207 return true;
4209 else if (CONSTRUCTOR_NELTS (rhs1)
4210 * TYPE_VECTOR_SUBPARTS (elt_t)
4211 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4213 error ("incorrect number of vector CONSTRUCTOR"
4214 " elements");
4215 debug_generic_stmt (rhs1);
4216 return true;
4219 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4220 elt_t))
4222 error ("incorrect type of vector CONSTRUCTOR elements");
4223 debug_generic_stmt (rhs1);
4224 return true;
4226 else if (CONSTRUCTOR_NELTS (rhs1)
4227 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4229 error ("incorrect number of vector CONSTRUCTOR elements");
4230 debug_generic_stmt (rhs1);
4231 return true;
4234 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4236 error ("incorrect type of vector CONSTRUCTOR elements");
4237 debug_generic_stmt (rhs1);
4238 return true;
4240 if (elt_i != NULL_TREE
4241 && (TREE_CODE (elt_t) == VECTOR_TYPE
4242 || TREE_CODE (elt_i) != INTEGER_CST
4243 || compare_tree_int (elt_i, i) != 0))
4245 error ("vector CONSTRUCTOR with non-NULL element index");
4246 debug_generic_stmt (rhs1);
4247 return true;
4249 if (!is_gimple_val (elt_v))
4251 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4252 debug_generic_stmt (rhs1);
4253 return true;
4257 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4259 error ("non-vector CONSTRUCTOR with elements");
4260 debug_generic_stmt (rhs1);
4261 return true;
4263 return res;
4264 case OBJ_TYPE_REF:
4265 case ASSERT_EXPR:
4266 case WITH_SIZE_EXPR:
4267 /* FIXME. */
4268 return res;
4270 default:;
4273 return res;
4276 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4277 is a problem, otherwise false. */
4279 static bool
4280 verify_gimple_assign (gassign *stmt)
4282 switch (gimple_assign_rhs_class (stmt))
4284 case GIMPLE_SINGLE_RHS:
4285 return verify_gimple_assign_single (stmt);
4287 case GIMPLE_UNARY_RHS:
4288 return verify_gimple_assign_unary (stmt);
4290 case GIMPLE_BINARY_RHS:
4291 return verify_gimple_assign_binary (stmt);
4293 case GIMPLE_TERNARY_RHS:
4294 return verify_gimple_assign_ternary (stmt);
4296 default:
4297 gcc_unreachable ();
4301 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4302 is a problem, otherwise false. */
4304 static bool
4305 verify_gimple_return (greturn *stmt)
4307 tree op = gimple_return_retval (stmt);
4308 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4310 /* We cannot test for present return values as we do not fix up missing
4311 return values from the original source. */
4312 if (op == NULL)
4313 return false;
4315 if (!is_gimple_val (op)
4316 && TREE_CODE (op) != RESULT_DECL)
4318 error ("invalid operand in return statement");
4319 debug_generic_stmt (op);
4320 return true;
4323 if ((TREE_CODE (op) == RESULT_DECL
4324 && DECL_BY_REFERENCE (op))
4325 || (TREE_CODE (op) == SSA_NAME
4326 && SSA_NAME_VAR (op)
4327 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4328 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4329 op = TREE_TYPE (op);
4331 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4333 error ("invalid conversion in return statement");
4334 debug_generic_stmt (restype);
4335 debug_generic_stmt (TREE_TYPE (op));
4336 return true;
4339 return false;
4343 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4344 is a problem, otherwise false. */
4346 static bool
4347 verify_gimple_goto (ggoto *stmt)
4349 tree dest = gimple_goto_dest (stmt);
4351 /* ??? We have two canonical forms of direct goto destinations, a
4352 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4353 if (TREE_CODE (dest) != LABEL_DECL
4354 && (!is_gimple_val (dest)
4355 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4357 error ("goto destination is neither a label nor a pointer");
4358 return true;
4361 return false;
4364 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4365 is a problem, otherwise false. */
4367 static bool
4368 verify_gimple_switch (gswitch *stmt)
4370 unsigned int i, n;
4371 tree elt, prev_upper_bound = NULL_TREE;
4372 tree index_type, elt_type = NULL_TREE;
4374 if (!is_gimple_val (gimple_switch_index (stmt)))
4376 error ("invalid operand to switch statement");
4377 debug_generic_stmt (gimple_switch_index (stmt));
4378 return true;
4381 index_type = TREE_TYPE (gimple_switch_index (stmt));
4382 if (! INTEGRAL_TYPE_P (index_type))
4384 error ("non-integral type switch statement");
4385 debug_generic_expr (index_type);
4386 return true;
4389 elt = gimple_switch_label (stmt, 0);
4390 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4392 error ("invalid default case label in switch statement");
4393 debug_generic_expr (elt);
4394 return true;
4397 n = gimple_switch_num_labels (stmt);
4398 for (i = 1; i < n; i++)
4400 elt = gimple_switch_label (stmt, i);
4402 if (! CASE_LOW (elt))
4404 error ("invalid case label in switch statement");
4405 debug_generic_expr (elt);
4406 return true;
4408 if (CASE_HIGH (elt)
4409 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4411 error ("invalid case range in switch statement");
4412 debug_generic_expr (elt);
4413 return true;
4416 if (elt_type)
4418 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4419 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4421 error ("type mismatch for case label in switch statement");
4422 debug_generic_expr (elt);
4423 return true;
4426 else
4428 elt_type = TREE_TYPE (CASE_LOW (elt));
4429 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4431 error ("type precision mismatch in switch statement");
4432 return true;
4436 if (prev_upper_bound)
4438 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4440 error ("case labels not sorted in switch statement");
4441 return true;
4445 prev_upper_bound = CASE_HIGH (elt);
4446 if (! prev_upper_bound)
4447 prev_upper_bound = CASE_LOW (elt);
4450 return false;
4453 /* Verify a gimple debug statement STMT.
4454 Returns true if anything is wrong. */
4456 static bool
4457 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4459 /* There isn't much that could be wrong in a gimple debug stmt. A
4460 gimple debug bind stmt, for example, maps a tree, that's usually
4461 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4462 component or member of an aggregate type, to another tree, that
4463 can be an arbitrary expression. These stmts expand into debug
4464 insns, and are converted to debug notes by var-tracking.c. */
4465 return false;
4468 /* Verify a gimple label statement STMT.
4469 Returns true if anything is wrong. */
4471 static bool
4472 verify_gimple_label (glabel *stmt)
4474 tree decl = gimple_label_label (stmt);
4475 int uid;
4476 bool err = false;
4478 if (TREE_CODE (decl) != LABEL_DECL)
4479 return true;
4480 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4481 && DECL_CONTEXT (decl) != current_function_decl)
4483 error ("label's context is not the current function decl");
4484 err |= true;
4487 uid = LABEL_DECL_UID (decl);
4488 if (cfun->cfg
4489 && (uid == -1
4490 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4492 error ("incorrect entry in label_to_block_map");
4493 err |= true;
4496 uid = EH_LANDING_PAD_NR (decl);
4497 if (uid)
4499 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4500 if (decl != lp->post_landing_pad)
4502 error ("incorrect setting of landing pad number");
4503 err |= true;
4507 return err;
4510 /* Verify a gimple cond statement STMT.
4511 Returns true if anything is wrong. */
4513 static bool
4514 verify_gimple_cond (gcond *stmt)
4516 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4518 error ("invalid comparison code in gimple cond");
4519 return true;
4521 if (!(!gimple_cond_true_label (stmt)
4522 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4523 || !(!gimple_cond_false_label (stmt)
4524 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4526 error ("invalid labels in gimple cond");
4527 return true;
4530 return verify_gimple_comparison (boolean_type_node,
4531 gimple_cond_lhs (stmt),
4532 gimple_cond_rhs (stmt));
4535 /* Verify the GIMPLE statement STMT. Returns true if there is an
4536 error, otherwise false. */
4538 static bool
4539 verify_gimple_stmt (gimple stmt)
4541 switch (gimple_code (stmt))
4543 case GIMPLE_ASSIGN:
4544 return verify_gimple_assign (as_a <gassign *> (stmt));
4546 case GIMPLE_LABEL:
4547 return verify_gimple_label (as_a <glabel *> (stmt));
4549 case GIMPLE_CALL:
4550 return verify_gimple_call (as_a <gcall *> (stmt));
4552 case GIMPLE_COND:
4553 return verify_gimple_cond (as_a <gcond *> (stmt));
4555 case GIMPLE_GOTO:
4556 return verify_gimple_goto (as_a <ggoto *> (stmt));
4558 case GIMPLE_SWITCH:
4559 return verify_gimple_switch (as_a <gswitch *> (stmt));
4561 case GIMPLE_RETURN:
4562 return verify_gimple_return (as_a <greturn *> (stmt));
4564 case GIMPLE_ASM:
4565 return false;
4567 case GIMPLE_TRANSACTION:
4568 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4570 /* Tuples that do not have tree operands. */
4571 case GIMPLE_NOP:
4572 case GIMPLE_PREDICT:
4573 case GIMPLE_RESX:
4574 case GIMPLE_EH_DISPATCH:
4575 case GIMPLE_EH_MUST_NOT_THROW:
4576 return false;
4578 CASE_GIMPLE_OMP:
4579 /* OpenMP directives are validated by the FE and never operated
4580 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4581 non-gimple expressions when the main index variable has had
4582 its address taken. This does not affect the loop itself
4583 because the header of an GIMPLE_OMP_FOR is merely used to determine
4584 how to setup the parallel iteration. */
4585 return false;
4587 case GIMPLE_DEBUG:
4588 return verify_gimple_debug (stmt);
4590 default:
4591 gcc_unreachable ();
4595 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4596 and false otherwise. */
4598 static bool
4599 verify_gimple_phi (gimple phi)
4601 bool err = false;
4602 unsigned i;
4603 tree phi_result = gimple_phi_result (phi);
4604 bool virtual_p;
4606 if (!phi_result)
4608 error ("invalid PHI result");
4609 return true;
4612 virtual_p = virtual_operand_p (phi_result);
4613 if (TREE_CODE (phi_result) != SSA_NAME
4614 || (virtual_p
4615 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4617 error ("invalid PHI result");
4618 err = true;
4621 for (i = 0; i < gimple_phi_num_args (phi); i++)
4623 tree t = gimple_phi_arg_def (phi, i);
4625 if (!t)
4627 error ("missing PHI def");
4628 err |= true;
4629 continue;
4631 /* Addressable variables do have SSA_NAMEs but they
4632 are not considered gimple values. */
4633 else if ((TREE_CODE (t) == SSA_NAME
4634 && virtual_p != virtual_operand_p (t))
4635 || (virtual_p
4636 && (TREE_CODE (t) != SSA_NAME
4637 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4638 || (!virtual_p
4639 && !is_gimple_val (t)))
4641 error ("invalid PHI argument");
4642 debug_generic_expr (t);
4643 err |= true;
4645 #ifdef ENABLE_TYPES_CHECKING
4646 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4648 error ("incompatible types in PHI argument %u", i);
4649 debug_generic_stmt (TREE_TYPE (phi_result));
4650 debug_generic_stmt (TREE_TYPE (t));
4651 err |= true;
4653 #endif
4656 return err;
4659 /* Verify the GIMPLE statements inside the sequence STMTS. */
4661 static bool
4662 verify_gimple_in_seq_2 (gimple_seq stmts)
4664 gimple_stmt_iterator ittr;
4665 bool err = false;
4667 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4669 gimple stmt = gsi_stmt (ittr);
4671 switch (gimple_code (stmt))
4673 case GIMPLE_BIND:
4674 err |= verify_gimple_in_seq_2 (
4675 gimple_bind_body (as_a <gbind *> (stmt)));
4676 break;
4678 case GIMPLE_TRY:
4679 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4680 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4681 break;
4683 case GIMPLE_EH_FILTER:
4684 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4685 break;
4687 case GIMPLE_EH_ELSE:
4689 geh_else *eh_else = as_a <geh_else *> (stmt);
4690 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4691 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4693 break;
4695 case GIMPLE_CATCH:
4696 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4697 as_a <gcatch *> (stmt)));
4698 break;
4700 case GIMPLE_TRANSACTION:
4701 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4702 break;
4704 default:
4706 bool err2 = verify_gimple_stmt (stmt);
4707 if (err2)
4708 debug_gimple_stmt (stmt);
4709 err |= err2;
4714 return err;
4717 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4718 is a problem, otherwise false. */
4720 static bool
4721 verify_gimple_transaction (gtransaction *stmt)
4723 tree lab = gimple_transaction_label (stmt);
4724 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4725 return true;
4726 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4730 /* Verify the GIMPLE statements inside the statement list STMTS. */
4732 DEBUG_FUNCTION void
4733 verify_gimple_in_seq (gimple_seq stmts)
4735 timevar_push (TV_TREE_STMT_VERIFY);
4736 if (verify_gimple_in_seq_2 (stmts))
4737 internal_error ("verify_gimple failed");
4738 timevar_pop (TV_TREE_STMT_VERIFY);
4741 /* Return true when the T can be shared. */
4743 static bool
4744 tree_node_can_be_shared (tree t)
4746 if (IS_TYPE_OR_DECL_P (t)
4747 || is_gimple_min_invariant (t)
4748 || TREE_CODE (t) == SSA_NAME
4749 || t == error_mark_node
4750 || TREE_CODE (t) == IDENTIFIER_NODE)
4751 return true;
4753 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4754 return true;
4756 if (DECL_P (t))
4757 return true;
4759 return false;
4762 /* Called via walk_tree. Verify tree sharing. */
4764 static tree
4765 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4767 hash_set<void *> *visited = (hash_set<void *> *) data;
4769 if (tree_node_can_be_shared (*tp))
4771 *walk_subtrees = false;
4772 return NULL;
4775 if (visited->add (*tp))
4776 return *tp;
4778 return NULL;
4781 /* Called via walk_gimple_stmt. Verify tree sharing. */
4783 static tree
4784 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4786 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4787 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4790 static bool eh_error_found;
4791 bool
4792 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4793 hash_set<gimple> *visited)
4795 if (!visited->contains (stmt))
4797 error ("dead STMT in EH table");
4798 debug_gimple_stmt (stmt);
4799 eh_error_found = true;
4801 return true;
4804 /* Verify if the location LOCs block is in BLOCKS. */
4806 static bool
4807 verify_location (hash_set<tree> *blocks, location_t loc)
4809 tree block = LOCATION_BLOCK (loc);
4810 if (block != NULL_TREE
4811 && !blocks->contains (block))
4813 error ("location references block not in block tree");
4814 return true;
4816 if (block != NULL_TREE)
4817 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4818 return false;
4821 /* Called via walk_tree. Verify that expressions have no blocks. */
4823 static tree
4824 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4826 if (!EXPR_P (*tp))
4828 *walk_subtrees = false;
4829 return NULL;
4832 location_t loc = EXPR_LOCATION (*tp);
4833 if (LOCATION_BLOCK (loc) != NULL)
4834 return *tp;
4836 return NULL;
4839 /* Called via walk_tree. Verify locations of expressions. */
4841 static tree
4842 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4844 hash_set<tree> *blocks = (hash_set<tree> *) data;
4846 if (TREE_CODE (*tp) == VAR_DECL
4847 && DECL_HAS_DEBUG_EXPR_P (*tp))
4849 tree t = DECL_DEBUG_EXPR (*tp);
4850 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4851 if (addr)
4852 return addr;
4854 if ((TREE_CODE (*tp) == VAR_DECL
4855 || TREE_CODE (*tp) == PARM_DECL
4856 || TREE_CODE (*tp) == RESULT_DECL)
4857 && DECL_HAS_VALUE_EXPR_P (*tp))
4859 tree t = DECL_VALUE_EXPR (*tp);
4860 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4861 if (addr)
4862 return addr;
4865 if (!EXPR_P (*tp))
4867 *walk_subtrees = false;
4868 return NULL;
4871 location_t loc = EXPR_LOCATION (*tp);
4872 if (verify_location (blocks, loc))
4873 return *tp;
4875 return NULL;
4878 /* Called via walk_gimple_op. Verify locations of expressions. */
4880 static tree
4881 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4883 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4884 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4887 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4889 static void
4890 collect_subblocks (hash_set<tree> *blocks, tree block)
4892 tree t;
4893 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4895 blocks->add (t);
4896 collect_subblocks (blocks, t);
4900 /* Verify the GIMPLE statements in the CFG of FN. */
4902 DEBUG_FUNCTION void
4903 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4905 basic_block bb;
4906 bool err = false;
4908 timevar_push (TV_TREE_STMT_VERIFY);
4909 hash_set<void *> visited;
4910 hash_set<gimple> visited_stmts;
4912 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4913 hash_set<tree> blocks;
4914 if (DECL_INITIAL (fn->decl))
4916 blocks.add (DECL_INITIAL (fn->decl));
4917 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4920 FOR_EACH_BB_FN (bb, fn)
4922 gimple_stmt_iterator gsi;
4924 for (gphi_iterator gpi = gsi_start_phis (bb);
4925 !gsi_end_p (gpi);
4926 gsi_next (&gpi))
4928 gphi *phi = gpi.phi ();
4929 bool err2 = false;
4930 unsigned i;
4932 visited_stmts.add (phi);
4934 if (gimple_bb (phi) != bb)
4936 error ("gimple_bb (phi) is set to a wrong basic block");
4937 err2 = true;
4940 err2 |= verify_gimple_phi (phi);
4942 /* Only PHI arguments have locations. */
4943 if (gimple_location (phi) != UNKNOWN_LOCATION)
4945 error ("PHI node with location");
4946 err2 = true;
4949 for (i = 0; i < gimple_phi_num_args (phi); i++)
4951 tree arg = gimple_phi_arg_def (phi, i);
4952 tree addr = walk_tree (&arg, verify_node_sharing_1,
4953 &visited, NULL);
4954 if (addr)
4956 error ("incorrect sharing of tree nodes");
4957 debug_generic_expr (addr);
4958 err2 |= true;
4960 location_t loc = gimple_phi_arg_location (phi, i);
4961 if (virtual_operand_p (gimple_phi_result (phi))
4962 && loc != UNKNOWN_LOCATION)
4964 error ("virtual PHI with argument locations");
4965 err2 = true;
4967 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4968 if (addr)
4970 debug_generic_expr (addr);
4971 err2 = true;
4973 err2 |= verify_location (&blocks, loc);
4976 if (err2)
4977 debug_gimple_stmt (phi);
4978 err |= err2;
4981 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4983 gimple stmt = gsi_stmt (gsi);
4984 bool err2 = false;
4985 struct walk_stmt_info wi;
4986 tree addr;
4987 int lp_nr;
4989 visited_stmts.add (stmt);
4991 if (gimple_bb (stmt) != bb)
4993 error ("gimple_bb (stmt) is set to a wrong basic block");
4994 err2 = true;
4997 err2 |= verify_gimple_stmt (stmt);
4998 err2 |= verify_location (&blocks, gimple_location (stmt));
5000 memset (&wi, 0, sizeof (wi));
5001 wi.info = (void *) &visited;
5002 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5003 if (addr)
5005 error ("incorrect sharing of tree nodes");
5006 debug_generic_expr (addr);
5007 err2 |= true;
5010 memset (&wi, 0, sizeof (wi));
5011 wi.info = (void *) &blocks;
5012 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5013 if (addr)
5015 debug_generic_expr (addr);
5016 err2 |= true;
5019 /* ??? Instead of not checking these stmts at all the walker
5020 should know its context via wi. */
5021 if (!is_gimple_debug (stmt)
5022 && !is_gimple_omp (stmt))
5024 memset (&wi, 0, sizeof (wi));
5025 addr = walk_gimple_op (stmt, verify_expr, &wi);
5026 if (addr)
5028 debug_generic_expr (addr);
5029 inform (gimple_location (stmt), "in statement");
5030 err2 |= true;
5034 /* If the statement is marked as part of an EH region, then it is
5035 expected that the statement could throw. Verify that when we
5036 have optimizations that simplify statements such that we prove
5037 that they cannot throw, that we update other data structures
5038 to match. */
5039 lp_nr = lookup_stmt_eh_lp (stmt);
5040 if (lp_nr > 0)
5042 if (!stmt_could_throw_p (stmt))
5044 if (verify_nothrow)
5046 error ("statement marked for throw, but doesn%'t");
5047 err2 |= true;
5050 else if (!gsi_one_before_end_p (gsi))
5052 error ("statement marked for throw in middle of block");
5053 err2 |= true;
5057 if (err2)
5058 debug_gimple_stmt (stmt);
5059 err |= err2;
5063 eh_error_found = false;
5064 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5065 if (eh_table)
5066 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5067 (&visited_stmts);
5069 if (err || eh_error_found)
5070 internal_error ("verify_gimple failed");
5072 verify_histograms ();
5073 timevar_pop (TV_TREE_STMT_VERIFY);
5077 /* Verifies that the flow information is OK. */
5079 static int
5080 gimple_verify_flow_info (void)
5082 int err = 0;
5083 basic_block bb;
5084 gimple_stmt_iterator gsi;
5085 gimple stmt;
5086 edge e;
5087 edge_iterator ei;
5089 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5090 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5092 error ("ENTRY_BLOCK has IL associated with it");
5093 err = 1;
5096 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5097 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5099 error ("EXIT_BLOCK has IL associated with it");
5100 err = 1;
5103 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5104 if (e->flags & EDGE_FALLTHRU)
5106 error ("fallthru to exit from bb %d", e->src->index);
5107 err = 1;
5110 FOR_EACH_BB_FN (bb, cfun)
5112 bool found_ctrl_stmt = false;
5114 stmt = NULL;
5116 /* Skip labels on the start of basic block. */
5117 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5119 tree label;
5120 gimple prev_stmt = stmt;
5122 stmt = gsi_stmt (gsi);
5124 if (gimple_code (stmt) != GIMPLE_LABEL)
5125 break;
5127 label = gimple_label_label (as_a <glabel *> (stmt));
5128 if (prev_stmt && DECL_NONLOCAL (label))
5130 error ("nonlocal label ");
5131 print_generic_expr (stderr, label, 0);
5132 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5133 bb->index);
5134 err = 1;
5137 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5139 error ("EH landing pad label ");
5140 print_generic_expr (stderr, label, 0);
5141 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5142 bb->index);
5143 err = 1;
5146 if (label_to_block (label) != bb)
5148 error ("label ");
5149 print_generic_expr (stderr, label, 0);
5150 fprintf (stderr, " to block does not match in bb %d",
5151 bb->index);
5152 err = 1;
5155 if (decl_function_context (label) != current_function_decl)
5157 error ("label ");
5158 print_generic_expr (stderr, label, 0);
5159 fprintf (stderr, " has incorrect context in bb %d",
5160 bb->index);
5161 err = 1;
5165 /* Verify that body of basic block BB is free of control flow. */
5166 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5168 gimple stmt = gsi_stmt (gsi);
5170 if (found_ctrl_stmt)
5172 error ("control flow in the middle of basic block %d",
5173 bb->index);
5174 err = 1;
5177 if (stmt_ends_bb_p (stmt))
5178 found_ctrl_stmt = true;
5180 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5182 error ("label ");
5183 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5184 fprintf (stderr, " in the middle of basic block %d", bb->index);
5185 err = 1;
5189 gsi = gsi_last_bb (bb);
5190 if (gsi_end_p (gsi))
5191 continue;
5193 stmt = gsi_stmt (gsi);
5195 if (gimple_code (stmt) == GIMPLE_LABEL)
5196 continue;
5198 err |= verify_eh_edges (stmt);
5200 if (is_ctrl_stmt (stmt))
5202 FOR_EACH_EDGE (e, ei, bb->succs)
5203 if (e->flags & EDGE_FALLTHRU)
5205 error ("fallthru edge after a control statement in bb %d",
5206 bb->index);
5207 err = 1;
5211 if (gimple_code (stmt) != GIMPLE_COND)
5213 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5214 after anything else but if statement. */
5215 FOR_EACH_EDGE (e, ei, bb->succs)
5216 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5218 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5219 bb->index);
5220 err = 1;
5224 switch (gimple_code (stmt))
5226 case GIMPLE_COND:
5228 edge true_edge;
5229 edge false_edge;
5231 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5233 if (!true_edge
5234 || !false_edge
5235 || !(true_edge->flags & EDGE_TRUE_VALUE)
5236 || !(false_edge->flags & EDGE_FALSE_VALUE)
5237 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5238 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5239 || EDGE_COUNT (bb->succs) >= 3)
5241 error ("wrong outgoing edge flags at end of bb %d",
5242 bb->index);
5243 err = 1;
5246 break;
5248 case GIMPLE_GOTO:
5249 if (simple_goto_p (stmt))
5251 error ("explicit goto at end of bb %d", bb->index);
5252 err = 1;
5254 else
5256 /* FIXME. We should double check that the labels in the
5257 destination blocks have their address taken. */
5258 FOR_EACH_EDGE (e, ei, bb->succs)
5259 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5260 | EDGE_FALSE_VALUE))
5261 || !(e->flags & EDGE_ABNORMAL))
5263 error ("wrong outgoing edge flags at end of bb %d",
5264 bb->index);
5265 err = 1;
5268 break;
5270 case GIMPLE_CALL:
5271 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5272 break;
5273 /* ... fallthru ... */
5274 case GIMPLE_RETURN:
5275 if (!single_succ_p (bb)
5276 || (single_succ_edge (bb)->flags
5277 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5278 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5280 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5281 err = 1;
5283 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5285 error ("return edge does not point to exit in bb %d",
5286 bb->index);
5287 err = 1;
5289 break;
5291 case GIMPLE_SWITCH:
5293 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5294 tree prev;
5295 edge e;
5296 size_t i, n;
5298 n = gimple_switch_num_labels (switch_stmt);
5300 /* Mark all the destination basic blocks. */
5301 for (i = 0; i < n; ++i)
5303 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5304 basic_block label_bb = label_to_block (lab);
5305 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5306 label_bb->aux = (void *)1;
5309 /* Verify that the case labels are sorted. */
5310 prev = gimple_switch_label (switch_stmt, 0);
5311 for (i = 1; i < n; ++i)
5313 tree c = gimple_switch_label (switch_stmt, i);
5314 if (!CASE_LOW (c))
5316 error ("found default case not at the start of "
5317 "case vector");
5318 err = 1;
5319 continue;
5321 if (CASE_LOW (prev)
5322 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5324 error ("case labels not sorted: ");
5325 print_generic_expr (stderr, prev, 0);
5326 fprintf (stderr," is greater than ");
5327 print_generic_expr (stderr, c, 0);
5328 fprintf (stderr," but comes before it.\n");
5329 err = 1;
5331 prev = c;
5333 /* VRP will remove the default case if it can prove it will
5334 never be executed. So do not verify there always exists
5335 a default case here. */
5337 FOR_EACH_EDGE (e, ei, bb->succs)
5339 if (!e->dest->aux)
5341 error ("extra outgoing edge %d->%d",
5342 bb->index, e->dest->index);
5343 err = 1;
5346 e->dest->aux = (void *)2;
5347 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5348 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5350 error ("wrong outgoing edge flags at end of bb %d",
5351 bb->index);
5352 err = 1;
5356 /* Check that we have all of them. */
5357 for (i = 0; i < n; ++i)
5359 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5360 basic_block label_bb = label_to_block (lab);
5362 if (label_bb->aux != (void *)2)
5364 error ("missing edge %i->%i", bb->index, label_bb->index);
5365 err = 1;
5369 FOR_EACH_EDGE (e, ei, bb->succs)
5370 e->dest->aux = (void *)0;
5372 break;
5374 case GIMPLE_EH_DISPATCH:
5375 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5376 break;
5378 default:
5379 break;
5383 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5384 verify_dominators (CDI_DOMINATORS);
5386 return err;
5390 /* Updates phi nodes after creating a forwarder block joined
5391 by edge FALLTHRU. */
5393 static void
5394 gimple_make_forwarder_block (edge fallthru)
5396 edge e;
5397 edge_iterator ei;
5398 basic_block dummy, bb;
5399 tree var;
5400 gphi_iterator gsi;
5402 dummy = fallthru->src;
5403 bb = fallthru->dest;
5405 if (single_pred_p (bb))
5406 return;
5408 /* If we redirected a branch we must create new PHI nodes at the
5409 start of BB. */
5410 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5412 gphi *phi, *new_phi;
5414 phi = gsi.phi ();
5415 var = gimple_phi_result (phi);
5416 new_phi = create_phi_node (var, bb);
5417 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5418 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5419 UNKNOWN_LOCATION);
5422 /* Add the arguments we have stored on edges. */
5423 FOR_EACH_EDGE (e, ei, bb->preds)
5425 if (e == fallthru)
5426 continue;
5428 flush_pending_stmts (e);
5433 /* Return a non-special label in the head of basic block BLOCK.
5434 Create one if it doesn't exist. */
5436 tree
5437 gimple_block_label (basic_block bb)
5439 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5440 bool first = true;
5441 tree label;
5442 glabel *stmt;
5444 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5446 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5447 if (!stmt)
5448 break;
5449 label = gimple_label_label (stmt);
5450 if (!DECL_NONLOCAL (label))
5452 if (!first)
5453 gsi_move_before (&i, &s);
5454 return label;
5458 label = create_artificial_label (UNKNOWN_LOCATION);
5459 stmt = gimple_build_label (label);
5460 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5461 return label;
5465 /* Attempt to perform edge redirection by replacing a possibly complex
5466 jump instruction by a goto or by removing the jump completely.
5467 This can apply only if all edges now point to the same block. The
5468 parameters and return values are equivalent to
5469 redirect_edge_and_branch. */
5471 static edge
5472 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5474 basic_block src = e->src;
5475 gimple_stmt_iterator i;
5476 gimple stmt;
5478 /* We can replace or remove a complex jump only when we have exactly
5479 two edges. */
5480 if (EDGE_COUNT (src->succs) != 2
5481 /* Verify that all targets will be TARGET. Specifically, the
5482 edge that is not E must also go to TARGET. */
5483 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5484 return NULL;
5486 i = gsi_last_bb (src);
5487 if (gsi_end_p (i))
5488 return NULL;
5490 stmt = gsi_stmt (i);
5492 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5494 gsi_remove (&i, true);
5495 e = ssa_redirect_edge (e, target);
5496 e->flags = EDGE_FALLTHRU;
5497 return e;
5500 return NULL;
5504 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5505 edge representing the redirected branch. */
5507 static edge
5508 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5510 basic_block bb = e->src;
5511 gimple_stmt_iterator gsi;
5512 edge ret;
5513 gimple stmt;
5515 if (e->flags & EDGE_ABNORMAL)
5516 return NULL;
5518 if (e->dest == dest)
5519 return NULL;
5521 if (e->flags & EDGE_EH)
5522 return redirect_eh_edge (e, dest);
5524 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5526 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5527 if (ret)
5528 return ret;
5531 gsi = gsi_last_bb (bb);
5532 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5534 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5536 case GIMPLE_COND:
5537 /* For COND_EXPR, we only need to redirect the edge. */
5538 break;
5540 case GIMPLE_GOTO:
5541 /* No non-abnormal edges should lead from a non-simple goto, and
5542 simple ones should be represented implicitly. */
5543 gcc_unreachable ();
5545 case GIMPLE_SWITCH:
5547 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5548 tree label = gimple_block_label (dest);
5549 tree cases = get_cases_for_edge (e, switch_stmt);
5551 /* If we have a list of cases associated with E, then use it
5552 as it's a lot faster than walking the entire case vector. */
5553 if (cases)
5555 edge e2 = find_edge (e->src, dest);
5556 tree last, first;
5558 first = cases;
5559 while (cases)
5561 last = cases;
5562 CASE_LABEL (cases) = label;
5563 cases = CASE_CHAIN (cases);
5566 /* If there was already an edge in the CFG, then we need
5567 to move all the cases associated with E to E2. */
5568 if (e2)
5570 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5572 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5573 CASE_CHAIN (cases2) = first;
5575 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5577 else
5579 size_t i, n = gimple_switch_num_labels (switch_stmt);
5581 for (i = 0; i < n; i++)
5583 tree elt = gimple_switch_label (switch_stmt, i);
5584 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5585 CASE_LABEL (elt) = label;
5589 break;
5591 case GIMPLE_ASM:
5593 gasm *asm_stmt = as_a <gasm *> (stmt);
5594 int i, n = gimple_asm_nlabels (asm_stmt);
5595 tree label = NULL;
5597 for (i = 0; i < n; ++i)
5599 tree cons = gimple_asm_label_op (asm_stmt, i);
5600 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5602 if (!label)
5603 label = gimple_block_label (dest);
5604 TREE_VALUE (cons) = label;
5608 /* If we didn't find any label matching the former edge in the
5609 asm labels, we must be redirecting the fallthrough
5610 edge. */
5611 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5613 break;
5615 case GIMPLE_RETURN:
5616 gsi_remove (&gsi, true);
5617 e->flags |= EDGE_FALLTHRU;
5618 break;
5620 case GIMPLE_OMP_RETURN:
5621 case GIMPLE_OMP_CONTINUE:
5622 case GIMPLE_OMP_SECTIONS_SWITCH:
5623 case GIMPLE_OMP_FOR:
5624 /* The edges from OMP constructs can be simply redirected. */
5625 break;
5627 case GIMPLE_EH_DISPATCH:
5628 if (!(e->flags & EDGE_FALLTHRU))
5629 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5630 break;
5632 case GIMPLE_TRANSACTION:
5633 /* The ABORT edge has a stored label associated with it, otherwise
5634 the edges are simply redirectable. */
5635 if (e->flags == 0)
5636 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5637 gimple_block_label (dest));
5638 break;
5640 default:
5641 /* Otherwise it must be a fallthru edge, and we don't need to
5642 do anything besides redirecting it. */
5643 gcc_assert (e->flags & EDGE_FALLTHRU);
5644 break;
5647 /* Update/insert PHI nodes as necessary. */
5649 /* Now update the edges in the CFG. */
5650 e = ssa_redirect_edge (e, dest);
5652 return e;
5655 /* Returns true if it is possible to remove edge E by redirecting
5656 it to the destination of the other edge from E->src. */
5658 static bool
5659 gimple_can_remove_branch_p (const_edge e)
5661 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5662 return false;
5664 return true;
5667 /* Simple wrapper, as we can always redirect fallthru edges. */
5669 static basic_block
5670 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5672 e = gimple_redirect_edge_and_branch (e, dest);
5673 gcc_assert (e);
5675 return NULL;
5679 /* Splits basic block BB after statement STMT (but at least after the
5680 labels). If STMT is NULL, BB is split just after the labels. */
5682 static basic_block
5683 gimple_split_block (basic_block bb, void *stmt)
5685 gimple_stmt_iterator gsi;
5686 gimple_stmt_iterator gsi_tgt;
5687 gimple_seq list;
5688 basic_block new_bb;
5689 edge e;
5690 edge_iterator ei;
5692 new_bb = create_empty_bb (bb);
5694 /* Redirect the outgoing edges. */
5695 new_bb->succs = bb->succs;
5696 bb->succs = NULL;
5697 FOR_EACH_EDGE (e, ei, new_bb->succs)
5698 e->src = new_bb;
5700 /* Get a stmt iterator pointing to the first stmt to move. */
5701 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5702 gsi = gsi_after_labels (bb);
5703 else
5705 gsi = gsi_for_stmt ((gimple) stmt);
5706 gsi_next (&gsi);
5709 /* Move everything from GSI to the new basic block. */
5710 if (gsi_end_p (gsi))
5711 return new_bb;
5713 /* Split the statement list - avoid re-creating new containers as this
5714 brings ugly quadratic memory consumption in the inliner.
5715 (We are still quadratic since we need to update stmt BB pointers,
5716 sadly.) */
5717 gsi_split_seq_before (&gsi, &list);
5718 set_bb_seq (new_bb, list);
5719 for (gsi_tgt = gsi_start (list);
5720 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5721 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5723 return new_bb;
5727 /* Moves basic block BB after block AFTER. */
5729 static bool
5730 gimple_move_block_after (basic_block bb, basic_block after)
5732 if (bb->prev_bb == after)
5733 return true;
5735 unlink_block (bb);
5736 link_block (bb, after);
5738 return true;
5742 /* Return TRUE if block BB has no executable statements, otherwise return
5743 FALSE. */
5745 static bool
5746 gimple_empty_block_p (basic_block bb)
5748 /* BB must have no executable statements. */
5749 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5750 if (phi_nodes (bb))
5751 return false;
5752 if (gsi_end_p (gsi))
5753 return true;
5754 if (is_gimple_debug (gsi_stmt (gsi)))
5755 gsi_next_nondebug (&gsi);
5756 return gsi_end_p (gsi);
5760 /* Split a basic block if it ends with a conditional branch and if the
5761 other part of the block is not empty. */
5763 static basic_block
5764 gimple_split_block_before_cond_jump (basic_block bb)
5766 gimple last, split_point;
5767 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5768 if (gsi_end_p (gsi))
5769 return NULL;
5770 last = gsi_stmt (gsi);
5771 if (gimple_code (last) != GIMPLE_COND
5772 && gimple_code (last) != GIMPLE_SWITCH)
5773 return NULL;
5774 gsi_prev_nondebug (&gsi);
5775 split_point = gsi_stmt (gsi);
5776 return split_block (bb, split_point)->dest;
5780 /* Return true if basic_block can be duplicated. */
5782 static bool
5783 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5785 return true;
5788 /* Create a duplicate of the basic block BB. NOTE: This does not
5789 preserve SSA form. */
5791 static basic_block
5792 gimple_duplicate_bb (basic_block bb)
5794 basic_block new_bb;
5795 gimple_stmt_iterator gsi_tgt;
5797 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5799 /* Copy the PHI nodes. We ignore PHI node arguments here because
5800 the incoming edges have not been setup yet. */
5801 for (gphi_iterator gpi = gsi_start_phis (bb);
5802 !gsi_end_p (gpi);
5803 gsi_next (&gpi))
5805 gphi *phi, *copy;
5806 phi = gpi.phi ();
5807 copy = create_phi_node (NULL_TREE, new_bb);
5808 create_new_def_for (gimple_phi_result (phi), copy,
5809 gimple_phi_result_ptr (copy));
5810 gimple_set_uid (copy, gimple_uid (phi));
5813 gsi_tgt = gsi_start_bb (new_bb);
5814 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5815 !gsi_end_p (gsi);
5816 gsi_next (&gsi))
5818 def_operand_p def_p;
5819 ssa_op_iter op_iter;
5820 tree lhs;
5821 gimple stmt, copy;
5823 stmt = gsi_stmt (gsi);
5824 if (gimple_code (stmt) == GIMPLE_LABEL)
5825 continue;
5827 /* Don't duplicate label debug stmts. */
5828 if (gimple_debug_bind_p (stmt)
5829 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5830 == LABEL_DECL)
5831 continue;
5833 /* Create a new copy of STMT and duplicate STMT's virtual
5834 operands. */
5835 copy = gimple_copy (stmt);
5836 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5838 maybe_duplicate_eh_stmt (copy, stmt);
5839 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5841 /* When copying around a stmt writing into a local non-user
5842 aggregate, make sure it won't share stack slot with other
5843 vars. */
5844 lhs = gimple_get_lhs (stmt);
5845 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5847 tree base = get_base_address (lhs);
5848 if (base
5849 && (TREE_CODE (base) == VAR_DECL
5850 || TREE_CODE (base) == RESULT_DECL)
5851 && DECL_IGNORED_P (base)
5852 && !TREE_STATIC (base)
5853 && !DECL_EXTERNAL (base)
5854 && (TREE_CODE (base) != VAR_DECL
5855 || !DECL_HAS_VALUE_EXPR_P (base)))
5856 DECL_NONSHAREABLE (base) = 1;
5859 /* Create new names for all the definitions created by COPY and
5860 add replacement mappings for each new name. */
5861 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5862 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5865 return new_bb;
5868 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5870 static void
5871 add_phi_args_after_copy_edge (edge e_copy)
5873 basic_block bb, bb_copy = e_copy->src, dest;
5874 edge e;
5875 edge_iterator ei;
5876 gphi *phi, *phi_copy;
5877 tree def;
5878 gphi_iterator psi, psi_copy;
5880 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5881 return;
5883 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5885 if (e_copy->dest->flags & BB_DUPLICATED)
5886 dest = get_bb_original (e_copy->dest);
5887 else
5888 dest = e_copy->dest;
5890 e = find_edge (bb, dest);
5891 if (!e)
5893 /* During loop unrolling the target of the latch edge is copied.
5894 In this case we are not looking for edge to dest, but to
5895 duplicated block whose original was dest. */
5896 FOR_EACH_EDGE (e, ei, bb->succs)
5898 if ((e->dest->flags & BB_DUPLICATED)
5899 && get_bb_original (e->dest) == dest)
5900 break;
5903 gcc_assert (e != NULL);
5906 for (psi = gsi_start_phis (e->dest),
5907 psi_copy = gsi_start_phis (e_copy->dest);
5908 !gsi_end_p (psi);
5909 gsi_next (&psi), gsi_next (&psi_copy))
5911 phi = psi.phi ();
5912 phi_copy = psi_copy.phi ();
5913 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5914 add_phi_arg (phi_copy, def, e_copy,
5915 gimple_phi_arg_location_from_edge (phi, e));
5920 /* Basic block BB_COPY was created by code duplication. Add phi node
5921 arguments for edges going out of BB_COPY. The blocks that were
5922 duplicated have BB_DUPLICATED set. */
5924 void
5925 add_phi_args_after_copy_bb (basic_block bb_copy)
5927 edge e_copy;
5928 edge_iterator ei;
5930 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5932 add_phi_args_after_copy_edge (e_copy);
5936 /* Blocks in REGION_COPY array of length N_REGION were created by
5937 duplication of basic blocks. Add phi node arguments for edges
5938 going from these blocks. If E_COPY is not NULL, also add
5939 phi node arguments for its destination.*/
5941 void
5942 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5943 edge e_copy)
5945 unsigned i;
5947 for (i = 0; i < n_region; i++)
5948 region_copy[i]->flags |= BB_DUPLICATED;
5950 for (i = 0; i < n_region; i++)
5951 add_phi_args_after_copy_bb (region_copy[i]);
5952 if (e_copy)
5953 add_phi_args_after_copy_edge (e_copy);
5955 for (i = 0; i < n_region; i++)
5956 region_copy[i]->flags &= ~BB_DUPLICATED;
5959 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5960 important exit edge EXIT. By important we mean that no SSA name defined
5961 inside region is live over the other exit edges of the region. All entry
5962 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5963 to the duplicate of the region. Dominance and loop information is
5964 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5965 UPDATE_DOMINANCE is false then we assume that the caller will update the
5966 dominance information after calling this function. The new basic
5967 blocks are stored to REGION_COPY in the same order as they had in REGION,
5968 provided that REGION_COPY is not NULL.
5969 The function returns false if it is unable to copy the region,
5970 true otherwise. */
5972 bool
5973 gimple_duplicate_sese_region (edge entry, edge exit,
5974 basic_block *region, unsigned n_region,
5975 basic_block *region_copy,
5976 bool update_dominance)
5978 unsigned i;
5979 bool free_region_copy = false, copying_header = false;
5980 struct loop *loop = entry->dest->loop_father;
5981 edge exit_copy;
5982 vec<basic_block> doms;
5983 edge redirected;
5984 int total_freq = 0, entry_freq = 0;
5985 gcov_type total_count = 0, entry_count = 0;
5987 if (!can_copy_bbs_p (region, n_region))
5988 return false;
5990 /* Some sanity checking. Note that we do not check for all possible
5991 missuses of the functions. I.e. if you ask to copy something weird,
5992 it will work, but the state of structures probably will not be
5993 correct. */
5994 for (i = 0; i < n_region; i++)
5996 /* We do not handle subloops, i.e. all the blocks must belong to the
5997 same loop. */
5998 if (region[i]->loop_father != loop)
5999 return false;
6001 if (region[i] != entry->dest
6002 && region[i] == loop->header)
6003 return false;
6006 /* In case the function is used for loop header copying (which is the primary
6007 use), ensure that EXIT and its copy will be new latch and entry edges. */
6008 if (loop->header == entry->dest)
6010 copying_header = true;
6012 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6013 return false;
6015 for (i = 0; i < n_region; i++)
6016 if (region[i] != exit->src
6017 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6018 return false;
6021 initialize_original_copy_tables ();
6023 if (copying_header)
6024 set_loop_copy (loop, loop_outer (loop));
6025 else
6026 set_loop_copy (loop, loop);
6028 if (!region_copy)
6030 region_copy = XNEWVEC (basic_block, n_region);
6031 free_region_copy = true;
6034 /* Record blocks outside the region that are dominated by something
6035 inside. */
6036 if (update_dominance)
6038 doms.create (0);
6039 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6042 if (entry->dest->count)
6044 total_count = entry->dest->count;
6045 entry_count = entry->count;
6046 /* Fix up corner cases, to avoid division by zero or creation of negative
6047 frequencies. */
6048 if (entry_count > total_count)
6049 entry_count = total_count;
6051 else
6053 total_freq = entry->dest->frequency;
6054 entry_freq = EDGE_FREQUENCY (entry);
6055 /* Fix up corner cases, to avoid division by zero or creation of negative
6056 frequencies. */
6057 if (total_freq == 0)
6058 total_freq = 1;
6059 else if (entry_freq > total_freq)
6060 entry_freq = total_freq;
6063 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6064 split_edge_bb_loc (entry), update_dominance);
6065 if (total_count)
6067 scale_bbs_frequencies_gcov_type (region, n_region,
6068 total_count - entry_count,
6069 total_count);
6070 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6071 total_count);
6073 else
6075 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6076 total_freq);
6077 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6080 if (copying_header)
6082 loop->header = exit->dest;
6083 loop->latch = exit->src;
6086 /* Redirect the entry and add the phi node arguments. */
6087 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6088 gcc_assert (redirected != NULL);
6089 flush_pending_stmts (entry);
6091 /* Concerning updating of dominators: We must recount dominators
6092 for entry block and its copy. Anything that is outside of the
6093 region, but was dominated by something inside needs recounting as
6094 well. */
6095 if (update_dominance)
6097 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6098 doms.safe_push (get_bb_original (entry->dest));
6099 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6100 doms.release ();
6103 /* Add the other PHI node arguments. */
6104 add_phi_args_after_copy (region_copy, n_region, NULL);
6106 if (free_region_copy)
6107 free (region_copy);
6109 free_original_copy_tables ();
6110 return true;
6113 /* Checks if BB is part of the region defined by N_REGION BBS. */
6114 static bool
6115 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6117 unsigned int n;
6119 for (n = 0; n < n_region; n++)
6121 if (bb == bbs[n])
6122 return true;
6124 return false;
6127 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6128 are stored to REGION_COPY in the same order in that they appear
6129 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6130 the region, EXIT an exit from it. The condition guarding EXIT
6131 is moved to ENTRY. Returns true if duplication succeeds, false
6132 otherwise.
6134 For example,
6136 some_code;
6137 if (cond)
6139 else
6142 is transformed to
6144 if (cond)
6146 some_code;
6149 else
6151 some_code;
6156 bool
6157 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6158 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6159 basic_block *region_copy ATTRIBUTE_UNUSED)
6161 unsigned i;
6162 bool free_region_copy = false;
6163 struct loop *loop = exit->dest->loop_father;
6164 struct loop *orig_loop = entry->dest->loop_father;
6165 basic_block switch_bb, entry_bb, nentry_bb;
6166 vec<basic_block> doms;
6167 int total_freq = 0, exit_freq = 0;
6168 gcov_type total_count = 0, exit_count = 0;
6169 edge exits[2], nexits[2], e;
6170 gimple_stmt_iterator gsi;
6171 gimple cond_stmt;
6172 edge sorig, snew;
6173 basic_block exit_bb;
6174 gphi_iterator psi;
6175 gphi *phi;
6176 tree def;
6177 struct loop *target, *aloop, *cloop;
6179 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6180 exits[0] = exit;
6181 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6183 if (!can_copy_bbs_p (region, n_region))
6184 return false;
6186 initialize_original_copy_tables ();
6187 set_loop_copy (orig_loop, loop);
6189 target= loop;
6190 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6192 if (bb_part_of_region_p (aloop->header, region, n_region))
6194 cloop = duplicate_loop (aloop, target);
6195 duplicate_subloops (aloop, cloop);
6199 if (!region_copy)
6201 region_copy = XNEWVEC (basic_block, n_region);
6202 free_region_copy = true;
6205 gcc_assert (!need_ssa_update_p (cfun));
6207 /* Record blocks outside the region that are dominated by something
6208 inside. */
6209 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6211 if (exit->src->count)
6213 total_count = exit->src->count;
6214 exit_count = exit->count;
6215 /* Fix up corner cases, to avoid division by zero or creation of negative
6216 frequencies. */
6217 if (exit_count > total_count)
6218 exit_count = total_count;
6220 else
6222 total_freq = exit->src->frequency;
6223 exit_freq = EDGE_FREQUENCY (exit);
6224 /* Fix up corner cases, to avoid division by zero or creation of negative
6225 frequencies. */
6226 if (total_freq == 0)
6227 total_freq = 1;
6228 if (exit_freq > total_freq)
6229 exit_freq = total_freq;
6232 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6233 split_edge_bb_loc (exit), true);
6234 if (total_count)
6236 scale_bbs_frequencies_gcov_type (region, n_region,
6237 total_count - exit_count,
6238 total_count);
6239 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6240 total_count);
6242 else
6244 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6245 total_freq);
6246 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6249 /* Create the switch block, and put the exit condition to it. */
6250 entry_bb = entry->dest;
6251 nentry_bb = get_bb_copy (entry_bb);
6252 if (!last_stmt (entry->src)
6253 || !stmt_ends_bb_p (last_stmt (entry->src)))
6254 switch_bb = entry->src;
6255 else
6256 switch_bb = split_edge (entry);
6257 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6259 gsi = gsi_last_bb (switch_bb);
6260 cond_stmt = last_stmt (exit->src);
6261 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6262 cond_stmt = gimple_copy (cond_stmt);
6264 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6266 sorig = single_succ_edge (switch_bb);
6267 sorig->flags = exits[1]->flags;
6268 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6270 /* Register the new edge from SWITCH_BB in loop exit lists. */
6271 rescan_loop_exit (snew, true, false);
6273 /* Add the PHI node arguments. */
6274 add_phi_args_after_copy (region_copy, n_region, snew);
6276 /* Get rid of now superfluous conditions and associated edges (and phi node
6277 arguments). */
6278 exit_bb = exit->dest;
6280 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6281 PENDING_STMT (e) = NULL;
6283 /* The latch of ORIG_LOOP was copied, and so was the backedge
6284 to the original header. We redirect this backedge to EXIT_BB. */
6285 for (i = 0; i < n_region; i++)
6286 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6288 gcc_assert (single_succ_edge (region_copy[i]));
6289 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6290 PENDING_STMT (e) = NULL;
6291 for (psi = gsi_start_phis (exit_bb);
6292 !gsi_end_p (psi);
6293 gsi_next (&psi))
6295 phi = psi.phi ();
6296 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6297 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6300 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6301 PENDING_STMT (e) = NULL;
6303 /* Anything that is outside of the region, but was dominated by something
6304 inside needs to update dominance info. */
6305 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6306 doms.release ();
6307 /* Update the SSA web. */
6308 update_ssa (TODO_update_ssa);
6310 if (free_region_copy)
6311 free (region_copy);
6313 free_original_copy_tables ();
6314 return true;
6317 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6318 adding blocks when the dominator traversal reaches EXIT. This
6319 function silently assumes that ENTRY strictly dominates EXIT. */
6321 void
6322 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6323 vec<basic_block> *bbs_p)
6325 basic_block son;
6327 for (son = first_dom_son (CDI_DOMINATORS, entry);
6328 son;
6329 son = next_dom_son (CDI_DOMINATORS, son))
6331 bbs_p->safe_push (son);
6332 if (son != exit)
6333 gather_blocks_in_sese_region (son, exit, bbs_p);
6337 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6338 The duplicates are recorded in VARS_MAP. */
6340 static void
6341 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6342 tree to_context)
6344 tree t = *tp, new_t;
6345 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6347 if (DECL_CONTEXT (t) == to_context)
6348 return;
6350 bool existed;
6351 tree &loc = vars_map->get_or_insert (t, &existed);
6353 if (!existed)
6355 if (SSA_VAR_P (t))
6357 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6358 add_local_decl (f, new_t);
6360 else
6362 gcc_assert (TREE_CODE (t) == CONST_DECL);
6363 new_t = copy_node (t);
6365 DECL_CONTEXT (new_t) = to_context;
6367 loc = new_t;
6369 else
6370 new_t = loc;
6372 *tp = new_t;
6376 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6377 VARS_MAP maps old ssa names and var_decls to the new ones. */
6379 static tree
6380 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6381 tree to_context)
6383 tree new_name;
6385 gcc_assert (!virtual_operand_p (name));
6387 tree *loc = vars_map->get (name);
6389 if (!loc)
6391 tree decl = SSA_NAME_VAR (name);
6392 if (decl)
6394 replace_by_duplicate_decl (&decl, vars_map, to_context);
6395 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6396 decl, SSA_NAME_DEF_STMT (name));
6397 if (SSA_NAME_IS_DEFAULT_DEF (name))
6398 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6399 decl, new_name);
6401 else
6402 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6403 name, SSA_NAME_DEF_STMT (name));
6405 vars_map->put (name, new_name);
6407 else
6408 new_name = *loc;
6410 return new_name;
6413 struct move_stmt_d
6415 tree orig_block;
6416 tree new_block;
6417 tree from_context;
6418 tree to_context;
6419 hash_map<tree, tree> *vars_map;
6420 htab_t new_label_map;
6421 hash_map<void *, void *> *eh_map;
6422 bool remap_decls_p;
6425 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6426 contained in *TP if it has been ORIG_BLOCK previously and change the
6427 DECL_CONTEXT of every local variable referenced in *TP. */
6429 static tree
6430 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6432 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6433 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6434 tree t = *tp;
6436 if (EXPR_P (t))
6438 tree block = TREE_BLOCK (t);
6439 if (block == p->orig_block
6440 || (p->orig_block == NULL_TREE
6441 && block != NULL_TREE))
6442 TREE_SET_BLOCK (t, p->new_block);
6443 #ifdef ENABLE_CHECKING
6444 else if (block != NULL_TREE)
6446 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6447 block = BLOCK_SUPERCONTEXT (block);
6448 gcc_assert (block == p->orig_block);
6450 #endif
6452 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6454 if (TREE_CODE (t) == SSA_NAME)
6455 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6456 else if (TREE_CODE (t) == LABEL_DECL)
6458 if (p->new_label_map)
6460 struct tree_map in, *out;
6461 in.base.from = t;
6462 out = (struct tree_map *)
6463 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6464 if (out)
6465 *tp = t = out->to;
6468 DECL_CONTEXT (t) = p->to_context;
6470 else if (p->remap_decls_p)
6472 /* Replace T with its duplicate. T should no longer appear in the
6473 parent function, so this looks wasteful; however, it may appear
6474 in referenced_vars, and more importantly, as virtual operands of
6475 statements, and in alias lists of other variables. It would be
6476 quite difficult to expunge it from all those places. ??? It might
6477 suffice to do this for addressable variables. */
6478 if ((TREE_CODE (t) == VAR_DECL
6479 && !is_global_var (t))
6480 || TREE_CODE (t) == CONST_DECL)
6481 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6483 *walk_subtrees = 0;
6485 else if (TYPE_P (t))
6486 *walk_subtrees = 0;
6488 return NULL_TREE;
6491 /* Helper for move_stmt_r. Given an EH region number for the source
6492 function, map that to the duplicate EH regio number in the dest. */
6494 static int
6495 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6497 eh_region old_r, new_r;
6499 old_r = get_eh_region_from_number (old_nr);
6500 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6502 return new_r->index;
6505 /* Similar, but operate on INTEGER_CSTs. */
6507 static tree
6508 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6510 int old_nr, new_nr;
6512 old_nr = tree_to_shwi (old_t_nr);
6513 new_nr = move_stmt_eh_region_nr (old_nr, p);
6515 return build_int_cst (integer_type_node, new_nr);
6518 /* Like move_stmt_op, but for gimple statements.
6520 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6521 contained in the current statement in *GSI_P and change the
6522 DECL_CONTEXT of every local variable referenced in the current
6523 statement. */
6525 static tree
6526 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6527 struct walk_stmt_info *wi)
6529 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6530 gimple stmt = gsi_stmt (*gsi_p);
6531 tree block = gimple_block (stmt);
6533 if (block == p->orig_block
6534 || (p->orig_block == NULL_TREE
6535 && block != NULL_TREE))
6536 gimple_set_block (stmt, p->new_block);
6538 switch (gimple_code (stmt))
6540 case GIMPLE_CALL:
6541 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6543 tree r, fndecl = gimple_call_fndecl (stmt);
6544 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6545 switch (DECL_FUNCTION_CODE (fndecl))
6547 case BUILT_IN_EH_COPY_VALUES:
6548 r = gimple_call_arg (stmt, 1);
6549 r = move_stmt_eh_region_tree_nr (r, p);
6550 gimple_call_set_arg (stmt, 1, r);
6551 /* FALLTHRU */
6553 case BUILT_IN_EH_POINTER:
6554 case BUILT_IN_EH_FILTER:
6555 r = gimple_call_arg (stmt, 0);
6556 r = move_stmt_eh_region_tree_nr (r, p);
6557 gimple_call_set_arg (stmt, 0, r);
6558 break;
6560 default:
6561 break;
6564 break;
6566 case GIMPLE_RESX:
6568 gresx *resx_stmt = as_a <gresx *> (stmt);
6569 int r = gimple_resx_region (resx_stmt);
6570 r = move_stmt_eh_region_nr (r, p);
6571 gimple_resx_set_region (resx_stmt, r);
6573 break;
6575 case GIMPLE_EH_DISPATCH:
6577 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6578 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6579 r = move_stmt_eh_region_nr (r, p);
6580 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6582 break;
6584 case GIMPLE_OMP_RETURN:
6585 case GIMPLE_OMP_CONTINUE:
6586 break;
6587 default:
6588 if (is_gimple_omp (stmt))
6590 /* Do not remap variables inside OMP directives. Variables
6591 referenced in clauses and directive header belong to the
6592 parent function and should not be moved into the child
6593 function. */
6594 bool save_remap_decls_p = p->remap_decls_p;
6595 p->remap_decls_p = false;
6596 *handled_ops_p = true;
6598 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6599 move_stmt_op, wi);
6601 p->remap_decls_p = save_remap_decls_p;
6603 break;
6606 return NULL_TREE;
6609 /* Move basic block BB from function CFUN to function DEST_FN. The
6610 block is moved out of the original linked list and placed after
6611 block AFTER in the new list. Also, the block is removed from the
6612 original array of blocks and placed in DEST_FN's array of blocks.
6613 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6614 updated to reflect the moved edges.
6616 The local variables are remapped to new instances, VARS_MAP is used
6617 to record the mapping. */
6619 static void
6620 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6621 basic_block after, bool update_edge_count_p,
6622 struct move_stmt_d *d)
6624 struct control_flow_graph *cfg;
6625 edge_iterator ei;
6626 edge e;
6627 gimple_stmt_iterator si;
6628 unsigned old_len, new_len;
6630 /* Remove BB from dominance structures. */
6631 delete_from_dominance_info (CDI_DOMINATORS, bb);
6633 /* Move BB from its current loop to the copy in the new function. */
6634 if (current_loops)
6636 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6637 if (new_loop)
6638 bb->loop_father = new_loop;
6641 /* Link BB to the new linked list. */
6642 move_block_after (bb, after);
6644 /* Update the edge count in the corresponding flowgraphs. */
6645 if (update_edge_count_p)
6646 FOR_EACH_EDGE (e, ei, bb->succs)
6648 cfun->cfg->x_n_edges--;
6649 dest_cfun->cfg->x_n_edges++;
6652 /* Remove BB from the original basic block array. */
6653 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6654 cfun->cfg->x_n_basic_blocks--;
6656 /* Grow DEST_CFUN's basic block array if needed. */
6657 cfg = dest_cfun->cfg;
6658 cfg->x_n_basic_blocks++;
6659 if (bb->index >= cfg->x_last_basic_block)
6660 cfg->x_last_basic_block = bb->index + 1;
6662 old_len = vec_safe_length (cfg->x_basic_block_info);
6663 if ((unsigned) cfg->x_last_basic_block >= old_len)
6665 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6666 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6669 (*cfg->x_basic_block_info)[bb->index] = bb;
6671 /* Remap the variables in phi nodes. */
6672 for (gphi_iterator psi = gsi_start_phis (bb);
6673 !gsi_end_p (psi); )
6675 gphi *phi = psi.phi ();
6676 use_operand_p use;
6677 tree op = PHI_RESULT (phi);
6678 ssa_op_iter oi;
6679 unsigned i;
6681 if (virtual_operand_p (op))
6683 /* Remove the phi nodes for virtual operands (alias analysis will be
6684 run for the new function, anyway). */
6685 remove_phi_node (&psi, true);
6686 continue;
6689 SET_PHI_RESULT (phi,
6690 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6691 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6693 op = USE_FROM_PTR (use);
6694 if (TREE_CODE (op) == SSA_NAME)
6695 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6698 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6700 location_t locus = gimple_phi_arg_location (phi, i);
6701 tree block = LOCATION_BLOCK (locus);
6703 if (locus == UNKNOWN_LOCATION)
6704 continue;
6705 if (d->orig_block == NULL_TREE || block == d->orig_block)
6707 if (d->new_block == NULL_TREE)
6708 locus = LOCATION_LOCUS (locus);
6709 else
6710 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6711 gimple_phi_arg_set_location (phi, i, locus);
6715 gsi_next (&psi);
6718 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6720 gimple stmt = gsi_stmt (si);
6721 struct walk_stmt_info wi;
6723 memset (&wi, 0, sizeof (wi));
6724 wi.info = d;
6725 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6727 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6729 tree label = gimple_label_label (label_stmt);
6730 int uid = LABEL_DECL_UID (label);
6732 gcc_assert (uid > -1);
6734 old_len = vec_safe_length (cfg->x_label_to_block_map);
6735 if (old_len <= (unsigned) uid)
6737 new_len = 3 * uid / 2 + 1;
6738 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6741 (*cfg->x_label_to_block_map)[uid] = bb;
6742 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6744 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6746 if (uid >= dest_cfun->cfg->last_label_uid)
6747 dest_cfun->cfg->last_label_uid = uid + 1;
6750 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6751 remove_stmt_from_eh_lp_fn (cfun, stmt);
6753 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6754 gimple_remove_stmt_histograms (cfun, stmt);
6756 /* We cannot leave any operands allocated from the operand caches of
6757 the current function. */
6758 free_stmt_operands (cfun, stmt);
6759 push_cfun (dest_cfun);
6760 update_stmt (stmt);
6761 pop_cfun ();
6764 FOR_EACH_EDGE (e, ei, bb->succs)
6765 if (e->goto_locus != UNKNOWN_LOCATION)
6767 tree block = LOCATION_BLOCK (e->goto_locus);
6768 if (d->orig_block == NULL_TREE
6769 || block == d->orig_block)
6770 e->goto_locus = d->new_block ?
6771 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6772 LOCATION_LOCUS (e->goto_locus);
6776 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6777 the outermost EH region. Use REGION as the incoming base EH region. */
6779 static eh_region
6780 find_outermost_region_in_block (struct function *src_cfun,
6781 basic_block bb, eh_region region)
6783 gimple_stmt_iterator si;
6785 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6787 gimple stmt = gsi_stmt (si);
6788 eh_region stmt_region;
6789 int lp_nr;
6791 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6792 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6793 if (stmt_region)
6795 if (region == NULL)
6796 region = stmt_region;
6797 else if (stmt_region != region)
6799 region = eh_region_outermost (src_cfun, stmt_region, region);
6800 gcc_assert (region != NULL);
6805 return region;
6808 static tree
6809 new_label_mapper (tree decl, void *data)
6811 htab_t hash = (htab_t) data;
6812 struct tree_map *m;
6813 void **slot;
6815 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6817 m = XNEW (struct tree_map);
6818 m->hash = DECL_UID (decl);
6819 m->base.from = decl;
6820 m->to = create_artificial_label (UNKNOWN_LOCATION);
6821 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6822 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6823 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6825 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6826 gcc_assert (*slot == NULL);
6828 *slot = m;
6830 return m->to;
6833 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6834 subblocks. */
6836 static void
6837 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6838 tree to_context)
6840 tree *tp, t;
6842 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6844 t = *tp;
6845 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6846 continue;
6847 replace_by_duplicate_decl (&t, vars_map, to_context);
6848 if (t != *tp)
6850 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6852 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6853 DECL_HAS_VALUE_EXPR_P (t) = 1;
6855 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6856 *tp = t;
6860 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6861 replace_block_vars_by_duplicates (block, vars_map, to_context);
6864 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6865 from FN1 to FN2. */
6867 static void
6868 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6869 struct loop *loop)
6871 /* Discard it from the old loop array. */
6872 (*get_loops (fn1))[loop->num] = NULL;
6874 /* Place it in the new loop array, assigning it a new number. */
6875 loop->num = number_of_loops (fn2);
6876 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6878 /* Recurse to children. */
6879 for (loop = loop->inner; loop; loop = loop->next)
6880 fixup_loop_arrays_after_move (fn1, fn2, loop);
6883 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6884 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6886 DEBUG_FUNCTION void
6887 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6889 basic_block bb;
6890 edge_iterator ei;
6891 edge e;
6892 bitmap bbs = BITMAP_ALLOC (NULL);
6893 int i;
6895 gcc_assert (entry != NULL);
6896 gcc_assert (entry != exit);
6897 gcc_assert (bbs_p != NULL);
6899 gcc_assert (bbs_p->length () > 0);
6901 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6902 bitmap_set_bit (bbs, bb->index);
6904 gcc_assert (bitmap_bit_p (bbs, entry->index));
6905 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6907 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6909 if (bb == entry)
6911 gcc_assert (single_pred_p (entry));
6912 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6914 else
6915 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6917 e = ei_edge (ei);
6918 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6921 if (bb == exit)
6923 gcc_assert (single_succ_p (exit));
6924 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6926 else
6927 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6929 e = ei_edge (ei);
6930 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6934 BITMAP_FREE (bbs);
6938 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6939 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6940 single basic block in the original CFG and the new basic block is
6941 returned. DEST_CFUN must not have a CFG yet.
6943 Note that the region need not be a pure SESE region. Blocks inside
6944 the region may contain calls to abort/exit. The only restriction
6945 is that ENTRY_BB should be the only entry point and it must
6946 dominate EXIT_BB.
6948 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6949 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6950 to the new function.
6952 All local variables referenced in the region are assumed to be in
6953 the corresponding BLOCK_VARS and unexpanded variable lists
6954 associated with DEST_CFUN. */
6956 basic_block
6957 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6958 basic_block exit_bb, tree orig_block)
6960 vec<basic_block> bbs, dom_bbs;
6961 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6962 basic_block after, bb, *entry_pred, *exit_succ, abb;
6963 struct function *saved_cfun = cfun;
6964 int *entry_flag, *exit_flag;
6965 unsigned *entry_prob, *exit_prob;
6966 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6967 edge e;
6968 edge_iterator ei;
6969 htab_t new_label_map;
6970 hash_map<void *, void *> *eh_map;
6971 struct loop *loop = entry_bb->loop_father;
6972 struct loop *loop0 = get_loop (saved_cfun, 0);
6973 struct move_stmt_d d;
6975 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6976 region. */
6977 gcc_assert (entry_bb != exit_bb
6978 && (!exit_bb
6979 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6981 /* Collect all the blocks in the region. Manually add ENTRY_BB
6982 because it won't be added by dfs_enumerate_from. */
6983 bbs.create (0);
6984 bbs.safe_push (entry_bb);
6985 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6986 #ifdef ENABLE_CHECKING
6987 verify_sese (entry_bb, exit_bb, &bbs);
6988 #endif
6990 /* The blocks that used to be dominated by something in BBS will now be
6991 dominated by the new block. */
6992 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6993 bbs.address (),
6994 bbs.length ());
6996 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6997 the predecessor edges to ENTRY_BB and the successor edges to
6998 EXIT_BB so that we can re-attach them to the new basic block that
6999 will replace the region. */
7000 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7001 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7002 entry_flag = XNEWVEC (int, num_entry_edges);
7003 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7004 i = 0;
7005 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7007 entry_prob[i] = e->probability;
7008 entry_flag[i] = e->flags;
7009 entry_pred[i++] = e->src;
7010 remove_edge (e);
7013 if (exit_bb)
7015 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7016 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7017 exit_flag = XNEWVEC (int, num_exit_edges);
7018 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7019 i = 0;
7020 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7022 exit_prob[i] = e->probability;
7023 exit_flag[i] = e->flags;
7024 exit_succ[i++] = e->dest;
7025 remove_edge (e);
7028 else
7030 num_exit_edges = 0;
7031 exit_succ = NULL;
7032 exit_flag = NULL;
7033 exit_prob = NULL;
7036 /* Switch context to the child function to initialize DEST_FN's CFG. */
7037 gcc_assert (dest_cfun->cfg == NULL);
7038 push_cfun (dest_cfun);
7040 init_empty_tree_cfg ();
7042 /* Initialize EH information for the new function. */
7043 eh_map = NULL;
7044 new_label_map = NULL;
7045 if (saved_cfun->eh)
7047 eh_region region = NULL;
7049 FOR_EACH_VEC_ELT (bbs, i, bb)
7050 region = find_outermost_region_in_block (saved_cfun, bb, region);
7052 init_eh_for_function ();
7053 if (region != NULL)
7055 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7056 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7057 new_label_mapper, new_label_map);
7061 /* Initialize an empty loop tree. */
7062 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7063 init_loops_structure (dest_cfun, loops, 1);
7064 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7065 set_loops_for_fn (dest_cfun, loops);
7067 /* Move the outlined loop tree part. */
7068 num_nodes = bbs.length ();
7069 FOR_EACH_VEC_ELT (bbs, i, bb)
7071 if (bb->loop_father->header == bb)
7073 struct loop *this_loop = bb->loop_father;
7074 struct loop *outer = loop_outer (this_loop);
7075 if (outer == loop
7076 /* If the SESE region contains some bbs ending with
7077 a noreturn call, those are considered to belong
7078 to the outermost loop in saved_cfun, rather than
7079 the entry_bb's loop_father. */
7080 || outer == loop0)
7082 if (outer != loop)
7083 num_nodes -= this_loop->num_nodes;
7084 flow_loop_tree_node_remove (bb->loop_father);
7085 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7086 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7089 else if (bb->loop_father == loop0 && loop0 != loop)
7090 num_nodes--;
7092 /* Remove loop exits from the outlined region. */
7093 if (loops_for_fn (saved_cfun)->exits)
7094 FOR_EACH_EDGE (e, ei, bb->succs)
7096 struct loops *l = loops_for_fn (saved_cfun);
7097 loop_exit **slot
7098 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7099 NO_INSERT);
7100 if (slot)
7101 l->exits->clear_slot (slot);
7106 /* Adjust the number of blocks in the tree root of the outlined part. */
7107 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7109 /* Setup a mapping to be used by move_block_to_fn. */
7110 loop->aux = current_loops->tree_root;
7111 loop0->aux = current_loops->tree_root;
7113 pop_cfun ();
7115 /* Move blocks from BBS into DEST_CFUN. */
7116 gcc_assert (bbs.length () >= 2);
7117 after = dest_cfun->cfg->x_entry_block_ptr;
7118 hash_map<tree, tree> vars_map;
7120 memset (&d, 0, sizeof (d));
7121 d.orig_block = orig_block;
7122 d.new_block = DECL_INITIAL (dest_cfun->decl);
7123 d.from_context = cfun->decl;
7124 d.to_context = dest_cfun->decl;
7125 d.vars_map = &vars_map;
7126 d.new_label_map = new_label_map;
7127 d.eh_map = eh_map;
7128 d.remap_decls_p = true;
7130 FOR_EACH_VEC_ELT (bbs, i, bb)
7132 /* No need to update edge counts on the last block. It has
7133 already been updated earlier when we detached the region from
7134 the original CFG. */
7135 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7136 after = bb;
7139 loop->aux = NULL;
7140 loop0->aux = NULL;
7141 /* Loop sizes are no longer correct, fix them up. */
7142 loop->num_nodes -= num_nodes;
7143 for (struct loop *outer = loop_outer (loop);
7144 outer; outer = loop_outer (outer))
7145 outer->num_nodes -= num_nodes;
7146 loop0->num_nodes -= bbs.length () - num_nodes;
7148 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7150 struct loop *aloop;
7151 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7152 if (aloop != NULL)
7154 if (aloop->simduid)
7156 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7157 d.to_context);
7158 dest_cfun->has_simduid_loops = true;
7160 if (aloop->force_vectorize)
7161 dest_cfun->has_force_vectorize_loops = true;
7165 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7166 if (orig_block)
7168 tree block;
7169 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7170 == NULL_TREE);
7171 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7172 = BLOCK_SUBBLOCKS (orig_block);
7173 for (block = BLOCK_SUBBLOCKS (orig_block);
7174 block; block = BLOCK_CHAIN (block))
7175 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7176 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7179 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7180 &vars_map, dest_cfun->decl);
7182 if (new_label_map)
7183 htab_delete (new_label_map);
7184 if (eh_map)
7185 delete eh_map;
7187 /* Rewire the entry and exit blocks. The successor to the entry
7188 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7189 the child function. Similarly, the predecessor of DEST_FN's
7190 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7191 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7192 various CFG manipulation function get to the right CFG.
7194 FIXME, this is silly. The CFG ought to become a parameter to
7195 these helpers. */
7196 push_cfun (dest_cfun);
7197 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7198 if (exit_bb)
7199 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7200 pop_cfun ();
7202 /* Back in the original function, the SESE region has disappeared,
7203 create a new basic block in its place. */
7204 bb = create_empty_bb (entry_pred[0]);
7205 if (current_loops)
7206 add_bb_to_loop (bb, loop);
7207 for (i = 0; i < num_entry_edges; i++)
7209 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7210 e->probability = entry_prob[i];
7213 for (i = 0; i < num_exit_edges; i++)
7215 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7216 e->probability = exit_prob[i];
7219 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7220 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7221 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7222 dom_bbs.release ();
7224 if (exit_bb)
7226 free (exit_prob);
7227 free (exit_flag);
7228 free (exit_succ);
7230 free (entry_prob);
7231 free (entry_flag);
7232 free (entry_pred);
7233 bbs.release ();
7235 return bb;
7239 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7242 void
7243 dump_function_to_file (tree fndecl, FILE *file, int flags)
7245 tree arg, var, old_current_fndecl = current_function_decl;
7246 struct function *dsf;
7247 bool ignore_topmost_bind = false, any_var = false;
7248 basic_block bb;
7249 tree chain;
7250 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7251 && decl_is_tm_clone (fndecl));
7252 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7254 current_function_decl = fndecl;
7255 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7257 arg = DECL_ARGUMENTS (fndecl);
7258 while (arg)
7260 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7261 fprintf (file, " ");
7262 print_generic_expr (file, arg, dump_flags);
7263 if (flags & TDF_VERBOSE)
7264 print_node (file, "", arg, 4);
7265 if (DECL_CHAIN (arg))
7266 fprintf (file, ", ");
7267 arg = DECL_CHAIN (arg);
7269 fprintf (file, ")\n");
7271 if (flags & TDF_VERBOSE)
7272 print_node (file, "", fndecl, 2);
7274 dsf = DECL_STRUCT_FUNCTION (fndecl);
7275 if (dsf && (flags & TDF_EH))
7276 dump_eh_tree (file, dsf);
7278 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7280 dump_node (fndecl, TDF_SLIM | flags, file);
7281 current_function_decl = old_current_fndecl;
7282 return;
7285 /* When GIMPLE is lowered, the variables are no longer available in
7286 BIND_EXPRs, so display them separately. */
7287 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7289 unsigned ix;
7290 ignore_topmost_bind = true;
7292 fprintf (file, "{\n");
7293 if (!vec_safe_is_empty (fun->local_decls))
7294 FOR_EACH_LOCAL_DECL (fun, ix, var)
7296 print_generic_decl (file, var, flags);
7297 if (flags & TDF_VERBOSE)
7298 print_node (file, "", var, 4);
7299 fprintf (file, "\n");
7301 any_var = true;
7303 if (gimple_in_ssa_p (cfun))
7304 for (ix = 1; ix < num_ssa_names; ++ix)
7306 tree name = ssa_name (ix);
7307 if (name && !SSA_NAME_VAR (name))
7309 fprintf (file, " ");
7310 print_generic_expr (file, TREE_TYPE (name), flags);
7311 fprintf (file, " ");
7312 print_generic_expr (file, name, flags);
7313 fprintf (file, ";\n");
7315 any_var = true;
7320 if (fun && fun->decl == fndecl
7321 && fun->cfg
7322 && basic_block_info_for_fn (fun))
7324 /* If the CFG has been built, emit a CFG-based dump. */
7325 if (!ignore_topmost_bind)
7326 fprintf (file, "{\n");
7328 if (any_var && n_basic_blocks_for_fn (fun))
7329 fprintf (file, "\n");
7331 FOR_EACH_BB_FN (bb, fun)
7332 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7334 fprintf (file, "}\n");
7336 else if (DECL_SAVED_TREE (fndecl) == NULL)
7338 /* The function is now in GIMPLE form but the CFG has not been
7339 built yet. Emit the single sequence of GIMPLE statements
7340 that make up its body. */
7341 gimple_seq body = gimple_body (fndecl);
7343 if (gimple_seq_first_stmt (body)
7344 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7345 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7346 print_gimple_seq (file, body, 0, flags);
7347 else
7349 if (!ignore_topmost_bind)
7350 fprintf (file, "{\n");
7352 if (any_var)
7353 fprintf (file, "\n");
7355 print_gimple_seq (file, body, 2, flags);
7356 fprintf (file, "}\n");
7359 else
7361 int indent;
7363 /* Make a tree based dump. */
7364 chain = DECL_SAVED_TREE (fndecl);
7365 if (chain && TREE_CODE (chain) == BIND_EXPR)
7367 if (ignore_topmost_bind)
7369 chain = BIND_EXPR_BODY (chain);
7370 indent = 2;
7372 else
7373 indent = 0;
7375 else
7377 if (!ignore_topmost_bind)
7378 fprintf (file, "{\n");
7379 indent = 2;
7382 if (any_var)
7383 fprintf (file, "\n");
7385 print_generic_stmt_indented (file, chain, flags, indent);
7386 if (ignore_topmost_bind)
7387 fprintf (file, "}\n");
7390 if (flags & TDF_ENUMERATE_LOCALS)
7391 dump_enumerated_decls (file, flags);
7392 fprintf (file, "\n\n");
7394 current_function_decl = old_current_fndecl;
7397 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7399 DEBUG_FUNCTION void
7400 debug_function (tree fn, int flags)
7402 dump_function_to_file (fn, stderr, flags);
7406 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7408 static void
7409 print_pred_bbs (FILE *file, basic_block bb)
7411 edge e;
7412 edge_iterator ei;
7414 FOR_EACH_EDGE (e, ei, bb->preds)
7415 fprintf (file, "bb_%d ", e->src->index);
7419 /* Print on FILE the indexes for the successors of basic_block BB. */
7421 static void
7422 print_succ_bbs (FILE *file, basic_block bb)
7424 edge e;
7425 edge_iterator ei;
7427 FOR_EACH_EDGE (e, ei, bb->succs)
7428 fprintf (file, "bb_%d ", e->dest->index);
7431 /* Print to FILE the basic block BB following the VERBOSITY level. */
7433 void
7434 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7436 char *s_indent = (char *) alloca ((size_t) indent + 1);
7437 memset ((void *) s_indent, ' ', (size_t) indent);
7438 s_indent[indent] = '\0';
7440 /* Print basic_block's header. */
7441 if (verbosity >= 2)
7443 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7444 print_pred_bbs (file, bb);
7445 fprintf (file, "}, succs = {");
7446 print_succ_bbs (file, bb);
7447 fprintf (file, "})\n");
7450 /* Print basic_block's body. */
7451 if (verbosity >= 3)
7453 fprintf (file, "%s {\n", s_indent);
7454 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7455 fprintf (file, "%s }\n", s_indent);
7459 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7461 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7462 VERBOSITY level this outputs the contents of the loop, or just its
7463 structure. */
7465 static void
7466 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7468 char *s_indent;
7469 basic_block bb;
7471 if (loop == NULL)
7472 return;
7474 s_indent = (char *) alloca ((size_t) indent + 1);
7475 memset ((void *) s_indent, ' ', (size_t) indent);
7476 s_indent[indent] = '\0';
7478 /* Print loop's header. */
7479 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7480 if (loop->header)
7481 fprintf (file, "header = %d", loop->header->index);
7482 else
7484 fprintf (file, "deleted)\n");
7485 return;
7487 if (loop->latch)
7488 fprintf (file, ", latch = %d", loop->latch->index);
7489 else
7490 fprintf (file, ", multiple latches");
7491 fprintf (file, ", niter = ");
7492 print_generic_expr (file, loop->nb_iterations, 0);
7494 if (loop->any_upper_bound)
7496 fprintf (file, ", upper_bound = ");
7497 print_decu (loop->nb_iterations_upper_bound, file);
7500 if (loop->any_estimate)
7502 fprintf (file, ", estimate = ");
7503 print_decu (loop->nb_iterations_estimate, file);
7505 fprintf (file, ")\n");
7507 /* Print loop's body. */
7508 if (verbosity >= 1)
7510 fprintf (file, "%s{\n", s_indent);
7511 FOR_EACH_BB_FN (bb, cfun)
7512 if (bb->loop_father == loop)
7513 print_loops_bb (file, bb, indent, verbosity);
7515 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7516 fprintf (file, "%s}\n", s_indent);
7520 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7521 spaces. Following VERBOSITY level this outputs the contents of the
7522 loop, or just its structure. */
7524 static void
7525 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7526 int verbosity)
7528 if (loop == NULL)
7529 return;
7531 print_loop (file, loop, indent, verbosity);
7532 print_loop_and_siblings (file, loop->next, indent, verbosity);
7535 /* Follow a CFG edge from the entry point of the program, and on entry
7536 of a loop, pretty print the loop structure on FILE. */
7538 void
7539 print_loops (FILE *file, int verbosity)
7541 basic_block bb;
7543 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7544 if (bb && bb->loop_father)
7545 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7548 /* Dump a loop. */
7550 DEBUG_FUNCTION void
7551 debug (struct loop &ref)
7553 print_loop (stderr, &ref, 0, /*verbosity*/0);
7556 DEBUG_FUNCTION void
7557 debug (struct loop *ptr)
7559 if (ptr)
7560 debug (*ptr);
7561 else
7562 fprintf (stderr, "<nil>\n");
7565 /* Dump a loop verbosely. */
7567 DEBUG_FUNCTION void
7568 debug_verbose (struct loop &ref)
7570 print_loop (stderr, &ref, 0, /*verbosity*/3);
7573 DEBUG_FUNCTION void
7574 debug_verbose (struct loop *ptr)
7576 if (ptr)
7577 debug (*ptr);
7578 else
7579 fprintf (stderr, "<nil>\n");
7583 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7585 DEBUG_FUNCTION void
7586 debug_loops (int verbosity)
7588 print_loops (stderr, verbosity);
7591 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7593 DEBUG_FUNCTION void
7594 debug_loop (struct loop *loop, int verbosity)
7596 print_loop (stderr, loop, 0, verbosity);
7599 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7600 level. */
7602 DEBUG_FUNCTION void
7603 debug_loop_num (unsigned num, int verbosity)
7605 debug_loop (get_loop (cfun, num), verbosity);
7608 /* Return true if BB ends with a call, possibly followed by some
7609 instructions that must stay with the call. Return false,
7610 otherwise. */
7612 static bool
7613 gimple_block_ends_with_call_p (basic_block bb)
7615 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7616 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7620 /* Return true if BB ends with a conditional branch. Return false,
7621 otherwise. */
7623 static bool
7624 gimple_block_ends_with_condjump_p (const_basic_block bb)
7626 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7627 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7631 /* Return true if we need to add fake edge to exit at statement T.
7632 Helper function for gimple_flow_call_edges_add. */
7634 static bool
7635 need_fake_edge_p (gimple t)
7637 tree fndecl = NULL_TREE;
7638 int call_flags = 0;
7640 /* NORETURN and LONGJMP calls already have an edge to exit.
7641 CONST and PURE calls do not need one.
7642 We don't currently check for CONST and PURE here, although
7643 it would be a good idea, because those attributes are
7644 figured out from the RTL in mark_constant_function, and
7645 the counter incrementation code from -fprofile-arcs
7646 leads to different results from -fbranch-probabilities. */
7647 if (is_gimple_call (t))
7649 fndecl = gimple_call_fndecl (t);
7650 call_flags = gimple_call_flags (t);
7653 if (is_gimple_call (t)
7654 && fndecl
7655 && DECL_BUILT_IN (fndecl)
7656 && (call_flags & ECF_NOTHROW)
7657 && !(call_flags & ECF_RETURNS_TWICE)
7658 /* fork() doesn't really return twice, but the effect of
7659 wrapping it in __gcov_fork() which calls __gcov_flush()
7660 and clears the counters before forking has the same
7661 effect as returning twice. Force a fake edge. */
7662 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7663 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7664 return false;
7666 if (is_gimple_call (t))
7668 edge_iterator ei;
7669 edge e;
7670 basic_block bb;
7672 if (!(call_flags & ECF_NORETURN))
7673 return true;
7675 bb = gimple_bb (t);
7676 FOR_EACH_EDGE (e, ei, bb->succs)
7677 if ((e->flags & EDGE_FAKE) == 0)
7678 return true;
7681 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7682 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7683 return true;
7685 return false;
7689 /* Add fake edges to the function exit for any non constant and non
7690 noreturn calls (or noreturn calls with EH/abnormal edges),
7691 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7692 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7693 that were split.
7695 The goal is to expose cases in which entering a basic block does
7696 not imply that all subsequent instructions must be executed. */
7698 static int
7699 gimple_flow_call_edges_add (sbitmap blocks)
7701 int i;
7702 int blocks_split = 0;
7703 int last_bb = last_basic_block_for_fn (cfun);
7704 bool check_last_block = false;
7706 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7707 return 0;
7709 if (! blocks)
7710 check_last_block = true;
7711 else
7712 check_last_block = bitmap_bit_p (blocks,
7713 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7715 /* In the last basic block, before epilogue generation, there will be
7716 a fallthru edge to EXIT. Special care is required if the last insn
7717 of the last basic block is a call because make_edge folds duplicate
7718 edges, which would result in the fallthru edge also being marked
7719 fake, which would result in the fallthru edge being removed by
7720 remove_fake_edges, which would result in an invalid CFG.
7722 Moreover, we can't elide the outgoing fake edge, since the block
7723 profiler needs to take this into account in order to solve the minimal
7724 spanning tree in the case that the call doesn't return.
7726 Handle this by adding a dummy instruction in a new last basic block. */
7727 if (check_last_block)
7729 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7730 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7731 gimple t = NULL;
7733 if (!gsi_end_p (gsi))
7734 t = gsi_stmt (gsi);
7736 if (t && need_fake_edge_p (t))
7738 edge e;
7740 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7741 if (e)
7743 gsi_insert_on_edge (e, gimple_build_nop ());
7744 gsi_commit_edge_inserts ();
7749 /* Now add fake edges to the function exit for any non constant
7750 calls since there is no way that we can determine if they will
7751 return or not... */
7752 for (i = 0; i < last_bb; i++)
7754 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7755 gimple_stmt_iterator gsi;
7756 gimple stmt, last_stmt;
7758 if (!bb)
7759 continue;
7761 if (blocks && !bitmap_bit_p (blocks, i))
7762 continue;
7764 gsi = gsi_last_nondebug_bb (bb);
7765 if (!gsi_end_p (gsi))
7767 last_stmt = gsi_stmt (gsi);
7770 stmt = gsi_stmt (gsi);
7771 if (need_fake_edge_p (stmt))
7773 edge e;
7775 /* The handling above of the final block before the
7776 epilogue should be enough to verify that there is
7777 no edge to the exit block in CFG already.
7778 Calling make_edge in such case would cause us to
7779 mark that edge as fake and remove it later. */
7780 #ifdef ENABLE_CHECKING
7781 if (stmt == last_stmt)
7783 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7784 gcc_assert (e == NULL);
7786 #endif
7788 /* Note that the following may create a new basic block
7789 and renumber the existing basic blocks. */
7790 if (stmt != last_stmt)
7792 e = split_block (bb, stmt);
7793 if (e)
7794 blocks_split++;
7796 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7798 gsi_prev (&gsi);
7800 while (!gsi_end_p (gsi));
7804 if (blocks_split)
7805 verify_flow_info ();
7807 return blocks_split;
7810 /* Removes edge E and all the blocks dominated by it, and updates dominance
7811 information. The IL in E->src needs to be updated separately.
7812 If dominance info is not available, only the edge E is removed.*/
7814 void
7815 remove_edge_and_dominated_blocks (edge e)
7817 vec<basic_block> bbs_to_remove = vNULL;
7818 vec<basic_block> bbs_to_fix_dom = vNULL;
7819 bitmap df, df_idom;
7820 edge f;
7821 edge_iterator ei;
7822 bool none_removed = false;
7823 unsigned i;
7824 basic_block bb, dbb;
7825 bitmap_iterator bi;
7827 /* If we are removing a path inside a non-root loop that may change
7828 loop ownership of blocks or remove loops. Mark loops for fixup. */
7829 if (current_loops
7830 && loop_outer (e->src->loop_father) != NULL
7831 && e->src->loop_father == e->dest->loop_father)
7832 loops_state_set (LOOPS_NEED_FIXUP);
7834 if (!dom_info_available_p (CDI_DOMINATORS))
7836 remove_edge (e);
7837 return;
7840 /* No updating is needed for edges to exit. */
7841 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7843 if (cfgcleanup_altered_bbs)
7844 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7845 remove_edge (e);
7846 return;
7849 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7850 that is not dominated by E->dest, then this set is empty. Otherwise,
7851 all the basic blocks dominated by E->dest are removed.
7853 Also, to DF_IDOM we store the immediate dominators of the blocks in
7854 the dominance frontier of E (i.e., of the successors of the
7855 removed blocks, if there are any, and of E->dest otherwise). */
7856 FOR_EACH_EDGE (f, ei, e->dest->preds)
7858 if (f == e)
7859 continue;
7861 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7863 none_removed = true;
7864 break;
7868 df = BITMAP_ALLOC (NULL);
7869 df_idom = BITMAP_ALLOC (NULL);
7871 if (none_removed)
7872 bitmap_set_bit (df_idom,
7873 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7874 else
7876 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7877 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7879 FOR_EACH_EDGE (f, ei, bb->succs)
7881 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7882 bitmap_set_bit (df, f->dest->index);
7885 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7886 bitmap_clear_bit (df, bb->index);
7888 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7890 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7891 bitmap_set_bit (df_idom,
7892 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7896 if (cfgcleanup_altered_bbs)
7898 /* Record the set of the altered basic blocks. */
7899 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7900 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7903 /* Remove E and the cancelled blocks. */
7904 if (none_removed)
7905 remove_edge (e);
7906 else
7908 /* Walk backwards so as to get a chance to substitute all
7909 released DEFs into debug stmts. See
7910 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7911 details. */
7912 for (i = bbs_to_remove.length (); i-- > 0; )
7913 delete_basic_block (bbs_to_remove[i]);
7916 /* Update the dominance information. The immediate dominator may change only
7917 for blocks whose immediate dominator belongs to DF_IDOM:
7919 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7920 removal. Let Z the arbitrary block such that idom(Z) = Y and
7921 Z dominates X after the removal. Before removal, there exists a path P
7922 from Y to X that avoids Z. Let F be the last edge on P that is
7923 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7924 dominates W, and because of P, Z does not dominate W), and W belongs to
7925 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7926 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7928 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7929 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7930 dbb;
7931 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7932 bbs_to_fix_dom.safe_push (dbb);
7935 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7937 BITMAP_FREE (df);
7938 BITMAP_FREE (df_idom);
7939 bbs_to_remove.release ();
7940 bbs_to_fix_dom.release ();
7943 /* Purge dead EH edges from basic block BB. */
7945 bool
7946 gimple_purge_dead_eh_edges (basic_block bb)
7948 bool changed = false;
7949 edge e;
7950 edge_iterator ei;
7951 gimple stmt = last_stmt (bb);
7953 if (stmt && stmt_can_throw_internal (stmt))
7954 return false;
7956 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7958 if (e->flags & EDGE_EH)
7960 remove_edge_and_dominated_blocks (e);
7961 changed = true;
7963 else
7964 ei_next (&ei);
7967 return changed;
7970 /* Purge dead EH edges from basic block listed in BLOCKS. */
7972 bool
7973 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7975 bool changed = false;
7976 unsigned i;
7977 bitmap_iterator bi;
7979 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7981 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7983 /* Earlier gimple_purge_dead_eh_edges could have removed
7984 this basic block already. */
7985 gcc_assert (bb || changed);
7986 if (bb != NULL)
7987 changed |= gimple_purge_dead_eh_edges (bb);
7990 return changed;
7993 /* Purge dead abnormal call edges from basic block BB. */
7995 bool
7996 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7998 bool changed = false;
7999 edge e;
8000 edge_iterator ei;
8001 gimple stmt = last_stmt (bb);
8003 if (!cfun->has_nonlocal_label
8004 && !cfun->calls_setjmp)
8005 return false;
8007 if (stmt && stmt_can_make_abnormal_goto (stmt))
8008 return false;
8010 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8012 if (e->flags & EDGE_ABNORMAL)
8014 if (e->flags & EDGE_FALLTHRU)
8015 e->flags &= ~EDGE_ABNORMAL;
8016 else
8017 remove_edge_and_dominated_blocks (e);
8018 changed = true;
8020 else
8021 ei_next (&ei);
8024 return changed;
8027 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8029 bool
8030 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8032 bool changed = false;
8033 unsigned i;
8034 bitmap_iterator bi;
8036 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8038 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8040 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8041 this basic block already. */
8042 gcc_assert (bb || changed);
8043 if (bb != NULL)
8044 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8047 return changed;
8050 /* This function is called whenever a new edge is created or
8051 redirected. */
8053 static void
8054 gimple_execute_on_growing_pred (edge e)
8056 basic_block bb = e->dest;
8058 if (!gimple_seq_empty_p (phi_nodes (bb)))
8059 reserve_phi_args_for_new_edge (bb);
8062 /* This function is called immediately before edge E is removed from
8063 the edge vector E->dest->preds. */
8065 static void
8066 gimple_execute_on_shrinking_pred (edge e)
8068 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8069 remove_phi_args (e);
8072 /*---------------------------------------------------------------------------
8073 Helper functions for Loop versioning
8074 ---------------------------------------------------------------------------*/
8076 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8077 of 'first'. Both of them are dominated by 'new_head' basic block. When
8078 'new_head' was created by 'second's incoming edge it received phi arguments
8079 on the edge by split_edge(). Later, additional edge 'e' was created to
8080 connect 'new_head' and 'first'. Now this routine adds phi args on this
8081 additional edge 'e' that new_head to second edge received as part of edge
8082 splitting. */
8084 static void
8085 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8086 basic_block new_head, edge e)
8088 gphi *phi1, *phi2;
8089 gphi_iterator psi1, psi2;
8090 tree def;
8091 edge e2 = find_edge (new_head, second);
8093 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8094 edge, we should always have an edge from NEW_HEAD to SECOND. */
8095 gcc_assert (e2 != NULL);
8097 /* Browse all 'second' basic block phi nodes and add phi args to
8098 edge 'e' for 'first' head. PHI args are always in correct order. */
8100 for (psi2 = gsi_start_phis (second),
8101 psi1 = gsi_start_phis (first);
8102 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8103 gsi_next (&psi2), gsi_next (&psi1))
8105 phi1 = psi1.phi ();
8106 phi2 = psi2.phi ();
8107 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8108 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8113 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8114 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8115 the destination of the ELSE part. */
8117 static void
8118 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8119 basic_block second_head ATTRIBUTE_UNUSED,
8120 basic_block cond_bb, void *cond_e)
8122 gimple_stmt_iterator gsi;
8123 gimple new_cond_expr;
8124 tree cond_expr = (tree) cond_e;
8125 edge e0;
8127 /* Build new conditional expr */
8128 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8129 NULL_TREE, NULL_TREE);
8131 /* Add new cond in cond_bb. */
8132 gsi = gsi_last_bb (cond_bb);
8133 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8135 /* Adjust edges appropriately to connect new head with first head
8136 as well as second head. */
8137 e0 = single_succ_edge (cond_bb);
8138 e0->flags &= ~EDGE_FALLTHRU;
8139 e0->flags |= EDGE_FALSE_VALUE;
8143 /* Do book-keeping of basic block BB for the profile consistency checker.
8144 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8145 then do post-pass accounting. Store the counting in RECORD. */
8146 static void
8147 gimple_account_profile_record (basic_block bb, int after_pass,
8148 struct profile_record *record)
8150 gimple_stmt_iterator i;
8151 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8153 record->size[after_pass]
8154 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8155 if (profile_status_for_fn (cfun) == PROFILE_READ)
8156 record->time[after_pass]
8157 += estimate_num_insns (gsi_stmt (i),
8158 &eni_time_weights) * bb->count;
8159 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8160 record->time[after_pass]
8161 += estimate_num_insns (gsi_stmt (i),
8162 &eni_time_weights) * bb->frequency;
8166 struct cfg_hooks gimple_cfg_hooks = {
8167 "gimple",
8168 gimple_verify_flow_info,
8169 gimple_dump_bb, /* dump_bb */
8170 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8171 create_bb, /* create_basic_block */
8172 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8173 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8174 gimple_can_remove_branch_p, /* can_remove_branch_p */
8175 remove_bb, /* delete_basic_block */
8176 gimple_split_block, /* split_block */
8177 gimple_move_block_after, /* move_block_after */
8178 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8179 gimple_merge_blocks, /* merge_blocks */
8180 gimple_predict_edge, /* predict_edge */
8181 gimple_predicted_by_p, /* predicted_by_p */
8182 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8183 gimple_duplicate_bb, /* duplicate_block */
8184 gimple_split_edge, /* split_edge */
8185 gimple_make_forwarder_block, /* make_forward_block */
8186 NULL, /* tidy_fallthru_edge */
8187 NULL, /* force_nonfallthru */
8188 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8189 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8190 gimple_flow_call_edges_add, /* flow_call_edges_add */
8191 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8192 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8193 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8194 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8195 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8196 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8197 flush_pending_stmts, /* flush_pending_stmts */
8198 gimple_empty_block_p, /* block_empty_p */
8199 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8200 gimple_account_profile_record,
8204 /* Split all critical edges. */
8206 unsigned int
8207 split_critical_edges (void)
8209 basic_block bb;
8210 edge e;
8211 edge_iterator ei;
8213 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8214 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8215 mappings around the calls to split_edge. */
8216 start_recording_case_labels ();
8217 FOR_ALL_BB_FN (bb, cfun)
8219 FOR_EACH_EDGE (e, ei, bb->succs)
8221 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8222 split_edge (e);
8223 /* PRE inserts statements to edges and expects that
8224 since split_critical_edges was done beforehand, committing edge
8225 insertions will not split more edges. In addition to critical
8226 edges we must split edges that have multiple successors and
8227 end by control flow statements, such as RESX.
8228 Go ahead and split them too. This matches the logic in
8229 gimple_find_edge_insert_loc. */
8230 else if ((!single_pred_p (e->dest)
8231 || !gimple_seq_empty_p (phi_nodes (e->dest))
8232 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8233 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8234 && !(e->flags & EDGE_ABNORMAL))
8236 gimple_stmt_iterator gsi;
8238 gsi = gsi_last_bb (e->src);
8239 if (!gsi_end_p (gsi)
8240 && stmt_ends_bb_p (gsi_stmt (gsi))
8241 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8242 && !gimple_call_builtin_p (gsi_stmt (gsi),
8243 BUILT_IN_RETURN)))
8244 split_edge (e);
8248 end_recording_case_labels ();
8249 return 0;
8252 namespace {
8254 const pass_data pass_data_split_crit_edges =
8256 GIMPLE_PASS, /* type */
8257 "crited", /* name */
8258 OPTGROUP_NONE, /* optinfo_flags */
8259 TV_TREE_SPLIT_EDGES, /* tv_id */
8260 PROP_cfg, /* properties_required */
8261 PROP_no_crit_edges, /* properties_provided */
8262 0, /* properties_destroyed */
8263 0, /* todo_flags_start */
8264 0, /* todo_flags_finish */
8267 class pass_split_crit_edges : public gimple_opt_pass
8269 public:
8270 pass_split_crit_edges (gcc::context *ctxt)
8271 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8274 /* opt_pass methods: */
8275 virtual unsigned int execute (function *) { return split_critical_edges (); }
8277 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8278 }; // class pass_split_crit_edges
8280 } // anon namespace
8282 gimple_opt_pass *
8283 make_pass_split_crit_edges (gcc::context *ctxt)
8285 return new pass_split_crit_edges (ctxt);
8289 /* Insert COND expression which is GIMPLE_COND after STMT
8290 in basic block BB with appropriate basic block split
8291 and creation of a new conditionally executed basic block.
8292 Return created basic block. */
8293 basic_block
8294 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8296 edge fall = split_block (bb, stmt);
8297 gimple_stmt_iterator iter = gsi_last_bb (bb);
8298 basic_block new_bb;
8300 /* Insert cond statement. */
8301 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8302 if (gsi_end_p (iter))
8303 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8304 else
8305 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8307 /* Create conditionally executed block. */
8308 new_bb = create_empty_bb (bb);
8309 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8310 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8312 /* Fix edge for split bb. */
8313 fall->flags = EDGE_FALSE_VALUE;
8315 /* Update dominance info. */
8316 if (dom_info_available_p (CDI_DOMINATORS))
8318 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8319 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8322 /* Update loop info. */
8323 if (current_loops)
8324 add_bb_to_loop (new_bb, bb->loop_father);
8326 return new_bb;
8329 /* Build a ternary operation and gimplify it. Emit code before GSI.
8330 Return the gimple_val holding the result. */
8332 tree
8333 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8334 tree type, tree a, tree b, tree c)
8336 tree ret;
8337 location_t loc = gimple_location (gsi_stmt (*gsi));
8339 ret = fold_build3_loc (loc, code, type, a, b, c);
8340 STRIP_NOPS (ret);
8342 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8343 GSI_SAME_STMT);
8346 /* Build a binary operation and gimplify it. Emit code before GSI.
8347 Return the gimple_val holding the result. */
8349 tree
8350 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8351 tree type, tree a, tree b)
8353 tree ret;
8355 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8356 STRIP_NOPS (ret);
8358 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8359 GSI_SAME_STMT);
8362 /* Build a unary operation and gimplify it. Emit code before GSI.
8363 Return the gimple_val holding the result. */
8365 tree
8366 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8367 tree a)
8369 tree ret;
8371 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8372 STRIP_NOPS (ret);
8374 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8375 GSI_SAME_STMT);
8380 /* Given a basic block B which ends with a conditional and has
8381 precisely two successors, determine which of the edges is taken if
8382 the conditional is true and which is taken if the conditional is
8383 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8385 void
8386 extract_true_false_edges_from_block (basic_block b,
8387 edge *true_edge,
8388 edge *false_edge)
8390 edge e = EDGE_SUCC (b, 0);
8392 if (e->flags & EDGE_TRUE_VALUE)
8394 *true_edge = e;
8395 *false_edge = EDGE_SUCC (b, 1);
8397 else
8399 *false_edge = e;
8400 *true_edge = EDGE_SUCC (b, 1);
8404 /* Emit return warnings. */
8406 namespace {
8408 const pass_data pass_data_warn_function_return =
8410 GIMPLE_PASS, /* type */
8411 "*warn_function_return", /* name */
8412 OPTGROUP_NONE, /* optinfo_flags */
8413 TV_NONE, /* tv_id */
8414 PROP_cfg, /* properties_required */
8415 0, /* properties_provided */
8416 0, /* properties_destroyed */
8417 0, /* todo_flags_start */
8418 0, /* todo_flags_finish */
8421 class pass_warn_function_return : public gimple_opt_pass
8423 public:
8424 pass_warn_function_return (gcc::context *ctxt)
8425 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8428 /* opt_pass methods: */
8429 virtual unsigned int execute (function *);
8431 }; // class pass_warn_function_return
8433 unsigned int
8434 pass_warn_function_return::execute (function *fun)
8436 source_location location;
8437 gimple last;
8438 edge e;
8439 edge_iterator ei;
8441 if (!targetm.warn_func_return (fun->decl))
8442 return 0;
8444 /* If we have a path to EXIT, then we do return. */
8445 if (TREE_THIS_VOLATILE (fun->decl)
8446 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8448 location = UNKNOWN_LOCATION;
8449 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8451 last = last_stmt (e->src);
8452 if ((gimple_code (last) == GIMPLE_RETURN
8453 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8454 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8455 break;
8457 if (location == UNKNOWN_LOCATION)
8458 location = cfun->function_end_locus;
8459 warning_at (location, 0, "%<noreturn%> function does return");
8462 /* If we see "return;" in some basic block, then we do reach the end
8463 without returning a value. */
8464 else if (warn_return_type
8465 && !TREE_NO_WARNING (fun->decl)
8466 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8467 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8469 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8471 gimple last = last_stmt (e->src);
8472 greturn *return_stmt = dyn_cast <greturn *> (last);
8473 if (return_stmt
8474 && gimple_return_retval (return_stmt) == NULL
8475 && !gimple_no_warning_p (last))
8477 location = gimple_location (last);
8478 if (location == UNKNOWN_LOCATION)
8479 location = fun->function_end_locus;
8480 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8481 TREE_NO_WARNING (fun->decl) = 1;
8482 break;
8486 return 0;
8489 } // anon namespace
8491 gimple_opt_pass *
8492 make_pass_warn_function_return (gcc::context *ctxt)
8494 return new pass_warn_function_return (ctxt);
8497 /* Walk a gimplified function and warn for functions whose return value is
8498 ignored and attribute((warn_unused_result)) is set. This is done before
8499 inlining, so we don't have to worry about that. */
8501 static void
8502 do_warn_unused_result (gimple_seq seq)
8504 tree fdecl, ftype;
8505 gimple_stmt_iterator i;
8507 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8509 gimple g = gsi_stmt (i);
8511 switch (gimple_code (g))
8513 case GIMPLE_BIND:
8514 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8515 break;
8516 case GIMPLE_TRY:
8517 do_warn_unused_result (gimple_try_eval (g));
8518 do_warn_unused_result (gimple_try_cleanup (g));
8519 break;
8520 case GIMPLE_CATCH:
8521 do_warn_unused_result (gimple_catch_handler (
8522 as_a <gcatch *> (g)));
8523 break;
8524 case GIMPLE_EH_FILTER:
8525 do_warn_unused_result (gimple_eh_filter_failure (g));
8526 break;
8528 case GIMPLE_CALL:
8529 if (gimple_call_lhs (g))
8530 break;
8531 if (gimple_call_internal_p (g))
8532 break;
8534 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8535 LHS. All calls whose value is ignored should be
8536 represented like this. Look for the attribute. */
8537 fdecl = gimple_call_fndecl (g);
8538 ftype = gimple_call_fntype (g);
8540 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8542 location_t loc = gimple_location (g);
8544 if (fdecl)
8545 warning_at (loc, OPT_Wunused_result,
8546 "ignoring return value of %qD, "
8547 "declared with attribute warn_unused_result",
8548 fdecl);
8549 else
8550 warning_at (loc, OPT_Wunused_result,
8551 "ignoring return value of function "
8552 "declared with attribute warn_unused_result");
8554 break;
8556 default:
8557 /* Not a container, not a call, or a call whose value is used. */
8558 break;
8563 namespace {
8565 const pass_data pass_data_warn_unused_result =
8567 GIMPLE_PASS, /* type */
8568 "*warn_unused_result", /* name */
8569 OPTGROUP_NONE, /* optinfo_flags */
8570 TV_NONE, /* tv_id */
8571 PROP_gimple_any, /* properties_required */
8572 0, /* properties_provided */
8573 0, /* properties_destroyed */
8574 0, /* todo_flags_start */
8575 0, /* todo_flags_finish */
8578 class pass_warn_unused_result : public gimple_opt_pass
8580 public:
8581 pass_warn_unused_result (gcc::context *ctxt)
8582 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8585 /* opt_pass methods: */
8586 virtual bool gate (function *) { return flag_warn_unused_result; }
8587 virtual unsigned int execute (function *)
8589 do_warn_unused_result (gimple_body (current_function_decl));
8590 return 0;
8593 }; // class pass_warn_unused_result
8595 } // anon namespace
8597 gimple_opt_pass *
8598 make_pass_warn_unused_result (gcc::context *ctxt)
8600 return new pass_warn_unused_result (ctxt);
8603 /* IPA passes, compilation of earlier functions or inlining
8604 might have changed some properties, such as marked functions nothrow,
8605 pure, const or noreturn.
8606 Remove redundant edges and basic blocks, and create new ones if necessary.
8608 This pass can't be executed as stand alone pass from pass manager, because
8609 in between inlining and this fixup the verify_flow_info would fail. */
8611 unsigned int
8612 execute_fixup_cfg (void)
8614 basic_block bb;
8615 gimple_stmt_iterator gsi;
8616 int todo = 0;
8617 gcov_type count_scale;
8618 edge e;
8619 edge_iterator ei;
8621 count_scale
8622 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8623 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8625 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8626 cgraph_node::get (current_function_decl)->count;
8627 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8628 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8629 count_scale);
8631 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8632 e->count = apply_scale (e->count, count_scale);
8634 FOR_EACH_BB_FN (bb, cfun)
8636 bb->count = apply_scale (bb->count, count_scale);
8637 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8639 gimple stmt = gsi_stmt (gsi);
8640 tree decl = is_gimple_call (stmt)
8641 ? gimple_call_fndecl (stmt)
8642 : NULL;
8643 if (decl)
8645 int flags = gimple_call_flags (stmt);
8646 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8648 if (gimple_purge_dead_abnormal_call_edges (bb))
8649 todo |= TODO_cleanup_cfg;
8651 if (gimple_in_ssa_p (cfun))
8653 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8654 update_stmt (stmt);
8658 if (flags & ECF_NORETURN
8659 && fixup_noreturn_call (stmt))
8660 todo |= TODO_cleanup_cfg;
8663 /* Remove stores to variables we marked write-only.
8664 Keep access when store has side effect, i.e. in case when source
8665 is volatile. */
8666 if (gimple_store_p (stmt)
8667 && !gimple_has_side_effects (stmt))
8669 tree lhs = get_base_address (gimple_get_lhs (stmt));
8671 if (TREE_CODE (lhs) == VAR_DECL
8672 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8673 && varpool_node::get (lhs)->writeonly)
8675 unlink_stmt_vdef (stmt);
8676 gsi_remove (&gsi, true);
8677 release_defs (stmt);
8678 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8679 continue;
8682 /* For calls we can simply remove LHS when it is known
8683 to be write-only. */
8684 if (is_gimple_call (stmt)
8685 && gimple_get_lhs (stmt))
8687 tree lhs = get_base_address (gimple_get_lhs (stmt));
8689 if (TREE_CODE (lhs) == VAR_DECL
8690 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8691 && varpool_node::get (lhs)->writeonly)
8693 gimple_call_set_lhs (stmt, NULL);
8694 update_stmt (stmt);
8695 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8699 if (maybe_clean_eh_stmt (stmt)
8700 && gimple_purge_dead_eh_edges (bb))
8701 todo |= TODO_cleanup_cfg;
8702 gsi_next (&gsi);
8705 FOR_EACH_EDGE (e, ei, bb->succs)
8706 e->count = apply_scale (e->count, count_scale);
8708 /* If we have a basic block with no successors that does not
8709 end with a control statement or a noreturn call end it with
8710 a call to __builtin_unreachable. This situation can occur
8711 when inlining a noreturn call that does in fact return. */
8712 if (EDGE_COUNT (bb->succs) == 0)
8714 gimple stmt = last_stmt (bb);
8715 if (!stmt
8716 || (!is_ctrl_stmt (stmt)
8717 && (!is_gimple_call (stmt)
8718 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8720 if (stmt && is_gimple_call (stmt))
8721 gimple_call_set_ctrl_altering (stmt, false);
8722 stmt = gimple_build_call
8723 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8724 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8725 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8729 if (count_scale != REG_BR_PROB_BASE)
8730 compute_function_frequency ();
8732 if (current_loops
8733 && (todo & TODO_cleanup_cfg))
8734 loops_state_set (LOOPS_NEED_FIXUP);
8736 return todo;
8739 namespace {
8741 const pass_data pass_data_fixup_cfg =
8743 GIMPLE_PASS, /* type */
8744 "fixup_cfg", /* name */
8745 OPTGROUP_NONE, /* optinfo_flags */
8746 TV_NONE, /* tv_id */
8747 PROP_cfg, /* properties_required */
8748 0, /* properties_provided */
8749 0, /* properties_destroyed */
8750 0, /* todo_flags_start */
8751 0, /* todo_flags_finish */
8754 class pass_fixup_cfg : public gimple_opt_pass
8756 public:
8757 pass_fixup_cfg (gcc::context *ctxt)
8758 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8761 /* opt_pass methods: */
8762 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8763 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8765 }; // class pass_fixup_cfg
8767 } // anon namespace
8769 gimple_opt_pass *
8770 make_pass_fixup_cfg (gcc::context *ctxt)
8772 return new pass_fixup_cfg (ctxt);
8775 /* Garbage collection support for edge_def. */
8777 extern void gt_ggc_mx (tree&);
8778 extern void gt_ggc_mx (gimple&);
8779 extern void gt_ggc_mx (rtx&);
8780 extern void gt_ggc_mx (basic_block&);
8782 static void
8783 gt_ggc_mx (rtx_insn *& x)
8785 if (x)
8786 gt_ggc_mx_rtx_def ((void *) x);
8789 void
8790 gt_ggc_mx (edge_def *e)
8792 tree block = LOCATION_BLOCK (e->goto_locus);
8793 gt_ggc_mx (e->src);
8794 gt_ggc_mx (e->dest);
8795 if (current_ir_type () == IR_GIMPLE)
8796 gt_ggc_mx (e->insns.g);
8797 else
8798 gt_ggc_mx (e->insns.r);
8799 gt_ggc_mx (block);
8802 /* PCH support for edge_def. */
8804 extern void gt_pch_nx (tree&);
8805 extern void gt_pch_nx (gimple&);
8806 extern void gt_pch_nx (rtx&);
8807 extern void gt_pch_nx (basic_block&);
8809 static void
8810 gt_pch_nx (rtx_insn *& x)
8812 if (x)
8813 gt_pch_nx_rtx_def ((void *) x);
8816 void
8817 gt_pch_nx (edge_def *e)
8819 tree block = LOCATION_BLOCK (e->goto_locus);
8820 gt_pch_nx (e->src);
8821 gt_pch_nx (e->dest);
8822 if (current_ir_type () == IR_GIMPLE)
8823 gt_pch_nx (e->insns.g);
8824 else
8825 gt_pch_nx (e->insns.r);
8826 gt_pch_nx (block);
8829 void
8830 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8832 tree block = LOCATION_BLOCK (e->goto_locus);
8833 op (&(e->src), cookie);
8834 op (&(e->dest), cookie);
8835 if (current_ir_type () == IR_GIMPLE)
8836 op (&(e->insns.g), cookie);
8837 else
8838 op (&(e->insns.r), cookie);
8839 op (&(block), cookie);