Update ChangeLog and version files for release
[official-gcc.git] / gcc / tree-cfg.c
blob64bdc92e681026ea0d6e47208e35e3212c684722
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "trans-mem.h"
39 #include "stor-layout.h"
40 #include "print-tree.h"
41 #include "tm_p.h"
42 #include "predict.h"
43 #include "hard-reg-set.h"
44 #include "function.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "cfganal.h"
48 #include "basic-block.h"
49 #include "flags.h"
50 #include "gimple-pretty-print.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-fold.h"
54 #include "tree-eh.h"
55 #include "gimple-expr.h"
56 #include "is-a.h"
57 #include "gimple.h"
58 #include "gimple-iterator.h"
59 #include "gimplify-me.h"
60 #include "gimple-walk.h"
61 #include "gimple-ssa.h"
62 #include "plugin-api.h"
63 #include "ipa-ref.h"
64 #include "cgraph.h"
65 #include "tree-cfg.h"
66 #include "tree-phinodes.h"
67 #include "ssa-iterators.h"
68 #include "stringpool.h"
69 #include "tree-ssanames.h"
70 #include "tree-ssa-loop-manip.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "tree-into-ssa.h"
73 #include "hashtab.h"
74 #include "rtl.h"
75 #include "statistics.h"
76 #include "real.h"
77 #include "fixed-value.h"
78 #include "insn-config.h"
79 #include "expmed.h"
80 #include "dojump.h"
81 #include "explow.h"
82 #include "calls.h"
83 #include "emit-rtl.h"
84 #include "varasm.h"
85 #include "stmt.h"
86 #include "expr.h"
87 #include "tree-dfa.h"
88 #include "tree-ssa.h"
89 #include "tree-dump.h"
90 #include "tree-pass.h"
91 #include "diagnostic-core.h"
92 #include "except.h"
93 #include "cfgloop.h"
94 #include "tree-ssa-propagate.h"
95 #include "value-prof.h"
96 #include "tree-inline.h"
97 #include "target.h"
98 #include "tree-ssa-live.h"
99 #include "omp-low.h"
100 #include "tree-cfgcleanup.h"
101 #include "wide-int-print.h"
103 /* This file contains functions for building the Control Flow Graph (CFG)
104 for a function tree. */
106 /* Local declarations. */
108 /* Initial capacity for the basic block array. */
109 static const int initial_cfg_capacity = 20;
111 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
112 which use a particular edge. The CASE_LABEL_EXPRs are chained together
113 via their CASE_CHAIN field, which we clear after we're done with the
114 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
116 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
117 update the case vector in response to edge redirections.
119 Right now this table is set up and torn down at key points in the
120 compilation process. It would be nice if we could make the table
121 more persistent. The key is getting notification of changes to
122 the CFG (particularly edge removal, creation and redirection). */
124 static hash_map<edge, tree> *edge_to_cases;
126 /* If we record edge_to_cases, this bitmap will hold indexes
127 of basic blocks that end in a GIMPLE_SWITCH which we touched
128 due to edge manipulations. */
130 static bitmap touched_switch_bbs;
132 /* CFG statistics. */
133 struct cfg_stats_d
135 long num_merged_labels;
138 static struct cfg_stats_d cfg_stats;
140 /* Hash table to store last discriminator assigned for each locus. */
141 struct locus_discrim_map
143 location_t locus;
144 int discriminator;
147 /* Hashtable helpers. */
149 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
151 typedef locus_discrim_map value_type;
152 typedef locus_discrim_map compare_type;
153 static inline hashval_t hash (const value_type *);
154 static inline bool equal (const value_type *, const compare_type *);
157 /* Trivial hash function for a location_t. ITEM is a pointer to
158 a hash table entry that maps a location_t to a discriminator. */
160 inline hashval_t
161 locus_discrim_hasher::hash (const value_type *item)
163 return LOCATION_LINE (item->locus);
166 /* Equality function for the locus-to-discriminator map. A and B
167 point to the two hash table entries to compare. */
169 inline bool
170 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
172 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
175 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
177 /* Basic blocks and flowgraphs. */
178 static void make_blocks (gimple_seq);
180 /* Edges. */
181 static void make_edges (void);
182 static void assign_discriminators (void);
183 static void make_cond_expr_edges (basic_block);
184 static void make_gimple_switch_edges (gswitch *, basic_block);
185 static bool make_goto_expr_edges (basic_block);
186 static void make_gimple_asm_edges (basic_block);
187 static edge gimple_redirect_edge_and_branch (edge, basic_block);
188 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
190 /* Various helpers. */
191 static inline bool stmt_starts_bb_p (gimple, gimple);
192 static int gimple_verify_flow_info (void);
193 static void gimple_make_forwarder_block (edge);
194 static gimple first_non_label_stmt (basic_block);
195 static bool verify_gimple_transaction (gtransaction *);
196 static bool call_can_make_abnormal_goto (gimple);
198 /* Flowgraph optimization and cleanup. */
199 static void gimple_merge_blocks (basic_block, basic_block);
200 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
201 static void remove_bb (basic_block);
202 static edge find_taken_edge_computed_goto (basic_block, tree);
203 static edge find_taken_edge_cond_expr (basic_block, tree);
204 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
205 static tree find_case_label_for_value (gswitch *, tree);
207 void
208 init_empty_tree_cfg_for_function (struct function *fn)
210 /* Initialize the basic block array. */
211 init_flow (fn);
212 profile_status_for_fn (fn) = PROFILE_ABSENT;
213 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
214 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
215 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
216 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
217 initial_cfg_capacity);
219 /* Build a mapping of labels to their associated blocks. */
220 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
221 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
222 initial_cfg_capacity);
224 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
225 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
227 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
228 = EXIT_BLOCK_PTR_FOR_FN (fn);
229 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
230 = ENTRY_BLOCK_PTR_FOR_FN (fn);
233 void
234 init_empty_tree_cfg (void)
236 init_empty_tree_cfg_for_function (cfun);
239 /*---------------------------------------------------------------------------
240 Create basic blocks
241 ---------------------------------------------------------------------------*/
243 /* Entry point to the CFG builder for trees. SEQ is the sequence of
244 statements to be added to the flowgraph. */
246 static void
247 build_gimple_cfg (gimple_seq seq)
249 /* Register specific gimple functions. */
250 gimple_register_cfg_hooks ();
252 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
254 init_empty_tree_cfg ();
256 make_blocks (seq);
258 /* Make sure there is always at least one block, even if it's empty. */
259 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
260 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
262 /* Adjust the size of the array. */
263 if (basic_block_info_for_fn (cfun)->length ()
264 < (size_t) n_basic_blocks_for_fn (cfun))
265 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
266 n_basic_blocks_for_fn (cfun));
268 /* To speed up statement iterator walks, we first purge dead labels. */
269 cleanup_dead_labels ();
271 /* Group case nodes to reduce the number of edges.
272 We do this after cleaning up dead labels because otherwise we miss
273 a lot of obvious case merging opportunities. */
274 group_case_labels ();
276 /* Create the edges of the flowgraph. */
277 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
278 make_edges ();
279 assign_discriminators ();
280 cleanup_dead_labels ();
281 delete discriminator_per_locus;
282 discriminator_per_locus = NULL;
285 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
286 them and propagate the information to LOOP. We assume that the annotations
287 come immediately before the condition in BB, if any. */
289 static void
290 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
292 gimple_stmt_iterator gsi = gsi_last_bb (bb);
293 gimple stmt = gsi_stmt (gsi);
295 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
296 return;
298 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
300 stmt = gsi_stmt (gsi);
301 if (gimple_code (stmt) != GIMPLE_CALL)
302 break;
303 if (!gimple_call_internal_p (stmt)
304 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
305 break;
307 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
309 case annot_expr_ivdep_kind:
310 loop->safelen = INT_MAX;
311 break;
312 case annot_expr_no_vector_kind:
313 loop->dont_vectorize = true;
314 break;
315 case annot_expr_vector_kind:
316 loop->force_vectorize = true;
317 cfun->has_force_vectorize_loops = true;
318 break;
319 default:
320 gcc_unreachable ();
323 stmt = gimple_build_assign (gimple_call_lhs (stmt),
324 gimple_call_arg (stmt, 0));
325 gsi_replace (&gsi, stmt, true);
329 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
330 them and propagate the information to the loop. We assume that the
331 annotations come immediately before the condition of the loop. */
333 static void
334 replace_loop_annotate (void)
336 struct loop *loop;
337 basic_block bb;
338 gimple_stmt_iterator gsi;
339 gimple stmt;
341 FOR_EACH_LOOP (loop, 0)
343 /* First look into the header. */
344 replace_loop_annotate_in_block (loop->header, loop);
346 /* Then look into the latch, if any. */
347 if (loop->latch)
348 replace_loop_annotate_in_block (loop->latch, loop);
351 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
352 FOR_EACH_BB_FN (bb, cfun)
354 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
356 stmt = gsi_stmt (gsi);
357 if (gimple_code (stmt) != GIMPLE_CALL)
358 continue;
359 if (!gimple_call_internal_p (stmt)
360 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
361 continue;
363 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
365 case annot_expr_ivdep_kind:
366 case annot_expr_no_vector_kind:
367 case annot_expr_vector_kind:
368 break;
369 default:
370 gcc_unreachable ();
373 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
374 stmt = gimple_build_assign (gimple_call_lhs (stmt),
375 gimple_call_arg (stmt, 0));
376 gsi_replace (&gsi, stmt, true);
382 static unsigned int
383 execute_build_cfg (void)
385 gimple_seq body = gimple_body (current_function_decl);
387 build_gimple_cfg (body);
388 gimple_set_body (current_function_decl, NULL);
389 if (dump_file && (dump_flags & TDF_DETAILS))
391 fprintf (dump_file, "Scope blocks:\n");
392 dump_scope_blocks (dump_file, dump_flags);
394 cleanup_tree_cfg ();
395 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
396 replace_loop_annotate ();
397 return 0;
400 namespace {
402 const pass_data pass_data_build_cfg =
404 GIMPLE_PASS, /* type */
405 "cfg", /* name */
406 OPTGROUP_NONE, /* optinfo_flags */
407 TV_TREE_CFG, /* tv_id */
408 PROP_gimple_leh, /* properties_required */
409 ( PROP_cfg | PROP_loops ), /* properties_provided */
410 0, /* properties_destroyed */
411 0, /* todo_flags_start */
412 0, /* todo_flags_finish */
415 class pass_build_cfg : public gimple_opt_pass
417 public:
418 pass_build_cfg (gcc::context *ctxt)
419 : gimple_opt_pass (pass_data_build_cfg, ctxt)
422 /* opt_pass methods: */
423 virtual unsigned int execute (function *) { return execute_build_cfg (); }
425 }; // class pass_build_cfg
427 } // anon namespace
429 gimple_opt_pass *
430 make_pass_build_cfg (gcc::context *ctxt)
432 return new pass_build_cfg (ctxt);
436 /* Return true if T is a computed goto. */
438 bool
439 computed_goto_p (gimple t)
441 return (gimple_code (t) == GIMPLE_GOTO
442 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
445 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
446 the other edge points to a bb with just __builtin_unreachable ().
447 I.e. return true for C->M edge in:
448 <bb C>:
450 if (something)
451 goto <bb N>;
452 else
453 goto <bb M>;
454 <bb N>:
455 __builtin_unreachable ();
456 <bb M>: */
458 bool
459 assert_unreachable_fallthru_edge_p (edge e)
461 basic_block pred_bb = e->src;
462 gimple last = last_stmt (pred_bb);
463 if (last && gimple_code (last) == GIMPLE_COND)
465 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
466 if (other_bb == e->dest)
467 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
468 if (EDGE_COUNT (other_bb->succs) == 0)
470 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
471 gimple stmt;
473 if (gsi_end_p (gsi))
474 return false;
475 stmt = gsi_stmt (gsi);
476 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
478 gsi_next (&gsi);
479 if (gsi_end_p (gsi))
480 return false;
481 stmt = gsi_stmt (gsi);
483 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
486 return false;
490 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
491 could alter control flow except via eh. We initialize the flag at
492 CFG build time and only ever clear it later. */
494 static void
495 gimple_call_initialize_ctrl_altering (gimple stmt)
497 int flags = gimple_call_flags (stmt);
499 /* A call alters control flow if it can make an abnormal goto. */
500 if (call_can_make_abnormal_goto (stmt)
501 /* A call also alters control flow if it does not return. */
502 || flags & ECF_NORETURN
503 /* TM ending statements have backedges out of the transaction.
504 Return true so we split the basic block containing them.
505 Note that the TM_BUILTIN test is merely an optimization. */
506 || ((flags & ECF_TM_BUILTIN)
507 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
508 /* BUILT_IN_RETURN call is same as return statement. */
509 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
510 gimple_call_set_ctrl_altering (stmt, true);
511 else
512 gimple_call_set_ctrl_altering (stmt, false);
516 /* Build a flowgraph for the sequence of stmts SEQ. */
518 static void
519 make_blocks (gimple_seq seq)
521 gimple_stmt_iterator i = gsi_start (seq);
522 gimple stmt = NULL;
523 bool start_new_block = true;
524 bool first_stmt_of_seq = true;
525 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
527 while (!gsi_end_p (i))
529 gimple prev_stmt;
531 prev_stmt = stmt;
532 stmt = gsi_stmt (i);
534 if (stmt && is_gimple_call (stmt))
535 gimple_call_initialize_ctrl_altering (stmt);
537 /* If the statement starts a new basic block or if we have determined
538 in a previous pass that we need to create a new block for STMT, do
539 so now. */
540 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
542 if (!first_stmt_of_seq)
543 gsi_split_seq_before (&i, &seq);
544 bb = create_basic_block (seq, NULL, bb);
545 start_new_block = false;
548 /* Now add STMT to BB and create the subgraphs for special statement
549 codes. */
550 gimple_set_bb (stmt, bb);
552 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
553 next iteration. */
554 if (stmt_ends_bb_p (stmt))
556 /* If the stmt can make abnormal goto use a new temporary
557 for the assignment to the LHS. This makes sure the old value
558 of the LHS is available on the abnormal edge. Otherwise
559 we will end up with overlapping life-ranges for abnormal
560 SSA names. */
561 if (gimple_has_lhs (stmt)
562 && stmt_can_make_abnormal_goto (stmt)
563 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
565 tree lhs = gimple_get_lhs (stmt);
566 tree tmp = create_tmp_var (TREE_TYPE (lhs));
567 gimple s = gimple_build_assign (lhs, tmp);
568 gimple_set_location (s, gimple_location (stmt));
569 gimple_set_block (s, gimple_block (stmt));
570 gimple_set_lhs (stmt, tmp);
571 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
572 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
573 DECL_GIMPLE_REG_P (tmp) = 1;
574 gsi_insert_after (&i, s, GSI_SAME_STMT);
576 start_new_block = true;
579 gsi_next (&i);
580 first_stmt_of_seq = false;
585 /* Create and return a new empty basic block after bb AFTER. */
587 static basic_block
588 create_bb (void *h, void *e, basic_block after)
590 basic_block bb;
592 gcc_assert (!e);
594 /* Create and initialize a new basic block. Since alloc_block uses
595 GC allocation that clears memory to allocate a basic block, we do
596 not have to clear the newly allocated basic block here. */
597 bb = alloc_block ();
599 bb->index = last_basic_block_for_fn (cfun);
600 bb->flags = BB_NEW;
601 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
603 /* Add the new block to the linked list of blocks. */
604 link_block (bb, after);
606 /* Grow the basic block array if needed. */
607 if ((size_t) last_basic_block_for_fn (cfun)
608 == basic_block_info_for_fn (cfun)->length ())
610 size_t new_size =
611 (last_basic_block_for_fn (cfun)
612 + (last_basic_block_for_fn (cfun) + 3) / 4);
613 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
616 /* Add the newly created block to the array. */
617 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
619 n_basic_blocks_for_fn (cfun)++;
620 last_basic_block_for_fn (cfun)++;
622 return bb;
626 /*---------------------------------------------------------------------------
627 Edge creation
628 ---------------------------------------------------------------------------*/
630 /* Fold COND_EXPR_COND of each COND_EXPR. */
632 void
633 fold_cond_expr_cond (void)
635 basic_block bb;
637 FOR_EACH_BB_FN (bb, cfun)
639 gimple stmt = last_stmt (bb);
641 if (stmt && gimple_code (stmt) == GIMPLE_COND)
643 gcond *cond_stmt = as_a <gcond *> (stmt);
644 location_t loc = gimple_location (stmt);
645 tree cond;
646 bool zerop, onep;
648 fold_defer_overflow_warnings ();
649 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
650 boolean_type_node,
651 gimple_cond_lhs (cond_stmt),
652 gimple_cond_rhs (cond_stmt));
653 if (cond)
655 zerop = integer_zerop (cond);
656 onep = integer_onep (cond);
658 else
659 zerop = onep = false;
661 fold_undefer_overflow_warnings (zerop || onep,
662 stmt,
663 WARN_STRICT_OVERFLOW_CONDITIONAL);
664 if (zerop)
665 gimple_cond_make_false (cond_stmt);
666 else if (onep)
667 gimple_cond_make_true (cond_stmt);
672 /* If basic block BB has an abnormal edge to a basic block
673 containing IFN_ABNORMAL_DISPATCHER internal call, return
674 that the dispatcher's basic block, otherwise return NULL. */
676 basic_block
677 get_abnormal_succ_dispatcher (basic_block bb)
679 edge e;
680 edge_iterator ei;
682 FOR_EACH_EDGE (e, ei, bb->succs)
683 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
685 gimple_stmt_iterator gsi
686 = gsi_start_nondebug_after_labels_bb (e->dest);
687 gimple g = gsi_stmt (gsi);
688 if (g
689 && is_gimple_call (g)
690 && gimple_call_internal_p (g)
691 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
692 return e->dest;
694 return NULL;
697 /* Helper function for make_edges. Create a basic block with
698 with ABNORMAL_DISPATCHER internal call in it if needed, and
699 create abnormal edges from BBS to it and from it to FOR_BB
700 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
702 static void
703 handle_abnormal_edges (basic_block *dispatcher_bbs,
704 basic_block for_bb, int *bb_to_omp_idx,
705 auto_vec<basic_block> *bbs, bool computed_goto)
707 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
708 unsigned int idx = 0;
709 basic_block bb;
710 bool inner = false;
712 if (bb_to_omp_idx)
714 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
715 if (bb_to_omp_idx[for_bb->index] != 0)
716 inner = true;
719 /* If the dispatcher has been created already, then there are basic
720 blocks with abnormal edges to it, so just make a new edge to
721 for_bb. */
722 if (*dispatcher == NULL)
724 /* Check if there are any basic blocks that need to have
725 abnormal edges to this dispatcher. If there are none, return
726 early. */
727 if (bb_to_omp_idx == NULL)
729 if (bbs->is_empty ())
730 return;
732 else
734 FOR_EACH_VEC_ELT (*bbs, idx, bb)
735 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
736 break;
737 if (bb == NULL)
738 return;
741 /* Create the dispatcher bb. */
742 *dispatcher = create_basic_block (NULL, NULL, for_bb);
743 if (computed_goto)
745 /* Factor computed gotos into a common computed goto site. Also
746 record the location of that site so that we can un-factor the
747 gotos after we have converted back to normal form. */
748 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
750 /* Create the destination of the factored goto. Each original
751 computed goto will put its desired destination into this
752 variable and jump to the label we create immediately below. */
753 tree var = create_tmp_var (ptr_type_node, "gotovar");
755 /* Build a label for the new block which will contain the
756 factored computed goto. */
757 tree factored_label_decl
758 = create_artificial_label (UNKNOWN_LOCATION);
759 gimple factored_computed_goto_label
760 = gimple_build_label (factored_label_decl);
761 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
763 /* Build our new computed goto. */
764 gimple factored_computed_goto = gimple_build_goto (var);
765 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
767 FOR_EACH_VEC_ELT (*bbs, idx, bb)
769 if (bb_to_omp_idx
770 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
771 continue;
773 gsi = gsi_last_bb (bb);
774 gimple last = gsi_stmt (gsi);
776 gcc_assert (computed_goto_p (last));
778 /* Copy the original computed goto's destination into VAR. */
779 gimple assignment
780 = gimple_build_assign (var, gimple_goto_dest (last));
781 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
783 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
784 e->goto_locus = gimple_location (last);
785 gsi_remove (&gsi, true);
788 else
790 tree arg = inner ? boolean_true_node : boolean_false_node;
791 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
792 1, arg);
793 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
794 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
796 /* Create predecessor edges of the dispatcher. */
797 FOR_EACH_VEC_ELT (*bbs, idx, bb)
799 if (bb_to_omp_idx
800 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 continue;
802 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
807 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
810 /* Join all the blocks in the flowgraph. */
812 static void
813 make_edges (void)
815 basic_block bb;
816 struct omp_region *cur_region = NULL;
817 auto_vec<basic_block> ab_edge_goto;
818 auto_vec<basic_block> ab_edge_call;
819 int *bb_to_omp_idx = NULL;
820 int cur_omp_region_idx = 0;
822 /* Create an edge from entry to the first block with executable
823 statements in it. */
824 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
825 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
826 EDGE_FALLTHRU);
828 /* Traverse the basic block array placing edges. */
829 FOR_EACH_BB_FN (bb, cfun)
831 gimple last = last_stmt (bb);
832 bool fallthru;
834 if (bb_to_omp_idx)
835 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
837 if (last)
839 enum gimple_code code = gimple_code (last);
840 switch (code)
842 case GIMPLE_GOTO:
843 if (make_goto_expr_edges (bb))
844 ab_edge_goto.safe_push (bb);
845 fallthru = false;
846 break;
847 case GIMPLE_RETURN:
849 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
850 e->goto_locus = gimple_location (last);
851 fallthru = false;
853 break;
854 case GIMPLE_COND:
855 make_cond_expr_edges (bb);
856 fallthru = false;
857 break;
858 case GIMPLE_SWITCH:
859 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
860 fallthru = false;
861 break;
862 case GIMPLE_RESX:
863 make_eh_edges (last);
864 fallthru = false;
865 break;
866 case GIMPLE_EH_DISPATCH:
867 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
868 break;
870 case GIMPLE_CALL:
871 /* If this function receives a nonlocal goto, then we need to
872 make edges from this call site to all the nonlocal goto
873 handlers. */
874 if (stmt_can_make_abnormal_goto (last))
875 ab_edge_call.safe_push (bb);
877 /* If this statement has reachable exception handlers, then
878 create abnormal edges to them. */
879 make_eh_edges (last);
881 /* BUILTIN_RETURN is really a return statement. */
882 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
884 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
885 fallthru = false;
887 /* Some calls are known not to return. */
888 else
889 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
890 break;
892 case GIMPLE_ASSIGN:
893 /* A GIMPLE_ASSIGN may throw internally and thus be considered
894 control-altering. */
895 if (is_ctrl_altering_stmt (last))
896 make_eh_edges (last);
897 fallthru = true;
898 break;
900 case GIMPLE_ASM:
901 make_gimple_asm_edges (bb);
902 fallthru = true;
903 break;
905 CASE_GIMPLE_OMP:
906 fallthru = make_gimple_omp_edges (bb, &cur_region,
907 &cur_omp_region_idx);
908 if (cur_region && bb_to_omp_idx == NULL)
909 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
910 break;
912 case GIMPLE_TRANSACTION:
914 tree abort_label
915 = gimple_transaction_label (as_a <gtransaction *> (last));
916 if (abort_label)
917 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
918 fallthru = true;
920 break;
922 default:
923 gcc_assert (!stmt_ends_bb_p (last));
924 fallthru = true;
927 else
928 fallthru = true;
930 if (fallthru)
931 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
934 /* Computed gotos are hell to deal with, especially if there are
935 lots of them with a large number of destinations. So we factor
936 them to a common computed goto location before we build the
937 edge list. After we convert back to normal form, we will un-factor
938 the computed gotos since factoring introduces an unwanted jump.
939 For non-local gotos and abnormal edges from calls to calls that return
940 twice or forced labels, factor the abnormal edges too, by having all
941 abnormal edges from the calls go to a common artificial basic block
942 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
943 basic block to all forced labels and calls returning twice.
944 We do this per-OpenMP structured block, because those regions
945 are guaranteed to be single entry single exit by the standard,
946 so it is not allowed to enter or exit such regions abnormally this way,
947 thus all computed gotos, non-local gotos and setjmp/longjmp calls
948 must not transfer control across SESE region boundaries. */
949 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
951 gimple_stmt_iterator gsi;
952 basic_block dispatcher_bb_array[2] = { NULL, NULL };
953 basic_block *dispatcher_bbs = dispatcher_bb_array;
954 int count = n_basic_blocks_for_fn (cfun);
956 if (bb_to_omp_idx)
957 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
959 FOR_EACH_BB_FN (bb, cfun)
961 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
963 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
964 tree target;
966 if (!label_stmt)
967 break;
969 target = gimple_label_label (label_stmt);
971 /* Make an edge to every label block that has been marked as a
972 potential target for a computed goto or a non-local goto. */
973 if (FORCED_LABEL (target))
974 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
975 &ab_edge_goto, true);
976 if (DECL_NONLOCAL (target))
978 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
979 &ab_edge_call, false);
980 break;
984 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
985 gsi_next_nondebug (&gsi);
986 if (!gsi_end_p (gsi))
988 /* Make an edge to every setjmp-like call. */
989 gimple call_stmt = gsi_stmt (gsi);
990 if (is_gimple_call (call_stmt)
991 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
992 || gimple_call_builtin_p (call_stmt,
993 BUILT_IN_SETJMP_RECEIVER)))
994 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
995 &ab_edge_call, false);
999 if (bb_to_omp_idx)
1000 XDELETE (dispatcher_bbs);
1003 XDELETE (bb_to_omp_idx);
1005 free_omp_regions ();
1007 /* Fold COND_EXPR_COND of each COND_EXPR. */
1008 fold_cond_expr_cond ();
1011 /* Find the next available discriminator value for LOCUS. The
1012 discriminator distinguishes among several basic blocks that
1013 share a common locus, allowing for more accurate sample-based
1014 profiling. */
1016 static int
1017 next_discriminator_for_locus (location_t locus)
1019 struct locus_discrim_map item;
1020 struct locus_discrim_map **slot;
1022 item.locus = locus;
1023 item.discriminator = 0;
1024 slot = discriminator_per_locus->find_slot_with_hash (
1025 &item, LOCATION_LINE (locus), INSERT);
1026 gcc_assert (slot);
1027 if (*slot == HTAB_EMPTY_ENTRY)
1029 *slot = XNEW (struct locus_discrim_map);
1030 gcc_assert (*slot);
1031 (*slot)->locus = locus;
1032 (*slot)->discriminator = 0;
1034 (*slot)->discriminator++;
1035 return (*slot)->discriminator;
1038 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1040 static bool
1041 same_line_p (location_t locus1, location_t locus2)
1043 expanded_location from, to;
1045 if (locus1 == locus2)
1046 return true;
1048 from = expand_location (locus1);
1049 to = expand_location (locus2);
1051 if (from.line != to.line)
1052 return false;
1053 if (from.file == to.file)
1054 return true;
1055 return (from.file != NULL
1056 && to.file != NULL
1057 && filename_cmp (from.file, to.file) == 0);
1060 /* Assign discriminators to each basic block. */
1062 static void
1063 assign_discriminators (void)
1065 basic_block bb;
1067 FOR_EACH_BB_FN (bb, cfun)
1069 edge e;
1070 edge_iterator ei;
1071 gimple last = last_stmt (bb);
1072 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1074 if (locus == UNKNOWN_LOCATION)
1075 continue;
1077 FOR_EACH_EDGE (e, ei, bb->succs)
1079 gimple first = first_non_label_stmt (e->dest);
1080 gimple last = last_stmt (e->dest);
1081 if ((first && same_line_p (locus, gimple_location (first)))
1082 || (last && same_line_p (locus, gimple_location (last))))
1084 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1085 bb->discriminator = next_discriminator_for_locus (locus);
1086 else
1087 e->dest->discriminator = next_discriminator_for_locus (locus);
1093 /* Create the edges for a GIMPLE_COND starting at block BB. */
1095 static void
1096 make_cond_expr_edges (basic_block bb)
1098 gcond *entry = as_a <gcond *> (last_stmt (bb));
1099 gimple then_stmt, else_stmt;
1100 basic_block then_bb, else_bb;
1101 tree then_label, else_label;
1102 edge e;
1104 gcc_assert (entry);
1105 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1107 /* Entry basic blocks for each component. */
1108 then_label = gimple_cond_true_label (entry);
1109 else_label = gimple_cond_false_label (entry);
1110 then_bb = label_to_block (then_label);
1111 else_bb = label_to_block (else_label);
1112 then_stmt = first_stmt (then_bb);
1113 else_stmt = first_stmt (else_bb);
1115 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1116 e->goto_locus = gimple_location (then_stmt);
1117 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1118 if (e)
1119 e->goto_locus = gimple_location (else_stmt);
1121 /* We do not need the labels anymore. */
1122 gimple_cond_set_true_label (entry, NULL_TREE);
1123 gimple_cond_set_false_label (entry, NULL_TREE);
1127 /* Called for each element in the hash table (P) as we delete the
1128 edge to cases hash table.
1130 Clear all the TREE_CHAINs to prevent problems with copying of
1131 SWITCH_EXPRs and structure sharing rules, then free the hash table
1132 element. */
1134 bool
1135 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1137 tree t, next;
1139 for (t = value; t; t = next)
1141 next = CASE_CHAIN (t);
1142 CASE_CHAIN (t) = NULL;
1145 return true;
1148 /* Start recording information mapping edges to case labels. */
1150 void
1151 start_recording_case_labels (void)
1153 gcc_assert (edge_to_cases == NULL);
1154 edge_to_cases = new hash_map<edge, tree>;
1155 touched_switch_bbs = BITMAP_ALLOC (NULL);
1158 /* Return nonzero if we are recording information for case labels. */
1160 static bool
1161 recording_case_labels_p (void)
1163 return (edge_to_cases != NULL);
1166 /* Stop recording information mapping edges to case labels and
1167 remove any information we have recorded. */
1168 void
1169 end_recording_case_labels (void)
1171 bitmap_iterator bi;
1172 unsigned i;
1173 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1174 delete edge_to_cases;
1175 edge_to_cases = NULL;
1176 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1178 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1179 if (bb)
1181 gimple stmt = last_stmt (bb);
1182 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1183 group_case_labels_stmt (as_a <gswitch *> (stmt));
1186 BITMAP_FREE (touched_switch_bbs);
1189 /* If we are inside a {start,end}_recording_cases block, then return
1190 a chain of CASE_LABEL_EXPRs from T which reference E.
1192 Otherwise return NULL. */
1194 static tree
1195 get_cases_for_edge (edge e, gswitch *t)
1197 tree *slot;
1198 size_t i, n;
1200 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1201 chains available. Return NULL so the caller can detect this case. */
1202 if (!recording_case_labels_p ())
1203 return NULL;
1205 slot = edge_to_cases->get (e);
1206 if (slot)
1207 return *slot;
1209 /* If we did not find E in the hash table, then this must be the first
1210 time we have been queried for information about E & T. Add all the
1211 elements from T to the hash table then perform the query again. */
1213 n = gimple_switch_num_labels (t);
1214 for (i = 0; i < n; i++)
1216 tree elt = gimple_switch_label (t, i);
1217 tree lab = CASE_LABEL (elt);
1218 basic_block label_bb = label_to_block (lab);
1219 edge this_edge = find_edge (e->src, label_bb);
1221 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1222 a new chain. */
1223 tree &s = edge_to_cases->get_or_insert (this_edge);
1224 CASE_CHAIN (elt) = s;
1225 s = elt;
1228 return *edge_to_cases->get (e);
1231 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1233 static void
1234 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1236 size_t i, n;
1238 n = gimple_switch_num_labels (entry);
1240 for (i = 0; i < n; ++i)
1242 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1243 basic_block label_bb = label_to_block (lab);
1244 make_edge (bb, label_bb, 0);
1249 /* Return the basic block holding label DEST. */
1251 basic_block
1252 label_to_block_fn (struct function *ifun, tree dest)
1254 int uid = LABEL_DECL_UID (dest);
1256 /* We would die hard when faced by an undefined label. Emit a label to
1257 the very first basic block. This will hopefully make even the dataflow
1258 and undefined variable warnings quite right. */
1259 if (seen_error () && uid < 0)
1261 gimple_stmt_iterator gsi =
1262 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1263 gimple stmt;
1265 stmt = gimple_build_label (dest);
1266 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1267 uid = LABEL_DECL_UID (dest);
1269 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1270 return NULL;
1271 return (*ifun->cfg->x_label_to_block_map)[uid];
1274 /* Create edges for a goto statement at block BB. Returns true
1275 if abnormal edges should be created. */
1277 static bool
1278 make_goto_expr_edges (basic_block bb)
1280 gimple_stmt_iterator last = gsi_last_bb (bb);
1281 gimple goto_t = gsi_stmt (last);
1283 /* A simple GOTO creates normal edges. */
1284 if (simple_goto_p (goto_t))
1286 tree dest = gimple_goto_dest (goto_t);
1287 basic_block label_bb = label_to_block (dest);
1288 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1289 e->goto_locus = gimple_location (goto_t);
1290 gsi_remove (&last, true);
1291 return false;
1294 /* A computed GOTO creates abnormal edges. */
1295 return true;
1298 /* Create edges for an asm statement with labels at block BB. */
1300 static void
1301 make_gimple_asm_edges (basic_block bb)
1303 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1304 int i, n = gimple_asm_nlabels (stmt);
1306 for (i = 0; i < n; ++i)
1308 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1309 basic_block label_bb = label_to_block (label);
1310 make_edge (bb, label_bb, 0);
1314 /*---------------------------------------------------------------------------
1315 Flowgraph analysis
1316 ---------------------------------------------------------------------------*/
1318 /* Cleanup useless labels in basic blocks. This is something we wish
1319 to do early because it allows us to group case labels before creating
1320 the edges for the CFG, and it speeds up block statement iterators in
1321 all passes later on.
1322 We rerun this pass after CFG is created, to get rid of the labels that
1323 are no longer referenced. After then we do not run it any more, since
1324 (almost) no new labels should be created. */
1326 /* A map from basic block index to the leading label of that block. */
1327 static struct label_record
1329 /* The label. */
1330 tree label;
1332 /* True if the label is referenced from somewhere. */
1333 bool used;
1334 } *label_for_bb;
1336 /* Given LABEL return the first label in the same basic block. */
1338 static tree
1339 main_block_label (tree label)
1341 basic_block bb = label_to_block (label);
1342 tree main_label = label_for_bb[bb->index].label;
1344 /* label_to_block possibly inserted undefined label into the chain. */
1345 if (!main_label)
1347 label_for_bb[bb->index].label = label;
1348 main_label = label;
1351 label_for_bb[bb->index].used = true;
1352 return main_label;
1355 /* Clean up redundant labels within the exception tree. */
1357 static void
1358 cleanup_dead_labels_eh (void)
1360 eh_landing_pad lp;
1361 eh_region r;
1362 tree lab;
1363 int i;
1365 if (cfun->eh == NULL)
1366 return;
1368 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1369 if (lp && lp->post_landing_pad)
1371 lab = main_block_label (lp->post_landing_pad);
1372 if (lab != lp->post_landing_pad)
1374 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1375 EH_LANDING_PAD_NR (lab) = lp->index;
1379 FOR_ALL_EH_REGION (r)
1380 switch (r->type)
1382 case ERT_CLEANUP:
1383 case ERT_MUST_NOT_THROW:
1384 break;
1386 case ERT_TRY:
1388 eh_catch c;
1389 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1391 lab = c->label;
1392 if (lab)
1393 c->label = main_block_label (lab);
1396 break;
1398 case ERT_ALLOWED_EXCEPTIONS:
1399 lab = r->u.allowed.label;
1400 if (lab)
1401 r->u.allowed.label = main_block_label (lab);
1402 break;
1407 /* Cleanup redundant labels. This is a three-step process:
1408 1) Find the leading label for each block.
1409 2) Redirect all references to labels to the leading labels.
1410 3) Cleanup all useless labels. */
1412 void
1413 cleanup_dead_labels (void)
1415 basic_block bb;
1416 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1418 /* Find a suitable label for each block. We use the first user-defined
1419 label if there is one, or otherwise just the first label we see. */
1420 FOR_EACH_BB_FN (bb, cfun)
1422 gimple_stmt_iterator i;
1424 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1426 tree label;
1427 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1429 if (!label_stmt)
1430 break;
1432 label = gimple_label_label (label_stmt);
1434 /* If we have not yet seen a label for the current block,
1435 remember this one and see if there are more labels. */
1436 if (!label_for_bb[bb->index].label)
1438 label_for_bb[bb->index].label = label;
1439 continue;
1442 /* If we did see a label for the current block already, but it
1443 is an artificially created label, replace it if the current
1444 label is a user defined label. */
1445 if (!DECL_ARTIFICIAL (label)
1446 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1448 label_for_bb[bb->index].label = label;
1449 break;
1454 /* Now redirect all jumps/branches to the selected label.
1455 First do so for each block ending in a control statement. */
1456 FOR_EACH_BB_FN (bb, cfun)
1458 gimple stmt = last_stmt (bb);
1459 tree label, new_label;
1461 if (!stmt)
1462 continue;
1464 switch (gimple_code (stmt))
1466 case GIMPLE_COND:
1468 gcond *cond_stmt = as_a <gcond *> (stmt);
1469 label = gimple_cond_true_label (cond_stmt);
1470 if (label)
1472 new_label = main_block_label (label);
1473 if (new_label != label)
1474 gimple_cond_set_true_label (cond_stmt, new_label);
1477 label = gimple_cond_false_label (cond_stmt);
1478 if (label)
1480 new_label = main_block_label (label);
1481 if (new_label != label)
1482 gimple_cond_set_false_label (cond_stmt, new_label);
1485 break;
1487 case GIMPLE_SWITCH:
1489 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1490 size_t i, n = gimple_switch_num_labels (switch_stmt);
1492 /* Replace all destination labels. */
1493 for (i = 0; i < n; ++i)
1495 tree case_label = gimple_switch_label (switch_stmt, i);
1496 label = CASE_LABEL (case_label);
1497 new_label = main_block_label (label);
1498 if (new_label != label)
1499 CASE_LABEL (case_label) = new_label;
1501 break;
1504 case GIMPLE_ASM:
1506 gasm *asm_stmt = as_a <gasm *> (stmt);
1507 int i, n = gimple_asm_nlabels (asm_stmt);
1509 for (i = 0; i < n; ++i)
1511 tree cons = gimple_asm_label_op (asm_stmt, i);
1512 tree label = main_block_label (TREE_VALUE (cons));
1513 TREE_VALUE (cons) = label;
1515 break;
1518 /* We have to handle gotos until they're removed, and we don't
1519 remove them until after we've created the CFG edges. */
1520 case GIMPLE_GOTO:
1521 if (!computed_goto_p (stmt))
1523 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1524 label = gimple_goto_dest (goto_stmt);
1525 new_label = main_block_label (label);
1526 if (new_label != label)
1527 gimple_goto_set_dest (goto_stmt, new_label);
1529 break;
1531 case GIMPLE_TRANSACTION:
1533 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1534 tree label = gimple_transaction_label (trans_stmt);
1535 if (label)
1537 tree new_label = main_block_label (label);
1538 if (new_label != label)
1539 gimple_transaction_set_label (trans_stmt, new_label);
1542 break;
1544 default:
1545 break;
1549 /* Do the same for the exception region tree labels. */
1550 cleanup_dead_labels_eh ();
1552 /* Finally, purge dead labels. All user-defined labels and labels that
1553 can be the target of non-local gotos and labels which have their
1554 address taken are preserved. */
1555 FOR_EACH_BB_FN (bb, cfun)
1557 gimple_stmt_iterator i;
1558 tree label_for_this_bb = label_for_bb[bb->index].label;
1560 if (!label_for_this_bb)
1561 continue;
1563 /* If the main label of the block is unused, we may still remove it. */
1564 if (!label_for_bb[bb->index].used)
1565 label_for_this_bb = NULL;
1567 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1569 tree label;
1570 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1572 if (!label_stmt)
1573 break;
1575 label = gimple_label_label (label_stmt);
1577 if (label == label_for_this_bb
1578 || !DECL_ARTIFICIAL (label)
1579 || DECL_NONLOCAL (label)
1580 || FORCED_LABEL (label))
1581 gsi_next (&i);
1582 else
1583 gsi_remove (&i, true);
1587 free (label_for_bb);
1590 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1591 the ones jumping to the same label.
1592 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1594 void
1595 group_case_labels_stmt (gswitch *stmt)
1597 int old_size = gimple_switch_num_labels (stmt);
1598 int i, j, new_size = old_size;
1599 basic_block default_bb = NULL;
1601 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1603 /* Look for possible opportunities to merge cases. */
1604 i = 1;
1605 while (i < old_size)
1607 tree base_case, base_high;
1608 basic_block base_bb;
1610 base_case = gimple_switch_label (stmt, i);
1612 gcc_assert (base_case);
1613 base_bb = label_to_block (CASE_LABEL (base_case));
1615 /* Discard cases that have the same destination as the
1616 default case. */
1617 if (base_bb == default_bb)
1619 gimple_switch_set_label (stmt, i, NULL_TREE);
1620 i++;
1621 new_size--;
1622 continue;
1625 base_high = CASE_HIGH (base_case)
1626 ? CASE_HIGH (base_case)
1627 : CASE_LOW (base_case);
1628 i++;
1630 /* Try to merge case labels. Break out when we reach the end
1631 of the label vector or when we cannot merge the next case
1632 label with the current one. */
1633 while (i < old_size)
1635 tree merge_case = gimple_switch_label (stmt, i);
1636 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1637 wide_int bhp1 = wi::add (base_high, 1);
1639 /* Merge the cases if they jump to the same place,
1640 and their ranges are consecutive. */
1641 if (merge_bb == base_bb
1642 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1644 base_high = CASE_HIGH (merge_case) ?
1645 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1646 CASE_HIGH (base_case) = base_high;
1647 gimple_switch_set_label (stmt, i, NULL_TREE);
1648 new_size--;
1649 i++;
1651 else
1652 break;
1656 /* Compress the case labels in the label vector, and adjust the
1657 length of the vector. */
1658 for (i = 0, j = 0; i < new_size; i++)
1660 while (! gimple_switch_label (stmt, j))
1661 j++;
1662 gimple_switch_set_label (stmt, i,
1663 gimple_switch_label (stmt, j++));
1666 gcc_assert (new_size <= old_size);
1667 gimple_switch_set_num_labels (stmt, new_size);
1670 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1671 and scan the sorted vector of cases. Combine the ones jumping to the
1672 same label. */
1674 void
1675 group_case_labels (void)
1677 basic_block bb;
1679 FOR_EACH_BB_FN (bb, cfun)
1681 gimple stmt = last_stmt (bb);
1682 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1683 group_case_labels_stmt (as_a <gswitch *> (stmt));
1687 /* Checks whether we can merge block B into block A. */
1689 static bool
1690 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1692 gimple stmt;
1694 if (!single_succ_p (a))
1695 return false;
1697 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1698 return false;
1700 if (single_succ (a) != b)
1701 return false;
1703 if (!single_pred_p (b))
1704 return false;
1706 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1707 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1708 return false;
1710 /* If A ends by a statement causing exceptions or something similar, we
1711 cannot merge the blocks. */
1712 stmt = last_stmt (a);
1713 if (stmt && stmt_ends_bb_p (stmt))
1714 return false;
1716 /* Do not allow a block with only a non-local label to be merged. */
1717 if (stmt)
1718 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1719 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1720 return false;
1722 /* Examine the labels at the beginning of B. */
1723 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1724 gsi_next (&gsi))
1726 tree lab;
1727 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1728 if (!label_stmt)
1729 break;
1730 lab = gimple_label_label (label_stmt);
1732 /* Do not remove user forced labels or for -O0 any user labels. */
1733 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1734 return false;
1737 /* Protect simple loop latches. We only want to avoid merging
1738 the latch with the loop header or with a block in another
1739 loop in this case. */
1740 if (current_loops
1741 && b->loop_father->latch == b
1742 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1743 && (b->loop_father->header == a
1744 || b->loop_father != a->loop_father))
1745 return false;
1747 /* It must be possible to eliminate all phi nodes in B. If ssa form
1748 is not up-to-date and a name-mapping is registered, we cannot eliminate
1749 any phis. Symbols marked for renaming are never a problem though. */
1750 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1751 gsi_next (&gsi))
1753 gphi *phi = gsi.phi ();
1754 /* Technically only new names matter. */
1755 if (name_registered_for_update_p (PHI_RESULT (phi)))
1756 return false;
1759 /* When not optimizing, don't merge if we'd lose goto_locus. */
1760 if (!optimize
1761 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1763 location_t goto_locus = single_succ_edge (a)->goto_locus;
1764 gimple_stmt_iterator prev, next;
1765 prev = gsi_last_nondebug_bb (a);
1766 next = gsi_after_labels (b);
1767 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1768 gsi_next_nondebug (&next);
1769 if ((gsi_end_p (prev)
1770 || gimple_location (gsi_stmt (prev)) != goto_locus)
1771 && (gsi_end_p (next)
1772 || gimple_location (gsi_stmt (next)) != goto_locus))
1773 return false;
1776 return true;
1779 /* Replaces all uses of NAME by VAL. */
1781 void
1782 replace_uses_by (tree name, tree val)
1784 imm_use_iterator imm_iter;
1785 use_operand_p use;
1786 gimple stmt;
1787 edge e;
1789 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1791 /* Mark the block if we change the last stmt in it. */
1792 if (cfgcleanup_altered_bbs
1793 && stmt_ends_bb_p (stmt))
1794 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1796 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1798 replace_exp (use, val);
1800 if (gimple_code (stmt) == GIMPLE_PHI)
1802 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1803 PHI_ARG_INDEX_FROM_USE (use));
1804 if (e->flags & EDGE_ABNORMAL
1805 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1807 /* This can only occur for virtual operands, since
1808 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1809 would prevent replacement. */
1810 gcc_checking_assert (virtual_operand_p (name));
1811 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1816 if (gimple_code (stmt) != GIMPLE_PHI)
1818 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1819 gimple orig_stmt = stmt;
1820 size_t i;
1822 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1823 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1824 only change sth from non-invariant to invariant, and only
1825 when propagating constants. */
1826 if (is_gimple_min_invariant (val))
1827 for (i = 0; i < gimple_num_ops (stmt); i++)
1829 tree op = gimple_op (stmt, i);
1830 /* Operands may be empty here. For example, the labels
1831 of a GIMPLE_COND are nulled out following the creation
1832 of the corresponding CFG edges. */
1833 if (op && TREE_CODE (op) == ADDR_EXPR)
1834 recompute_tree_invariant_for_addr_expr (op);
1837 if (fold_stmt (&gsi))
1838 stmt = gsi_stmt (gsi);
1840 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1841 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1843 update_stmt (stmt);
1847 gcc_checking_assert (has_zero_uses (name));
1849 /* Also update the trees stored in loop structures. */
1850 if (current_loops)
1852 struct loop *loop;
1854 FOR_EACH_LOOP (loop, 0)
1856 substitute_in_loop_info (loop, name, val);
1861 /* Merge block B into block A. */
1863 static void
1864 gimple_merge_blocks (basic_block a, basic_block b)
1866 gimple_stmt_iterator last, gsi;
1867 gphi_iterator psi;
1869 if (dump_file)
1870 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1872 /* Remove all single-valued PHI nodes from block B of the form
1873 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1874 gsi = gsi_last_bb (a);
1875 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1877 gimple phi = gsi_stmt (psi);
1878 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1879 gimple copy;
1880 bool may_replace_uses = (virtual_operand_p (def)
1881 || may_propagate_copy (def, use));
1883 /* In case we maintain loop closed ssa form, do not propagate arguments
1884 of loop exit phi nodes. */
1885 if (current_loops
1886 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1887 && !virtual_operand_p (def)
1888 && TREE_CODE (use) == SSA_NAME
1889 && a->loop_father != b->loop_father)
1890 may_replace_uses = false;
1892 if (!may_replace_uses)
1894 gcc_assert (!virtual_operand_p (def));
1896 /* Note that just emitting the copies is fine -- there is no problem
1897 with ordering of phi nodes. This is because A is the single
1898 predecessor of B, therefore results of the phi nodes cannot
1899 appear as arguments of the phi nodes. */
1900 copy = gimple_build_assign (def, use);
1901 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1902 remove_phi_node (&psi, false);
1904 else
1906 /* If we deal with a PHI for virtual operands, we can simply
1907 propagate these without fussing with folding or updating
1908 the stmt. */
1909 if (virtual_operand_p (def))
1911 imm_use_iterator iter;
1912 use_operand_p use_p;
1913 gimple stmt;
1915 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1916 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1917 SET_USE (use_p, use);
1919 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1920 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1922 else
1923 replace_uses_by (def, use);
1925 remove_phi_node (&psi, true);
1929 /* Ensure that B follows A. */
1930 move_block_after (b, a);
1932 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1933 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1935 /* Remove labels from B and set gimple_bb to A for other statements. */
1936 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1938 gimple stmt = gsi_stmt (gsi);
1939 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1941 tree label = gimple_label_label (label_stmt);
1942 int lp_nr;
1944 gsi_remove (&gsi, false);
1946 /* Now that we can thread computed gotos, we might have
1947 a situation where we have a forced label in block B
1948 However, the label at the start of block B might still be
1949 used in other ways (think about the runtime checking for
1950 Fortran assigned gotos). So we can not just delete the
1951 label. Instead we move the label to the start of block A. */
1952 if (FORCED_LABEL (label))
1954 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1955 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1957 /* Other user labels keep around in a form of a debug stmt. */
1958 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1960 gimple dbg = gimple_build_debug_bind (label,
1961 integer_zero_node,
1962 stmt);
1963 gimple_debug_bind_reset_value (dbg);
1964 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1967 lp_nr = EH_LANDING_PAD_NR (label);
1968 if (lp_nr)
1970 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1971 lp->post_landing_pad = NULL;
1974 else
1976 gimple_set_bb (stmt, a);
1977 gsi_next (&gsi);
1981 /* When merging two BBs, if their counts are different, the larger count
1982 is selected as the new bb count. This is to handle inconsistent
1983 profiles. */
1984 if (a->loop_father == b->loop_father)
1986 a->count = MAX (a->count, b->count);
1987 a->frequency = MAX (a->frequency, b->frequency);
1990 /* Merge the sequences. */
1991 last = gsi_last_bb (a);
1992 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1993 set_bb_seq (b, NULL);
1995 if (cfgcleanup_altered_bbs)
1996 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2000 /* Return the one of two successors of BB that is not reachable by a
2001 complex edge, if there is one. Else, return BB. We use
2002 this in optimizations that use post-dominators for their heuristics,
2003 to catch the cases in C++ where function calls are involved. */
2005 basic_block
2006 single_noncomplex_succ (basic_block bb)
2008 edge e0, e1;
2009 if (EDGE_COUNT (bb->succs) != 2)
2010 return bb;
2012 e0 = EDGE_SUCC (bb, 0);
2013 e1 = EDGE_SUCC (bb, 1);
2014 if (e0->flags & EDGE_COMPLEX)
2015 return e1->dest;
2016 if (e1->flags & EDGE_COMPLEX)
2017 return e0->dest;
2019 return bb;
2022 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2024 void
2025 notice_special_calls (gcall *call)
2027 int flags = gimple_call_flags (call);
2029 if (flags & ECF_MAY_BE_ALLOCA)
2030 cfun->calls_alloca = true;
2031 if (flags & ECF_RETURNS_TWICE)
2032 cfun->calls_setjmp = true;
2036 /* Clear flags set by notice_special_calls. Used by dead code removal
2037 to update the flags. */
2039 void
2040 clear_special_calls (void)
2042 cfun->calls_alloca = false;
2043 cfun->calls_setjmp = false;
2046 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2048 static void
2049 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2051 /* Since this block is no longer reachable, we can just delete all
2052 of its PHI nodes. */
2053 remove_phi_nodes (bb);
2055 /* Remove edges to BB's successors. */
2056 while (EDGE_COUNT (bb->succs) > 0)
2057 remove_edge (EDGE_SUCC (bb, 0));
2061 /* Remove statements of basic block BB. */
2063 static void
2064 remove_bb (basic_block bb)
2066 gimple_stmt_iterator i;
2068 if (dump_file)
2070 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2071 if (dump_flags & TDF_DETAILS)
2073 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2074 fprintf (dump_file, "\n");
2078 if (current_loops)
2080 struct loop *loop = bb->loop_father;
2082 /* If a loop gets removed, clean up the information associated
2083 with it. */
2084 if (loop->latch == bb
2085 || loop->header == bb)
2086 free_numbers_of_iterations_estimates_loop (loop);
2089 /* Remove all the instructions in the block. */
2090 if (bb_seq (bb) != NULL)
2092 /* Walk backwards so as to get a chance to substitute all
2093 released DEFs into debug stmts. See
2094 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2095 details. */
2096 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2098 gimple stmt = gsi_stmt (i);
2099 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2100 if (label_stmt
2101 && (FORCED_LABEL (gimple_label_label (label_stmt))
2102 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2104 basic_block new_bb;
2105 gimple_stmt_iterator new_gsi;
2107 /* A non-reachable non-local label may still be referenced.
2108 But it no longer needs to carry the extra semantics of
2109 non-locality. */
2110 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2112 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2113 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2116 new_bb = bb->prev_bb;
2117 new_gsi = gsi_start_bb (new_bb);
2118 gsi_remove (&i, false);
2119 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2121 else
2123 /* Release SSA definitions if we are in SSA. Note that we
2124 may be called when not in SSA. For example,
2125 final_cleanup calls this function via
2126 cleanup_tree_cfg. */
2127 if (gimple_in_ssa_p (cfun))
2128 release_defs (stmt);
2130 gsi_remove (&i, true);
2133 if (gsi_end_p (i))
2134 i = gsi_last_bb (bb);
2135 else
2136 gsi_prev (&i);
2140 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2141 bb->il.gimple.seq = NULL;
2142 bb->il.gimple.phi_nodes = NULL;
2146 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2147 predicate VAL, return the edge that will be taken out of the block.
2148 If VAL does not match a unique edge, NULL is returned. */
2150 edge
2151 find_taken_edge (basic_block bb, tree val)
2153 gimple stmt;
2155 stmt = last_stmt (bb);
2157 gcc_assert (stmt);
2158 gcc_assert (is_ctrl_stmt (stmt));
2160 if (val == NULL)
2161 return NULL;
2163 if (!is_gimple_min_invariant (val))
2164 return NULL;
2166 if (gimple_code (stmt) == GIMPLE_COND)
2167 return find_taken_edge_cond_expr (bb, val);
2169 if (gimple_code (stmt) == GIMPLE_SWITCH)
2170 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2172 if (computed_goto_p (stmt))
2174 /* Only optimize if the argument is a label, if the argument is
2175 not a label then we can not construct a proper CFG.
2177 It may be the case that we only need to allow the LABEL_REF to
2178 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2179 appear inside a LABEL_EXPR just to be safe. */
2180 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2181 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2182 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2183 return NULL;
2186 gcc_unreachable ();
2189 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2190 statement, determine which of the outgoing edges will be taken out of the
2191 block. Return NULL if either edge may be taken. */
2193 static edge
2194 find_taken_edge_computed_goto (basic_block bb, tree val)
2196 basic_block dest;
2197 edge e = NULL;
2199 dest = label_to_block (val);
2200 if (dest)
2202 e = find_edge (bb, dest);
2203 gcc_assert (e != NULL);
2206 return e;
2209 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2210 statement, determine which of the two edges will be taken out of the
2211 block. Return NULL if either edge may be taken. */
2213 static edge
2214 find_taken_edge_cond_expr (basic_block bb, tree val)
2216 edge true_edge, false_edge;
2218 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2220 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2221 return (integer_zerop (val) ? false_edge : true_edge);
2224 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2225 statement, determine which edge will be taken out of the block. Return
2226 NULL if any edge may be taken. */
2228 static edge
2229 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2230 tree val)
2232 basic_block dest_bb;
2233 edge e;
2234 tree taken_case;
2236 taken_case = find_case_label_for_value (switch_stmt, val);
2237 dest_bb = label_to_block (CASE_LABEL (taken_case));
2239 e = find_edge (bb, dest_bb);
2240 gcc_assert (e);
2241 return e;
2245 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2246 We can make optimal use here of the fact that the case labels are
2247 sorted: We can do a binary search for a case matching VAL. */
2249 static tree
2250 find_case_label_for_value (gswitch *switch_stmt, tree val)
2252 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2253 tree default_case = gimple_switch_default_label (switch_stmt);
2255 for (low = 0, high = n; high - low > 1; )
2257 size_t i = (high + low) / 2;
2258 tree t = gimple_switch_label (switch_stmt, i);
2259 int cmp;
2261 /* Cache the result of comparing CASE_LOW and val. */
2262 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2264 if (cmp > 0)
2265 high = i;
2266 else
2267 low = i;
2269 if (CASE_HIGH (t) == NULL)
2271 /* A singe-valued case label. */
2272 if (cmp == 0)
2273 return t;
2275 else
2277 /* A case range. We can only handle integer ranges. */
2278 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2279 return t;
2283 return default_case;
2287 /* Dump a basic block on stderr. */
2289 void
2290 gimple_debug_bb (basic_block bb)
2292 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2296 /* Dump basic block with index N on stderr. */
2298 basic_block
2299 gimple_debug_bb_n (int n)
2301 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2302 return BASIC_BLOCK_FOR_FN (cfun, n);
2306 /* Dump the CFG on stderr.
2308 FLAGS are the same used by the tree dumping functions
2309 (see TDF_* in dumpfile.h). */
2311 void
2312 gimple_debug_cfg (int flags)
2314 gimple_dump_cfg (stderr, flags);
2318 /* Dump the program showing basic block boundaries on the given FILE.
2320 FLAGS are the same used by the tree dumping functions (see TDF_* in
2321 tree.h). */
2323 void
2324 gimple_dump_cfg (FILE *file, int flags)
2326 if (flags & TDF_DETAILS)
2328 dump_function_header (file, current_function_decl, flags);
2329 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2330 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2331 last_basic_block_for_fn (cfun));
2333 brief_dump_cfg (file, flags | TDF_COMMENT);
2334 fprintf (file, "\n");
2337 if (flags & TDF_STATS)
2338 dump_cfg_stats (file);
2340 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2344 /* Dump CFG statistics on FILE. */
2346 void
2347 dump_cfg_stats (FILE *file)
2349 static long max_num_merged_labels = 0;
2350 unsigned long size, total = 0;
2351 long num_edges;
2352 basic_block bb;
2353 const char * const fmt_str = "%-30s%-13s%12s\n";
2354 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2355 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2356 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2357 const char *funcname = current_function_name ();
2359 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2361 fprintf (file, "---------------------------------------------------------\n");
2362 fprintf (file, fmt_str, "", " Number of ", "Memory");
2363 fprintf (file, fmt_str, "", " instances ", "used ");
2364 fprintf (file, "---------------------------------------------------------\n");
2366 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2367 total += size;
2368 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2369 SCALE (size), LABEL (size));
2371 num_edges = 0;
2372 FOR_EACH_BB_FN (bb, cfun)
2373 num_edges += EDGE_COUNT (bb->succs);
2374 size = num_edges * sizeof (struct edge_def);
2375 total += size;
2376 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2378 fprintf (file, "---------------------------------------------------------\n");
2379 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2380 LABEL (total));
2381 fprintf (file, "---------------------------------------------------------\n");
2382 fprintf (file, "\n");
2384 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2385 max_num_merged_labels = cfg_stats.num_merged_labels;
2387 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2388 cfg_stats.num_merged_labels, max_num_merged_labels);
2390 fprintf (file, "\n");
2394 /* Dump CFG statistics on stderr. Keep extern so that it's always
2395 linked in the final executable. */
2397 DEBUG_FUNCTION void
2398 debug_cfg_stats (void)
2400 dump_cfg_stats (stderr);
2403 /*---------------------------------------------------------------------------
2404 Miscellaneous helpers
2405 ---------------------------------------------------------------------------*/
2407 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2408 flow. Transfers of control flow associated with EH are excluded. */
2410 static bool
2411 call_can_make_abnormal_goto (gimple t)
2413 /* If the function has no non-local labels, then a call cannot make an
2414 abnormal transfer of control. */
2415 if (!cfun->has_nonlocal_label
2416 && !cfun->calls_setjmp)
2417 return false;
2419 /* Likewise if the call has no side effects. */
2420 if (!gimple_has_side_effects (t))
2421 return false;
2423 /* Likewise if the called function is leaf. */
2424 if (gimple_call_flags (t) & ECF_LEAF)
2425 return false;
2427 return true;
2431 /* Return true if T can make an abnormal transfer of control flow.
2432 Transfers of control flow associated with EH are excluded. */
2434 bool
2435 stmt_can_make_abnormal_goto (gimple t)
2437 if (computed_goto_p (t))
2438 return true;
2439 if (is_gimple_call (t))
2440 return call_can_make_abnormal_goto (t);
2441 return false;
2445 /* Return true if T represents a stmt that always transfers control. */
2447 bool
2448 is_ctrl_stmt (gimple t)
2450 switch (gimple_code (t))
2452 case GIMPLE_COND:
2453 case GIMPLE_SWITCH:
2454 case GIMPLE_GOTO:
2455 case GIMPLE_RETURN:
2456 case GIMPLE_RESX:
2457 return true;
2458 default:
2459 return false;
2464 /* Return true if T is a statement that may alter the flow of control
2465 (e.g., a call to a non-returning function). */
2467 bool
2468 is_ctrl_altering_stmt (gimple t)
2470 gcc_assert (t);
2472 switch (gimple_code (t))
2474 case GIMPLE_CALL:
2475 /* Per stmt call flag indicates whether the call could alter
2476 controlflow. */
2477 if (gimple_call_ctrl_altering_p (t))
2478 return true;
2479 break;
2481 case GIMPLE_EH_DISPATCH:
2482 /* EH_DISPATCH branches to the individual catch handlers at
2483 this level of a try or allowed-exceptions region. It can
2484 fallthru to the next statement as well. */
2485 return true;
2487 case GIMPLE_ASM:
2488 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2489 return true;
2490 break;
2492 CASE_GIMPLE_OMP:
2493 /* OpenMP directives alter control flow. */
2494 return true;
2496 case GIMPLE_TRANSACTION:
2497 /* A transaction start alters control flow. */
2498 return true;
2500 default:
2501 break;
2504 /* If a statement can throw, it alters control flow. */
2505 return stmt_can_throw_internal (t);
2509 /* Return true if T is a simple local goto. */
2511 bool
2512 simple_goto_p (gimple t)
2514 return (gimple_code (t) == GIMPLE_GOTO
2515 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2519 /* Return true if STMT should start a new basic block. PREV_STMT is
2520 the statement preceding STMT. It is used when STMT is a label or a
2521 case label. Labels should only start a new basic block if their
2522 previous statement wasn't a label. Otherwise, sequence of labels
2523 would generate unnecessary basic blocks that only contain a single
2524 label. */
2526 static inline bool
2527 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2529 if (stmt == NULL)
2530 return false;
2532 /* Labels start a new basic block only if the preceding statement
2533 wasn't a label of the same type. This prevents the creation of
2534 consecutive blocks that have nothing but a single label. */
2535 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2537 /* Nonlocal and computed GOTO targets always start a new block. */
2538 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2539 || FORCED_LABEL (gimple_label_label (label_stmt)))
2540 return true;
2542 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2544 if (DECL_NONLOCAL (gimple_label_label (
2545 as_a <glabel *> (prev_stmt))))
2546 return true;
2548 cfg_stats.num_merged_labels++;
2549 return false;
2551 else
2552 return true;
2554 else if (gimple_code (stmt) == GIMPLE_CALL
2555 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2556 /* setjmp acts similar to a nonlocal GOTO target and thus should
2557 start a new block. */
2558 return true;
2560 return false;
2564 /* Return true if T should end a basic block. */
2566 bool
2567 stmt_ends_bb_p (gimple t)
2569 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2572 /* Remove block annotations and other data structures. */
2574 void
2575 delete_tree_cfg_annotations (void)
2577 vec_free (label_to_block_map_for_fn (cfun));
2581 /* Return the first statement in basic block BB. */
2583 gimple
2584 first_stmt (basic_block bb)
2586 gimple_stmt_iterator i = gsi_start_bb (bb);
2587 gimple stmt = NULL;
2589 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2591 gsi_next (&i);
2592 stmt = NULL;
2594 return stmt;
2597 /* Return the first non-label statement in basic block BB. */
2599 static gimple
2600 first_non_label_stmt (basic_block bb)
2602 gimple_stmt_iterator i = gsi_start_bb (bb);
2603 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2604 gsi_next (&i);
2605 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2608 /* Return the last statement in basic block BB. */
2610 gimple
2611 last_stmt (basic_block bb)
2613 gimple_stmt_iterator i = gsi_last_bb (bb);
2614 gimple stmt = NULL;
2616 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2618 gsi_prev (&i);
2619 stmt = NULL;
2621 return stmt;
2624 /* Return the last statement of an otherwise empty block. Return NULL
2625 if the block is totally empty, or if it contains more than one
2626 statement. */
2628 gimple
2629 last_and_only_stmt (basic_block bb)
2631 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2632 gimple last, prev;
2634 if (gsi_end_p (i))
2635 return NULL;
2637 last = gsi_stmt (i);
2638 gsi_prev_nondebug (&i);
2639 if (gsi_end_p (i))
2640 return last;
2642 /* Empty statements should no longer appear in the instruction stream.
2643 Everything that might have appeared before should be deleted by
2644 remove_useless_stmts, and the optimizers should just gsi_remove
2645 instead of smashing with build_empty_stmt.
2647 Thus the only thing that should appear here in a block containing
2648 one executable statement is a label. */
2649 prev = gsi_stmt (i);
2650 if (gimple_code (prev) == GIMPLE_LABEL)
2651 return last;
2652 else
2653 return NULL;
2656 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2658 static void
2659 reinstall_phi_args (edge new_edge, edge old_edge)
2661 edge_var_map *vm;
2662 int i;
2663 gphi_iterator phis;
2665 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2666 if (!v)
2667 return;
2669 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2670 v->iterate (i, &vm) && !gsi_end_p (phis);
2671 i++, gsi_next (&phis))
2673 gphi *phi = phis.phi ();
2674 tree result = redirect_edge_var_map_result (vm);
2675 tree arg = redirect_edge_var_map_def (vm);
2677 gcc_assert (result == gimple_phi_result (phi));
2679 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2682 redirect_edge_var_map_clear (old_edge);
2685 /* Returns the basic block after which the new basic block created
2686 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2687 near its "logical" location. This is of most help to humans looking
2688 at debugging dumps. */
2690 basic_block
2691 split_edge_bb_loc (edge edge_in)
2693 basic_block dest = edge_in->dest;
2694 basic_block dest_prev = dest->prev_bb;
2696 if (dest_prev)
2698 edge e = find_edge (dest_prev, dest);
2699 if (e && !(e->flags & EDGE_COMPLEX))
2700 return edge_in->src;
2702 return dest_prev;
2705 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2706 Abort on abnormal edges. */
2708 static basic_block
2709 gimple_split_edge (edge edge_in)
2711 basic_block new_bb, after_bb, dest;
2712 edge new_edge, e;
2714 /* Abnormal edges cannot be split. */
2715 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2717 dest = edge_in->dest;
2719 after_bb = split_edge_bb_loc (edge_in);
2721 new_bb = create_empty_bb (after_bb);
2722 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2723 new_bb->count = edge_in->count;
2724 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2725 new_edge->probability = REG_BR_PROB_BASE;
2726 new_edge->count = edge_in->count;
2728 e = redirect_edge_and_branch (edge_in, new_bb);
2729 gcc_assert (e == edge_in);
2730 reinstall_phi_args (new_edge, e);
2732 return new_bb;
2736 /* Verify properties of the address expression T with base object BASE. */
2738 static tree
2739 verify_address (tree t, tree base)
2741 bool old_constant;
2742 bool old_side_effects;
2743 bool new_constant;
2744 bool new_side_effects;
2746 old_constant = TREE_CONSTANT (t);
2747 old_side_effects = TREE_SIDE_EFFECTS (t);
2749 recompute_tree_invariant_for_addr_expr (t);
2750 new_side_effects = TREE_SIDE_EFFECTS (t);
2751 new_constant = TREE_CONSTANT (t);
2753 if (old_constant != new_constant)
2755 error ("constant not recomputed when ADDR_EXPR changed");
2756 return t;
2758 if (old_side_effects != new_side_effects)
2760 error ("side effects not recomputed when ADDR_EXPR changed");
2761 return t;
2764 if (!(TREE_CODE (base) == VAR_DECL
2765 || TREE_CODE (base) == PARM_DECL
2766 || TREE_CODE (base) == RESULT_DECL))
2767 return NULL_TREE;
2769 if (DECL_GIMPLE_REG_P (base))
2771 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2772 return base;
2775 return NULL_TREE;
2778 /* Callback for walk_tree, check that all elements with address taken are
2779 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2780 inside a PHI node. */
2782 static tree
2783 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2785 tree t = *tp, x;
2787 if (TYPE_P (t))
2788 *walk_subtrees = 0;
2790 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2791 #define CHECK_OP(N, MSG) \
2792 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2793 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2795 switch (TREE_CODE (t))
2797 case SSA_NAME:
2798 if (SSA_NAME_IN_FREE_LIST (t))
2800 error ("SSA name in freelist but still referenced");
2801 return *tp;
2803 break;
2805 case INDIRECT_REF:
2806 error ("INDIRECT_REF in gimple IL");
2807 return t;
2809 case MEM_REF:
2810 x = TREE_OPERAND (t, 0);
2811 if (!POINTER_TYPE_P (TREE_TYPE (x))
2812 || !is_gimple_mem_ref_addr (x))
2814 error ("invalid first operand of MEM_REF");
2815 return x;
2817 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2818 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2820 error ("invalid offset operand of MEM_REF");
2821 return TREE_OPERAND (t, 1);
2823 if (TREE_CODE (x) == ADDR_EXPR
2824 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2825 return x;
2826 *walk_subtrees = 0;
2827 break;
2829 case ASSERT_EXPR:
2830 x = fold (ASSERT_EXPR_COND (t));
2831 if (x == boolean_false_node)
2833 error ("ASSERT_EXPR with an always-false condition");
2834 return *tp;
2836 break;
2838 case MODIFY_EXPR:
2839 error ("MODIFY_EXPR not expected while having tuples");
2840 return *tp;
2842 case ADDR_EXPR:
2844 tree tem;
2846 gcc_assert (is_gimple_address (t));
2848 /* Skip any references (they will be checked when we recurse down the
2849 tree) and ensure that any variable used as a prefix is marked
2850 addressable. */
2851 for (x = TREE_OPERAND (t, 0);
2852 handled_component_p (x);
2853 x = TREE_OPERAND (x, 0))
2856 if ((tem = verify_address (t, x)))
2857 return tem;
2859 if (!(TREE_CODE (x) == VAR_DECL
2860 || TREE_CODE (x) == PARM_DECL
2861 || TREE_CODE (x) == RESULT_DECL))
2862 return NULL;
2864 if (!TREE_ADDRESSABLE (x))
2866 error ("address taken, but ADDRESSABLE bit not set");
2867 return x;
2870 break;
2873 case COND_EXPR:
2874 x = COND_EXPR_COND (t);
2875 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2877 error ("non-integral used in condition");
2878 return x;
2880 if (!is_gimple_condexpr (x))
2882 error ("invalid conditional operand");
2883 return x;
2885 break;
2887 case NON_LVALUE_EXPR:
2888 case TRUTH_NOT_EXPR:
2889 gcc_unreachable ();
2891 CASE_CONVERT:
2892 case FIX_TRUNC_EXPR:
2893 case FLOAT_EXPR:
2894 case NEGATE_EXPR:
2895 case ABS_EXPR:
2896 case BIT_NOT_EXPR:
2897 CHECK_OP (0, "invalid operand to unary operator");
2898 break;
2900 case REALPART_EXPR:
2901 case IMAGPART_EXPR:
2902 case BIT_FIELD_REF:
2903 if (!is_gimple_reg_type (TREE_TYPE (t)))
2905 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2906 return t;
2909 if (TREE_CODE (t) == BIT_FIELD_REF)
2911 tree t0 = TREE_OPERAND (t, 0);
2912 tree t1 = TREE_OPERAND (t, 1);
2913 tree t2 = TREE_OPERAND (t, 2);
2914 if (!tree_fits_uhwi_p (t1)
2915 || !tree_fits_uhwi_p (t2))
2917 error ("invalid position or size operand to BIT_FIELD_REF");
2918 return t;
2920 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2921 && (TYPE_PRECISION (TREE_TYPE (t))
2922 != tree_to_uhwi (t1)))
2924 error ("integral result type precision does not match "
2925 "field size of BIT_FIELD_REF");
2926 return t;
2928 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2929 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2930 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2931 != tree_to_uhwi (t1)))
2933 error ("mode precision of non-integral result does not "
2934 "match field size of BIT_FIELD_REF");
2935 return t;
2937 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2938 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2939 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2941 error ("position plus size exceeds size of referenced object in "
2942 "BIT_FIELD_REF");
2943 return t;
2946 t = TREE_OPERAND (t, 0);
2948 /* Fall-through. */
2949 case COMPONENT_REF:
2950 case ARRAY_REF:
2951 case ARRAY_RANGE_REF:
2952 case VIEW_CONVERT_EXPR:
2953 /* We have a nest of references. Verify that each of the operands
2954 that determine where to reference is either a constant or a variable,
2955 verify that the base is valid, and then show we've already checked
2956 the subtrees. */
2957 while (handled_component_p (t))
2959 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2960 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2961 else if (TREE_CODE (t) == ARRAY_REF
2962 || TREE_CODE (t) == ARRAY_RANGE_REF)
2964 CHECK_OP (1, "invalid array index");
2965 if (TREE_OPERAND (t, 2))
2966 CHECK_OP (2, "invalid array lower bound");
2967 if (TREE_OPERAND (t, 3))
2968 CHECK_OP (3, "invalid array stride");
2970 else if (TREE_CODE (t) == BIT_FIELD_REF
2971 || TREE_CODE (t) == REALPART_EXPR
2972 || TREE_CODE (t) == IMAGPART_EXPR)
2974 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2975 "REALPART_EXPR");
2976 return t;
2979 t = TREE_OPERAND (t, 0);
2982 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2984 error ("invalid reference prefix");
2985 return t;
2987 *walk_subtrees = 0;
2988 break;
2989 case PLUS_EXPR:
2990 case MINUS_EXPR:
2991 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2992 POINTER_PLUS_EXPR. */
2993 if (POINTER_TYPE_P (TREE_TYPE (t)))
2995 error ("invalid operand to plus/minus, type is a pointer");
2996 return t;
2998 CHECK_OP (0, "invalid operand to binary operator");
2999 CHECK_OP (1, "invalid operand to binary operator");
3000 break;
3002 case POINTER_PLUS_EXPR:
3003 /* Check to make sure the first operand is a pointer or reference type. */
3004 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3006 error ("invalid operand to pointer plus, first operand is not a pointer");
3007 return t;
3009 /* Check to make sure the second operand is a ptrofftype. */
3010 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3012 error ("invalid operand to pointer plus, second operand is not an "
3013 "integer type of appropriate width");
3014 return t;
3016 /* FALLTHROUGH */
3017 case LT_EXPR:
3018 case LE_EXPR:
3019 case GT_EXPR:
3020 case GE_EXPR:
3021 case EQ_EXPR:
3022 case NE_EXPR:
3023 case UNORDERED_EXPR:
3024 case ORDERED_EXPR:
3025 case UNLT_EXPR:
3026 case UNLE_EXPR:
3027 case UNGT_EXPR:
3028 case UNGE_EXPR:
3029 case UNEQ_EXPR:
3030 case LTGT_EXPR:
3031 case MULT_EXPR:
3032 case TRUNC_DIV_EXPR:
3033 case CEIL_DIV_EXPR:
3034 case FLOOR_DIV_EXPR:
3035 case ROUND_DIV_EXPR:
3036 case TRUNC_MOD_EXPR:
3037 case CEIL_MOD_EXPR:
3038 case FLOOR_MOD_EXPR:
3039 case ROUND_MOD_EXPR:
3040 case RDIV_EXPR:
3041 case EXACT_DIV_EXPR:
3042 case MIN_EXPR:
3043 case MAX_EXPR:
3044 case LSHIFT_EXPR:
3045 case RSHIFT_EXPR:
3046 case LROTATE_EXPR:
3047 case RROTATE_EXPR:
3048 case BIT_IOR_EXPR:
3049 case BIT_XOR_EXPR:
3050 case BIT_AND_EXPR:
3051 CHECK_OP (0, "invalid operand to binary operator");
3052 CHECK_OP (1, "invalid operand to binary operator");
3053 break;
3055 case CONSTRUCTOR:
3056 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3057 *walk_subtrees = 0;
3058 break;
3060 case CASE_LABEL_EXPR:
3061 if (CASE_CHAIN (t))
3063 error ("invalid CASE_CHAIN");
3064 return t;
3066 break;
3068 default:
3069 break;
3071 return NULL;
3073 #undef CHECK_OP
3077 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3078 Returns true if there is an error, otherwise false. */
3080 static bool
3081 verify_types_in_gimple_min_lval (tree expr)
3083 tree op;
3085 if (is_gimple_id (expr))
3086 return false;
3088 if (TREE_CODE (expr) != TARGET_MEM_REF
3089 && TREE_CODE (expr) != MEM_REF)
3091 error ("invalid expression for min lvalue");
3092 return true;
3095 /* TARGET_MEM_REFs are strange beasts. */
3096 if (TREE_CODE (expr) == TARGET_MEM_REF)
3097 return false;
3099 op = TREE_OPERAND (expr, 0);
3100 if (!is_gimple_val (op))
3102 error ("invalid operand in indirect reference");
3103 debug_generic_stmt (op);
3104 return true;
3106 /* Memory references now generally can involve a value conversion. */
3108 return false;
3111 /* Verify if EXPR is a valid GIMPLE reference expression. If
3112 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3113 if there is an error, otherwise false. */
3115 static bool
3116 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3118 while (handled_component_p (expr))
3120 tree op = TREE_OPERAND (expr, 0);
3122 if (TREE_CODE (expr) == ARRAY_REF
3123 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3125 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3126 || (TREE_OPERAND (expr, 2)
3127 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3128 || (TREE_OPERAND (expr, 3)
3129 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3131 error ("invalid operands to array reference");
3132 debug_generic_stmt (expr);
3133 return true;
3137 /* Verify if the reference array element types are compatible. */
3138 if (TREE_CODE (expr) == ARRAY_REF
3139 && !useless_type_conversion_p (TREE_TYPE (expr),
3140 TREE_TYPE (TREE_TYPE (op))))
3142 error ("type mismatch in array reference");
3143 debug_generic_stmt (TREE_TYPE (expr));
3144 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3145 return true;
3147 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3148 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3149 TREE_TYPE (TREE_TYPE (op))))
3151 error ("type mismatch in array range reference");
3152 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3153 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3154 return true;
3157 if ((TREE_CODE (expr) == REALPART_EXPR
3158 || TREE_CODE (expr) == IMAGPART_EXPR)
3159 && !useless_type_conversion_p (TREE_TYPE (expr),
3160 TREE_TYPE (TREE_TYPE (op))))
3162 error ("type mismatch in real/imagpart reference");
3163 debug_generic_stmt (TREE_TYPE (expr));
3164 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3165 return true;
3168 if (TREE_CODE (expr) == COMPONENT_REF
3169 && !useless_type_conversion_p (TREE_TYPE (expr),
3170 TREE_TYPE (TREE_OPERAND (expr, 1))))
3172 error ("type mismatch in component reference");
3173 debug_generic_stmt (TREE_TYPE (expr));
3174 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3175 return true;
3178 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3180 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3181 that their operand is not an SSA name or an invariant when
3182 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3183 bug). Otherwise there is nothing to verify, gross mismatches at
3184 most invoke undefined behavior. */
3185 if (require_lvalue
3186 && (TREE_CODE (op) == SSA_NAME
3187 || is_gimple_min_invariant (op)))
3189 error ("conversion of an SSA_NAME on the left hand side");
3190 debug_generic_stmt (expr);
3191 return true;
3193 else if (TREE_CODE (op) == SSA_NAME
3194 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3196 error ("conversion of register to a different size");
3197 debug_generic_stmt (expr);
3198 return true;
3200 else if (!handled_component_p (op))
3201 return false;
3204 expr = op;
3207 if (TREE_CODE (expr) == MEM_REF)
3209 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3211 error ("invalid address operand in MEM_REF");
3212 debug_generic_stmt (expr);
3213 return true;
3215 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3216 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3218 error ("invalid offset operand in MEM_REF");
3219 debug_generic_stmt (expr);
3220 return true;
3223 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3225 if (!TMR_BASE (expr)
3226 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3228 error ("invalid address operand in TARGET_MEM_REF");
3229 return true;
3231 if (!TMR_OFFSET (expr)
3232 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3233 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3235 error ("invalid offset operand in TARGET_MEM_REF");
3236 debug_generic_stmt (expr);
3237 return true;
3241 return ((require_lvalue || !is_gimple_min_invariant (expr))
3242 && verify_types_in_gimple_min_lval (expr));
3245 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3246 list of pointer-to types that is trivially convertible to DEST. */
3248 static bool
3249 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3251 tree src;
3253 if (!TYPE_POINTER_TO (src_obj))
3254 return true;
3256 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3257 if (useless_type_conversion_p (dest, src))
3258 return true;
3260 return false;
3263 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3264 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3266 static bool
3267 valid_fixed_convert_types_p (tree type1, tree type2)
3269 return (FIXED_POINT_TYPE_P (type1)
3270 && (INTEGRAL_TYPE_P (type2)
3271 || SCALAR_FLOAT_TYPE_P (type2)
3272 || FIXED_POINT_TYPE_P (type2)));
3275 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3276 is a problem, otherwise false. */
3278 static bool
3279 verify_gimple_call (gcall *stmt)
3281 tree fn = gimple_call_fn (stmt);
3282 tree fntype, fndecl;
3283 unsigned i;
3285 if (gimple_call_internal_p (stmt))
3287 if (fn)
3289 error ("gimple call has two targets");
3290 debug_generic_stmt (fn);
3291 return true;
3294 else
3296 if (!fn)
3298 error ("gimple call has no target");
3299 return true;
3303 if (fn && !is_gimple_call_addr (fn))
3305 error ("invalid function in gimple call");
3306 debug_generic_stmt (fn);
3307 return true;
3310 if (fn
3311 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3312 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3313 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3315 error ("non-function in gimple call");
3316 return true;
3319 fndecl = gimple_call_fndecl (stmt);
3320 if (fndecl
3321 && TREE_CODE (fndecl) == FUNCTION_DECL
3322 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3323 && !DECL_PURE_P (fndecl)
3324 && !TREE_READONLY (fndecl))
3326 error ("invalid pure const state for function");
3327 return true;
3330 if (gimple_call_lhs (stmt)
3331 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3332 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3334 error ("invalid LHS in gimple call");
3335 return true;
3338 if (gimple_call_ctrl_altering_p (stmt)
3339 && gimple_call_lhs (stmt)
3340 && gimple_call_noreturn_p (stmt))
3342 error ("LHS in noreturn call");
3343 return true;
3346 fntype = gimple_call_fntype (stmt);
3347 if (fntype
3348 && gimple_call_lhs (stmt)
3349 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3350 TREE_TYPE (fntype))
3351 /* ??? At least C++ misses conversions at assignments from
3352 void * call results.
3353 ??? Java is completely off. Especially with functions
3354 returning java.lang.Object.
3355 For now simply allow arbitrary pointer type conversions. */
3356 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3357 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3359 error ("invalid conversion in gimple call");
3360 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3361 debug_generic_stmt (TREE_TYPE (fntype));
3362 return true;
3365 if (gimple_call_chain (stmt)
3366 && !is_gimple_val (gimple_call_chain (stmt)))
3368 error ("invalid static chain in gimple call");
3369 debug_generic_stmt (gimple_call_chain (stmt));
3370 return true;
3373 /* If there is a static chain argument, the call should either be
3374 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3375 if (gimple_call_chain (stmt)
3376 && fndecl
3377 && !DECL_STATIC_CHAIN (fndecl))
3379 error ("static chain with function that doesn%'t use one");
3380 return true;
3383 /* ??? The C frontend passes unpromoted arguments in case it
3384 didn't see a function declaration before the call. So for now
3385 leave the call arguments mostly unverified. Once we gimplify
3386 unit-at-a-time we have a chance to fix this. */
3388 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3390 tree arg = gimple_call_arg (stmt, i);
3391 if ((is_gimple_reg_type (TREE_TYPE (arg))
3392 && !is_gimple_val (arg))
3393 || (!is_gimple_reg_type (TREE_TYPE (arg))
3394 && !is_gimple_lvalue (arg)))
3396 error ("invalid argument to gimple call");
3397 debug_generic_expr (arg);
3398 return true;
3402 return false;
3405 /* Verifies the gimple comparison with the result type TYPE and
3406 the operands OP0 and OP1. */
3408 static bool
3409 verify_gimple_comparison (tree type, tree op0, tree op1)
3411 tree op0_type = TREE_TYPE (op0);
3412 tree op1_type = TREE_TYPE (op1);
3414 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3416 error ("invalid operands in gimple comparison");
3417 return true;
3420 /* For comparisons we do not have the operations type as the
3421 effective type the comparison is carried out in. Instead
3422 we require that either the first operand is trivially
3423 convertible into the second, or the other way around.
3424 Because we special-case pointers to void we allow
3425 comparisons of pointers with the same mode as well. */
3426 if (!useless_type_conversion_p (op0_type, op1_type)
3427 && !useless_type_conversion_p (op1_type, op0_type)
3428 && (!POINTER_TYPE_P (op0_type)
3429 || !POINTER_TYPE_P (op1_type)
3430 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3432 error ("mismatching comparison operand types");
3433 debug_generic_expr (op0_type);
3434 debug_generic_expr (op1_type);
3435 return true;
3438 /* The resulting type of a comparison may be an effective boolean type. */
3439 if (INTEGRAL_TYPE_P (type)
3440 && (TREE_CODE (type) == BOOLEAN_TYPE
3441 || TYPE_PRECISION (type) == 1))
3443 if (TREE_CODE (op0_type) == VECTOR_TYPE
3444 || TREE_CODE (op1_type) == VECTOR_TYPE)
3446 error ("vector comparison returning a boolean");
3447 debug_generic_expr (op0_type);
3448 debug_generic_expr (op1_type);
3449 return true;
3452 /* Or an integer vector type with the same size and element count
3453 as the comparison operand types. */
3454 else if (TREE_CODE (type) == VECTOR_TYPE
3455 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3457 if (TREE_CODE (op0_type) != VECTOR_TYPE
3458 || TREE_CODE (op1_type) != VECTOR_TYPE)
3460 error ("non-vector operands in vector comparison");
3461 debug_generic_expr (op0_type);
3462 debug_generic_expr (op1_type);
3463 return true;
3466 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3467 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3468 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3469 /* The result of a vector comparison is of signed
3470 integral type. */
3471 || TYPE_UNSIGNED (TREE_TYPE (type)))
3473 error ("invalid vector comparison resulting type");
3474 debug_generic_expr (type);
3475 return true;
3478 else
3480 error ("bogus comparison result type");
3481 debug_generic_expr (type);
3482 return true;
3485 return false;
3488 /* Verify a gimple assignment statement STMT with an unary rhs.
3489 Returns true if anything is wrong. */
3491 static bool
3492 verify_gimple_assign_unary (gassign *stmt)
3494 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3495 tree lhs = gimple_assign_lhs (stmt);
3496 tree lhs_type = TREE_TYPE (lhs);
3497 tree rhs1 = gimple_assign_rhs1 (stmt);
3498 tree rhs1_type = TREE_TYPE (rhs1);
3500 if (!is_gimple_reg (lhs))
3502 error ("non-register as LHS of unary operation");
3503 return true;
3506 if (!is_gimple_val (rhs1))
3508 error ("invalid operand in unary operation");
3509 return true;
3512 /* First handle conversions. */
3513 switch (rhs_code)
3515 CASE_CONVERT:
3517 /* Allow conversions from pointer type to integral type only if
3518 there is no sign or zero extension involved.
3519 For targets were the precision of ptrofftype doesn't match that
3520 of pointers we need to allow arbitrary conversions to ptrofftype. */
3521 if ((POINTER_TYPE_P (lhs_type)
3522 && INTEGRAL_TYPE_P (rhs1_type))
3523 || (POINTER_TYPE_P (rhs1_type)
3524 && INTEGRAL_TYPE_P (lhs_type)
3525 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3526 || ptrofftype_p (sizetype))))
3527 return false;
3529 /* Allow conversion from integral to offset type and vice versa. */
3530 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3531 && INTEGRAL_TYPE_P (rhs1_type))
3532 || (INTEGRAL_TYPE_P (lhs_type)
3533 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3534 return false;
3536 /* Otherwise assert we are converting between types of the
3537 same kind. */
3538 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3540 error ("invalid types in nop conversion");
3541 debug_generic_expr (lhs_type);
3542 debug_generic_expr (rhs1_type);
3543 return true;
3546 return false;
3549 case ADDR_SPACE_CONVERT_EXPR:
3551 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3552 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3553 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3555 error ("invalid types in address space conversion");
3556 debug_generic_expr (lhs_type);
3557 debug_generic_expr (rhs1_type);
3558 return true;
3561 return false;
3564 case FIXED_CONVERT_EXPR:
3566 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3567 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3569 error ("invalid types in fixed-point conversion");
3570 debug_generic_expr (lhs_type);
3571 debug_generic_expr (rhs1_type);
3572 return true;
3575 return false;
3578 case FLOAT_EXPR:
3580 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3581 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3582 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3584 error ("invalid types in conversion to floating point");
3585 debug_generic_expr (lhs_type);
3586 debug_generic_expr (rhs1_type);
3587 return true;
3590 return false;
3593 case FIX_TRUNC_EXPR:
3595 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3596 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3597 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3599 error ("invalid types in conversion to integer");
3600 debug_generic_expr (lhs_type);
3601 debug_generic_expr (rhs1_type);
3602 return true;
3605 return false;
3607 case REDUC_MAX_EXPR:
3608 case REDUC_MIN_EXPR:
3609 case REDUC_PLUS_EXPR:
3610 if (!VECTOR_TYPE_P (rhs1_type)
3611 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3613 error ("reduction should convert from vector to element type");
3614 debug_generic_expr (lhs_type);
3615 debug_generic_expr (rhs1_type);
3616 return true;
3618 return false;
3620 case VEC_UNPACK_HI_EXPR:
3621 case VEC_UNPACK_LO_EXPR:
3622 case VEC_UNPACK_FLOAT_HI_EXPR:
3623 case VEC_UNPACK_FLOAT_LO_EXPR:
3624 /* FIXME. */
3625 return false;
3627 case NEGATE_EXPR:
3628 case ABS_EXPR:
3629 case BIT_NOT_EXPR:
3630 case PAREN_EXPR:
3631 case CONJ_EXPR:
3632 break;
3634 default:
3635 gcc_unreachable ();
3638 /* For the remaining codes assert there is no conversion involved. */
3639 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3641 error ("non-trivial conversion in unary operation");
3642 debug_generic_expr (lhs_type);
3643 debug_generic_expr (rhs1_type);
3644 return true;
3647 return false;
3650 /* Verify a gimple assignment statement STMT with a binary rhs.
3651 Returns true if anything is wrong. */
3653 static bool
3654 verify_gimple_assign_binary (gassign *stmt)
3656 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3657 tree lhs = gimple_assign_lhs (stmt);
3658 tree lhs_type = TREE_TYPE (lhs);
3659 tree rhs1 = gimple_assign_rhs1 (stmt);
3660 tree rhs1_type = TREE_TYPE (rhs1);
3661 tree rhs2 = gimple_assign_rhs2 (stmt);
3662 tree rhs2_type = TREE_TYPE (rhs2);
3664 if (!is_gimple_reg (lhs))
3666 error ("non-register as LHS of binary operation");
3667 return true;
3670 if (!is_gimple_val (rhs1)
3671 || !is_gimple_val (rhs2))
3673 error ("invalid operands in binary operation");
3674 return true;
3677 /* First handle operations that involve different types. */
3678 switch (rhs_code)
3680 case COMPLEX_EXPR:
3682 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3683 || !(INTEGRAL_TYPE_P (rhs1_type)
3684 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3685 || !(INTEGRAL_TYPE_P (rhs2_type)
3686 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3688 error ("type mismatch in complex expression");
3689 debug_generic_expr (lhs_type);
3690 debug_generic_expr (rhs1_type);
3691 debug_generic_expr (rhs2_type);
3692 return true;
3695 return false;
3698 case LSHIFT_EXPR:
3699 case RSHIFT_EXPR:
3700 case LROTATE_EXPR:
3701 case RROTATE_EXPR:
3703 /* Shifts and rotates are ok on integral types, fixed point
3704 types and integer vector types. */
3705 if ((!INTEGRAL_TYPE_P (rhs1_type)
3706 && !FIXED_POINT_TYPE_P (rhs1_type)
3707 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3708 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3709 || (!INTEGRAL_TYPE_P (rhs2_type)
3710 /* Vector shifts of vectors are also ok. */
3711 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3712 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3713 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3714 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3715 || !useless_type_conversion_p (lhs_type, rhs1_type))
3717 error ("type mismatch in shift expression");
3718 debug_generic_expr (lhs_type);
3719 debug_generic_expr (rhs1_type);
3720 debug_generic_expr (rhs2_type);
3721 return true;
3724 return false;
3727 case WIDEN_LSHIFT_EXPR:
3729 if (!INTEGRAL_TYPE_P (lhs_type)
3730 || !INTEGRAL_TYPE_P (rhs1_type)
3731 || TREE_CODE (rhs2) != INTEGER_CST
3732 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3734 error ("type mismatch in widening vector shift expression");
3735 debug_generic_expr (lhs_type);
3736 debug_generic_expr (rhs1_type);
3737 debug_generic_expr (rhs2_type);
3738 return true;
3741 return false;
3744 case VEC_WIDEN_LSHIFT_HI_EXPR:
3745 case VEC_WIDEN_LSHIFT_LO_EXPR:
3747 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3748 || TREE_CODE (lhs_type) != VECTOR_TYPE
3749 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3750 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3751 || TREE_CODE (rhs2) != INTEGER_CST
3752 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3753 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3755 error ("type mismatch in widening vector shift expression");
3756 debug_generic_expr (lhs_type);
3757 debug_generic_expr (rhs1_type);
3758 debug_generic_expr (rhs2_type);
3759 return true;
3762 return false;
3765 case PLUS_EXPR:
3766 case MINUS_EXPR:
3768 tree lhs_etype = lhs_type;
3769 tree rhs1_etype = rhs1_type;
3770 tree rhs2_etype = rhs2_type;
3771 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3773 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3774 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3776 error ("invalid non-vector operands to vector valued plus");
3777 return true;
3779 lhs_etype = TREE_TYPE (lhs_type);
3780 rhs1_etype = TREE_TYPE (rhs1_type);
3781 rhs2_etype = TREE_TYPE (rhs2_type);
3783 if (POINTER_TYPE_P (lhs_etype)
3784 || POINTER_TYPE_P (rhs1_etype)
3785 || POINTER_TYPE_P (rhs2_etype))
3787 error ("invalid (pointer) operands to plus/minus");
3788 return true;
3791 /* Continue with generic binary expression handling. */
3792 break;
3795 case POINTER_PLUS_EXPR:
3797 if (!POINTER_TYPE_P (rhs1_type)
3798 || !useless_type_conversion_p (lhs_type, rhs1_type)
3799 || !ptrofftype_p (rhs2_type))
3801 error ("type mismatch in pointer plus expression");
3802 debug_generic_stmt (lhs_type);
3803 debug_generic_stmt (rhs1_type);
3804 debug_generic_stmt (rhs2_type);
3805 return true;
3808 return false;
3811 case TRUTH_ANDIF_EXPR:
3812 case TRUTH_ORIF_EXPR:
3813 case TRUTH_AND_EXPR:
3814 case TRUTH_OR_EXPR:
3815 case TRUTH_XOR_EXPR:
3817 gcc_unreachable ();
3819 case LT_EXPR:
3820 case LE_EXPR:
3821 case GT_EXPR:
3822 case GE_EXPR:
3823 case EQ_EXPR:
3824 case NE_EXPR:
3825 case UNORDERED_EXPR:
3826 case ORDERED_EXPR:
3827 case UNLT_EXPR:
3828 case UNLE_EXPR:
3829 case UNGT_EXPR:
3830 case UNGE_EXPR:
3831 case UNEQ_EXPR:
3832 case LTGT_EXPR:
3833 /* Comparisons are also binary, but the result type is not
3834 connected to the operand types. */
3835 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3837 case WIDEN_MULT_EXPR:
3838 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3839 return true;
3840 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3841 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3843 case WIDEN_SUM_EXPR:
3844 case VEC_WIDEN_MULT_HI_EXPR:
3845 case VEC_WIDEN_MULT_LO_EXPR:
3846 case VEC_WIDEN_MULT_EVEN_EXPR:
3847 case VEC_WIDEN_MULT_ODD_EXPR:
3848 case VEC_PACK_TRUNC_EXPR:
3849 case VEC_PACK_SAT_EXPR:
3850 case VEC_PACK_FIX_TRUNC_EXPR:
3851 /* FIXME. */
3852 return false;
3854 case MULT_EXPR:
3855 case MULT_HIGHPART_EXPR:
3856 case TRUNC_DIV_EXPR:
3857 case CEIL_DIV_EXPR:
3858 case FLOOR_DIV_EXPR:
3859 case ROUND_DIV_EXPR:
3860 case TRUNC_MOD_EXPR:
3861 case CEIL_MOD_EXPR:
3862 case FLOOR_MOD_EXPR:
3863 case ROUND_MOD_EXPR:
3864 case RDIV_EXPR:
3865 case EXACT_DIV_EXPR:
3866 case MIN_EXPR:
3867 case MAX_EXPR:
3868 case BIT_IOR_EXPR:
3869 case BIT_XOR_EXPR:
3870 case BIT_AND_EXPR:
3871 /* Continue with generic binary expression handling. */
3872 break;
3874 default:
3875 gcc_unreachable ();
3878 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3879 || !useless_type_conversion_p (lhs_type, rhs2_type))
3881 error ("type mismatch in binary expression");
3882 debug_generic_stmt (lhs_type);
3883 debug_generic_stmt (rhs1_type);
3884 debug_generic_stmt (rhs2_type);
3885 return true;
3888 return false;
3891 /* Verify a gimple assignment statement STMT with a ternary rhs.
3892 Returns true if anything is wrong. */
3894 static bool
3895 verify_gimple_assign_ternary (gassign *stmt)
3897 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3898 tree lhs = gimple_assign_lhs (stmt);
3899 tree lhs_type = TREE_TYPE (lhs);
3900 tree rhs1 = gimple_assign_rhs1 (stmt);
3901 tree rhs1_type = TREE_TYPE (rhs1);
3902 tree rhs2 = gimple_assign_rhs2 (stmt);
3903 tree rhs2_type = TREE_TYPE (rhs2);
3904 tree rhs3 = gimple_assign_rhs3 (stmt);
3905 tree rhs3_type = TREE_TYPE (rhs3);
3907 if (!is_gimple_reg (lhs))
3909 error ("non-register as LHS of ternary operation");
3910 return true;
3913 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3914 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3915 || !is_gimple_val (rhs2)
3916 || !is_gimple_val (rhs3))
3918 error ("invalid operands in ternary operation");
3919 return true;
3922 /* First handle operations that involve different types. */
3923 switch (rhs_code)
3925 case WIDEN_MULT_PLUS_EXPR:
3926 case WIDEN_MULT_MINUS_EXPR:
3927 if ((!INTEGRAL_TYPE_P (rhs1_type)
3928 && !FIXED_POINT_TYPE_P (rhs1_type))
3929 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3930 || !useless_type_conversion_p (lhs_type, rhs3_type)
3931 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3932 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3934 error ("type mismatch in widening multiply-accumulate expression");
3935 debug_generic_expr (lhs_type);
3936 debug_generic_expr (rhs1_type);
3937 debug_generic_expr (rhs2_type);
3938 debug_generic_expr (rhs3_type);
3939 return true;
3941 break;
3943 case FMA_EXPR:
3944 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3945 || !useless_type_conversion_p (lhs_type, rhs2_type)
3946 || !useless_type_conversion_p (lhs_type, rhs3_type))
3948 error ("type mismatch in fused multiply-add expression");
3949 debug_generic_expr (lhs_type);
3950 debug_generic_expr (rhs1_type);
3951 debug_generic_expr (rhs2_type);
3952 debug_generic_expr (rhs3_type);
3953 return true;
3955 break;
3957 case COND_EXPR:
3958 case VEC_COND_EXPR:
3959 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3960 || !useless_type_conversion_p (lhs_type, rhs3_type))
3962 error ("type mismatch in conditional expression");
3963 debug_generic_expr (lhs_type);
3964 debug_generic_expr (rhs2_type);
3965 debug_generic_expr (rhs3_type);
3966 return true;
3968 break;
3970 case VEC_PERM_EXPR:
3971 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3972 || !useless_type_conversion_p (lhs_type, rhs2_type))
3974 error ("type mismatch in vector permute expression");
3975 debug_generic_expr (lhs_type);
3976 debug_generic_expr (rhs1_type);
3977 debug_generic_expr (rhs2_type);
3978 debug_generic_expr (rhs3_type);
3979 return true;
3982 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3983 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3984 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3986 error ("vector types expected in vector permute expression");
3987 debug_generic_expr (lhs_type);
3988 debug_generic_expr (rhs1_type);
3989 debug_generic_expr (rhs2_type);
3990 debug_generic_expr (rhs3_type);
3991 return true;
3994 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3995 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3996 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3997 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3998 != TYPE_VECTOR_SUBPARTS (lhs_type))
4000 error ("vectors with different element number found "
4001 "in vector permute expression");
4002 debug_generic_expr (lhs_type);
4003 debug_generic_expr (rhs1_type);
4004 debug_generic_expr (rhs2_type);
4005 debug_generic_expr (rhs3_type);
4006 return true;
4009 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4010 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4011 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4013 error ("invalid mask type in vector permute expression");
4014 debug_generic_expr (lhs_type);
4015 debug_generic_expr (rhs1_type);
4016 debug_generic_expr (rhs2_type);
4017 debug_generic_expr (rhs3_type);
4018 return true;
4021 return false;
4023 case SAD_EXPR:
4024 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4025 || !useless_type_conversion_p (lhs_type, rhs3_type)
4026 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4027 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4028 > GET_MODE_BITSIZE (GET_MODE_INNER
4029 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4031 error ("type mismatch in sad expression");
4032 debug_generic_expr (lhs_type);
4033 debug_generic_expr (rhs1_type);
4034 debug_generic_expr (rhs2_type);
4035 debug_generic_expr (rhs3_type);
4036 return true;
4039 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4040 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4041 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4043 error ("vector types expected in sad expression");
4044 debug_generic_expr (lhs_type);
4045 debug_generic_expr (rhs1_type);
4046 debug_generic_expr (rhs2_type);
4047 debug_generic_expr (rhs3_type);
4048 return true;
4051 return false;
4053 case DOT_PROD_EXPR:
4054 case REALIGN_LOAD_EXPR:
4055 /* FIXME. */
4056 return false;
4058 default:
4059 gcc_unreachable ();
4061 return false;
4064 /* Verify a gimple assignment statement STMT with a single rhs.
4065 Returns true if anything is wrong. */
4067 static bool
4068 verify_gimple_assign_single (gassign *stmt)
4070 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4071 tree lhs = gimple_assign_lhs (stmt);
4072 tree lhs_type = TREE_TYPE (lhs);
4073 tree rhs1 = gimple_assign_rhs1 (stmt);
4074 tree rhs1_type = TREE_TYPE (rhs1);
4075 bool res = false;
4077 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4079 error ("non-trivial conversion at assignment");
4080 debug_generic_expr (lhs_type);
4081 debug_generic_expr (rhs1_type);
4082 return true;
4085 if (gimple_clobber_p (stmt)
4086 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4088 error ("non-decl/MEM_REF LHS in clobber statement");
4089 debug_generic_expr (lhs);
4090 return true;
4093 if (handled_component_p (lhs)
4094 || TREE_CODE (lhs) == MEM_REF
4095 || TREE_CODE (lhs) == TARGET_MEM_REF)
4096 res |= verify_types_in_gimple_reference (lhs, true);
4098 /* Special codes we cannot handle via their class. */
4099 switch (rhs_code)
4101 case ADDR_EXPR:
4103 tree op = TREE_OPERAND (rhs1, 0);
4104 if (!is_gimple_addressable (op))
4106 error ("invalid operand in unary expression");
4107 return true;
4110 /* Technically there is no longer a need for matching types, but
4111 gimple hygiene asks for this check. In LTO we can end up
4112 combining incompatible units and thus end up with addresses
4113 of globals that change their type to a common one. */
4114 if (!in_lto_p
4115 && !types_compatible_p (TREE_TYPE (op),
4116 TREE_TYPE (TREE_TYPE (rhs1)))
4117 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4118 TREE_TYPE (op)))
4120 error ("type mismatch in address expression");
4121 debug_generic_stmt (TREE_TYPE (rhs1));
4122 debug_generic_stmt (TREE_TYPE (op));
4123 return true;
4126 return verify_types_in_gimple_reference (op, true);
4129 /* tcc_reference */
4130 case INDIRECT_REF:
4131 error ("INDIRECT_REF in gimple IL");
4132 return true;
4134 case COMPONENT_REF:
4135 case BIT_FIELD_REF:
4136 case ARRAY_REF:
4137 case ARRAY_RANGE_REF:
4138 case VIEW_CONVERT_EXPR:
4139 case REALPART_EXPR:
4140 case IMAGPART_EXPR:
4141 case TARGET_MEM_REF:
4142 case MEM_REF:
4143 if (!is_gimple_reg (lhs)
4144 && is_gimple_reg_type (TREE_TYPE (lhs)))
4146 error ("invalid rhs for gimple memory store");
4147 debug_generic_stmt (lhs);
4148 debug_generic_stmt (rhs1);
4149 return true;
4151 return res || verify_types_in_gimple_reference (rhs1, false);
4153 /* tcc_constant */
4154 case SSA_NAME:
4155 case INTEGER_CST:
4156 case REAL_CST:
4157 case FIXED_CST:
4158 case COMPLEX_CST:
4159 case VECTOR_CST:
4160 case STRING_CST:
4161 return res;
4163 /* tcc_declaration */
4164 case CONST_DECL:
4165 return res;
4166 case VAR_DECL:
4167 case PARM_DECL:
4168 if (!is_gimple_reg (lhs)
4169 && !is_gimple_reg (rhs1)
4170 && is_gimple_reg_type (TREE_TYPE (lhs)))
4172 error ("invalid rhs for gimple memory store");
4173 debug_generic_stmt (lhs);
4174 debug_generic_stmt (rhs1);
4175 return true;
4177 return res;
4179 case CONSTRUCTOR:
4180 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4182 unsigned int i;
4183 tree elt_i, elt_v, elt_t = NULL_TREE;
4185 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4186 return res;
4187 /* For vector CONSTRUCTORs we require that either it is empty
4188 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4189 (then the element count must be correct to cover the whole
4190 outer vector and index must be NULL on all elements, or it is
4191 a CONSTRUCTOR of scalar elements, where we as an exception allow
4192 smaller number of elements (assuming zero filling) and
4193 consecutive indexes as compared to NULL indexes (such
4194 CONSTRUCTORs can appear in the IL from FEs). */
4195 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4197 if (elt_t == NULL_TREE)
4199 elt_t = TREE_TYPE (elt_v);
4200 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4202 tree elt_t = TREE_TYPE (elt_v);
4203 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4204 TREE_TYPE (elt_t)))
4206 error ("incorrect type of vector CONSTRUCTOR"
4207 " elements");
4208 debug_generic_stmt (rhs1);
4209 return true;
4211 else if (CONSTRUCTOR_NELTS (rhs1)
4212 * TYPE_VECTOR_SUBPARTS (elt_t)
4213 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4215 error ("incorrect number of vector CONSTRUCTOR"
4216 " elements");
4217 debug_generic_stmt (rhs1);
4218 return true;
4221 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4222 elt_t))
4224 error ("incorrect type of vector CONSTRUCTOR elements");
4225 debug_generic_stmt (rhs1);
4226 return true;
4228 else if (CONSTRUCTOR_NELTS (rhs1)
4229 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4231 error ("incorrect number of vector CONSTRUCTOR elements");
4232 debug_generic_stmt (rhs1);
4233 return true;
4236 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4238 error ("incorrect type of vector CONSTRUCTOR elements");
4239 debug_generic_stmt (rhs1);
4240 return true;
4242 if (elt_i != NULL_TREE
4243 && (TREE_CODE (elt_t) == VECTOR_TYPE
4244 || TREE_CODE (elt_i) != INTEGER_CST
4245 || compare_tree_int (elt_i, i) != 0))
4247 error ("vector CONSTRUCTOR with non-NULL element index");
4248 debug_generic_stmt (rhs1);
4249 return true;
4251 if (!is_gimple_val (elt_v))
4253 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4254 debug_generic_stmt (rhs1);
4255 return true;
4259 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4261 error ("non-vector CONSTRUCTOR with elements");
4262 debug_generic_stmt (rhs1);
4263 return true;
4265 return res;
4266 case OBJ_TYPE_REF:
4267 case ASSERT_EXPR:
4268 case WITH_SIZE_EXPR:
4269 /* FIXME. */
4270 return res;
4272 default:;
4275 return res;
4278 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4279 is a problem, otherwise false. */
4281 static bool
4282 verify_gimple_assign (gassign *stmt)
4284 switch (gimple_assign_rhs_class (stmt))
4286 case GIMPLE_SINGLE_RHS:
4287 return verify_gimple_assign_single (stmt);
4289 case GIMPLE_UNARY_RHS:
4290 return verify_gimple_assign_unary (stmt);
4292 case GIMPLE_BINARY_RHS:
4293 return verify_gimple_assign_binary (stmt);
4295 case GIMPLE_TERNARY_RHS:
4296 return verify_gimple_assign_ternary (stmt);
4298 default:
4299 gcc_unreachable ();
4303 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4304 is a problem, otherwise false. */
4306 static bool
4307 verify_gimple_return (greturn *stmt)
4309 tree op = gimple_return_retval (stmt);
4310 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4312 /* We cannot test for present return values as we do not fix up missing
4313 return values from the original source. */
4314 if (op == NULL)
4315 return false;
4317 if (!is_gimple_val (op)
4318 && TREE_CODE (op) != RESULT_DECL)
4320 error ("invalid operand in return statement");
4321 debug_generic_stmt (op);
4322 return true;
4325 if ((TREE_CODE (op) == RESULT_DECL
4326 && DECL_BY_REFERENCE (op))
4327 || (TREE_CODE (op) == SSA_NAME
4328 && SSA_NAME_VAR (op)
4329 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4330 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4331 op = TREE_TYPE (op);
4333 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4335 error ("invalid conversion in return statement");
4336 debug_generic_stmt (restype);
4337 debug_generic_stmt (TREE_TYPE (op));
4338 return true;
4341 return false;
4345 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4346 is a problem, otherwise false. */
4348 static bool
4349 verify_gimple_goto (ggoto *stmt)
4351 tree dest = gimple_goto_dest (stmt);
4353 /* ??? We have two canonical forms of direct goto destinations, a
4354 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4355 if (TREE_CODE (dest) != LABEL_DECL
4356 && (!is_gimple_val (dest)
4357 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4359 error ("goto destination is neither a label nor a pointer");
4360 return true;
4363 return false;
4366 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4367 is a problem, otherwise false. */
4369 static bool
4370 verify_gimple_switch (gswitch *stmt)
4372 unsigned int i, n;
4373 tree elt, prev_upper_bound = NULL_TREE;
4374 tree index_type, elt_type = NULL_TREE;
4376 if (!is_gimple_val (gimple_switch_index (stmt)))
4378 error ("invalid operand to switch statement");
4379 debug_generic_stmt (gimple_switch_index (stmt));
4380 return true;
4383 index_type = TREE_TYPE (gimple_switch_index (stmt));
4384 if (! INTEGRAL_TYPE_P (index_type))
4386 error ("non-integral type switch statement");
4387 debug_generic_expr (index_type);
4388 return true;
4391 elt = gimple_switch_label (stmt, 0);
4392 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4394 error ("invalid default case label in switch statement");
4395 debug_generic_expr (elt);
4396 return true;
4399 n = gimple_switch_num_labels (stmt);
4400 for (i = 1; i < n; i++)
4402 elt = gimple_switch_label (stmt, i);
4404 if (! CASE_LOW (elt))
4406 error ("invalid case label in switch statement");
4407 debug_generic_expr (elt);
4408 return true;
4410 if (CASE_HIGH (elt)
4411 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4413 error ("invalid case range in switch statement");
4414 debug_generic_expr (elt);
4415 return true;
4418 if (elt_type)
4420 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4421 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4423 error ("type mismatch for case label in switch statement");
4424 debug_generic_expr (elt);
4425 return true;
4428 else
4430 elt_type = TREE_TYPE (CASE_LOW (elt));
4431 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4433 error ("type precision mismatch in switch statement");
4434 return true;
4438 if (prev_upper_bound)
4440 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4442 error ("case labels not sorted in switch statement");
4443 return true;
4447 prev_upper_bound = CASE_HIGH (elt);
4448 if (! prev_upper_bound)
4449 prev_upper_bound = CASE_LOW (elt);
4452 return false;
4455 /* Verify a gimple debug statement STMT.
4456 Returns true if anything is wrong. */
4458 static bool
4459 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4461 /* There isn't much that could be wrong in a gimple debug stmt. A
4462 gimple debug bind stmt, for example, maps a tree, that's usually
4463 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4464 component or member of an aggregate type, to another tree, that
4465 can be an arbitrary expression. These stmts expand into debug
4466 insns, and are converted to debug notes by var-tracking.c. */
4467 return false;
4470 /* Verify a gimple label statement STMT.
4471 Returns true if anything is wrong. */
4473 static bool
4474 verify_gimple_label (glabel *stmt)
4476 tree decl = gimple_label_label (stmt);
4477 int uid;
4478 bool err = false;
4480 if (TREE_CODE (decl) != LABEL_DECL)
4481 return true;
4482 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4483 && DECL_CONTEXT (decl) != current_function_decl)
4485 error ("label's context is not the current function decl");
4486 err |= true;
4489 uid = LABEL_DECL_UID (decl);
4490 if (cfun->cfg
4491 && (uid == -1
4492 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4494 error ("incorrect entry in label_to_block_map");
4495 err |= true;
4498 uid = EH_LANDING_PAD_NR (decl);
4499 if (uid)
4501 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4502 if (decl != lp->post_landing_pad)
4504 error ("incorrect setting of landing pad number");
4505 err |= true;
4509 return err;
4512 /* Verify a gimple cond statement STMT.
4513 Returns true if anything is wrong. */
4515 static bool
4516 verify_gimple_cond (gcond *stmt)
4518 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4520 error ("invalid comparison code in gimple cond");
4521 return true;
4523 if (!(!gimple_cond_true_label (stmt)
4524 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4525 || !(!gimple_cond_false_label (stmt)
4526 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4528 error ("invalid labels in gimple cond");
4529 return true;
4532 return verify_gimple_comparison (boolean_type_node,
4533 gimple_cond_lhs (stmt),
4534 gimple_cond_rhs (stmt));
4537 /* Verify the GIMPLE statement STMT. Returns true if there is an
4538 error, otherwise false. */
4540 static bool
4541 verify_gimple_stmt (gimple stmt)
4543 switch (gimple_code (stmt))
4545 case GIMPLE_ASSIGN:
4546 return verify_gimple_assign (as_a <gassign *> (stmt));
4548 case GIMPLE_LABEL:
4549 return verify_gimple_label (as_a <glabel *> (stmt));
4551 case GIMPLE_CALL:
4552 return verify_gimple_call (as_a <gcall *> (stmt));
4554 case GIMPLE_COND:
4555 return verify_gimple_cond (as_a <gcond *> (stmt));
4557 case GIMPLE_GOTO:
4558 return verify_gimple_goto (as_a <ggoto *> (stmt));
4560 case GIMPLE_SWITCH:
4561 return verify_gimple_switch (as_a <gswitch *> (stmt));
4563 case GIMPLE_RETURN:
4564 return verify_gimple_return (as_a <greturn *> (stmt));
4566 case GIMPLE_ASM:
4567 return false;
4569 case GIMPLE_TRANSACTION:
4570 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4572 /* Tuples that do not have tree operands. */
4573 case GIMPLE_NOP:
4574 case GIMPLE_PREDICT:
4575 case GIMPLE_RESX:
4576 case GIMPLE_EH_DISPATCH:
4577 case GIMPLE_EH_MUST_NOT_THROW:
4578 return false;
4580 CASE_GIMPLE_OMP:
4581 /* OpenMP directives are validated by the FE and never operated
4582 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4583 non-gimple expressions when the main index variable has had
4584 its address taken. This does not affect the loop itself
4585 because the header of an GIMPLE_OMP_FOR is merely used to determine
4586 how to setup the parallel iteration. */
4587 return false;
4589 case GIMPLE_DEBUG:
4590 return verify_gimple_debug (stmt);
4592 default:
4593 gcc_unreachable ();
4597 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4598 and false otherwise. */
4600 static bool
4601 verify_gimple_phi (gimple phi)
4603 bool err = false;
4604 unsigned i;
4605 tree phi_result = gimple_phi_result (phi);
4606 bool virtual_p;
4608 if (!phi_result)
4610 error ("invalid PHI result");
4611 return true;
4614 virtual_p = virtual_operand_p (phi_result);
4615 if (TREE_CODE (phi_result) != SSA_NAME
4616 || (virtual_p
4617 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4619 error ("invalid PHI result");
4620 err = true;
4623 for (i = 0; i < gimple_phi_num_args (phi); i++)
4625 tree t = gimple_phi_arg_def (phi, i);
4627 if (!t)
4629 error ("missing PHI def");
4630 err |= true;
4631 continue;
4633 /* Addressable variables do have SSA_NAMEs but they
4634 are not considered gimple values. */
4635 else if ((TREE_CODE (t) == SSA_NAME
4636 && virtual_p != virtual_operand_p (t))
4637 || (virtual_p
4638 && (TREE_CODE (t) != SSA_NAME
4639 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4640 || (!virtual_p
4641 && !is_gimple_val (t)))
4643 error ("invalid PHI argument");
4644 debug_generic_expr (t);
4645 err |= true;
4647 #ifdef ENABLE_TYPES_CHECKING
4648 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4650 error ("incompatible types in PHI argument %u", i);
4651 debug_generic_stmt (TREE_TYPE (phi_result));
4652 debug_generic_stmt (TREE_TYPE (t));
4653 err |= true;
4655 #endif
4658 return err;
4661 /* Verify the GIMPLE statements inside the sequence STMTS. */
4663 static bool
4664 verify_gimple_in_seq_2 (gimple_seq stmts)
4666 gimple_stmt_iterator ittr;
4667 bool err = false;
4669 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4671 gimple stmt = gsi_stmt (ittr);
4673 switch (gimple_code (stmt))
4675 case GIMPLE_BIND:
4676 err |= verify_gimple_in_seq_2 (
4677 gimple_bind_body (as_a <gbind *> (stmt)));
4678 break;
4680 case GIMPLE_TRY:
4681 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4682 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4683 break;
4685 case GIMPLE_EH_FILTER:
4686 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4687 break;
4689 case GIMPLE_EH_ELSE:
4691 geh_else *eh_else = as_a <geh_else *> (stmt);
4692 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4693 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4695 break;
4697 case GIMPLE_CATCH:
4698 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4699 as_a <gcatch *> (stmt)));
4700 break;
4702 case GIMPLE_TRANSACTION:
4703 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4704 break;
4706 default:
4708 bool err2 = verify_gimple_stmt (stmt);
4709 if (err2)
4710 debug_gimple_stmt (stmt);
4711 err |= err2;
4716 return err;
4719 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4720 is a problem, otherwise false. */
4722 static bool
4723 verify_gimple_transaction (gtransaction *stmt)
4725 tree lab = gimple_transaction_label (stmt);
4726 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4727 return true;
4728 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4732 /* Verify the GIMPLE statements inside the statement list STMTS. */
4734 DEBUG_FUNCTION void
4735 verify_gimple_in_seq (gimple_seq stmts)
4737 timevar_push (TV_TREE_STMT_VERIFY);
4738 if (verify_gimple_in_seq_2 (stmts))
4739 internal_error ("verify_gimple failed");
4740 timevar_pop (TV_TREE_STMT_VERIFY);
4743 /* Return true when the T can be shared. */
4745 static bool
4746 tree_node_can_be_shared (tree t)
4748 if (IS_TYPE_OR_DECL_P (t)
4749 || is_gimple_min_invariant (t)
4750 || TREE_CODE (t) == SSA_NAME
4751 || t == error_mark_node
4752 || TREE_CODE (t) == IDENTIFIER_NODE)
4753 return true;
4755 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4756 return true;
4758 if (DECL_P (t))
4759 return true;
4761 return false;
4764 /* Called via walk_tree. Verify tree sharing. */
4766 static tree
4767 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4769 hash_set<void *> *visited = (hash_set<void *> *) data;
4771 if (tree_node_can_be_shared (*tp))
4773 *walk_subtrees = false;
4774 return NULL;
4777 if (visited->add (*tp))
4778 return *tp;
4780 return NULL;
4783 /* Called via walk_gimple_stmt. Verify tree sharing. */
4785 static tree
4786 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4788 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4789 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4792 static bool eh_error_found;
4793 bool
4794 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4795 hash_set<gimple> *visited)
4797 if (!visited->contains (stmt))
4799 error ("dead STMT in EH table");
4800 debug_gimple_stmt (stmt);
4801 eh_error_found = true;
4803 return true;
4806 /* Verify if the location LOCs block is in BLOCKS. */
4808 static bool
4809 verify_location (hash_set<tree> *blocks, location_t loc)
4811 tree block = LOCATION_BLOCK (loc);
4812 if (block != NULL_TREE
4813 && !blocks->contains (block))
4815 error ("location references block not in block tree");
4816 return true;
4818 if (block != NULL_TREE)
4819 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4820 return false;
4823 /* Called via walk_tree. Verify that expressions have no blocks. */
4825 static tree
4826 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4828 if (!EXPR_P (*tp))
4830 *walk_subtrees = false;
4831 return NULL;
4834 location_t loc = EXPR_LOCATION (*tp);
4835 if (LOCATION_BLOCK (loc) != NULL)
4836 return *tp;
4838 return NULL;
4841 /* Called via walk_tree. Verify locations of expressions. */
4843 static tree
4844 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4846 hash_set<tree> *blocks = (hash_set<tree> *) data;
4848 if (TREE_CODE (*tp) == VAR_DECL
4849 && DECL_HAS_DEBUG_EXPR_P (*tp))
4851 tree t = DECL_DEBUG_EXPR (*tp);
4852 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4853 if (addr)
4854 return addr;
4856 if ((TREE_CODE (*tp) == VAR_DECL
4857 || TREE_CODE (*tp) == PARM_DECL
4858 || TREE_CODE (*tp) == RESULT_DECL)
4859 && DECL_HAS_VALUE_EXPR_P (*tp))
4861 tree t = DECL_VALUE_EXPR (*tp);
4862 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4863 if (addr)
4864 return addr;
4867 if (!EXPR_P (*tp))
4869 *walk_subtrees = false;
4870 return NULL;
4873 location_t loc = EXPR_LOCATION (*tp);
4874 if (verify_location (blocks, loc))
4875 return *tp;
4877 return NULL;
4880 /* Called via walk_gimple_op. Verify locations of expressions. */
4882 static tree
4883 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4885 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4886 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4889 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4891 static void
4892 collect_subblocks (hash_set<tree> *blocks, tree block)
4894 tree t;
4895 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4897 blocks->add (t);
4898 collect_subblocks (blocks, t);
4902 /* Verify the GIMPLE statements in the CFG of FN. */
4904 DEBUG_FUNCTION void
4905 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4907 basic_block bb;
4908 bool err = false;
4910 timevar_push (TV_TREE_STMT_VERIFY);
4911 hash_set<void *> visited;
4912 hash_set<gimple> visited_stmts;
4914 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4915 hash_set<tree> blocks;
4916 if (DECL_INITIAL (fn->decl))
4918 blocks.add (DECL_INITIAL (fn->decl));
4919 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4922 FOR_EACH_BB_FN (bb, fn)
4924 gimple_stmt_iterator gsi;
4926 for (gphi_iterator gpi = gsi_start_phis (bb);
4927 !gsi_end_p (gpi);
4928 gsi_next (&gpi))
4930 gphi *phi = gpi.phi ();
4931 bool err2 = false;
4932 unsigned i;
4934 visited_stmts.add (phi);
4936 if (gimple_bb (phi) != bb)
4938 error ("gimple_bb (phi) is set to a wrong basic block");
4939 err2 = true;
4942 err2 |= verify_gimple_phi (phi);
4944 /* Only PHI arguments have locations. */
4945 if (gimple_location (phi) != UNKNOWN_LOCATION)
4947 error ("PHI node with location");
4948 err2 = true;
4951 for (i = 0; i < gimple_phi_num_args (phi); i++)
4953 tree arg = gimple_phi_arg_def (phi, i);
4954 tree addr = walk_tree (&arg, verify_node_sharing_1,
4955 &visited, NULL);
4956 if (addr)
4958 error ("incorrect sharing of tree nodes");
4959 debug_generic_expr (addr);
4960 err2 |= true;
4962 location_t loc = gimple_phi_arg_location (phi, i);
4963 if (virtual_operand_p (gimple_phi_result (phi))
4964 && loc != UNKNOWN_LOCATION)
4966 error ("virtual PHI with argument locations");
4967 err2 = true;
4969 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4970 if (addr)
4972 debug_generic_expr (addr);
4973 err2 = true;
4975 err2 |= verify_location (&blocks, loc);
4978 if (err2)
4979 debug_gimple_stmt (phi);
4980 err |= err2;
4983 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4985 gimple stmt = gsi_stmt (gsi);
4986 bool err2 = false;
4987 struct walk_stmt_info wi;
4988 tree addr;
4989 int lp_nr;
4991 visited_stmts.add (stmt);
4993 if (gimple_bb (stmt) != bb)
4995 error ("gimple_bb (stmt) is set to a wrong basic block");
4996 err2 = true;
4999 err2 |= verify_gimple_stmt (stmt);
5000 err2 |= verify_location (&blocks, gimple_location (stmt));
5002 memset (&wi, 0, sizeof (wi));
5003 wi.info = (void *) &visited;
5004 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5005 if (addr)
5007 error ("incorrect sharing of tree nodes");
5008 debug_generic_expr (addr);
5009 err2 |= true;
5012 memset (&wi, 0, sizeof (wi));
5013 wi.info = (void *) &blocks;
5014 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5015 if (addr)
5017 debug_generic_expr (addr);
5018 err2 |= true;
5021 /* ??? Instead of not checking these stmts at all the walker
5022 should know its context via wi. */
5023 if (!is_gimple_debug (stmt)
5024 && !is_gimple_omp (stmt))
5026 memset (&wi, 0, sizeof (wi));
5027 addr = walk_gimple_op (stmt, verify_expr, &wi);
5028 if (addr)
5030 debug_generic_expr (addr);
5031 inform (gimple_location (stmt), "in statement");
5032 err2 |= true;
5036 /* If the statement is marked as part of an EH region, then it is
5037 expected that the statement could throw. Verify that when we
5038 have optimizations that simplify statements such that we prove
5039 that they cannot throw, that we update other data structures
5040 to match. */
5041 lp_nr = lookup_stmt_eh_lp (stmt);
5042 if (lp_nr > 0)
5044 if (!stmt_could_throw_p (stmt))
5046 if (verify_nothrow)
5048 error ("statement marked for throw, but doesn%'t");
5049 err2 |= true;
5052 else if (!gsi_one_before_end_p (gsi))
5054 error ("statement marked for throw in middle of block");
5055 err2 |= true;
5059 if (err2)
5060 debug_gimple_stmt (stmt);
5061 err |= err2;
5065 eh_error_found = false;
5066 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5067 if (eh_table)
5068 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5069 (&visited_stmts);
5071 if (err || eh_error_found)
5072 internal_error ("verify_gimple failed");
5074 verify_histograms ();
5075 timevar_pop (TV_TREE_STMT_VERIFY);
5079 /* Verifies that the flow information is OK. */
5081 static int
5082 gimple_verify_flow_info (void)
5084 int err = 0;
5085 basic_block bb;
5086 gimple_stmt_iterator gsi;
5087 gimple stmt;
5088 edge e;
5089 edge_iterator ei;
5091 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5092 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5094 error ("ENTRY_BLOCK has IL associated with it");
5095 err = 1;
5098 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5099 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5101 error ("EXIT_BLOCK has IL associated with it");
5102 err = 1;
5105 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5106 if (e->flags & EDGE_FALLTHRU)
5108 error ("fallthru to exit from bb %d", e->src->index);
5109 err = 1;
5112 FOR_EACH_BB_FN (bb, cfun)
5114 bool found_ctrl_stmt = false;
5116 stmt = NULL;
5118 /* Skip labels on the start of basic block. */
5119 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5121 tree label;
5122 gimple prev_stmt = stmt;
5124 stmt = gsi_stmt (gsi);
5126 if (gimple_code (stmt) != GIMPLE_LABEL)
5127 break;
5129 label = gimple_label_label (as_a <glabel *> (stmt));
5130 if (prev_stmt && DECL_NONLOCAL (label))
5132 error ("nonlocal label ");
5133 print_generic_expr (stderr, label, 0);
5134 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5135 bb->index);
5136 err = 1;
5139 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5141 error ("EH landing pad label ");
5142 print_generic_expr (stderr, label, 0);
5143 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5144 bb->index);
5145 err = 1;
5148 if (label_to_block (label) != bb)
5150 error ("label ");
5151 print_generic_expr (stderr, label, 0);
5152 fprintf (stderr, " to block does not match in bb %d",
5153 bb->index);
5154 err = 1;
5157 if (decl_function_context (label) != current_function_decl)
5159 error ("label ");
5160 print_generic_expr (stderr, label, 0);
5161 fprintf (stderr, " has incorrect context in bb %d",
5162 bb->index);
5163 err = 1;
5167 /* Verify that body of basic block BB is free of control flow. */
5168 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5170 gimple stmt = gsi_stmt (gsi);
5172 if (found_ctrl_stmt)
5174 error ("control flow in the middle of basic block %d",
5175 bb->index);
5176 err = 1;
5179 if (stmt_ends_bb_p (stmt))
5180 found_ctrl_stmt = true;
5182 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5184 error ("label ");
5185 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5186 fprintf (stderr, " in the middle of basic block %d", bb->index);
5187 err = 1;
5191 gsi = gsi_last_bb (bb);
5192 if (gsi_end_p (gsi))
5193 continue;
5195 stmt = gsi_stmt (gsi);
5197 if (gimple_code (stmt) == GIMPLE_LABEL)
5198 continue;
5200 err |= verify_eh_edges (stmt);
5202 if (is_ctrl_stmt (stmt))
5204 FOR_EACH_EDGE (e, ei, bb->succs)
5205 if (e->flags & EDGE_FALLTHRU)
5207 error ("fallthru edge after a control statement in bb %d",
5208 bb->index);
5209 err = 1;
5213 if (gimple_code (stmt) != GIMPLE_COND)
5215 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5216 after anything else but if statement. */
5217 FOR_EACH_EDGE (e, ei, bb->succs)
5218 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5220 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5221 bb->index);
5222 err = 1;
5226 switch (gimple_code (stmt))
5228 case GIMPLE_COND:
5230 edge true_edge;
5231 edge false_edge;
5233 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5235 if (!true_edge
5236 || !false_edge
5237 || !(true_edge->flags & EDGE_TRUE_VALUE)
5238 || !(false_edge->flags & EDGE_FALSE_VALUE)
5239 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5240 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5241 || EDGE_COUNT (bb->succs) >= 3)
5243 error ("wrong outgoing edge flags at end of bb %d",
5244 bb->index);
5245 err = 1;
5248 break;
5250 case GIMPLE_GOTO:
5251 if (simple_goto_p (stmt))
5253 error ("explicit goto at end of bb %d", bb->index);
5254 err = 1;
5256 else
5258 /* FIXME. We should double check that the labels in the
5259 destination blocks have their address taken. */
5260 FOR_EACH_EDGE (e, ei, bb->succs)
5261 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5262 | EDGE_FALSE_VALUE))
5263 || !(e->flags & EDGE_ABNORMAL))
5265 error ("wrong outgoing edge flags at end of bb %d",
5266 bb->index);
5267 err = 1;
5270 break;
5272 case GIMPLE_CALL:
5273 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5274 break;
5275 /* ... fallthru ... */
5276 case GIMPLE_RETURN:
5277 if (!single_succ_p (bb)
5278 || (single_succ_edge (bb)->flags
5279 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5280 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5282 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5283 err = 1;
5285 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5287 error ("return edge does not point to exit in bb %d",
5288 bb->index);
5289 err = 1;
5291 break;
5293 case GIMPLE_SWITCH:
5295 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5296 tree prev;
5297 edge e;
5298 size_t i, n;
5300 n = gimple_switch_num_labels (switch_stmt);
5302 /* Mark all the destination basic blocks. */
5303 for (i = 0; i < n; ++i)
5305 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5306 basic_block label_bb = label_to_block (lab);
5307 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5308 label_bb->aux = (void *)1;
5311 /* Verify that the case labels are sorted. */
5312 prev = gimple_switch_label (switch_stmt, 0);
5313 for (i = 1; i < n; ++i)
5315 tree c = gimple_switch_label (switch_stmt, i);
5316 if (!CASE_LOW (c))
5318 error ("found default case not at the start of "
5319 "case vector");
5320 err = 1;
5321 continue;
5323 if (CASE_LOW (prev)
5324 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5326 error ("case labels not sorted: ");
5327 print_generic_expr (stderr, prev, 0);
5328 fprintf (stderr," is greater than ");
5329 print_generic_expr (stderr, c, 0);
5330 fprintf (stderr," but comes before it.\n");
5331 err = 1;
5333 prev = c;
5335 /* VRP will remove the default case if it can prove it will
5336 never be executed. So do not verify there always exists
5337 a default case here. */
5339 FOR_EACH_EDGE (e, ei, bb->succs)
5341 if (!e->dest->aux)
5343 error ("extra outgoing edge %d->%d",
5344 bb->index, e->dest->index);
5345 err = 1;
5348 e->dest->aux = (void *)2;
5349 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5350 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5352 error ("wrong outgoing edge flags at end of bb %d",
5353 bb->index);
5354 err = 1;
5358 /* Check that we have all of them. */
5359 for (i = 0; i < n; ++i)
5361 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5362 basic_block label_bb = label_to_block (lab);
5364 if (label_bb->aux != (void *)2)
5366 error ("missing edge %i->%i", bb->index, label_bb->index);
5367 err = 1;
5371 FOR_EACH_EDGE (e, ei, bb->succs)
5372 e->dest->aux = (void *)0;
5374 break;
5376 case GIMPLE_EH_DISPATCH:
5377 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5378 break;
5380 default:
5381 break;
5385 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5386 verify_dominators (CDI_DOMINATORS);
5388 return err;
5392 /* Updates phi nodes after creating a forwarder block joined
5393 by edge FALLTHRU. */
5395 static void
5396 gimple_make_forwarder_block (edge fallthru)
5398 edge e;
5399 edge_iterator ei;
5400 basic_block dummy, bb;
5401 tree var;
5402 gphi_iterator gsi;
5404 dummy = fallthru->src;
5405 bb = fallthru->dest;
5407 if (single_pred_p (bb))
5408 return;
5410 /* If we redirected a branch we must create new PHI nodes at the
5411 start of BB. */
5412 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5414 gphi *phi, *new_phi;
5416 phi = gsi.phi ();
5417 var = gimple_phi_result (phi);
5418 new_phi = create_phi_node (var, bb);
5419 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5420 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5421 UNKNOWN_LOCATION);
5424 /* Add the arguments we have stored on edges. */
5425 FOR_EACH_EDGE (e, ei, bb->preds)
5427 if (e == fallthru)
5428 continue;
5430 flush_pending_stmts (e);
5435 /* Return a non-special label in the head of basic block BLOCK.
5436 Create one if it doesn't exist. */
5438 tree
5439 gimple_block_label (basic_block bb)
5441 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5442 bool first = true;
5443 tree label;
5444 glabel *stmt;
5446 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5448 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5449 if (!stmt)
5450 break;
5451 label = gimple_label_label (stmt);
5452 if (!DECL_NONLOCAL (label))
5454 if (!first)
5455 gsi_move_before (&i, &s);
5456 return label;
5460 label = create_artificial_label (UNKNOWN_LOCATION);
5461 stmt = gimple_build_label (label);
5462 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5463 return label;
5467 /* Attempt to perform edge redirection by replacing a possibly complex
5468 jump instruction by a goto or by removing the jump completely.
5469 This can apply only if all edges now point to the same block. The
5470 parameters and return values are equivalent to
5471 redirect_edge_and_branch. */
5473 static edge
5474 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5476 basic_block src = e->src;
5477 gimple_stmt_iterator i;
5478 gimple stmt;
5480 /* We can replace or remove a complex jump only when we have exactly
5481 two edges. */
5482 if (EDGE_COUNT (src->succs) != 2
5483 /* Verify that all targets will be TARGET. Specifically, the
5484 edge that is not E must also go to TARGET. */
5485 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5486 return NULL;
5488 i = gsi_last_bb (src);
5489 if (gsi_end_p (i))
5490 return NULL;
5492 stmt = gsi_stmt (i);
5494 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5496 gsi_remove (&i, true);
5497 e = ssa_redirect_edge (e, target);
5498 e->flags = EDGE_FALLTHRU;
5499 return e;
5502 return NULL;
5506 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5507 edge representing the redirected branch. */
5509 static edge
5510 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5512 basic_block bb = e->src;
5513 gimple_stmt_iterator gsi;
5514 edge ret;
5515 gimple stmt;
5517 if (e->flags & EDGE_ABNORMAL)
5518 return NULL;
5520 if (e->dest == dest)
5521 return NULL;
5523 if (e->flags & EDGE_EH)
5524 return redirect_eh_edge (e, dest);
5526 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5528 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5529 if (ret)
5530 return ret;
5533 gsi = gsi_last_bb (bb);
5534 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5536 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5538 case GIMPLE_COND:
5539 /* For COND_EXPR, we only need to redirect the edge. */
5540 break;
5542 case GIMPLE_GOTO:
5543 /* No non-abnormal edges should lead from a non-simple goto, and
5544 simple ones should be represented implicitly. */
5545 gcc_unreachable ();
5547 case GIMPLE_SWITCH:
5549 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5550 tree label = gimple_block_label (dest);
5551 tree cases = get_cases_for_edge (e, switch_stmt);
5553 /* If we have a list of cases associated with E, then use it
5554 as it's a lot faster than walking the entire case vector. */
5555 if (cases)
5557 edge e2 = find_edge (e->src, dest);
5558 tree last, first;
5560 first = cases;
5561 while (cases)
5563 last = cases;
5564 CASE_LABEL (cases) = label;
5565 cases = CASE_CHAIN (cases);
5568 /* If there was already an edge in the CFG, then we need
5569 to move all the cases associated with E to E2. */
5570 if (e2)
5572 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5574 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5575 CASE_CHAIN (cases2) = first;
5577 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5579 else
5581 size_t i, n = gimple_switch_num_labels (switch_stmt);
5583 for (i = 0; i < n; i++)
5585 tree elt = gimple_switch_label (switch_stmt, i);
5586 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5587 CASE_LABEL (elt) = label;
5591 break;
5593 case GIMPLE_ASM:
5595 gasm *asm_stmt = as_a <gasm *> (stmt);
5596 int i, n = gimple_asm_nlabels (asm_stmt);
5597 tree label = NULL;
5599 for (i = 0; i < n; ++i)
5601 tree cons = gimple_asm_label_op (asm_stmt, i);
5602 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5604 if (!label)
5605 label = gimple_block_label (dest);
5606 TREE_VALUE (cons) = label;
5610 /* If we didn't find any label matching the former edge in the
5611 asm labels, we must be redirecting the fallthrough
5612 edge. */
5613 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5615 break;
5617 case GIMPLE_RETURN:
5618 gsi_remove (&gsi, true);
5619 e->flags |= EDGE_FALLTHRU;
5620 break;
5622 case GIMPLE_OMP_RETURN:
5623 case GIMPLE_OMP_CONTINUE:
5624 case GIMPLE_OMP_SECTIONS_SWITCH:
5625 case GIMPLE_OMP_FOR:
5626 /* The edges from OMP constructs can be simply redirected. */
5627 break;
5629 case GIMPLE_EH_DISPATCH:
5630 if (!(e->flags & EDGE_FALLTHRU))
5631 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5632 break;
5634 case GIMPLE_TRANSACTION:
5635 /* The ABORT edge has a stored label associated with it, otherwise
5636 the edges are simply redirectable. */
5637 if (e->flags == 0)
5638 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5639 gimple_block_label (dest));
5640 break;
5642 default:
5643 /* Otherwise it must be a fallthru edge, and we don't need to
5644 do anything besides redirecting it. */
5645 gcc_assert (e->flags & EDGE_FALLTHRU);
5646 break;
5649 /* Update/insert PHI nodes as necessary. */
5651 /* Now update the edges in the CFG. */
5652 e = ssa_redirect_edge (e, dest);
5654 return e;
5657 /* Returns true if it is possible to remove edge E by redirecting
5658 it to the destination of the other edge from E->src. */
5660 static bool
5661 gimple_can_remove_branch_p (const_edge e)
5663 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5664 return false;
5666 return true;
5669 /* Simple wrapper, as we can always redirect fallthru edges. */
5671 static basic_block
5672 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5674 e = gimple_redirect_edge_and_branch (e, dest);
5675 gcc_assert (e);
5677 return NULL;
5681 /* Splits basic block BB after statement STMT (but at least after the
5682 labels). If STMT is NULL, BB is split just after the labels. */
5684 static basic_block
5685 gimple_split_block (basic_block bb, void *stmt)
5687 gimple_stmt_iterator gsi;
5688 gimple_stmt_iterator gsi_tgt;
5689 gimple_seq list;
5690 basic_block new_bb;
5691 edge e;
5692 edge_iterator ei;
5694 new_bb = create_empty_bb (bb);
5696 /* Redirect the outgoing edges. */
5697 new_bb->succs = bb->succs;
5698 bb->succs = NULL;
5699 FOR_EACH_EDGE (e, ei, new_bb->succs)
5700 e->src = new_bb;
5702 /* Get a stmt iterator pointing to the first stmt to move. */
5703 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5704 gsi = gsi_after_labels (bb);
5705 else
5707 gsi = gsi_for_stmt ((gimple) stmt);
5708 gsi_next (&gsi);
5711 /* Move everything from GSI to the new basic block. */
5712 if (gsi_end_p (gsi))
5713 return new_bb;
5715 /* Split the statement list - avoid re-creating new containers as this
5716 brings ugly quadratic memory consumption in the inliner.
5717 (We are still quadratic since we need to update stmt BB pointers,
5718 sadly.) */
5719 gsi_split_seq_before (&gsi, &list);
5720 set_bb_seq (new_bb, list);
5721 for (gsi_tgt = gsi_start (list);
5722 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5723 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5725 return new_bb;
5729 /* Moves basic block BB after block AFTER. */
5731 static bool
5732 gimple_move_block_after (basic_block bb, basic_block after)
5734 if (bb->prev_bb == after)
5735 return true;
5737 unlink_block (bb);
5738 link_block (bb, after);
5740 return true;
5744 /* Return TRUE if block BB has no executable statements, otherwise return
5745 FALSE. */
5747 static bool
5748 gimple_empty_block_p (basic_block bb)
5750 /* BB must have no executable statements. */
5751 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5752 if (phi_nodes (bb))
5753 return false;
5754 if (gsi_end_p (gsi))
5755 return true;
5756 if (is_gimple_debug (gsi_stmt (gsi)))
5757 gsi_next_nondebug (&gsi);
5758 return gsi_end_p (gsi);
5762 /* Split a basic block if it ends with a conditional branch and if the
5763 other part of the block is not empty. */
5765 static basic_block
5766 gimple_split_block_before_cond_jump (basic_block bb)
5768 gimple last, split_point;
5769 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5770 if (gsi_end_p (gsi))
5771 return NULL;
5772 last = gsi_stmt (gsi);
5773 if (gimple_code (last) != GIMPLE_COND
5774 && gimple_code (last) != GIMPLE_SWITCH)
5775 return NULL;
5776 gsi_prev_nondebug (&gsi);
5777 split_point = gsi_stmt (gsi);
5778 return split_block (bb, split_point)->dest;
5782 /* Return true if basic_block can be duplicated. */
5784 static bool
5785 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5787 return true;
5790 /* Create a duplicate of the basic block BB. NOTE: This does not
5791 preserve SSA form. */
5793 static basic_block
5794 gimple_duplicate_bb (basic_block bb)
5796 basic_block new_bb;
5797 gimple_stmt_iterator gsi_tgt;
5799 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5801 /* Copy the PHI nodes. We ignore PHI node arguments here because
5802 the incoming edges have not been setup yet. */
5803 for (gphi_iterator gpi = gsi_start_phis (bb);
5804 !gsi_end_p (gpi);
5805 gsi_next (&gpi))
5807 gphi *phi, *copy;
5808 phi = gpi.phi ();
5809 copy = create_phi_node (NULL_TREE, new_bb);
5810 create_new_def_for (gimple_phi_result (phi), copy,
5811 gimple_phi_result_ptr (copy));
5812 gimple_set_uid (copy, gimple_uid (phi));
5815 gsi_tgt = gsi_start_bb (new_bb);
5816 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5817 !gsi_end_p (gsi);
5818 gsi_next (&gsi))
5820 def_operand_p def_p;
5821 ssa_op_iter op_iter;
5822 tree lhs;
5823 gimple stmt, copy;
5825 stmt = gsi_stmt (gsi);
5826 if (gimple_code (stmt) == GIMPLE_LABEL)
5827 continue;
5829 /* Don't duplicate label debug stmts. */
5830 if (gimple_debug_bind_p (stmt)
5831 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5832 == LABEL_DECL)
5833 continue;
5835 /* Create a new copy of STMT and duplicate STMT's virtual
5836 operands. */
5837 copy = gimple_copy (stmt);
5838 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5840 maybe_duplicate_eh_stmt (copy, stmt);
5841 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5843 /* When copying around a stmt writing into a local non-user
5844 aggregate, make sure it won't share stack slot with other
5845 vars. */
5846 lhs = gimple_get_lhs (stmt);
5847 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5849 tree base = get_base_address (lhs);
5850 if (base
5851 && (TREE_CODE (base) == VAR_DECL
5852 || TREE_CODE (base) == RESULT_DECL)
5853 && DECL_IGNORED_P (base)
5854 && !TREE_STATIC (base)
5855 && !DECL_EXTERNAL (base)
5856 && (TREE_CODE (base) != VAR_DECL
5857 || !DECL_HAS_VALUE_EXPR_P (base)))
5858 DECL_NONSHAREABLE (base) = 1;
5861 /* Create new names for all the definitions created by COPY and
5862 add replacement mappings for each new name. */
5863 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5864 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5867 return new_bb;
5870 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5872 static void
5873 add_phi_args_after_copy_edge (edge e_copy)
5875 basic_block bb, bb_copy = e_copy->src, dest;
5876 edge e;
5877 edge_iterator ei;
5878 gphi *phi, *phi_copy;
5879 tree def;
5880 gphi_iterator psi, psi_copy;
5882 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5883 return;
5885 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5887 if (e_copy->dest->flags & BB_DUPLICATED)
5888 dest = get_bb_original (e_copy->dest);
5889 else
5890 dest = e_copy->dest;
5892 e = find_edge (bb, dest);
5893 if (!e)
5895 /* During loop unrolling the target of the latch edge is copied.
5896 In this case we are not looking for edge to dest, but to
5897 duplicated block whose original was dest. */
5898 FOR_EACH_EDGE (e, ei, bb->succs)
5900 if ((e->dest->flags & BB_DUPLICATED)
5901 && get_bb_original (e->dest) == dest)
5902 break;
5905 gcc_assert (e != NULL);
5908 for (psi = gsi_start_phis (e->dest),
5909 psi_copy = gsi_start_phis (e_copy->dest);
5910 !gsi_end_p (psi);
5911 gsi_next (&psi), gsi_next (&psi_copy))
5913 phi = psi.phi ();
5914 phi_copy = psi_copy.phi ();
5915 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5916 add_phi_arg (phi_copy, def, e_copy,
5917 gimple_phi_arg_location_from_edge (phi, e));
5922 /* Basic block BB_COPY was created by code duplication. Add phi node
5923 arguments for edges going out of BB_COPY. The blocks that were
5924 duplicated have BB_DUPLICATED set. */
5926 void
5927 add_phi_args_after_copy_bb (basic_block bb_copy)
5929 edge e_copy;
5930 edge_iterator ei;
5932 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5934 add_phi_args_after_copy_edge (e_copy);
5938 /* Blocks in REGION_COPY array of length N_REGION were created by
5939 duplication of basic blocks. Add phi node arguments for edges
5940 going from these blocks. If E_COPY is not NULL, also add
5941 phi node arguments for its destination.*/
5943 void
5944 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5945 edge e_copy)
5947 unsigned i;
5949 for (i = 0; i < n_region; i++)
5950 region_copy[i]->flags |= BB_DUPLICATED;
5952 for (i = 0; i < n_region; i++)
5953 add_phi_args_after_copy_bb (region_copy[i]);
5954 if (e_copy)
5955 add_phi_args_after_copy_edge (e_copy);
5957 for (i = 0; i < n_region; i++)
5958 region_copy[i]->flags &= ~BB_DUPLICATED;
5961 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5962 important exit edge EXIT. By important we mean that no SSA name defined
5963 inside region is live over the other exit edges of the region. All entry
5964 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5965 to the duplicate of the region. Dominance and loop information is
5966 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5967 UPDATE_DOMINANCE is false then we assume that the caller will update the
5968 dominance information after calling this function. The new basic
5969 blocks are stored to REGION_COPY in the same order as they had in REGION,
5970 provided that REGION_COPY is not NULL.
5971 The function returns false if it is unable to copy the region,
5972 true otherwise. */
5974 bool
5975 gimple_duplicate_sese_region (edge entry, edge exit,
5976 basic_block *region, unsigned n_region,
5977 basic_block *region_copy,
5978 bool update_dominance)
5980 unsigned i;
5981 bool free_region_copy = false, copying_header = false;
5982 struct loop *loop = entry->dest->loop_father;
5983 edge exit_copy;
5984 vec<basic_block> doms;
5985 edge redirected;
5986 int total_freq = 0, entry_freq = 0;
5987 gcov_type total_count = 0, entry_count = 0;
5989 if (!can_copy_bbs_p (region, n_region))
5990 return false;
5992 /* Some sanity checking. Note that we do not check for all possible
5993 missuses of the functions. I.e. if you ask to copy something weird,
5994 it will work, but the state of structures probably will not be
5995 correct. */
5996 for (i = 0; i < n_region; i++)
5998 /* We do not handle subloops, i.e. all the blocks must belong to the
5999 same loop. */
6000 if (region[i]->loop_father != loop)
6001 return false;
6003 if (region[i] != entry->dest
6004 && region[i] == loop->header)
6005 return false;
6008 /* In case the function is used for loop header copying (which is the primary
6009 use), ensure that EXIT and its copy will be new latch and entry edges. */
6010 if (loop->header == entry->dest)
6012 copying_header = true;
6014 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6015 return false;
6017 for (i = 0; i < n_region; i++)
6018 if (region[i] != exit->src
6019 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6020 return false;
6023 initialize_original_copy_tables ();
6025 if (copying_header)
6026 set_loop_copy (loop, loop_outer (loop));
6027 else
6028 set_loop_copy (loop, loop);
6030 if (!region_copy)
6032 region_copy = XNEWVEC (basic_block, n_region);
6033 free_region_copy = true;
6036 /* Record blocks outside the region that are dominated by something
6037 inside. */
6038 if (update_dominance)
6040 doms.create (0);
6041 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6044 if (entry->dest->count)
6046 total_count = entry->dest->count;
6047 entry_count = entry->count;
6048 /* Fix up corner cases, to avoid division by zero or creation of negative
6049 frequencies. */
6050 if (entry_count > total_count)
6051 entry_count = total_count;
6053 else
6055 total_freq = entry->dest->frequency;
6056 entry_freq = EDGE_FREQUENCY (entry);
6057 /* Fix up corner cases, to avoid division by zero or creation of negative
6058 frequencies. */
6059 if (total_freq == 0)
6060 total_freq = 1;
6061 else if (entry_freq > total_freq)
6062 entry_freq = total_freq;
6065 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6066 split_edge_bb_loc (entry), update_dominance);
6067 if (total_count)
6069 scale_bbs_frequencies_gcov_type (region, n_region,
6070 total_count - entry_count,
6071 total_count);
6072 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6073 total_count);
6075 else
6077 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6078 total_freq);
6079 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6082 if (copying_header)
6084 loop->header = exit->dest;
6085 loop->latch = exit->src;
6088 /* Redirect the entry and add the phi node arguments. */
6089 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6090 gcc_assert (redirected != NULL);
6091 flush_pending_stmts (entry);
6093 /* Concerning updating of dominators: We must recount dominators
6094 for entry block and its copy. Anything that is outside of the
6095 region, but was dominated by something inside needs recounting as
6096 well. */
6097 if (update_dominance)
6099 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6100 doms.safe_push (get_bb_original (entry->dest));
6101 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6102 doms.release ();
6105 /* Add the other PHI node arguments. */
6106 add_phi_args_after_copy (region_copy, n_region, NULL);
6108 if (free_region_copy)
6109 free (region_copy);
6111 free_original_copy_tables ();
6112 return true;
6115 /* Checks if BB is part of the region defined by N_REGION BBS. */
6116 static bool
6117 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6119 unsigned int n;
6121 for (n = 0; n < n_region; n++)
6123 if (bb == bbs[n])
6124 return true;
6126 return false;
6129 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6130 are stored to REGION_COPY in the same order in that they appear
6131 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6132 the region, EXIT an exit from it. The condition guarding EXIT
6133 is moved to ENTRY. Returns true if duplication succeeds, false
6134 otherwise.
6136 For example,
6138 some_code;
6139 if (cond)
6141 else
6144 is transformed to
6146 if (cond)
6148 some_code;
6151 else
6153 some_code;
6158 bool
6159 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6160 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6161 basic_block *region_copy ATTRIBUTE_UNUSED)
6163 unsigned i;
6164 bool free_region_copy = false;
6165 struct loop *loop = exit->dest->loop_father;
6166 struct loop *orig_loop = entry->dest->loop_father;
6167 basic_block switch_bb, entry_bb, nentry_bb;
6168 vec<basic_block> doms;
6169 int total_freq = 0, exit_freq = 0;
6170 gcov_type total_count = 0, exit_count = 0;
6171 edge exits[2], nexits[2], e;
6172 gimple_stmt_iterator gsi;
6173 gimple cond_stmt;
6174 edge sorig, snew;
6175 basic_block exit_bb;
6176 gphi_iterator psi;
6177 gphi *phi;
6178 tree def;
6179 struct loop *target, *aloop, *cloop;
6181 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6182 exits[0] = exit;
6183 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6185 if (!can_copy_bbs_p (region, n_region))
6186 return false;
6188 initialize_original_copy_tables ();
6189 set_loop_copy (orig_loop, loop);
6191 target= loop;
6192 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6194 if (bb_part_of_region_p (aloop->header, region, n_region))
6196 cloop = duplicate_loop (aloop, target);
6197 duplicate_subloops (aloop, cloop);
6201 if (!region_copy)
6203 region_copy = XNEWVEC (basic_block, n_region);
6204 free_region_copy = true;
6207 gcc_assert (!need_ssa_update_p (cfun));
6209 /* Record blocks outside the region that are dominated by something
6210 inside. */
6211 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6213 if (exit->src->count)
6215 total_count = exit->src->count;
6216 exit_count = exit->count;
6217 /* Fix up corner cases, to avoid division by zero or creation of negative
6218 frequencies. */
6219 if (exit_count > total_count)
6220 exit_count = total_count;
6222 else
6224 total_freq = exit->src->frequency;
6225 exit_freq = EDGE_FREQUENCY (exit);
6226 /* Fix up corner cases, to avoid division by zero or creation of negative
6227 frequencies. */
6228 if (total_freq == 0)
6229 total_freq = 1;
6230 if (exit_freq > total_freq)
6231 exit_freq = total_freq;
6234 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6235 split_edge_bb_loc (exit), true);
6236 if (total_count)
6238 scale_bbs_frequencies_gcov_type (region, n_region,
6239 total_count - exit_count,
6240 total_count);
6241 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6242 total_count);
6244 else
6246 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6247 total_freq);
6248 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6251 /* Create the switch block, and put the exit condition to it. */
6252 entry_bb = entry->dest;
6253 nentry_bb = get_bb_copy (entry_bb);
6254 if (!last_stmt (entry->src)
6255 || !stmt_ends_bb_p (last_stmt (entry->src)))
6256 switch_bb = entry->src;
6257 else
6258 switch_bb = split_edge (entry);
6259 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6261 gsi = gsi_last_bb (switch_bb);
6262 cond_stmt = last_stmt (exit->src);
6263 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6264 cond_stmt = gimple_copy (cond_stmt);
6266 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6268 sorig = single_succ_edge (switch_bb);
6269 sorig->flags = exits[1]->flags;
6270 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6272 /* Register the new edge from SWITCH_BB in loop exit lists. */
6273 rescan_loop_exit (snew, true, false);
6275 /* Add the PHI node arguments. */
6276 add_phi_args_after_copy (region_copy, n_region, snew);
6278 /* Get rid of now superfluous conditions and associated edges (and phi node
6279 arguments). */
6280 exit_bb = exit->dest;
6282 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6283 PENDING_STMT (e) = NULL;
6285 /* The latch of ORIG_LOOP was copied, and so was the backedge
6286 to the original header. We redirect this backedge to EXIT_BB. */
6287 for (i = 0; i < n_region; i++)
6288 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6290 gcc_assert (single_succ_edge (region_copy[i]));
6291 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6292 PENDING_STMT (e) = NULL;
6293 for (psi = gsi_start_phis (exit_bb);
6294 !gsi_end_p (psi);
6295 gsi_next (&psi))
6297 phi = psi.phi ();
6298 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6299 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6302 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6303 PENDING_STMT (e) = NULL;
6305 /* Anything that is outside of the region, but was dominated by something
6306 inside needs to update dominance info. */
6307 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6308 doms.release ();
6309 /* Update the SSA web. */
6310 update_ssa (TODO_update_ssa);
6312 if (free_region_copy)
6313 free (region_copy);
6315 free_original_copy_tables ();
6316 return true;
6319 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6320 adding blocks when the dominator traversal reaches EXIT. This
6321 function silently assumes that ENTRY strictly dominates EXIT. */
6323 void
6324 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6325 vec<basic_block> *bbs_p)
6327 basic_block son;
6329 for (son = first_dom_son (CDI_DOMINATORS, entry);
6330 son;
6331 son = next_dom_son (CDI_DOMINATORS, son))
6333 bbs_p->safe_push (son);
6334 if (son != exit)
6335 gather_blocks_in_sese_region (son, exit, bbs_p);
6339 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6340 The duplicates are recorded in VARS_MAP. */
6342 static void
6343 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6344 tree to_context)
6346 tree t = *tp, new_t;
6347 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6349 if (DECL_CONTEXT (t) == to_context)
6350 return;
6352 bool existed;
6353 tree &loc = vars_map->get_or_insert (t, &existed);
6355 if (!existed)
6357 if (SSA_VAR_P (t))
6359 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6360 add_local_decl (f, new_t);
6362 else
6364 gcc_assert (TREE_CODE (t) == CONST_DECL);
6365 new_t = copy_node (t);
6367 DECL_CONTEXT (new_t) = to_context;
6369 loc = new_t;
6371 else
6372 new_t = loc;
6374 *tp = new_t;
6378 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6379 VARS_MAP maps old ssa names and var_decls to the new ones. */
6381 static tree
6382 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6383 tree to_context)
6385 tree new_name;
6387 gcc_assert (!virtual_operand_p (name));
6389 tree *loc = vars_map->get (name);
6391 if (!loc)
6393 tree decl = SSA_NAME_VAR (name);
6394 if (decl)
6396 replace_by_duplicate_decl (&decl, vars_map, to_context);
6397 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6398 decl, SSA_NAME_DEF_STMT (name));
6399 if (SSA_NAME_IS_DEFAULT_DEF (name))
6400 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6401 decl, new_name);
6403 else
6404 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6405 name, SSA_NAME_DEF_STMT (name));
6407 vars_map->put (name, new_name);
6409 else
6410 new_name = *loc;
6412 return new_name;
6415 struct move_stmt_d
6417 tree orig_block;
6418 tree new_block;
6419 tree from_context;
6420 tree to_context;
6421 hash_map<tree, tree> *vars_map;
6422 htab_t new_label_map;
6423 hash_map<void *, void *> *eh_map;
6424 bool remap_decls_p;
6427 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6428 contained in *TP if it has been ORIG_BLOCK previously and change the
6429 DECL_CONTEXT of every local variable referenced in *TP. */
6431 static tree
6432 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6434 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6435 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6436 tree t = *tp;
6438 if (EXPR_P (t))
6440 tree block = TREE_BLOCK (t);
6441 if (block == p->orig_block
6442 || (p->orig_block == NULL_TREE
6443 && block != NULL_TREE))
6444 TREE_SET_BLOCK (t, p->new_block);
6445 #ifdef ENABLE_CHECKING
6446 else if (block != NULL_TREE)
6448 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6449 block = BLOCK_SUPERCONTEXT (block);
6450 gcc_assert (block == p->orig_block);
6452 #endif
6454 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6456 if (TREE_CODE (t) == SSA_NAME)
6457 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6458 else if (TREE_CODE (t) == LABEL_DECL)
6460 if (p->new_label_map)
6462 struct tree_map in, *out;
6463 in.base.from = t;
6464 out = (struct tree_map *)
6465 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6466 if (out)
6467 *tp = t = out->to;
6470 DECL_CONTEXT (t) = p->to_context;
6472 else if (p->remap_decls_p)
6474 /* Replace T with its duplicate. T should no longer appear in the
6475 parent function, so this looks wasteful; however, it may appear
6476 in referenced_vars, and more importantly, as virtual operands of
6477 statements, and in alias lists of other variables. It would be
6478 quite difficult to expunge it from all those places. ??? It might
6479 suffice to do this for addressable variables. */
6480 if ((TREE_CODE (t) == VAR_DECL
6481 && !is_global_var (t))
6482 || TREE_CODE (t) == CONST_DECL)
6483 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6485 *walk_subtrees = 0;
6487 else if (TYPE_P (t))
6488 *walk_subtrees = 0;
6490 return NULL_TREE;
6493 /* Helper for move_stmt_r. Given an EH region number for the source
6494 function, map that to the duplicate EH regio number in the dest. */
6496 static int
6497 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6499 eh_region old_r, new_r;
6501 old_r = get_eh_region_from_number (old_nr);
6502 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6504 return new_r->index;
6507 /* Similar, but operate on INTEGER_CSTs. */
6509 static tree
6510 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6512 int old_nr, new_nr;
6514 old_nr = tree_to_shwi (old_t_nr);
6515 new_nr = move_stmt_eh_region_nr (old_nr, p);
6517 return build_int_cst (integer_type_node, new_nr);
6520 /* Like move_stmt_op, but for gimple statements.
6522 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6523 contained in the current statement in *GSI_P and change the
6524 DECL_CONTEXT of every local variable referenced in the current
6525 statement. */
6527 static tree
6528 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6529 struct walk_stmt_info *wi)
6531 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6532 gimple stmt = gsi_stmt (*gsi_p);
6533 tree block = gimple_block (stmt);
6535 if (block == p->orig_block
6536 || (p->orig_block == NULL_TREE
6537 && block != NULL_TREE))
6538 gimple_set_block (stmt, p->new_block);
6540 switch (gimple_code (stmt))
6542 case GIMPLE_CALL:
6543 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6545 tree r, fndecl = gimple_call_fndecl (stmt);
6546 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6547 switch (DECL_FUNCTION_CODE (fndecl))
6549 case BUILT_IN_EH_COPY_VALUES:
6550 r = gimple_call_arg (stmt, 1);
6551 r = move_stmt_eh_region_tree_nr (r, p);
6552 gimple_call_set_arg (stmt, 1, r);
6553 /* FALLTHRU */
6555 case BUILT_IN_EH_POINTER:
6556 case BUILT_IN_EH_FILTER:
6557 r = gimple_call_arg (stmt, 0);
6558 r = move_stmt_eh_region_tree_nr (r, p);
6559 gimple_call_set_arg (stmt, 0, r);
6560 break;
6562 default:
6563 break;
6566 break;
6568 case GIMPLE_RESX:
6570 gresx *resx_stmt = as_a <gresx *> (stmt);
6571 int r = gimple_resx_region (resx_stmt);
6572 r = move_stmt_eh_region_nr (r, p);
6573 gimple_resx_set_region (resx_stmt, r);
6575 break;
6577 case GIMPLE_EH_DISPATCH:
6579 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6580 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6581 r = move_stmt_eh_region_nr (r, p);
6582 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6584 break;
6586 case GIMPLE_OMP_RETURN:
6587 case GIMPLE_OMP_CONTINUE:
6588 break;
6589 default:
6590 if (is_gimple_omp (stmt))
6592 /* Do not remap variables inside OMP directives. Variables
6593 referenced in clauses and directive header belong to the
6594 parent function and should not be moved into the child
6595 function. */
6596 bool save_remap_decls_p = p->remap_decls_p;
6597 p->remap_decls_p = false;
6598 *handled_ops_p = true;
6600 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6601 move_stmt_op, wi);
6603 p->remap_decls_p = save_remap_decls_p;
6605 break;
6608 return NULL_TREE;
6611 /* Move basic block BB from function CFUN to function DEST_FN. The
6612 block is moved out of the original linked list and placed after
6613 block AFTER in the new list. Also, the block is removed from the
6614 original array of blocks and placed in DEST_FN's array of blocks.
6615 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6616 updated to reflect the moved edges.
6618 The local variables are remapped to new instances, VARS_MAP is used
6619 to record the mapping. */
6621 static void
6622 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6623 basic_block after, bool update_edge_count_p,
6624 struct move_stmt_d *d)
6626 struct control_flow_graph *cfg;
6627 edge_iterator ei;
6628 edge e;
6629 gimple_stmt_iterator si;
6630 unsigned old_len, new_len;
6632 /* Remove BB from dominance structures. */
6633 delete_from_dominance_info (CDI_DOMINATORS, bb);
6635 /* Move BB from its current loop to the copy in the new function. */
6636 if (current_loops)
6638 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6639 if (new_loop)
6640 bb->loop_father = new_loop;
6643 /* Link BB to the new linked list. */
6644 move_block_after (bb, after);
6646 /* Update the edge count in the corresponding flowgraphs. */
6647 if (update_edge_count_p)
6648 FOR_EACH_EDGE (e, ei, bb->succs)
6650 cfun->cfg->x_n_edges--;
6651 dest_cfun->cfg->x_n_edges++;
6654 /* Remove BB from the original basic block array. */
6655 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6656 cfun->cfg->x_n_basic_blocks--;
6658 /* Grow DEST_CFUN's basic block array if needed. */
6659 cfg = dest_cfun->cfg;
6660 cfg->x_n_basic_blocks++;
6661 if (bb->index >= cfg->x_last_basic_block)
6662 cfg->x_last_basic_block = bb->index + 1;
6664 old_len = vec_safe_length (cfg->x_basic_block_info);
6665 if ((unsigned) cfg->x_last_basic_block >= old_len)
6667 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6668 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6671 (*cfg->x_basic_block_info)[bb->index] = bb;
6673 /* Remap the variables in phi nodes. */
6674 for (gphi_iterator psi = gsi_start_phis (bb);
6675 !gsi_end_p (psi); )
6677 gphi *phi = psi.phi ();
6678 use_operand_p use;
6679 tree op = PHI_RESULT (phi);
6680 ssa_op_iter oi;
6681 unsigned i;
6683 if (virtual_operand_p (op))
6685 /* Remove the phi nodes for virtual operands (alias analysis will be
6686 run for the new function, anyway). */
6687 remove_phi_node (&psi, true);
6688 continue;
6691 SET_PHI_RESULT (phi,
6692 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6693 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6695 op = USE_FROM_PTR (use);
6696 if (TREE_CODE (op) == SSA_NAME)
6697 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6700 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6702 location_t locus = gimple_phi_arg_location (phi, i);
6703 tree block = LOCATION_BLOCK (locus);
6705 if (locus == UNKNOWN_LOCATION)
6706 continue;
6707 if (d->orig_block == NULL_TREE || block == d->orig_block)
6709 if (d->new_block == NULL_TREE)
6710 locus = LOCATION_LOCUS (locus);
6711 else
6712 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6713 gimple_phi_arg_set_location (phi, i, locus);
6717 gsi_next (&psi);
6720 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6722 gimple stmt = gsi_stmt (si);
6723 struct walk_stmt_info wi;
6725 memset (&wi, 0, sizeof (wi));
6726 wi.info = d;
6727 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6729 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6731 tree label = gimple_label_label (label_stmt);
6732 int uid = LABEL_DECL_UID (label);
6734 gcc_assert (uid > -1);
6736 old_len = vec_safe_length (cfg->x_label_to_block_map);
6737 if (old_len <= (unsigned) uid)
6739 new_len = 3 * uid / 2 + 1;
6740 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6743 (*cfg->x_label_to_block_map)[uid] = bb;
6744 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6746 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6748 if (uid >= dest_cfun->cfg->last_label_uid)
6749 dest_cfun->cfg->last_label_uid = uid + 1;
6752 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6753 remove_stmt_from_eh_lp_fn (cfun, stmt);
6755 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6756 gimple_remove_stmt_histograms (cfun, stmt);
6758 /* We cannot leave any operands allocated from the operand caches of
6759 the current function. */
6760 free_stmt_operands (cfun, stmt);
6761 push_cfun (dest_cfun);
6762 update_stmt (stmt);
6763 pop_cfun ();
6766 FOR_EACH_EDGE (e, ei, bb->succs)
6767 if (e->goto_locus != UNKNOWN_LOCATION)
6769 tree block = LOCATION_BLOCK (e->goto_locus);
6770 if (d->orig_block == NULL_TREE
6771 || block == d->orig_block)
6772 e->goto_locus = d->new_block ?
6773 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6774 LOCATION_LOCUS (e->goto_locus);
6778 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6779 the outermost EH region. Use REGION as the incoming base EH region. */
6781 static eh_region
6782 find_outermost_region_in_block (struct function *src_cfun,
6783 basic_block bb, eh_region region)
6785 gimple_stmt_iterator si;
6787 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6789 gimple stmt = gsi_stmt (si);
6790 eh_region stmt_region;
6791 int lp_nr;
6793 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6794 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6795 if (stmt_region)
6797 if (region == NULL)
6798 region = stmt_region;
6799 else if (stmt_region != region)
6801 region = eh_region_outermost (src_cfun, stmt_region, region);
6802 gcc_assert (region != NULL);
6807 return region;
6810 static tree
6811 new_label_mapper (tree decl, void *data)
6813 htab_t hash = (htab_t) data;
6814 struct tree_map *m;
6815 void **slot;
6817 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6819 m = XNEW (struct tree_map);
6820 m->hash = DECL_UID (decl);
6821 m->base.from = decl;
6822 m->to = create_artificial_label (UNKNOWN_LOCATION);
6823 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6824 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6825 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6827 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6828 gcc_assert (*slot == NULL);
6830 *slot = m;
6832 return m->to;
6835 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6836 subblocks. */
6838 static void
6839 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6840 tree to_context)
6842 tree *tp, t;
6844 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6846 t = *tp;
6847 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6848 continue;
6849 replace_by_duplicate_decl (&t, vars_map, to_context);
6850 if (t != *tp)
6852 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6854 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6855 DECL_HAS_VALUE_EXPR_P (t) = 1;
6857 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6858 *tp = t;
6862 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6863 replace_block_vars_by_duplicates (block, vars_map, to_context);
6866 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6867 from FN1 to FN2. */
6869 static void
6870 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6871 struct loop *loop)
6873 /* Discard it from the old loop array. */
6874 (*get_loops (fn1))[loop->num] = NULL;
6876 /* Place it in the new loop array, assigning it a new number. */
6877 loop->num = number_of_loops (fn2);
6878 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6880 /* Recurse to children. */
6881 for (loop = loop->inner; loop; loop = loop->next)
6882 fixup_loop_arrays_after_move (fn1, fn2, loop);
6885 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6886 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6888 DEBUG_FUNCTION void
6889 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6891 basic_block bb;
6892 edge_iterator ei;
6893 edge e;
6894 bitmap bbs = BITMAP_ALLOC (NULL);
6895 int i;
6897 gcc_assert (entry != NULL);
6898 gcc_assert (entry != exit);
6899 gcc_assert (bbs_p != NULL);
6901 gcc_assert (bbs_p->length () > 0);
6903 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6904 bitmap_set_bit (bbs, bb->index);
6906 gcc_assert (bitmap_bit_p (bbs, entry->index));
6907 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6909 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6911 if (bb == entry)
6913 gcc_assert (single_pred_p (entry));
6914 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6916 else
6917 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6919 e = ei_edge (ei);
6920 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6923 if (bb == exit)
6925 gcc_assert (single_succ_p (exit));
6926 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6928 else
6929 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6931 e = ei_edge (ei);
6932 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6936 BITMAP_FREE (bbs);
6940 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6941 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6942 single basic block in the original CFG and the new basic block is
6943 returned. DEST_CFUN must not have a CFG yet.
6945 Note that the region need not be a pure SESE region. Blocks inside
6946 the region may contain calls to abort/exit. The only restriction
6947 is that ENTRY_BB should be the only entry point and it must
6948 dominate EXIT_BB.
6950 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6951 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6952 to the new function.
6954 All local variables referenced in the region are assumed to be in
6955 the corresponding BLOCK_VARS and unexpanded variable lists
6956 associated with DEST_CFUN. */
6958 basic_block
6959 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6960 basic_block exit_bb, tree orig_block)
6962 vec<basic_block> bbs, dom_bbs;
6963 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6964 basic_block after, bb, *entry_pred, *exit_succ, abb;
6965 struct function *saved_cfun = cfun;
6966 int *entry_flag, *exit_flag;
6967 unsigned *entry_prob, *exit_prob;
6968 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6969 edge e;
6970 edge_iterator ei;
6971 htab_t new_label_map;
6972 hash_map<void *, void *> *eh_map;
6973 struct loop *loop = entry_bb->loop_father;
6974 struct loop *loop0 = get_loop (saved_cfun, 0);
6975 struct move_stmt_d d;
6977 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6978 region. */
6979 gcc_assert (entry_bb != exit_bb
6980 && (!exit_bb
6981 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6983 /* Collect all the blocks in the region. Manually add ENTRY_BB
6984 because it won't be added by dfs_enumerate_from. */
6985 bbs.create (0);
6986 bbs.safe_push (entry_bb);
6987 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6988 #ifdef ENABLE_CHECKING
6989 verify_sese (entry_bb, exit_bb, &bbs);
6990 #endif
6992 /* The blocks that used to be dominated by something in BBS will now be
6993 dominated by the new block. */
6994 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6995 bbs.address (),
6996 bbs.length ());
6998 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6999 the predecessor edges to ENTRY_BB and the successor edges to
7000 EXIT_BB so that we can re-attach them to the new basic block that
7001 will replace the region. */
7002 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7003 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7004 entry_flag = XNEWVEC (int, num_entry_edges);
7005 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7006 i = 0;
7007 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7009 entry_prob[i] = e->probability;
7010 entry_flag[i] = e->flags;
7011 entry_pred[i++] = e->src;
7012 remove_edge (e);
7015 if (exit_bb)
7017 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7018 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7019 exit_flag = XNEWVEC (int, num_exit_edges);
7020 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7021 i = 0;
7022 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7024 exit_prob[i] = e->probability;
7025 exit_flag[i] = e->flags;
7026 exit_succ[i++] = e->dest;
7027 remove_edge (e);
7030 else
7032 num_exit_edges = 0;
7033 exit_succ = NULL;
7034 exit_flag = NULL;
7035 exit_prob = NULL;
7038 /* Switch context to the child function to initialize DEST_FN's CFG. */
7039 gcc_assert (dest_cfun->cfg == NULL);
7040 push_cfun (dest_cfun);
7042 init_empty_tree_cfg ();
7044 /* Initialize EH information for the new function. */
7045 eh_map = NULL;
7046 new_label_map = NULL;
7047 if (saved_cfun->eh)
7049 eh_region region = NULL;
7051 FOR_EACH_VEC_ELT (bbs, i, bb)
7052 region = find_outermost_region_in_block (saved_cfun, bb, region);
7054 init_eh_for_function ();
7055 if (region != NULL)
7057 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7058 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7059 new_label_mapper, new_label_map);
7063 /* Initialize an empty loop tree. */
7064 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7065 init_loops_structure (dest_cfun, loops, 1);
7066 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7067 set_loops_for_fn (dest_cfun, loops);
7069 /* Move the outlined loop tree part. */
7070 num_nodes = bbs.length ();
7071 FOR_EACH_VEC_ELT (bbs, i, bb)
7073 if (bb->loop_father->header == bb)
7075 struct loop *this_loop = bb->loop_father;
7076 struct loop *outer = loop_outer (this_loop);
7077 if (outer == loop
7078 /* If the SESE region contains some bbs ending with
7079 a noreturn call, those are considered to belong
7080 to the outermost loop in saved_cfun, rather than
7081 the entry_bb's loop_father. */
7082 || outer == loop0)
7084 if (outer != loop)
7085 num_nodes -= this_loop->num_nodes;
7086 flow_loop_tree_node_remove (bb->loop_father);
7087 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7088 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7091 else if (bb->loop_father == loop0 && loop0 != loop)
7092 num_nodes--;
7094 /* Remove loop exits from the outlined region. */
7095 if (loops_for_fn (saved_cfun)->exits)
7096 FOR_EACH_EDGE (e, ei, bb->succs)
7098 struct loops *l = loops_for_fn (saved_cfun);
7099 loop_exit **slot
7100 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7101 NO_INSERT);
7102 if (slot)
7103 l->exits->clear_slot (slot);
7108 /* Adjust the number of blocks in the tree root of the outlined part. */
7109 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7111 /* Setup a mapping to be used by move_block_to_fn. */
7112 loop->aux = current_loops->tree_root;
7113 loop0->aux = current_loops->tree_root;
7115 pop_cfun ();
7117 /* Move blocks from BBS into DEST_CFUN. */
7118 gcc_assert (bbs.length () >= 2);
7119 after = dest_cfun->cfg->x_entry_block_ptr;
7120 hash_map<tree, tree> vars_map;
7122 memset (&d, 0, sizeof (d));
7123 d.orig_block = orig_block;
7124 d.new_block = DECL_INITIAL (dest_cfun->decl);
7125 d.from_context = cfun->decl;
7126 d.to_context = dest_cfun->decl;
7127 d.vars_map = &vars_map;
7128 d.new_label_map = new_label_map;
7129 d.eh_map = eh_map;
7130 d.remap_decls_p = true;
7132 FOR_EACH_VEC_ELT (bbs, i, bb)
7134 /* No need to update edge counts on the last block. It has
7135 already been updated earlier when we detached the region from
7136 the original CFG. */
7137 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7138 after = bb;
7141 loop->aux = NULL;
7142 loop0->aux = NULL;
7143 /* Loop sizes are no longer correct, fix them up. */
7144 loop->num_nodes -= num_nodes;
7145 for (struct loop *outer = loop_outer (loop);
7146 outer; outer = loop_outer (outer))
7147 outer->num_nodes -= num_nodes;
7148 loop0->num_nodes -= bbs.length () - num_nodes;
7150 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7152 struct loop *aloop;
7153 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7154 if (aloop != NULL)
7156 if (aloop->simduid)
7158 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7159 d.to_context);
7160 dest_cfun->has_simduid_loops = true;
7162 if (aloop->force_vectorize)
7163 dest_cfun->has_force_vectorize_loops = true;
7167 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7168 if (orig_block)
7170 tree block;
7171 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7172 == NULL_TREE);
7173 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7174 = BLOCK_SUBBLOCKS (orig_block);
7175 for (block = BLOCK_SUBBLOCKS (orig_block);
7176 block; block = BLOCK_CHAIN (block))
7177 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7178 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7181 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7182 &vars_map, dest_cfun->decl);
7184 if (new_label_map)
7185 htab_delete (new_label_map);
7186 if (eh_map)
7187 delete eh_map;
7189 /* Rewire the entry and exit blocks. The successor to the entry
7190 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7191 the child function. Similarly, the predecessor of DEST_FN's
7192 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7193 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7194 various CFG manipulation function get to the right CFG.
7196 FIXME, this is silly. The CFG ought to become a parameter to
7197 these helpers. */
7198 push_cfun (dest_cfun);
7199 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7200 if (exit_bb)
7201 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7202 pop_cfun ();
7204 /* Back in the original function, the SESE region has disappeared,
7205 create a new basic block in its place. */
7206 bb = create_empty_bb (entry_pred[0]);
7207 if (current_loops)
7208 add_bb_to_loop (bb, loop);
7209 for (i = 0; i < num_entry_edges; i++)
7211 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7212 e->probability = entry_prob[i];
7215 for (i = 0; i < num_exit_edges; i++)
7217 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7218 e->probability = exit_prob[i];
7221 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7222 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7223 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7224 dom_bbs.release ();
7226 if (exit_bb)
7228 free (exit_prob);
7229 free (exit_flag);
7230 free (exit_succ);
7232 free (entry_prob);
7233 free (entry_flag);
7234 free (entry_pred);
7235 bbs.release ();
7237 return bb;
7241 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7244 void
7245 dump_function_to_file (tree fndecl, FILE *file, int flags)
7247 tree arg, var, old_current_fndecl = current_function_decl;
7248 struct function *dsf;
7249 bool ignore_topmost_bind = false, any_var = false;
7250 basic_block bb;
7251 tree chain;
7252 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7253 && decl_is_tm_clone (fndecl));
7254 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7256 current_function_decl = fndecl;
7257 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7259 arg = DECL_ARGUMENTS (fndecl);
7260 while (arg)
7262 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7263 fprintf (file, " ");
7264 print_generic_expr (file, arg, dump_flags);
7265 if (flags & TDF_VERBOSE)
7266 print_node (file, "", arg, 4);
7267 if (DECL_CHAIN (arg))
7268 fprintf (file, ", ");
7269 arg = DECL_CHAIN (arg);
7271 fprintf (file, ")\n");
7273 if (flags & TDF_VERBOSE)
7274 print_node (file, "", fndecl, 2);
7276 dsf = DECL_STRUCT_FUNCTION (fndecl);
7277 if (dsf && (flags & TDF_EH))
7278 dump_eh_tree (file, dsf);
7280 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7282 dump_node (fndecl, TDF_SLIM | flags, file);
7283 current_function_decl = old_current_fndecl;
7284 return;
7287 /* When GIMPLE is lowered, the variables are no longer available in
7288 BIND_EXPRs, so display them separately. */
7289 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7291 unsigned ix;
7292 ignore_topmost_bind = true;
7294 fprintf (file, "{\n");
7295 if (!vec_safe_is_empty (fun->local_decls))
7296 FOR_EACH_LOCAL_DECL (fun, ix, var)
7298 print_generic_decl (file, var, flags);
7299 if (flags & TDF_VERBOSE)
7300 print_node (file, "", var, 4);
7301 fprintf (file, "\n");
7303 any_var = true;
7305 if (gimple_in_ssa_p (cfun))
7306 for (ix = 1; ix < num_ssa_names; ++ix)
7308 tree name = ssa_name (ix);
7309 if (name && !SSA_NAME_VAR (name))
7311 fprintf (file, " ");
7312 print_generic_expr (file, TREE_TYPE (name), flags);
7313 fprintf (file, " ");
7314 print_generic_expr (file, name, flags);
7315 fprintf (file, ";\n");
7317 any_var = true;
7322 if (fun && fun->decl == fndecl
7323 && fun->cfg
7324 && basic_block_info_for_fn (fun))
7326 /* If the CFG has been built, emit a CFG-based dump. */
7327 if (!ignore_topmost_bind)
7328 fprintf (file, "{\n");
7330 if (any_var && n_basic_blocks_for_fn (fun))
7331 fprintf (file, "\n");
7333 FOR_EACH_BB_FN (bb, fun)
7334 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7336 fprintf (file, "}\n");
7338 else if (DECL_SAVED_TREE (fndecl) == NULL)
7340 /* The function is now in GIMPLE form but the CFG has not been
7341 built yet. Emit the single sequence of GIMPLE statements
7342 that make up its body. */
7343 gimple_seq body = gimple_body (fndecl);
7345 if (gimple_seq_first_stmt (body)
7346 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7347 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7348 print_gimple_seq (file, body, 0, flags);
7349 else
7351 if (!ignore_topmost_bind)
7352 fprintf (file, "{\n");
7354 if (any_var)
7355 fprintf (file, "\n");
7357 print_gimple_seq (file, body, 2, flags);
7358 fprintf (file, "}\n");
7361 else
7363 int indent;
7365 /* Make a tree based dump. */
7366 chain = DECL_SAVED_TREE (fndecl);
7367 if (chain && TREE_CODE (chain) == BIND_EXPR)
7369 if (ignore_topmost_bind)
7371 chain = BIND_EXPR_BODY (chain);
7372 indent = 2;
7374 else
7375 indent = 0;
7377 else
7379 if (!ignore_topmost_bind)
7380 fprintf (file, "{\n");
7381 indent = 2;
7384 if (any_var)
7385 fprintf (file, "\n");
7387 print_generic_stmt_indented (file, chain, flags, indent);
7388 if (ignore_topmost_bind)
7389 fprintf (file, "}\n");
7392 if (flags & TDF_ENUMERATE_LOCALS)
7393 dump_enumerated_decls (file, flags);
7394 fprintf (file, "\n\n");
7396 current_function_decl = old_current_fndecl;
7399 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7401 DEBUG_FUNCTION void
7402 debug_function (tree fn, int flags)
7404 dump_function_to_file (fn, stderr, flags);
7408 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7410 static void
7411 print_pred_bbs (FILE *file, basic_block bb)
7413 edge e;
7414 edge_iterator ei;
7416 FOR_EACH_EDGE (e, ei, bb->preds)
7417 fprintf (file, "bb_%d ", e->src->index);
7421 /* Print on FILE the indexes for the successors of basic_block BB. */
7423 static void
7424 print_succ_bbs (FILE *file, basic_block bb)
7426 edge e;
7427 edge_iterator ei;
7429 FOR_EACH_EDGE (e, ei, bb->succs)
7430 fprintf (file, "bb_%d ", e->dest->index);
7433 /* Print to FILE the basic block BB following the VERBOSITY level. */
7435 void
7436 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7438 char *s_indent = (char *) alloca ((size_t) indent + 1);
7439 memset ((void *) s_indent, ' ', (size_t) indent);
7440 s_indent[indent] = '\0';
7442 /* Print basic_block's header. */
7443 if (verbosity >= 2)
7445 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7446 print_pred_bbs (file, bb);
7447 fprintf (file, "}, succs = {");
7448 print_succ_bbs (file, bb);
7449 fprintf (file, "})\n");
7452 /* Print basic_block's body. */
7453 if (verbosity >= 3)
7455 fprintf (file, "%s {\n", s_indent);
7456 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7457 fprintf (file, "%s }\n", s_indent);
7461 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7463 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7464 VERBOSITY level this outputs the contents of the loop, or just its
7465 structure. */
7467 static void
7468 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7470 char *s_indent;
7471 basic_block bb;
7473 if (loop == NULL)
7474 return;
7476 s_indent = (char *) alloca ((size_t) indent + 1);
7477 memset ((void *) s_indent, ' ', (size_t) indent);
7478 s_indent[indent] = '\0';
7480 /* Print loop's header. */
7481 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7482 if (loop->header)
7483 fprintf (file, "header = %d", loop->header->index);
7484 else
7486 fprintf (file, "deleted)\n");
7487 return;
7489 if (loop->latch)
7490 fprintf (file, ", latch = %d", loop->latch->index);
7491 else
7492 fprintf (file, ", multiple latches");
7493 fprintf (file, ", niter = ");
7494 print_generic_expr (file, loop->nb_iterations, 0);
7496 if (loop->any_upper_bound)
7498 fprintf (file, ", upper_bound = ");
7499 print_decu (loop->nb_iterations_upper_bound, file);
7502 if (loop->any_estimate)
7504 fprintf (file, ", estimate = ");
7505 print_decu (loop->nb_iterations_estimate, file);
7507 fprintf (file, ")\n");
7509 /* Print loop's body. */
7510 if (verbosity >= 1)
7512 fprintf (file, "%s{\n", s_indent);
7513 FOR_EACH_BB_FN (bb, cfun)
7514 if (bb->loop_father == loop)
7515 print_loops_bb (file, bb, indent, verbosity);
7517 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7518 fprintf (file, "%s}\n", s_indent);
7522 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7523 spaces. Following VERBOSITY level this outputs the contents of the
7524 loop, or just its structure. */
7526 static void
7527 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7528 int verbosity)
7530 if (loop == NULL)
7531 return;
7533 print_loop (file, loop, indent, verbosity);
7534 print_loop_and_siblings (file, loop->next, indent, verbosity);
7537 /* Follow a CFG edge from the entry point of the program, and on entry
7538 of a loop, pretty print the loop structure on FILE. */
7540 void
7541 print_loops (FILE *file, int verbosity)
7543 basic_block bb;
7545 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7546 if (bb && bb->loop_father)
7547 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7550 /* Dump a loop. */
7552 DEBUG_FUNCTION void
7553 debug (struct loop &ref)
7555 print_loop (stderr, &ref, 0, /*verbosity*/0);
7558 DEBUG_FUNCTION void
7559 debug (struct loop *ptr)
7561 if (ptr)
7562 debug (*ptr);
7563 else
7564 fprintf (stderr, "<nil>\n");
7567 /* Dump a loop verbosely. */
7569 DEBUG_FUNCTION void
7570 debug_verbose (struct loop &ref)
7572 print_loop (stderr, &ref, 0, /*verbosity*/3);
7575 DEBUG_FUNCTION void
7576 debug_verbose (struct loop *ptr)
7578 if (ptr)
7579 debug (*ptr);
7580 else
7581 fprintf (stderr, "<nil>\n");
7585 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7587 DEBUG_FUNCTION void
7588 debug_loops (int verbosity)
7590 print_loops (stderr, verbosity);
7593 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7595 DEBUG_FUNCTION void
7596 debug_loop (struct loop *loop, int verbosity)
7598 print_loop (stderr, loop, 0, verbosity);
7601 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7602 level. */
7604 DEBUG_FUNCTION void
7605 debug_loop_num (unsigned num, int verbosity)
7607 debug_loop (get_loop (cfun, num), verbosity);
7610 /* Return true if BB ends with a call, possibly followed by some
7611 instructions that must stay with the call. Return false,
7612 otherwise. */
7614 static bool
7615 gimple_block_ends_with_call_p (basic_block bb)
7617 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7618 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7622 /* Return true if BB ends with a conditional branch. Return false,
7623 otherwise. */
7625 static bool
7626 gimple_block_ends_with_condjump_p (const_basic_block bb)
7628 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7629 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7633 /* Return true if we need to add fake edge to exit at statement T.
7634 Helper function for gimple_flow_call_edges_add. */
7636 static bool
7637 need_fake_edge_p (gimple t)
7639 tree fndecl = NULL_TREE;
7640 int call_flags = 0;
7642 /* NORETURN and LONGJMP calls already have an edge to exit.
7643 CONST and PURE calls do not need one.
7644 We don't currently check for CONST and PURE here, although
7645 it would be a good idea, because those attributes are
7646 figured out from the RTL in mark_constant_function, and
7647 the counter incrementation code from -fprofile-arcs
7648 leads to different results from -fbranch-probabilities. */
7649 if (is_gimple_call (t))
7651 fndecl = gimple_call_fndecl (t);
7652 call_flags = gimple_call_flags (t);
7655 if (is_gimple_call (t)
7656 && fndecl
7657 && DECL_BUILT_IN (fndecl)
7658 && (call_flags & ECF_NOTHROW)
7659 && !(call_flags & ECF_RETURNS_TWICE)
7660 /* fork() doesn't really return twice, but the effect of
7661 wrapping it in __gcov_fork() which calls __gcov_flush()
7662 and clears the counters before forking has the same
7663 effect as returning twice. Force a fake edge. */
7664 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7665 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7666 return false;
7668 if (is_gimple_call (t))
7670 edge_iterator ei;
7671 edge e;
7672 basic_block bb;
7674 if (!(call_flags & ECF_NORETURN))
7675 return true;
7677 bb = gimple_bb (t);
7678 FOR_EACH_EDGE (e, ei, bb->succs)
7679 if ((e->flags & EDGE_FAKE) == 0)
7680 return true;
7683 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7684 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7685 return true;
7687 return false;
7691 /* Add fake edges to the function exit for any non constant and non
7692 noreturn calls (or noreturn calls with EH/abnormal edges),
7693 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7694 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7695 that were split.
7697 The goal is to expose cases in which entering a basic block does
7698 not imply that all subsequent instructions must be executed. */
7700 static int
7701 gimple_flow_call_edges_add (sbitmap blocks)
7703 int i;
7704 int blocks_split = 0;
7705 int last_bb = last_basic_block_for_fn (cfun);
7706 bool check_last_block = false;
7708 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7709 return 0;
7711 if (! blocks)
7712 check_last_block = true;
7713 else
7714 check_last_block = bitmap_bit_p (blocks,
7715 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7717 /* In the last basic block, before epilogue generation, there will be
7718 a fallthru edge to EXIT. Special care is required if the last insn
7719 of the last basic block is a call because make_edge folds duplicate
7720 edges, which would result in the fallthru edge also being marked
7721 fake, which would result in the fallthru edge being removed by
7722 remove_fake_edges, which would result in an invalid CFG.
7724 Moreover, we can't elide the outgoing fake edge, since the block
7725 profiler needs to take this into account in order to solve the minimal
7726 spanning tree in the case that the call doesn't return.
7728 Handle this by adding a dummy instruction in a new last basic block. */
7729 if (check_last_block)
7731 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7732 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7733 gimple t = NULL;
7735 if (!gsi_end_p (gsi))
7736 t = gsi_stmt (gsi);
7738 if (t && need_fake_edge_p (t))
7740 edge e;
7742 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7743 if (e)
7745 gsi_insert_on_edge (e, gimple_build_nop ());
7746 gsi_commit_edge_inserts ();
7751 /* Now add fake edges to the function exit for any non constant
7752 calls since there is no way that we can determine if they will
7753 return or not... */
7754 for (i = 0; i < last_bb; i++)
7756 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7757 gimple_stmt_iterator gsi;
7758 gimple stmt, last_stmt;
7760 if (!bb)
7761 continue;
7763 if (blocks && !bitmap_bit_p (blocks, i))
7764 continue;
7766 gsi = gsi_last_nondebug_bb (bb);
7767 if (!gsi_end_p (gsi))
7769 last_stmt = gsi_stmt (gsi);
7772 stmt = gsi_stmt (gsi);
7773 if (need_fake_edge_p (stmt))
7775 edge e;
7777 /* The handling above of the final block before the
7778 epilogue should be enough to verify that there is
7779 no edge to the exit block in CFG already.
7780 Calling make_edge in such case would cause us to
7781 mark that edge as fake and remove it later. */
7782 #ifdef ENABLE_CHECKING
7783 if (stmt == last_stmt)
7785 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7786 gcc_assert (e == NULL);
7788 #endif
7790 /* Note that the following may create a new basic block
7791 and renumber the existing basic blocks. */
7792 if (stmt != last_stmt)
7794 e = split_block (bb, stmt);
7795 if (e)
7796 blocks_split++;
7798 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7800 gsi_prev (&gsi);
7802 while (!gsi_end_p (gsi));
7806 if (blocks_split)
7807 verify_flow_info ();
7809 return blocks_split;
7812 /* Removes edge E and all the blocks dominated by it, and updates dominance
7813 information. The IL in E->src needs to be updated separately.
7814 If dominance info is not available, only the edge E is removed.*/
7816 void
7817 remove_edge_and_dominated_blocks (edge e)
7819 vec<basic_block> bbs_to_remove = vNULL;
7820 vec<basic_block> bbs_to_fix_dom = vNULL;
7821 bitmap df, df_idom;
7822 edge f;
7823 edge_iterator ei;
7824 bool none_removed = false;
7825 unsigned i;
7826 basic_block bb, dbb;
7827 bitmap_iterator bi;
7829 /* If we are removing a path inside a non-root loop that may change
7830 loop ownership of blocks or remove loops. Mark loops for fixup. */
7831 if (current_loops
7832 && loop_outer (e->src->loop_father) != NULL
7833 && e->src->loop_father == e->dest->loop_father)
7834 loops_state_set (LOOPS_NEED_FIXUP);
7836 if (!dom_info_available_p (CDI_DOMINATORS))
7838 remove_edge (e);
7839 return;
7842 /* No updating is needed for edges to exit. */
7843 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7845 if (cfgcleanup_altered_bbs)
7846 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7847 remove_edge (e);
7848 return;
7851 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7852 that is not dominated by E->dest, then this set is empty. Otherwise,
7853 all the basic blocks dominated by E->dest are removed.
7855 Also, to DF_IDOM we store the immediate dominators of the blocks in
7856 the dominance frontier of E (i.e., of the successors of the
7857 removed blocks, if there are any, and of E->dest otherwise). */
7858 FOR_EACH_EDGE (f, ei, e->dest->preds)
7860 if (f == e)
7861 continue;
7863 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7865 none_removed = true;
7866 break;
7870 df = BITMAP_ALLOC (NULL);
7871 df_idom = BITMAP_ALLOC (NULL);
7873 if (none_removed)
7874 bitmap_set_bit (df_idom,
7875 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7876 else
7878 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7879 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7881 FOR_EACH_EDGE (f, ei, bb->succs)
7883 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7884 bitmap_set_bit (df, f->dest->index);
7887 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7888 bitmap_clear_bit (df, bb->index);
7890 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7892 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7893 bitmap_set_bit (df_idom,
7894 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7898 if (cfgcleanup_altered_bbs)
7900 /* Record the set of the altered basic blocks. */
7901 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7902 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7905 /* Remove E and the cancelled blocks. */
7906 if (none_removed)
7907 remove_edge (e);
7908 else
7910 /* Walk backwards so as to get a chance to substitute all
7911 released DEFs into debug stmts. See
7912 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7913 details. */
7914 for (i = bbs_to_remove.length (); i-- > 0; )
7915 delete_basic_block (bbs_to_remove[i]);
7918 /* Update the dominance information. The immediate dominator may change only
7919 for blocks whose immediate dominator belongs to DF_IDOM:
7921 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7922 removal. Let Z the arbitrary block such that idom(Z) = Y and
7923 Z dominates X after the removal. Before removal, there exists a path P
7924 from Y to X that avoids Z. Let F be the last edge on P that is
7925 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7926 dominates W, and because of P, Z does not dominate W), and W belongs to
7927 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7928 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7930 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7931 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7932 dbb;
7933 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7934 bbs_to_fix_dom.safe_push (dbb);
7937 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7939 BITMAP_FREE (df);
7940 BITMAP_FREE (df_idom);
7941 bbs_to_remove.release ();
7942 bbs_to_fix_dom.release ();
7945 /* Purge dead EH edges from basic block BB. */
7947 bool
7948 gimple_purge_dead_eh_edges (basic_block bb)
7950 bool changed = false;
7951 edge e;
7952 edge_iterator ei;
7953 gimple stmt = last_stmt (bb);
7955 if (stmt && stmt_can_throw_internal (stmt))
7956 return false;
7958 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7960 if (e->flags & EDGE_EH)
7962 remove_edge_and_dominated_blocks (e);
7963 changed = true;
7965 else
7966 ei_next (&ei);
7969 return changed;
7972 /* Purge dead EH edges from basic block listed in BLOCKS. */
7974 bool
7975 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7977 bool changed = false;
7978 unsigned i;
7979 bitmap_iterator bi;
7981 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7983 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7985 /* Earlier gimple_purge_dead_eh_edges could have removed
7986 this basic block already. */
7987 gcc_assert (bb || changed);
7988 if (bb != NULL)
7989 changed |= gimple_purge_dead_eh_edges (bb);
7992 return changed;
7995 /* Purge dead abnormal call edges from basic block BB. */
7997 bool
7998 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8000 bool changed = false;
8001 edge e;
8002 edge_iterator ei;
8003 gimple stmt = last_stmt (bb);
8005 if (!cfun->has_nonlocal_label
8006 && !cfun->calls_setjmp)
8007 return false;
8009 if (stmt && stmt_can_make_abnormal_goto (stmt))
8010 return false;
8012 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8014 if (e->flags & EDGE_ABNORMAL)
8016 if (e->flags & EDGE_FALLTHRU)
8017 e->flags &= ~EDGE_ABNORMAL;
8018 else
8019 remove_edge_and_dominated_blocks (e);
8020 changed = true;
8022 else
8023 ei_next (&ei);
8026 return changed;
8029 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8031 bool
8032 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8034 bool changed = false;
8035 unsigned i;
8036 bitmap_iterator bi;
8038 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8040 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8042 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8043 this basic block already. */
8044 gcc_assert (bb || changed);
8045 if (bb != NULL)
8046 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8049 return changed;
8052 /* This function is called whenever a new edge is created or
8053 redirected. */
8055 static void
8056 gimple_execute_on_growing_pred (edge e)
8058 basic_block bb = e->dest;
8060 if (!gimple_seq_empty_p (phi_nodes (bb)))
8061 reserve_phi_args_for_new_edge (bb);
8064 /* This function is called immediately before edge E is removed from
8065 the edge vector E->dest->preds. */
8067 static void
8068 gimple_execute_on_shrinking_pred (edge e)
8070 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8071 remove_phi_args (e);
8074 /*---------------------------------------------------------------------------
8075 Helper functions for Loop versioning
8076 ---------------------------------------------------------------------------*/
8078 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8079 of 'first'. Both of them are dominated by 'new_head' basic block. When
8080 'new_head' was created by 'second's incoming edge it received phi arguments
8081 on the edge by split_edge(). Later, additional edge 'e' was created to
8082 connect 'new_head' and 'first'. Now this routine adds phi args on this
8083 additional edge 'e' that new_head to second edge received as part of edge
8084 splitting. */
8086 static void
8087 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8088 basic_block new_head, edge e)
8090 gphi *phi1, *phi2;
8091 gphi_iterator psi1, psi2;
8092 tree def;
8093 edge e2 = find_edge (new_head, second);
8095 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8096 edge, we should always have an edge from NEW_HEAD to SECOND. */
8097 gcc_assert (e2 != NULL);
8099 /* Browse all 'second' basic block phi nodes and add phi args to
8100 edge 'e' for 'first' head. PHI args are always in correct order. */
8102 for (psi2 = gsi_start_phis (second),
8103 psi1 = gsi_start_phis (first);
8104 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8105 gsi_next (&psi2), gsi_next (&psi1))
8107 phi1 = psi1.phi ();
8108 phi2 = psi2.phi ();
8109 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8110 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8115 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8116 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8117 the destination of the ELSE part. */
8119 static void
8120 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8121 basic_block second_head ATTRIBUTE_UNUSED,
8122 basic_block cond_bb, void *cond_e)
8124 gimple_stmt_iterator gsi;
8125 gimple new_cond_expr;
8126 tree cond_expr = (tree) cond_e;
8127 edge e0;
8129 /* Build new conditional expr */
8130 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8131 NULL_TREE, NULL_TREE);
8133 /* Add new cond in cond_bb. */
8134 gsi = gsi_last_bb (cond_bb);
8135 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8137 /* Adjust edges appropriately to connect new head with first head
8138 as well as second head. */
8139 e0 = single_succ_edge (cond_bb);
8140 e0->flags &= ~EDGE_FALLTHRU;
8141 e0->flags |= EDGE_FALSE_VALUE;
8145 /* Do book-keeping of basic block BB for the profile consistency checker.
8146 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8147 then do post-pass accounting. Store the counting in RECORD. */
8148 static void
8149 gimple_account_profile_record (basic_block bb, int after_pass,
8150 struct profile_record *record)
8152 gimple_stmt_iterator i;
8153 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8155 record->size[after_pass]
8156 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8157 if (profile_status_for_fn (cfun) == PROFILE_READ)
8158 record->time[after_pass]
8159 += estimate_num_insns (gsi_stmt (i),
8160 &eni_time_weights) * bb->count;
8161 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8162 record->time[after_pass]
8163 += estimate_num_insns (gsi_stmt (i),
8164 &eni_time_weights) * bb->frequency;
8168 struct cfg_hooks gimple_cfg_hooks = {
8169 "gimple",
8170 gimple_verify_flow_info,
8171 gimple_dump_bb, /* dump_bb */
8172 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8173 create_bb, /* create_basic_block */
8174 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8175 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8176 gimple_can_remove_branch_p, /* can_remove_branch_p */
8177 remove_bb, /* delete_basic_block */
8178 gimple_split_block, /* split_block */
8179 gimple_move_block_after, /* move_block_after */
8180 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8181 gimple_merge_blocks, /* merge_blocks */
8182 gimple_predict_edge, /* predict_edge */
8183 gimple_predicted_by_p, /* predicted_by_p */
8184 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8185 gimple_duplicate_bb, /* duplicate_block */
8186 gimple_split_edge, /* split_edge */
8187 gimple_make_forwarder_block, /* make_forward_block */
8188 NULL, /* tidy_fallthru_edge */
8189 NULL, /* force_nonfallthru */
8190 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8191 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8192 gimple_flow_call_edges_add, /* flow_call_edges_add */
8193 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8194 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8195 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8196 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8197 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8198 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8199 flush_pending_stmts, /* flush_pending_stmts */
8200 gimple_empty_block_p, /* block_empty_p */
8201 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8202 gimple_account_profile_record,
8206 /* Split all critical edges. */
8208 unsigned int
8209 split_critical_edges (void)
8211 basic_block bb;
8212 edge e;
8213 edge_iterator ei;
8215 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8216 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8217 mappings around the calls to split_edge. */
8218 start_recording_case_labels ();
8219 FOR_ALL_BB_FN (bb, cfun)
8221 FOR_EACH_EDGE (e, ei, bb->succs)
8223 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8224 split_edge (e);
8225 /* PRE inserts statements to edges and expects that
8226 since split_critical_edges was done beforehand, committing edge
8227 insertions will not split more edges. In addition to critical
8228 edges we must split edges that have multiple successors and
8229 end by control flow statements, such as RESX.
8230 Go ahead and split them too. This matches the logic in
8231 gimple_find_edge_insert_loc. */
8232 else if ((!single_pred_p (e->dest)
8233 || !gimple_seq_empty_p (phi_nodes (e->dest))
8234 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8235 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8236 && !(e->flags & EDGE_ABNORMAL))
8238 gimple_stmt_iterator gsi;
8240 gsi = gsi_last_bb (e->src);
8241 if (!gsi_end_p (gsi)
8242 && stmt_ends_bb_p (gsi_stmt (gsi))
8243 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8244 && !gimple_call_builtin_p (gsi_stmt (gsi),
8245 BUILT_IN_RETURN)))
8246 split_edge (e);
8250 end_recording_case_labels ();
8251 return 0;
8254 namespace {
8256 const pass_data pass_data_split_crit_edges =
8258 GIMPLE_PASS, /* type */
8259 "crited", /* name */
8260 OPTGROUP_NONE, /* optinfo_flags */
8261 TV_TREE_SPLIT_EDGES, /* tv_id */
8262 PROP_cfg, /* properties_required */
8263 PROP_no_crit_edges, /* properties_provided */
8264 0, /* properties_destroyed */
8265 0, /* todo_flags_start */
8266 0, /* todo_flags_finish */
8269 class pass_split_crit_edges : public gimple_opt_pass
8271 public:
8272 pass_split_crit_edges (gcc::context *ctxt)
8273 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8276 /* opt_pass methods: */
8277 virtual unsigned int execute (function *) { return split_critical_edges (); }
8279 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8280 }; // class pass_split_crit_edges
8282 } // anon namespace
8284 gimple_opt_pass *
8285 make_pass_split_crit_edges (gcc::context *ctxt)
8287 return new pass_split_crit_edges (ctxt);
8291 /* Insert COND expression which is GIMPLE_COND after STMT
8292 in basic block BB with appropriate basic block split
8293 and creation of a new conditionally executed basic block.
8294 Return created basic block. */
8295 basic_block
8296 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8298 edge fall = split_block (bb, stmt);
8299 gimple_stmt_iterator iter = gsi_last_bb (bb);
8300 basic_block new_bb;
8302 /* Insert cond statement. */
8303 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8304 if (gsi_end_p (iter))
8305 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8306 else
8307 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8309 /* Create conditionally executed block. */
8310 new_bb = create_empty_bb (bb);
8311 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8312 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8314 /* Fix edge for split bb. */
8315 fall->flags = EDGE_FALSE_VALUE;
8317 /* Update dominance info. */
8318 if (dom_info_available_p (CDI_DOMINATORS))
8320 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8321 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8324 /* Update loop info. */
8325 if (current_loops)
8326 add_bb_to_loop (new_bb, bb->loop_father);
8328 return new_bb;
8331 /* Build a ternary operation and gimplify it. Emit code before GSI.
8332 Return the gimple_val holding the result. */
8334 tree
8335 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8336 tree type, tree a, tree b, tree c)
8338 tree ret;
8339 location_t loc = gimple_location (gsi_stmt (*gsi));
8341 ret = fold_build3_loc (loc, code, type, a, b, c);
8342 STRIP_NOPS (ret);
8344 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8345 GSI_SAME_STMT);
8348 /* Build a binary operation and gimplify it. Emit code before GSI.
8349 Return the gimple_val holding the result. */
8351 tree
8352 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8353 tree type, tree a, tree b)
8355 tree ret;
8357 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8358 STRIP_NOPS (ret);
8360 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8361 GSI_SAME_STMT);
8364 /* Build a unary operation and gimplify it. Emit code before GSI.
8365 Return the gimple_val holding the result. */
8367 tree
8368 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8369 tree a)
8371 tree ret;
8373 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8374 STRIP_NOPS (ret);
8376 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8377 GSI_SAME_STMT);
8382 /* Given a basic block B which ends with a conditional and has
8383 precisely two successors, determine which of the edges is taken if
8384 the conditional is true and which is taken if the conditional is
8385 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8387 void
8388 extract_true_false_edges_from_block (basic_block b,
8389 edge *true_edge,
8390 edge *false_edge)
8392 edge e = EDGE_SUCC (b, 0);
8394 if (e->flags & EDGE_TRUE_VALUE)
8396 *true_edge = e;
8397 *false_edge = EDGE_SUCC (b, 1);
8399 else
8401 *false_edge = e;
8402 *true_edge = EDGE_SUCC (b, 1);
8406 /* Emit return warnings. */
8408 namespace {
8410 const pass_data pass_data_warn_function_return =
8412 GIMPLE_PASS, /* type */
8413 "*warn_function_return", /* name */
8414 OPTGROUP_NONE, /* optinfo_flags */
8415 TV_NONE, /* tv_id */
8416 PROP_cfg, /* properties_required */
8417 0, /* properties_provided */
8418 0, /* properties_destroyed */
8419 0, /* todo_flags_start */
8420 0, /* todo_flags_finish */
8423 class pass_warn_function_return : public gimple_opt_pass
8425 public:
8426 pass_warn_function_return (gcc::context *ctxt)
8427 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8430 /* opt_pass methods: */
8431 virtual unsigned int execute (function *);
8433 }; // class pass_warn_function_return
8435 unsigned int
8436 pass_warn_function_return::execute (function *fun)
8438 source_location location;
8439 gimple last;
8440 edge e;
8441 edge_iterator ei;
8443 if (!targetm.warn_func_return (fun->decl))
8444 return 0;
8446 /* If we have a path to EXIT, then we do return. */
8447 if (TREE_THIS_VOLATILE (fun->decl)
8448 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8450 location = UNKNOWN_LOCATION;
8451 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8453 last = last_stmt (e->src);
8454 if ((gimple_code (last) == GIMPLE_RETURN
8455 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8456 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8457 break;
8459 if (location == UNKNOWN_LOCATION)
8460 location = cfun->function_end_locus;
8461 warning_at (location, 0, "%<noreturn%> function does return");
8464 /* If we see "return;" in some basic block, then we do reach the end
8465 without returning a value. */
8466 else if (warn_return_type
8467 && !TREE_NO_WARNING (fun->decl)
8468 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8469 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8471 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8473 gimple last = last_stmt (e->src);
8474 greturn *return_stmt = dyn_cast <greturn *> (last);
8475 if (return_stmt
8476 && gimple_return_retval (return_stmt) == NULL
8477 && !gimple_no_warning_p (last))
8479 location = gimple_location (last);
8480 if (location == UNKNOWN_LOCATION)
8481 location = fun->function_end_locus;
8482 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8483 TREE_NO_WARNING (fun->decl) = 1;
8484 break;
8488 return 0;
8491 } // anon namespace
8493 gimple_opt_pass *
8494 make_pass_warn_function_return (gcc::context *ctxt)
8496 return new pass_warn_function_return (ctxt);
8499 /* Walk a gimplified function and warn for functions whose return value is
8500 ignored and attribute((warn_unused_result)) is set. This is done before
8501 inlining, so we don't have to worry about that. */
8503 static void
8504 do_warn_unused_result (gimple_seq seq)
8506 tree fdecl, ftype;
8507 gimple_stmt_iterator i;
8509 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8511 gimple g = gsi_stmt (i);
8513 switch (gimple_code (g))
8515 case GIMPLE_BIND:
8516 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8517 break;
8518 case GIMPLE_TRY:
8519 do_warn_unused_result (gimple_try_eval (g));
8520 do_warn_unused_result (gimple_try_cleanup (g));
8521 break;
8522 case GIMPLE_CATCH:
8523 do_warn_unused_result (gimple_catch_handler (
8524 as_a <gcatch *> (g)));
8525 break;
8526 case GIMPLE_EH_FILTER:
8527 do_warn_unused_result (gimple_eh_filter_failure (g));
8528 break;
8530 case GIMPLE_CALL:
8531 if (gimple_call_lhs (g))
8532 break;
8533 if (gimple_call_internal_p (g))
8534 break;
8536 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8537 LHS. All calls whose value is ignored should be
8538 represented like this. Look for the attribute. */
8539 fdecl = gimple_call_fndecl (g);
8540 ftype = gimple_call_fntype (g);
8542 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8544 location_t loc = gimple_location (g);
8546 if (fdecl)
8547 warning_at (loc, OPT_Wunused_result,
8548 "ignoring return value of %qD, "
8549 "declared with attribute warn_unused_result",
8550 fdecl);
8551 else
8552 warning_at (loc, OPT_Wunused_result,
8553 "ignoring return value of function "
8554 "declared with attribute warn_unused_result");
8556 break;
8558 default:
8559 /* Not a container, not a call, or a call whose value is used. */
8560 break;
8565 namespace {
8567 const pass_data pass_data_warn_unused_result =
8569 GIMPLE_PASS, /* type */
8570 "*warn_unused_result", /* name */
8571 OPTGROUP_NONE, /* optinfo_flags */
8572 TV_NONE, /* tv_id */
8573 PROP_gimple_any, /* properties_required */
8574 0, /* properties_provided */
8575 0, /* properties_destroyed */
8576 0, /* todo_flags_start */
8577 0, /* todo_flags_finish */
8580 class pass_warn_unused_result : public gimple_opt_pass
8582 public:
8583 pass_warn_unused_result (gcc::context *ctxt)
8584 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8587 /* opt_pass methods: */
8588 virtual bool gate (function *) { return flag_warn_unused_result; }
8589 virtual unsigned int execute (function *)
8591 do_warn_unused_result (gimple_body (current_function_decl));
8592 return 0;
8595 }; // class pass_warn_unused_result
8597 } // anon namespace
8599 gimple_opt_pass *
8600 make_pass_warn_unused_result (gcc::context *ctxt)
8602 return new pass_warn_unused_result (ctxt);
8605 /* IPA passes, compilation of earlier functions or inlining
8606 might have changed some properties, such as marked functions nothrow,
8607 pure, const or noreturn.
8608 Remove redundant edges and basic blocks, and create new ones if necessary.
8610 This pass can't be executed as stand alone pass from pass manager, because
8611 in between inlining and this fixup the verify_flow_info would fail. */
8613 unsigned int
8614 execute_fixup_cfg (void)
8616 basic_block bb;
8617 gimple_stmt_iterator gsi;
8618 int todo = 0;
8619 gcov_type count_scale;
8620 edge e;
8621 edge_iterator ei;
8623 count_scale
8624 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8625 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8627 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8628 cgraph_node::get (current_function_decl)->count;
8629 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8630 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8631 count_scale);
8633 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8634 e->count = apply_scale (e->count, count_scale);
8636 FOR_EACH_BB_FN (bb, cfun)
8638 bb->count = apply_scale (bb->count, count_scale);
8639 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8641 gimple stmt = gsi_stmt (gsi);
8642 tree decl = is_gimple_call (stmt)
8643 ? gimple_call_fndecl (stmt)
8644 : NULL;
8645 if (decl)
8647 int flags = gimple_call_flags (stmt);
8648 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8650 if (gimple_purge_dead_abnormal_call_edges (bb))
8651 todo |= TODO_cleanup_cfg;
8653 if (gimple_in_ssa_p (cfun))
8655 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8656 update_stmt (stmt);
8660 if (flags & ECF_NORETURN
8661 && fixup_noreturn_call (stmt))
8662 todo |= TODO_cleanup_cfg;
8665 /* Remove stores to variables we marked write-only.
8666 Keep access when store has side effect, i.e. in case when source
8667 is volatile. */
8668 if (gimple_store_p (stmt)
8669 && !gimple_has_side_effects (stmt))
8671 tree lhs = get_base_address (gimple_get_lhs (stmt));
8673 if (TREE_CODE (lhs) == VAR_DECL
8674 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8675 && varpool_node::get (lhs)->writeonly)
8677 unlink_stmt_vdef (stmt);
8678 gsi_remove (&gsi, true);
8679 release_defs (stmt);
8680 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8681 continue;
8684 /* For calls we can simply remove LHS when it is known
8685 to be write-only. */
8686 if (is_gimple_call (stmt)
8687 && gimple_get_lhs (stmt))
8689 tree lhs = get_base_address (gimple_get_lhs (stmt));
8691 if (TREE_CODE (lhs) == VAR_DECL
8692 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8693 && varpool_node::get (lhs)->writeonly)
8695 gimple_call_set_lhs (stmt, NULL);
8696 update_stmt (stmt);
8697 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8701 if (maybe_clean_eh_stmt (stmt)
8702 && gimple_purge_dead_eh_edges (bb))
8703 todo |= TODO_cleanup_cfg;
8704 gsi_next (&gsi);
8707 FOR_EACH_EDGE (e, ei, bb->succs)
8708 e->count = apply_scale (e->count, count_scale);
8710 /* If we have a basic block with no successors that does not
8711 end with a control statement or a noreturn call end it with
8712 a call to __builtin_unreachable. This situation can occur
8713 when inlining a noreturn call that does in fact return. */
8714 if (EDGE_COUNT (bb->succs) == 0)
8716 gimple stmt = last_stmt (bb);
8717 if (!stmt
8718 || (!is_ctrl_stmt (stmt)
8719 && (!is_gimple_call (stmt)
8720 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8722 if (stmt && is_gimple_call (stmt))
8723 gimple_call_set_ctrl_altering (stmt, false);
8724 stmt = gimple_build_call
8725 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8726 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8727 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8731 if (count_scale != REG_BR_PROB_BASE)
8732 compute_function_frequency ();
8734 if (current_loops
8735 && (todo & TODO_cleanup_cfg))
8736 loops_state_set (LOOPS_NEED_FIXUP);
8738 return todo;
8741 namespace {
8743 const pass_data pass_data_fixup_cfg =
8745 GIMPLE_PASS, /* type */
8746 "fixup_cfg", /* name */
8747 OPTGROUP_NONE, /* optinfo_flags */
8748 TV_NONE, /* tv_id */
8749 PROP_cfg, /* properties_required */
8750 0, /* properties_provided */
8751 0, /* properties_destroyed */
8752 0, /* todo_flags_start */
8753 0, /* todo_flags_finish */
8756 class pass_fixup_cfg : public gimple_opt_pass
8758 public:
8759 pass_fixup_cfg (gcc::context *ctxt)
8760 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8763 /* opt_pass methods: */
8764 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8765 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8767 }; // class pass_fixup_cfg
8769 } // anon namespace
8771 gimple_opt_pass *
8772 make_pass_fixup_cfg (gcc::context *ctxt)
8774 return new pass_fixup_cfg (ctxt);
8777 /* Garbage collection support for edge_def. */
8779 extern void gt_ggc_mx (tree&);
8780 extern void gt_ggc_mx (gimple&);
8781 extern void gt_ggc_mx (rtx&);
8782 extern void gt_ggc_mx (basic_block&);
8784 static void
8785 gt_ggc_mx (rtx_insn *& x)
8787 if (x)
8788 gt_ggc_mx_rtx_def ((void *) x);
8791 void
8792 gt_ggc_mx (edge_def *e)
8794 tree block = LOCATION_BLOCK (e->goto_locus);
8795 gt_ggc_mx (e->src);
8796 gt_ggc_mx (e->dest);
8797 if (current_ir_type () == IR_GIMPLE)
8798 gt_ggc_mx (e->insns.g);
8799 else
8800 gt_ggc_mx (e->insns.r);
8801 gt_ggc_mx (block);
8804 /* PCH support for edge_def. */
8806 extern void gt_pch_nx (tree&);
8807 extern void gt_pch_nx (gimple&);
8808 extern void gt_pch_nx (rtx&);
8809 extern void gt_pch_nx (basic_block&);
8811 static void
8812 gt_pch_nx (rtx_insn *& x)
8814 if (x)
8815 gt_pch_nx_rtx_def ((void *) x);
8818 void
8819 gt_pch_nx (edge_def *e)
8821 tree block = LOCATION_BLOCK (e->goto_locus);
8822 gt_pch_nx (e->src);
8823 gt_pch_nx (e->dest);
8824 if (current_ir_type () == IR_GIMPLE)
8825 gt_pch_nx (e->insns.g);
8826 else
8827 gt_pch_nx (e->insns.r);
8828 gt_pch_nx (block);
8831 void
8832 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8834 tree block = LOCATION_BLOCK (e->goto_locus);
8835 op (&(e->src), cookie);
8836 op (&(e->dest), cookie);
8837 if (current_ir_type () == IR_GIMPLE)
8838 op (&(e->insns.g), cookie);
8839 else
8840 op (&(e->insns.r), cookie);
8841 op (&(block), cookie);