jit: API change to gcc_jit_context_new_global
[official-gcc.git] / gcc / tree-cfg.c
blob38e5e7db432912e5d5184d1c7e90d43b2f4e8ec3
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "trans-mem.h"
39 #include "stor-layout.h"
40 #include "print-tree.h"
41 #include "tm_p.h"
42 #include "predict.h"
43 #include "hard-reg-set.h"
44 #include "input.h"
45 #include "function.h"
46 #include "dominance.h"
47 #include "cfg.h"
48 #include "cfganal.h"
49 #include "basic-block.h"
50 #include "flags.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-alias.h"
53 #include "internal-fn.h"
54 #include "gimple-fold.h"
55 #include "tree-eh.h"
56 #include "gimple-expr.h"
57 #include "is-a.h"
58 #include "gimple.h"
59 #include "gimple-iterator.h"
60 #include "gimplify-me.h"
61 #include "gimple-walk.h"
62 #include "gimple-ssa.h"
63 #include "plugin-api.h"
64 #include "ipa-ref.h"
65 #include "cgraph.h"
66 #include "tree-cfg.h"
67 #include "tree-phinodes.h"
68 #include "ssa-iterators.h"
69 #include "stringpool.h"
70 #include "tree-ssanames.h"
71 #include "tree-ssa-loop-manip.h"
72 #include "tree-ssa-loop-niter.h"
73 #include "tree-into-ssa.h"
74 #include "expr.h"
75 #include "tree-dfa.h"
76 #include "tree-ssa.h"
77 #include "tree-dump.h"
78 #include "tree-pass.h"
79 #include "diagnostic-core.h"
80 #include "except.h"
81 #include "cfgloop.h"
82 #include "tree-ssa-propagate.h"
83 #include "value-prof.h"
84 #include "tree-inline.h"
85 #include "target.h"
86 #include "tree-ssa-live.h"
87 #include "omp-low.h"
88 #include "tree-cfgcleanup.h"
89 #include "wide-int.h"
90 #include "wide-int-print.h"
92 /* This file contains functions for building the Control Flow Graph (CFG)
93 for a function tree. */
95 /* Local declarations. */
97 /* Initial capacity for the basic block array. */
98 static const int initial_cfg_capacity = 20;
100 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
101 which use a particular edge. The CASE_LABEL_EXPRs are chained together
102 via their CASE_CHAIN field, which we clear after we're done with the
103 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
105 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
106 update the case vector in response to edge redirections.
108 Right now this table is set up and torn down at key points in the
109 compilation process. It would be nice if we could make the table
110 more persistent. The key is getting notification of changes to
111 the CFG (particularly edge removal, creation and redirection). */
113 static hash_map<edge, tree> *edge_to_cases;
115 /* If we record edge_to_cases, this bitmap will hold indexes
116 of basic blocks that end in a GIMPLE_SWITCH which we touched
117 due to edge manipulations. */
119 static bitmap touched_switch_bbs;
121 /* CFG statistics. */
122 struct cfg_stats_d
124 long num_merged_labels;
127 static struct cfg_stats_d cfg_stats;
129 /* Hash table to store last discriminator assigned for each locus. */
130 struct locus_discrim_map
132 location_t locus;
133 int discriminator;
136 /* Hashtable helpers. */
138 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
140 typedef locus_discrim_map value_type;
141 typedef locus_discrim_map compare_type;
142 static inline hashval_t hash (const value_type *);
143 static inline bool equal (const value_type *, const compare_type *);
146 /* Trivial hash function for a location_t. ITEM is a pointer to
147 a hash table entry that maps a location_t to a discriminator. */
149 inline hashval_t
150 locus_discrim_hasher::hash (const value_type *item)
152 return LOCATION_LINE (item->locus);
155 /* Equality function for the locus-to-discriminator map. A and B
156 point to the two hash table entries to compare. */
158 inline bool
159 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
161 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
164 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
166 /* Basic blocks and flowgraphs. */
167 static void make_blocks (gimple_seq);
169 /* Edges. */
170 static void make_edges (void);
171 static void assign_discriminators (void);
172 static void make_cond_expr_edges (basic_block);
173 static void make_gimple_switch_edges (gswitch *, basic_block);
174 static bool make_goto_expr_edges (basic_block);
175 static void make_gimple_asm_edges (basic_block);
176 static edge gimple_redirect_edge_and_branch (edge, basic_block);
177 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
179 /* Various helpers. */
180 static inline bool stmt_starts_bb_p (gimple, gimple);
181 static int gimple_verify_flow_info (void);
182 static void gimple_make_forwarder_block (edge);
183 static gimple first_non_label_stmt (basic_block);
184 static bool verify_gimple_transaction (gtransaction *);
185 static bool call_can_make_abnormal_goto (gimple);
187 /* Flowgraph optimization and cleanup. */
188 static void gimple_merge_blocks (basic_block, basic_block);
189 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
190 static void remove_bb (basic_block);
191 static edge find_taken_edge_computed_goto (basic_block, tree);
192 static edge find_taken_edge_cond_expr (basic_block, tree);
193 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
194 static tree find_case_label_for_value (gswitch *, tree);
196 void
197 init_empty_tree_cfg_for_function (struct function *fn)
199 /* Initialize the basic block array. */
200 init_flow (fn);
201 profile_status_for_fn (fn) = PROFILE_ABSENT;
202 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
203 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
204 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
205 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
206 initial_cfg_capacity);
208 /* Build a mapping of labels to their associated blocks. */
209 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
210 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
211 initial_cfg_capacity);
213 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
214 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
216 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
217 = EXIT_BLOCK_PTR_FOR_FN (fn);
218 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
219 = ENTRY_BLOCK_PTR_FOR_FN (fn);
222 void
223 init_empty_tree_cfg (void)
225 init_empty_tree_cfg_for_function (cfun);
228 /*---------------------------------------------------------------------------
229 Create basic blocks
230 ---------------------------------------------------------------------------*/
232 /* Entry point to the CFG builder for trees. SEQ is the sequence of
233 statements to be added to the flowgraph. */
235 static void
236 build_gimple_cfg (gimple_seq seq)
238 /* Register specific gimple functions. */
239 gimple_register_cfg_hooks ();
241 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
243 init_empty_tree_cfg ();
245 make_blocks (seq);
247 /* Make sure there is always at least one block, even if it's empty. */
248 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
249 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
251 /* Adjust the size of the array. */
252 if (basic_block_info_for_fn (cfun)->length ()
253 < (size_t) n_basic_blocks_for_fn (cfun))
254 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
255 n_basic_blocks_for_fn (cfun));
257 /* To speed up statement iterator walks, we first purge dead labels. */
258 cleanup_dead_labels ();
260 /* Group case nodes to reduce the number of edges.
261 We do this after cleaning up dead labels because otherwise we miss
262 a lot of obvious case merging opportunities. */
263 group_case_labels ();
265 /* Create the edges of the flowgraph. */
266 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
267 make_edges ();
268 assign_discriminators ();
269 cleanup_dead_labels ();
270 delete discriminator_per_locus;
271 discriminator_per_locus = NULL;
274 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
275 them and propagate the information to LOOP. We assume that the annotations
276 come immediately before the condition in BB, if any. */
278 static void
279 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
281 gimple_stmt_iterator gsi = gsi_last_bb (bb);
282 gimple stmt = gsi_stmt (gsi);
284 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
285 return;
287 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
289 stmt = gsi_stmt (gsi);
290 if (gimple_code (stmt) != GIMPLE_CALL)
291 break;
292 if (!gimple_call_internal_p (stmt)
293 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
294 break;
296 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
298 case annot_expr_ivdep_kind:
299 loop->safelen = INT_MAX;
300 break;
301 case annot_expr_no_vector_kind:
302 loop->dont_vectorize = true;
303 break;
304 case annot_expr_vector_kind:
305 loop->force_vectorize = true;
306 cfun->has_force_vectorize_loops = true;
307 break;
308 default:
309 gcc_unreachable ();
312 stmt = gimple_build_assign (gimple_call_lhs (stmt),
313 gimple_call_arg (stmt, 0));
314 gsi_replace (&gsi, stmt, true);
318 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
319 them and propagate the information to the loop. We assume that the
320 annotations come immediately before the condition of the loop. */
322 static void
323 replace_loop_annotate (void)
325 struct loop *loop;
326 basic_block bb;
327 gimple_stmt_iterator gsi;
328 gimple stmt;
330 FOR_EACH_LOOP (loop, 0)
332 /* First look into the header. */
333 replace_loop_annotate_in_block (loop->header, loop);
335 /* Then look into the latch, if any. */
336 if (loop->latch)
337 replace_loop_annotate_in_block (loop->latch, loop);
340 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
341 FOR_EACH_BB_FN (bb, cfun)
343 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
345 stmt = gsi_stmt (gsi);
346 if (gimple_code (stmt) != GIMPLE_CALL)
347 continue;
348 if (!gimple_call_internal_p (stmt)
349 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
350 continue;
352 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
354 case annot_expr_ivdep_kind:
355 case annot_expr_no_vector_kind:
356 case annot_expr_vector_kind:
357 break;
358 default:
359 gcc_unreachable ();
362 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
363 stmt = gimple_build_assign (gimple_call_lhs (stmt),
364 gimple_call_arg (stmt, 0));
365 gsi_replace (&gsi, stmt, true);
371 static unsigned int
372 execute_build_cfg (void)
374 gimple_seq body = gimple_body (current_function_decl);
376 build_gimple_cfg (body);
377 gimple_set_body (current_function_decl, NULL);
378 if (dump_file && (dump_flags & TDF_DETAILS))
380 fprintf (dump_file, "Scope blocks:\n");
381 dump_scope_blocks (dump_file, dump_flags);
383 cleanup_tree_cfg ();
384 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
385 replace_loop_annotate ();
386 return 0;
389 namespace {
391 const pass_data pass_data_build_cfg =
393 GIMPLE_PASS, /* type */
394 "cfg", /* name */
395 OPTGROUP_NONE, /* optinfo_flags */
396 TV_TREE_CFG, /* tv_id */
397 PROP_gimple_leh, /* properties_required */
398 ( PROP_cfg | PROP_loops ), /* properties_provided */
399 0, /* properties_destroyed */
400 0, /* todo_flags_start */
401 0, /* todo_flags_finish */
404 class pass_build_cfg : public gimple_opt_pass
406 public:
407 pass_build_cfg (gcc::context *ctxt)
408 : gimple_opt_pass (pass_data_build_cfg, ctxt)
411 /* opt_pass methods: */
412 virtual unsigned int execute (function *) { return execute_build_cfg (); }
414 }; // class pass_build_cfg
416 } // anon namespace
418 gimple_opt_pass *
419 make_pass_build_cfg (gcc::context *ctxt)
421 return new pass_build_cfg (ctxt);
425 /* Return true if T is a computed goto. */
427 bool
428 computed_goto_p (gimple t)
430 return (gimple_code (t) == GIMPLE_GOTO
431 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
434 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
435 the other edge points to a bb with just __builtin_unreachable ().
436 I.e. return true for C->M edge in:
437 <bb C>:
439 if (something)
440 goto <bb N>;
441 else
442 goto <bb M>;
443 <bb N>:
444 __builtin_unreachable ();
445 <bb M>: */
447 bool
448 assert_unreachable_fallthru_edge_p (edge e)
450 basic_block pred_bb = e->src;
451 gimple last = last_stmt (pred_bb);
452 if (last && gimple_code (last) == GIMPLE_COND)
454 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
455 if (other_bb == e->dest)
456 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
457 if (EDGE_COUNT (other_bb->succs) == 0)
459 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
460 gimple stmt;
462 if (gsi_end_p (gsi))
463 return false;
464 stmt = gsi_stmt (gsi);
465 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
467 gsi_next (&gsi);
468 if (gsi_end_p (gsi))
469 return false;
470 stmt = gsi_stmt (gsi);
472 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
475 return false;
479 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
480 could alter control flow except via eh. We initialize the flag at
481 CFG build time and only ever clear it later. */
483 static void
484 gimple_call_initialize_ctrl_altering (gimple stmt)
486 int flags = gimple_call_flags (stmt);
488 /* A call alters control flow if it can make an abnormal goto. */
489 if (call_can_make_abnormal_goto (stmt)
490 /* A call also alters control flow if it does not return. */
491 || flags & ECF_NORETURN
492 /* TM ending statements have backedges out of the transaction.
493 Return true so we split the basic block containing them.
494 Note that the TM_BUILTIN test is merely an optimization. */
495 || ((flags & ECF_TM_BUILTIN)
496 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
497 /* BUILT_IN_RETURN call is same as return statement. */
498 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
499 gimple_call_set_ctrl_altering (stmt, true);
500 else
501 gimple_call_set_ctrl_altering (stmt, false);
505 /* Build a flowgraph for the sequence of stmts SEQ. */
507 static void
508 make_blocks (gimple_seq seq)
510 gimple_stmt_iterator i = gsi_start (seq);
511 gimple stmt = NULL;
512 bool start_new_block = true;
513 bool first_stmt_of_seq = true;
514 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
516 while (!gsi_end_p (i))
518 gimple prev_stmt;
520 prev_stmt = stmt;
521 stmt = gsi_stmt (i);
523 if (stmt && is_gimple_call (stmt))
524 gimple_call_initialize_ctrl_altering (stmt);
526 /* If the statement starts a new basic block or if we have determined
527 in a previous pass that we need to create a new block for STMT, do
528 so now. */
529 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
531 if (!first_stmt_of_seq)
532 gsi_split_seq_before (&i, &seq);
533 bb = create_basic_block (seq, NULL, bb);
534 start_new_block = false;
537 /* Now add STMT to BB and create the subgraphs for special statement
538 codes. */
539 gimple_set_bb (stmt, bb);
541 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
542 next iteration. */
543 if (stmt_ends_bb_p (stmt))
545 /* If the stmt can make abnormal goto use a new temporary
546 for the assignment to the LHS. This makes sure the old value
547 of the LHS is available on the abnormal edge. Otherwise
548 we will end up with overlapping life-ranges for abnormal
549 SSA names. */
550 if (gimple_has_lhs (stmt)
551 && stmt_can_make_abnormal_goto (stmt)
552 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
554 tree lhs = gimple_get_lhs (stmt);
555 tree tmp = create_tmp_var (TREE_TYPE (lhs));
556 gimple s = gimple_build_assign (lhs, tmp);
557 gimple_set_location (s, gimple_location (stmt));
558 gimple_set_block (s, gimple_block (stmt));
559 gimple_set_lhs (stmt, tmp);
560 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
561 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
562 DECL_GIMPLE_REG_P (tmp) = 1;
563 gsi_insert_after (&i, s, GSI_SAME_STMT);
565 start_new_block = true;
568 gsi_next (&i);
569 first_stmt_of_seq = false;
574 /* Create and return a new empty basic block after bb AFTER. */
576 static basic_block
577 create_bb (void *h, void *e, basic_block after)
579 basic_block bb;
581 gcc_assert (!e);
583 /* Create and initialize a new basic block. Since alloc_block uses
584 GC allocation that clears memory to allocate a basic block, we do
585 not have to clear the newly allocated basic block here. */
586 bb = alloc_block ();
588 bb->index = last_basic_block_for_fn (cfun);
589 bb->flags = BB_NEW;
590 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
592 /* Add the new block to the linked list of blocks. */
593 link_block (bb, after);
595 /* Grow the basic block array if needed. */
596 if ((size_t) last_basic_block_for_fn (cfun)
597 == basic_block_info_for_fn (cfun)->length ())
599 size_t new_size =
600 (last_basic_block_for_fn (cfun)
601 + (last_basic_block_for_fn (cfun) + 3) / 4);
602 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
605 /* Add the newly created block to the array. */
606 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
608 n_basic_blocks_for_fn (cfun)++;
609 last_basic_block_for_fn (cfun)++;
611 return bb;
615 /*---------------------------------------------------------------------------
616 Edge creation
617 ---------------------------------------------------------------------------*/
619 /* Fold COND_EXPR_COND of each COND_EXPR. */
621 void
622 fold_cond_expr_cond (void)
624 basic_block bb;
626 FOR_EACH_BB_FN (bb, cfun)
628 gimple stmt = last_stmt (bb);
630 if (stmt && gimple_code (stmt) == GIMPLE_COND)
632 gcond *cond_stmt = as_a <gcond *> (stmt);
633 location_t loc = gimple_location (stmt);
634 tree cond;
635 bool zerop, onep;
637 fold_defer_overflow_warnings ();
638 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
639 boolean_type_node,
640 gimple_cond_lhs (cond_stmt),
641 gimple_cond_rhs (cond_stmt));
642 if (cond)
644 zerop = integer_zerop (cond);
645 onep = integer_onep (cond);
647 else
648 zerop = onep = false;
650 fold_undefer_overflow_warnings (zerop || onep,
651 stmt,
652 WARN_STRICT_OVERFLOW_CONDITIONAL);
653 if (zerop)
654 gimple_cond_make_false (cond_stmt);
655 else if (onep)
656 gimple_cond_make_true (cond_stmt);
661 /* If basic block BB has an abnormal edge to a basic block
662 containing IFN_ABNORMAL_DISPATCHER internal call, return
663 that the dispatcher's basic block, otherwise return NULL. */
665 basic_block
666 get_abnormal_succ_dispatcher (basic_block bb)
668 edge e;
669 edge_iterator ei;
671 FOR_EACH_EDGE (e, ei, bb->succs)
672 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
674 gimple_stmt_iterator gsi
675 = gsi_start_nondebug_after_labels_bb (e->dest);
676 gimple g = gsi_stmt (gsi);
677 if (g
678 && is_gimple_call (g)
679 && gimple_call_internal_p (g)
680 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
681 return e->dest;
683 return NULL;
686 /* Helper function for make_edges. Create a basic block with
687 with ABNORMAL_DISPATCHER internal call in it if needed, and
688 create abnormal edges from BBS to it and from it to FOR_BB
689 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
691 static void
692 handle_abnormal_edges (basic_block *dispatcher_bbs,
693 basic_block for_bb, int *bb_to_omp_idx,
694 auto_vec<basic_block> *bbs, bool computed_goto)
696 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
697 unsigned int idx = 0;
698 basic_block bb;
699 bool inner = false;
701 if (bb_to_omp_idx)
703 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
704 if (bb_to_omp_idx[for_bb->index] != 0)
705 inner = true;
708 /* If the dispatcher has been created already, then there are basic
709 blocks with abnormal edges to it, so just make a new edge to
710 for_bb. */
711 if (*dispatcher == NULL)
713 /* Check if there are any basic blocks that need to have
714 abnormal edges to this dispatcher. If there are none, return
715 early. */
716 if (bb_to_omp_idx == NULL)
718 if (bbs->is_empty ())
719 return;
721 else
723 FOR_EACH_VEC_ELT (*bbs, idx, bb)
724 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
725 break;
726 if (bb == NULL)
727 return;
730 /* Create the dispatcher bb. */
731 *dispatcher = create_basic_block (NULL, NULL, for_bb);
732 if (computed_goto)
734 /* Factor computed gotos into a common computed goto site. Also
735 record the location of that site so that we can un-factor the
736 gotos after we have converted back to normal form. */
737 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
739 /* Create the destination of the factored goto. Each original
740 computed goto will put its desired destination into this
741 variable and jump to the label we create immediately below. */
742 tree var = create_tmp_var (ptr_type_node, "gotovar");
744 /* Build a label for the new block which will contain the
745 factored computed goto. */
746 tree factored_label_decl
747 = create_artificial_label (UNKNOWN_LOCATION);
748 gimple factored_computed_goto_label
749 = gimple_build_label (factored_label_decl);
750 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
752 /* Build our new computed goto. */
753 gimple factored_computed_goto = gimple_build_goto (var);
754 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
756 FOR_EACH_VEC_ELT (*bbs, idx, bb)
758 if (bb_to_omp_idx
759 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
760 continue;
762 gsi = gsi_last_bb (bb);
763 gimple last = gsi_stmt (gsi);
765 gcc_assert (computed_goto_p (last));
767 /* Copy the original computed goto's destination into VAR. */
768 gimple assignment
769 = gimple_build_assign (var, gimple_goto_dest (last));
770 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
772 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
773 e->goto_locus = gimple_location (last);
774 gsi_remove (&gsi, true);
777 else
779 tree arg = inner ? boolean_true_node : boolean_false_node;
780 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
781 1, arg);
782 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
783 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
785 /* Create predecessor edges of the dispatcher. */
786 FOR_EACH_VEC_ELT (*bbs, idx, bb)
788 if (bb_to_omp_idx
789 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
790 continue;
791 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
796 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
799 /* Join all the blocks in the flowgraph. */
801 static void
802 make_edges (void)
804 basic_block bb;
805 struct omp_region *cur_region = NULL;
806 auto_vec<basic_block> ab_edge_goto;
807 auto_vec<basic_block> ab_edge_call;
808 int *bb_to_omp_idx = NULL;
809 int cur_omp_region_idx = 0;
811 /* Create an edge from entry to the first block with executable
812 statements in it. */
813 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
814 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
815 EDGE_FALLTHRU);
817 /* Traverse the basic block array placing edges. */
818 FOR_EACH_BB_FN (bb, cfun)
820 gimple last = last_stmt (bb);
821 bool fallthru;
823 if (bb_to_omp_idx)
824 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
826 if (last)
828 enum gimple_code code = gimple_code (last);
829 switch (code)
831 case GIMPLE_GOTO:
832 if (make_goto_expr_edges (bb))
833 ab_edge_goto.safe_push (bb);
834 fallthru = false;
835 break;
836 case GIMPLE_RETURN:
838 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
839 e->goto_locus = gimple_location (last);
840 fallthru = false;
842 break;
843 case GIMPLE_COND:
844 make_cond_expr_edges (bb);
845 fallthru = false;
846 break;
847 case GIMPLE_SWITCH:
848 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
849 fallthru = false;
850 break;
851 case GIMPLE_RESX:
852 make_eh_edges (last);
853 fallthru = false;
854 break;
855 case GIMPLE_EH_DISPATCH:
856 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
857 break;
859 case GIMPLE_CALL:
860 /* If this function receives a nonlocal goto, then we need to
861 make edges from this call site to all the nonlocal goto
862 handlers. */
863 if (stmt_can_make_abnormal_goto (last))
864 ab_edge_call.safe_push (bb);
866 /* If this statement has reachable exception handlers, then
867 create abnormal edges to them. */
868 make_eh_edges (last);
870 /* BUILTIN_RETURN is really a return statement. */
871 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
873 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
874 fallthru = false;
876 /* Some calls are known not to return. */
877 else
878 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
879 break;
881 case GIMPLE_ASSIGN:
882 /* A GIMPLE_ASSIGN may throw internally and thus be considered
883 control-altering. */
884 if (is_ctrl_altering_stmt (last))
885 make_eh_edges (last);
886 fallthru = true;
887 break;
889 case GIMPLE_ASM:
890 make_gimple_asm_edges (bb);
891 fallthru = true;
892 break;
894 CASE_GIMPLE_OMP:
895 fallthru = make_gimple_omp_edges (bb, &cur_region,
896 &cur_omp_region_idx);
897 if (cur_region && bb_to_omp_idx == NULL)
898 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
899 break;
901 case GIMPLE_TRANSACTION:
903 tree abort_label
904 = gimple_transaction_label (as_a <gtransaction *> (last));
905 if (abort_label)
906 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
907 fallthru = true;
909 break;
911 default:
912 gcc_assert (!stmt_ends_bb_p (last));
913 fallthru = true;
916 else
917 fallthru = true;
919 if (fallthru)
920 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
923 /* Computed gotos are hell to deal with, especially if there are
924 lots of them with a large number of destinations. So we factor
925 them to a common computed goto location before we build the
926 edge list. After we convert back to normal form, we will un-factor
927 the computed gotos since factoring introduces an unwanted jump.
928 For non-local gotos and abnormal edges from calls to calls that return
929 twice or forced labels, factor the abnormal edges too, by having all
930 abnormal edges from the calls go to a common artificial basic block
931 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
932 basic block to all forced labels and calls returning twice.
933 We do this per-OpenMP structured block, because those regions
934 are guaranteed to be single entry single exit by the standard,
935 so it is not allowed to enter or exit such regions abnormally this way,
936 thus all computed gotos, non-local gotos and setjmp/longjmp calls
937 must not transfer control across SESE region boundaries. */
938 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
940 gimple_stmt_iterator gsi;
941 basic_block dispatcher_bb_array[2] = { NULL, NULL };
942 basic_block *dispatcher_bbs = dispatcher_bb_array;
943 int count = n_basic_blocks_for_fn (cfun);
945 if (bb_to_omp_idx)
946 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
948 FOR_EACH_BB_FN (bb, cfun)
950 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
952 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
953 tree target;
955 if (!label_stmt)
956 break;
958 target = gimple_label_label (label_stmt);
960 /* Make an edge to every label block that has been marked as a
961 potential target for a computed goto or a non-local goto. */
962 if (FORCED_LABEL (target))
963 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
964 &ab_edge_goto, true);
965 if (DECL_NONLOCAL (target))
967 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
968 &ab_edge_call, false);
969 break;
973 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
974 gsi_next_nondebug (&gsi);
975 if (!gsi_end_p (gsi))
977 /* Make an edge to every setjmp-like call. */
978 gimple call_stmt = gsi_stmt (gsi);
979 if (is_gimple_call (call_stmt)
980 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
981 || gimple_call_builtin_p (call_stmt,
982 BUILT_IN_SETJMP_RECEIVER)))
983 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
984 &ab_edge_call, false);
988 if (bb_to_omp_idx)
989 XDELETE (dispatcher_bbs);
992 XDELETE (bb_to_omp_idx);
994 free_omp_regions ();
996 /* Fold COND_EXPR_COND of each COND_EXPR. */
997 fold_cond_expr_cond ();
1000 /* Find the next available discriminator value for LOCUS. The
1001 discriminator distinguishes among several basic blocks that
1002 share a common locus, allowing for more accurate sample-based
1003 profiling. */
1005 static int
1006 next_discriminator_for_locus (location_t locus)
1008 struct locus_discrim_map item;
1009 struct locus_discrim_map **slot;
1011 item.locus = locus;
1012 item.discriminator = 0;
1013 slot = discriminator_per_locus->find_slot_with_hash (
1014 &item, LOCATION_LINE (locus), INSERT);
1015 gcc_assert (slot);
1016 if (*slot == HTAB_EMPTY_ENTRY)
1018 *slot = XNEW (struct locus_discrim_map);
1019 gcc_assert (*slot);
1020 (*slot)->locus = locus;
1021 (*slot)->discriminator = 0;
1023 (*slot)->discriminator++;
1024 return (*slot)->discriminator;
1027 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1029 static bool
1030 same_line_p (location_t locus1, location_t locus2)
1032 expanded_location from, to;
1034 if (locus1 == locus2)
1035 return true;
1037 from = expand_location (locus1);
1038 to = expand_location (locus2);
1040 if (from.line != to.line)
1041 return false;
1042 if (from.file == to.file)
1043 return true;
1044 return (from.file != NULL
1045 && to.file != NULL
1046 && filename_cmp (from.file, to.file) == 0);
1049 /* Assign discriminators to each basic block. */
1051 static void
1052 assign_discriminators (void)
1054 basic_block bb;
1056 FOR_EACH_BB_FN (bb, cfun)
1058 edge e;
1059 edge_iterator ei;
1060 gimple last = last_stmt (bb);
1061 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1063 if (locus == UNKNOWN_LOCATION)
1064 continue;
1066 FOR_EACH_EDGE (e, ei, bb->succs)
1068 gimple first = first_non_label_stmt (e->dest);
1069 gimple last = last_stmt (e->dest);
1070 if ((first && same_line_p (locus, gimple_location (first)))
1071 || (last && same_line_p (locus, gimple_location (last))))
1073 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1074 bb->discriminator = next_discriminator_for_locus (locus);
1075 else
1076 e->dest->discriminator = next_discriminator_for_locus (locus);
1082 /* Create the edges for a GIMPLE_COND starting at block BB. */
1084 static void
1085 make_cond_expr_edges (basic_block bb)
1087 gcond *entry = as_a <gcond *> (last_stmt (bb));
1088 gimple then_stmt, else_stmt;
1089 basic_block then_bb, else_bb;
1090 tree then_label, else_label;
1091 edge e;
1093 gcc_assert (entry);
1094 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1096 /* Entry basic blocks for each component. */
1097 then_label = gimple_cond_true_label (entry);
1098 else_label = gimple_cond_false_label (entry);
1099 then_bb = label_to_block (then_label);
1100 else_bb = label_to_block (else_label);
1101 then_stmt = first_stmt (then_bb);
1102 else_stmt = first_stmt (else_bb);
1104 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1105 e->goto_locus = gimple_location (then_stmt);
1106 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1107 if (e)
1108 e->goto_locus = gimple_location (else_stmt);
1110 /* We do not need the labels anymore. */
1111 gimple_cond_set_true_label (entry, NULL_TREE);
1112 gimple_cond_set_false_label (entry, NULL_TREE);
1116 /* Called for each element in the hash table (P) as we delete the
1117 edge to cases hash table.
1119 Clear all the TREE_CHAINs to prevent problems with copying of
1120 SWITCH_EXPRs and structure sharing rules, then free the hash table
1121 element. */
1123 bool
1124 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1126 tree t, next;
1128 for (t = value; t; t = next)
1130 next = CASE_CHAIN (t);
1131 CASE_CHAIN (t) = NULL;
1134 return true;
1137 /* Start recording information mapping edges to case labels. */
1139 void
1140 start_recording_case_labels (void)
1142 gcc_assert (edge_to_cases == NULL);
1143 edge_to_cases = new hash_map<edge, tree>;
1144 touched_switch_bbs = BITMAP_ALLOC (NULL);
1147 /* Return nonzero if we are recording information for case labels. */
1149 static bool
1150 recording_case_labels_p (void)
1152 return (edge_to_cases != NULL);
1155 /* Stop recording information mapping edges to case labels and
1156 remove any information we have recorded. */
1157 void
1158 end_recording_case_labels (void)
1160 bitmap_iterator bi;
1161 unsigned i;
1162 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1163 delete edge_to_cases;
1164 edge_to_cases = NULL;
1165 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1167 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1168 if (bb)
1170 gimple stmt = last_stmt (bb);
1171 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1172 group_case_labels_stmt (as_a <gswitch *> (stmt));
1175 BITMAP_FREE (touched_switch_bbs);
1178 /* If we are inside a {start,end}_recording_cases block, then return
1179 a chain of CASE_LABEL_EXPRs from T which reference E.
1181 Otherwise return NULL. */
1183 static tree
1184 get_cases_for_edge (edge e, gswitch *t)
1186 tree *slot;
1187 size_t i, n;
1189 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1190 chains available. Return NULL so the caller can detect this case. */
1191 if (!recording_case_labels_p ())
1192 return NULL;
1194 slot = edge_to_cases->get (e);
1195 if (slot)
1196 return *slot;
1198 /* If we did not find E in the hash table, then this must be the first
1199 time we have been queried for information about E & T. Add all the
1200 elements from T to the hash table then perform the query again. */
1202 n = gimple_switch_num_labels (t);
1203 for (i = 0; i < n; i++)
1205 tree elt = gimple_switch_label (t, i);
1206 tree lab = CASE_LABEL (elt);
1207 basic_block label_bb = label_to_block (lab);
1208 edge this_edge = find_edge (e->src, label_bb);
1210 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1211 a new chain. */
1212 tree &s = edge_to_cases->get_or_insert (this_edge);
1213 CASE_CHAIN (elt) = s;
1214 s = elt;
1217 return *edge_to_cases->get (e);
1220 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1222 static void
1223 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1225 size_t i, n;
1227 n = gimple_switch_num_labels (entry);
1229 for (i = 0; i < n; ++i)
1231 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1232 basic_block label_bb = label_to_block (lab);
1233 make_edge (bb, label_bb, 0);
1238 /* Return the basic block holding label DEST. */
1240 basic_block
1241 label_to_block_fn (struct function *ifun, tree dest)
1243 int uid = LABEL_DECL_UID (dest);
1245 /* We would die hard when faced by an undefined label. Emit a label to
1246 the very first basic block. This will hopefully make even the dataflow
1247 and undefined variable warnings quite right. */
1248 if (seen_error () && uid < 0)
1250 gimple_stmt_iterator gsi =
1251 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1252 gimple stmt;
1254 stmt = gimple_build_label (dest);
1255 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1256 uid = LABEL_DECL_UID (dest);
1258 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1259 return NULL;
1260 return (*ifun->cfg->x_label_to_block_map)[uid];
1263 /* Create edges for a goto statement at block BB. Returns true
1264 if abnormal edges should be created. */
1266 static bool
1267 make_goto_expr_edges (basic_block bb)
1269 gimple_stmt_iterator last = gsi_last_bb (bb);
1270 gimple goto_t = gsi_stmt (last);
1272 /* A simple GOTO creates normal edges. */
1273 if (simple_goto_p (goto_t))
1275 tree dest = gimple_goto_dest (goto_t);
1276 basic_block label_bb = label_to_block (dest);
1277 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1278 e->goto_locus = gimple_location (goto_t);
1279 gsi_remove (&last, true);
1280 return false;
1283 /* A computed GOTO creates abnormal edges. */
1284 return true;
1287 /* Create edges for an asm statement with labels at block BB. */
1289 static void
1290 make_gimple_asm_edges (basic_block bb)
1292 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1293 int i, n = gimple_asm_nlabels (stmt);
1295 for (i = 0; i < n; ++i)
1297 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1298 basic_block label_bb = label_to_block (label);
1299 make_edge (bb, label_bb, 0);
1303 /*---------------------------------------------------------------------------
1304 Flowgraph analysis
1305 ---------------------------------------------------------------------------*/
1307 /* Cleanup useless labels in basic blocks. This is something we wish
1308 to do early because it allows us to group case labels before creating
1309 the edges for the CFG, and it speeds up block statement iterators in
1310 all passes later on.
1311 We rerun this pass after CFG is created, to get rid of the labels that
1312 are no longer referenced. After then we do not run it any more, since
1313 (almost) no new labels should be created. */
1315 /* A map from basic block index to the leading label of that block. */
1316 static struct label_record
1318 /* The label. */
1319 tree label;
1321 /* True if the label is referenced from somewhere. */
1322 bool used;
1323 } *label_for_bb;
1325 /* Given LABEL return the first label in the same basic block. */
1327 static tree
1328 main_block_label (tree label)
1330 basic_block bb = label_to_block (label);
1331 tree main_label = label_for_bb[bb->index].label;
1333 /* label_to_block possibly inserted undefined label into the chain. */
1334 if (!main_label)
1336 label_for_bb[bb->index].label = label;
1337 main_label = label;
1340 label_for_bb[bb->index].used = true;
1341 return main_label;
1344 /* Clean up redundant labels within the exception tree. */
1346 static void
1347 cleanup_dead_labels_eh (void)
1349 eh_landing_pad lp;
1350 eh_region r;
1351 tree lab;
1352 int i;
1354 if (cfun->eh == NULL)
1355 return;
1357 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1358 if (lp && lp->post_landing_pad)
1360 lab = main_block_label (lp->post_landing_pad);
1361 if (lab != lp->post_landing_pad)
1363 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1364 EH_LANDING_PAD_NR (lab) = lp->index;
1368 FOR_ALL_EH_REGION (r)
1369 switch (r->type)
1371 case ERT_CLEANUP:
1372 case ERT_MUST_NOT_THROW:
1373 break;
1375 case ERT_TRY:
1377 eh_catch c;
1378 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1380 lab = c->label;
1381 if (lab)
1382 c->label = main_block_label (lab);
1385 break;
1387 case ERT_ALLOWED_EXCEPTIONS:
1388 lab = r->u.allowed.label;
1389 if (lab)
1390 r->u.allowed.label = main_block_label (lab);
1391 break;
1396 /* Cleanup redundant labels. This is a three-step process:
1397 1) Find the leading label for each block.
1398 2) Redirect all references to labels to the leading labels.
1399 3) Cleanup all useless labels. */
1401 void
1402 cleanup_dead_labels (void)
1404 basic_block bb;
1405 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1407 /* Find a suitable label for each block. We use the first user-defined
1408 label if there is one, or otherwise just the first label we see. */
1409 FOR_EACH_BB_FN (bb, cfun)
1411 gimple_stmt_iterator i;
1413 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1415 tree label;
1416 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1418 if (!label_stmt)
1419 break;
1421 label = gimple_label_label (label_stmt);
1423 /* If we have not yet seen a label for the current block,
1424 remember this one and see if there are more labels. */
1425 if (!label_for_bb[bb->index].label)
1427 label_for_bb[bb->index].label = label;
1428 continue;
1431 /* If we did see a label for the current block already, but it
1432 is an artificially created label, replace it if the current
1433 label is a user defined label. */
1434 if (!DECL_ARTIFICIAL (label)
1435 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1437 label_for_bb[bb->index].label = label;
1438 break;
1443 /* Now redirect all jumps/branches to the selected label.
1444 First do so for each block ending in a control statement. */
1445 FOR_EACH_BB_FN (bb, cfun)
1447 gimple stmt = last_stmt (bb);
1448 tree label, new_label;
1450 if (!stmt)
1451 continue;
1453 switch (gimple_code (stmt))
1455 case GIMPLE_COND:
1457 gcond *cond_stmt = as_a <gcond *> (stmt);
1458 label = gimple_cond_true_label (cond_stmt);
1459 if (label)
1461 new_label = main_block_label (label);
1462 if (new_label != label)
1463 gimple_cond_set_true_label (cond_stmt, new_label);
1466 label = gimple_cond_false_label (cond_stmt);
1467 if (label)
1469 new_label = main_block_label (label);
1470 if (new_label != label)
1471 gimple_cond_set_false_label (cond_stmt, new_label);
1474 break;
1476 case GIMPLE_SWITCH:
1478 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1479 size_t i, n = gimple_switch_num_labels (switch_stmt);
1481 /* Replace all destination labels. */
1482 for (i = 0; i < n; ++i)
1484 tree case_label = gimple_switch_label (switch_stmt, i);
1485 label = CASE_LABEL (case_label);
1486 new_label = main_block_label (label);
1487 if (new_label != label)
1488 CASE_LABEL (case_label) = new_label;
1490 break;
1493 case GIMPLE_ASM:
1495 gasm *asm_stmt = as_a <gasm *> (stmt);
1496 int i, n = gimple_asm_nlabels (asm_stmt);
1498 for (i = 0; i < n; ++i)
1500 tree cons = gimple_asm_label_op (asm_stmt, i);
1501 tree label = main_block_label (TREE_VALUE (cons));
1502 TREE_VALUE (cons) = label;
1504 break;
1507 /* We have to handle gotos until they're removed, and we don't
1508 remove them until after we've created the CFG edges. */
1509 case GIMPLE_GOTO:
1510 if (!computed_goto_p (stmt))
1512 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1513 label = gimple_goto_dest (goto_stmt);
1514 new_label = main_block_label (label);
1515 if (new_label != label)
1516 gimple_goto_set_dest (goto_stmt, new_label);
1518 break;
1520 case GIMPLE_TRANSACTION:
1522 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1523 tree label = gimple_transaction_label (trans_stmt);
1524 if (label)
1526 tree new_label = main_block_label (label);
1527 if (new_label != label)
1528 gimple_transaction_set_label (trans_stmt, new_label);
1531 break;
1533 default:
1534 break;
1538 /* Do the same for the exception region tree labels. */
1539 cleanup_dead_labels_eh ();
1541 /* Finally, purge dead labels. All user-defined labels and labels that
1542 can be the target of non-local gotos and labels which have their
1543 address taken are preserved. */
1544 FOR_EACH_BB_FN (bb, cfun)
1546 gimple_stmt_iterator i;
1547 tree label_for_this_bb = label_for_bb[bb->index].label;
1549 if (!label_for_this_bb)
1550 continue;
1552 /* If the main label of the block is unused, we may still remove it. */
1553 if (!label_for_bb[bb->index].used)
1554 label_for_this_bb = NULL;
1556 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1558 tree label;
1559 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1561 if (!label_stmt)
1562 break;
1564 label = gimple_label_label (label_stmt);
1566 if (label == label_for_this_bb
1567 || !DECL_ARTIFICIAL (label)
1568 || DECL_NONLOCAL (label)
1569 || FORCED_LABEL (label))
1570 gsi_next (&i);
1571 else
1572 gsi_remove (&i, true);
1576 free (label_for_bb);
1579 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1580 the ones jumping to the same label.
1581 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1583 void
1584 group_case_labels_stmt (gswitch *stmt)
1586 int old_size = gimple_switch_num_labels (stmt);
1587 int i, j, new_size = old_size;
1588 basic_block default_bb = NULL;
1590 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1592 /* Look for possible opportunities to merge cases. */
1593 i = 1;
1594 while (i < old_size)
1596 tree base_case, base_high;
1597 basic_block base_bb;
1599 base_case = gimple_switch_label (stmt, i);
1601 gcc_assert (base_case);
1602 base_bb = label_to_block (CASE_LABEL (base_case));
1604 /* Discard cases that have the same destination as the
1605 default case. */
1606 if (base_bb == default_bb)
1608 gimple_switch_set_label (stmt, i, NULL_TREE);
1609 i++;
1610 new_size--;
1611 continue;
1614 base_high = CASE_HIGH (base_case)
1615 ? CASE_HIGH (base_case)
1616 : CASE_LOW (base_case);
1617 i++;
1619 /* Try to merge case labels. Break out when we reach the end
1620 of the label vector or when we cannot merge the next case
1621 label with the current one. */
1622 while (i < old_size)
1624 tree merge_case = gimple_switch_label (stmt, i);
1625 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1626 wide_int bhp1 = wi::add (base_high, 1);
1628 /* Merge the cases if they jump to the same place,
1629 and their ranges are consecutive. */
1630 if (merge_bb == base_bb
1631 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1633 base_high = CASE_HIGH (merge_case) ?
1634 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1635 CASE_HIGH (base_case) = base_high;
1636 gimple_switch_set_label (stmt, i, NULL_TREE);
1637 new_size--;
1638 i++;
1640 else
1641 break;
1645 /* Compress the case labels in the label vector, and adjust the
1646 length of the vector. */
1647 for (i = 0, j = 0; i < new_size; i++)
1649 while (! gimple_switch_label (stmt, j))
1650 j++;
1651 gimple_switch_set_label (stmt, i,
1652 gimple_switch_label (stmt, j++));
1655 gcc_assert (new_size <= old_size);
1656 gimple_switch_set_num_labels (stmt, new_size);
1659 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1660 and scan the sorted vector of cases. Combine the ones jumping to the
1661 same label. */
1663 void
1664 group_case_labels (void)
1666 basic_block bb;
1668 FOR_EACH_BB_FN (bb, cfun)
1670 gimple stmt = last_stmt (bb);
1671 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1672 group_case_labels_stmt (as_a <gswitch *> (stmt));
1676 /* Checks whether we can merge block B into block A. */
1678 static bool
1679 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1681 gimple stmt;
1683 if (!single_succ_p (a))
1684 return false;
1686 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1687 return false;
1689 if (single_succ (a) != b)
1690 return false;
1692 if (!single_pred_p (b))
1693 return false;
1695 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1696 return false;
1698 /* If A ends by a statement causing exceptions or something similar, we
1699 cannot merge the blocks. */
1700 stmt = last_stmt (a);
1701 if (stmt && stmt_ends_bb_p (stmt))
1702 return false;
1704 /* Do not allow a block with only a non-local label to be merged. */
1705 if (stmt)
1706 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1707 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1708 return false;
1710 /* Examine the labels at the beginning of B. */
1711 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1712 gsi_next (&gsi))
1714 tree lab;
1715 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1716 if (!label_stmt)
1717 break;
1718 lab = gimple_label_label (label_stmt);
1720 /* Do not remove user forced labels or for -O0 any user labels. */
1721 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1722 return false;
1725 /* Protect simple loop latches. We only want to avoid merging
1726 the latch with the loop header or with a block in another
1727 loop in this case. */
1728 if (current_loops
1729 && b->loop_father->latch == b
1730 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1731 && (b->loop_father->header == a
1732 || b->loop_father != a->loop_father))
1733 return false;
1735 /* It must be possible to eliminate all phi nodes in B. If ssa form
1736 is not up-to-date and a name-mapping is registered, we cannot eliminate
1737 any phis. Symbols marked for renaming are never a problem though. */
1738 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1739 gsi_next (&gsi))
1741 gphi *phi = gsi.phi ();
1742 /* Technically only new names matter. */
1743 if (name_registered_for_update_p (PHI_RESULT (phi)))
1744 return false;
1747 /* When not optimizing, don't merge if we'd lose goto_locus. */
1748 if (!optimize
1749 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1751 location_t goto_locus = single_succ_edge (a)->goto_locus;
1752 gimple_stmt_iterator prev, next;
1753 prev = gsi_last_nondebug_bb (a);
1754 next = gsi_after_labels (b);
1755 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1756 gsi_next_nondebug (&next);
1757 if ((gsi_end_p (prev)
1758 || gimple_location (gsi_stmt (prev)) != goto_locus)
1759 && (gsi_end_p (next)
1760 || gimple_location (gsi_stmt (next)) != goto_locus))
1761 return false;
1764 return true;
1767 /* Replaces all uses of NAME by VAL. */
1769 void
1770 replace_uses_by (tree name, tree val)
1772 imm_use_iterator imm_iter;
1773 use_operand_p use;
1774 gimple stmt;
1775 edge e;
1777 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1779 /* Mark the block if we change the last stmt in it. */
1780 if (cfgcleanup_altered_bbs
1781 && stmt_ends_bb_p (stmt))
1782 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1784 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1786 replace_exp (use, val);
1788 if (gimple_code (stmt) == GIMPLE_PHI)
1790 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1791 PHI_ARG_INDEX_FROM_USE (use));
1792 if (e->flags & EDGE_ABNORMAL
1793 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1795 /* This can only occur for virtual operands, since
1796 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1797 would prevent replacement. */
1798 gcc_checking_assert (virtual_operand_p (name));
1799 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1804 if (gimple_code (stmt) != GIMPLE_PHI)
1806 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1807 gimple orig_stmt = stmt;
1808 size_t i;
1810 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1811 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1812 only change sth from non-invariant to invariant, and only
1813 when propagating constants. */
1814 if (is_gimple_min_invariant (val))
1815 for (i = 0; i < gimple_num_ops (stmt); i++)
1817 tree op = gimple_op (stmt, i);
1818 /* Operands may be empty here. For example, the labels
1819 of a GIMPLE_COND are nulled out following the creation
1820 of the corresponding CFG edges. */
1821 if (op && TREE_CODE (op) == ADDR_EXPR)
1822 recompute_tree_invariant_for_addr_expr (op);
1825 if (fold_stmt (&gsi))
1826 stmt = gsi_stmt (gsi);
1828 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1829 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1831 update_stmt (stmt);
1835 gcc_checking_assert (has_zero_uses (name));
1837 /* Also update the trees stored in loop structures. */
1838 if (current_loops)
1840 struct loop *loop;
1842 FOR_EACH_LOOP (loop, 0)
1844 substitute_in_loop_info (loop, name, val);
1849 /* Merge block B into block A. */
1851 static void
1852 gimple_merge_blocks (basic_block a, basic_block b)
1854 gimple_stmt_iterator last, gsi;
1855 gphi_iterator psi;
1857 if (dump_file)
1858 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1860 /* Remove all single-valued PHI nodes from block B of the form
1861 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1862 gsi = gsi_last_bb (a);
1863 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1865 gimple phi = gsi_stmt (psi);
1866 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1867 gimple copy;
1868 bool may_replace_uses = (virtual_operand_p (def)
1869 || may_propagate_copy (def, use));
1871 /* In case we maintain loop closed ssa form, do not propagate arguments
1872 of loop exit phi nodes. */
1873 if (current_loops
1874 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1875 && !virtual_operand_p (def)
1876 && TREE_CODE (use) == SSA_NAME
1877 && a->loop_father != b->loop_father)
1878 may_replace_uses = false;
1880 if (!may_replace_uses)
1882 gcc_assert (!virtual_operand_p (def));
1884 /* Note that just emitting the copies is fine -- there is no problem
1885 with ordering of phi nodes. This is because A is the single
1886 predecessor of B, therefore results of the phi nodes cannot
1887 appear as arguments of the phi nodes. */
1888 copy = gimple_build_assign (def, use);
1889 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1890 remove_phi_node (&psi, false);
1892 else
1894 /* If we deal with a PHI for virtual operands, we can simply
1895 propagate these without fussing with folding or updating
1896 the stmt. */
1897 if (virtual_operand_p (def))
1899 imm_use_iterator iter;
1900 use_operand_p use_p;
1901 gimple stmt;
1903 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1904 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1905 SET_USE (use_p, use);
1907 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1908 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1910 else
1911 replace_uses_by (def, use);
1913 remove_phi_node (&psi, true);
1917 /* Ensure that B follows A. */
1918 move_block_after (b, a);
1920 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1921 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1923 /* Remove labels from B and set gimple_bb to A for other statements. */
1924 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1926 gimple stmt = gsi_stmt (gsi);
1927 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1929 tree label = gimple_label_label (label_stmt);
1930 int lp_nr;
1932 gsi_remove (&gsi, false);
1934 /* Now that we can thread computed gotos, we might have
1935 a situation where we have a forced label in block B
1936 However, the label at the start of block B might still be
1937 used in other ways (think about the runtime checking for
1938 Fortran assigned gotos). So we can not just delete the
1939 label. Instead we move the label to the start of block A. */
1940 if (FORCED_LABEL (label))
1942 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1943 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1945 /* Other user labels keep around in a form of a debug stmt. */
1946 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1948 gimple dbg = gimple_build_debug_bind (label,
1949 integer_zero_node,
1950 stmt);
1951 gimple_debug_bind_reset_value (dbg);
1952 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1955 lp_nr = EH_LANDING_PAD_NR (label);
1956 if (lp_nr)
1958 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1959 lp->post_landing_pad = NULL;
1962 else
1964 gimple_set_bb (stmt, a);
1965 gsi_next (&gsi);
1969 /* When merging two BBs, if their counts are different, the larger count
1970 is selected as the new bb count. This is to handle inconsistent
1971 profiles. */
1972 if (a->loop_father == b->loop_father)
1974 a->count = MAX (a->count, b->count);
1975 a->frequency = MAX (a->frequency, b->frequency);
1978 /* Merge the sequences. */
1979 last = gsi_last_bb (a);
1980 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1981 set_bb_seq (b, NULL);
1983 if (cfgcleanup_altered_bbs)
1984 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1988 /* Return the one of two successors of BB that is not reachable by a
1989 complex edge, if there is one. Else, return BB. We use
1990 this in optimizations that use post-dominators for their heuristics,
1991 to catch the cases in C++ where function calls are involved. */
1993 basic_block
1994 single_noncomplex_succ (basic_block bb)
1996 edge e0, e1;
1997 if (EDGE_COUNT (bb->succs) != 2)
1998 return bb;
2000 e0 = EDGE_SUCC (bb, 0);
2001 e1 = EDGE_SUCC (bb, 1);
2002 if (e0->flags & EDGE_COMPLEX)
2003 return e1->dest;
2004 if (e1->flags & EDGE_COMPLEX)
2005 return e0->dest;
2007 return bb;
2010 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2012 void
2013 notice_special_calls (gcall *call)
2015 int flags = gimple_call_flags (call);
2017 if (flags & ECF_MAY_BE_ALLOCA)
2018 cfun->calls_alloca = true;
2019 if (flags & ECF_RETURNS_TWICE)
2020 cfun->calls_setjmp = true;
2024 /* Clear flags set by notice_special_calls. Used by dead code removal
2025 to update the flags. */
2027 void
2028 clear_special_calls (void)
2030 cfun->calls_alloca = false;
2031 cfun->calls_setjmp = false;
2034 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2036 static void
2037 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2039 /* Since this block is no longer reachable, we can just delete all
2040 of its PHI nodes. */
2041 remove_phi_nodes (bb);
2043 /* Remove edges to BB's successors. */
2044 while (EDGE_COUNT (bb->succs) > 0)
2045 remove_edge (EDGE_SUCC (bb, 0));
2049 /* Remove statements of basic block BB. */
2051 static void
2052 remove_bb (basic_block bb)
2054 gimple_stmt_iterator i;
2056 if (dump_file)
2058 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2059 if (dump_flags & TDF_DETAILS)
2061 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2062 fprintf (dump_file, "\n");
2066 if (current_loops)
2068 struct loop *loop = bb->loop_father;
2070 /* If a loop gets removed, clean up the information associated
2071 with it. */
2072 if (loop->latch == bb
2073 || loop->header == bb)
2074 free_numbers_of_iterations_estimates_loop (loop);
2077 /* Remove all the instructions in the block. */
2078 if (bb_seq (bb) != NULL)
2080 /* Walk backwards so as to get a chance to substitute all
2081 released DEFs into debug stmts. See
2082 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2083 details. */
2084 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2086 gimple stmt = gsi_stmt (i);
2087 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2088 if (label_stmt
2089 && (FORCED_LABEL (gimple_label_label (label_stmt))
2090 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2092 basic_block new_bb;
2093 gimple_stmt_iterator new_gsi;
2095 /* A non-reachable non-local label may still be referenced.
2096 But it no longer needs to carry the extra semantics of
2097 non-locality. */
2098 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2100 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2101 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2104 new_bb = bb->prev_bb;
2105 new_gsi = gsi_start_bb (new_bb);
2106 gsi_remove (&i, false);
2107 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2109 else
2111 /* Release SSA definitions if we are in SSA. Note that we
2112 may be called when not in SSA. For example,
2113 final_cleanup calls this function via
2114 cleanup_tree_cfg. */
2115 if (gimple_in_ssa_p (cfun))
2116 release_defs (stmt);
2118 gsi_remove (&i, true);
2121 if (gsi_end_p (i))
2122 i = gsi_last_bb (bb);
2123 else
2124 gsi_prev (&i);
2128 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2129 bb->il.gimple.seq = NULL;
2130 bb->il.gimple.phi_nodes = NULL;
2134 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2135 predicate VAL, return the edge that will be taken out of the block.
2136 If VAL does not match a unique edge, NULL is returned. */
2138 edge
2139 find_taken_edge (basic_block bb, tree val)
2141 gimple stmt;
2143 stmt = last_stmt (bb);
2145 gcc_assert (stmt);
2146 gcc_assert (is_ctrl_stmt (stmt));
2148 if (val == NULL)
2149 return NULL;
2151 if (!is_gimple_min_invariant (val))
2152 return NULL;
2154 if (gimple_code (stmt) == GIMPLE_COND)
2155 return find_taken_edge_cond_expr (bb, val);
2157 if (gimple_code (stmt) == GIMPLE_SWITCH)
2158 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2160 if (computed_goto_p (stmt))
2162 /* Only optimize if the argument is a label, if the argument is
2163 not a label then we can not construct a proper CFG.
2165 It may be the case that we only need to allow the LABEL_REF to
2166 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2167 appear inside a LABEL_EXPR just to be safe. */
2168 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2169 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2170 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2171 return NULL;
2174 gcc_unreachable ();
2177 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2178 statement, determine which of the outgoing edges will be taken out of the
2179 block. Return NULL if either edge may be taken. */
2181 static edge
2182 find_taken_edge_computed_goto (basic_block bb, tree val)
2184 basic_block dest;
2185 edge e = NULL;
2187 dest = label_to_block (val);
2188 if (dest)
2190 e = find_edge (bb, dest);
2191 gcc_assert (e != NULL);
2194 return e;
2197 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2198 statement, determine which of the two edges will be taken out of the
2199 block. Return NULL if either edge may be taken. */
2201 static edge
2202 find_taken_edge_cond_expr (basic_block bb, tree val)
2204 edge true_edge, false_edge;
2206 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2208 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2209 return (integer_zerop (val) ? false_edge : true_edge);
2212 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2213 statement, determine which edge will be taken out of the block. Return
2214 NULL if any edge may be taken. */
2216 static edge
2217 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2218 tree val)
2220 basic_block dest_bb;
2221 edge e;
2222 tree taken_case;
2224 taken_case = find_case_label_for_value (switch_stmt, val);
2225 dest_bb = label_to_block (CASE_LABEL (taken_case));
2227 e = find_edge (bb, dest_bb);
2228 gcc_assert (e);
2229 return e;
2233 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2234 We can make optimal use here of the fact that the case labels are
2235 sorted: We can do a binary search for a case matching VAL. */
2237 static tree
2238 find_case_label_for_value (gswitch *switch_stmt, tree val)
2240 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2241 tree default_case = gimple_switch_default_label (switch_stmt);
2243 for (low = 0, high = n; high - low > 1; )
2245 size_t i = (high + low) / 2;
2246 tree t = gimple_switch_label (switch_stmt, i);
2247 int cmp;
2249 /* Cache the result of comparing CASE_LOW and val. */
2250 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2252 if (cmp > 0)
2253 high = i;
2254 else
2255 low = i;
2257 if (CASE_HIGH (t) == NULL)
2259 /* A singe-valued case label. */
2260 if (cmp == 0)
2261 return t;
2263 else
2265 /* A case range. We can only handle integer ranges. */
2266 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2267 return t;
2271 return default_case;
2275 /* Dump a basic block on stderr. */
2277 void
2278 gimple_debug_bb (basic_block bb)
2280 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2284 /* Dump basic block with index N on stderr. */
2286 basic_block
2287 gimple_debug_bb_n (int n)
2289 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2290 return BASIC_BLOCK_FOR_FN (cfun, n);
2294 /* Dump the CFG on stderr.
2296 FLAGS are the same used by the tree dumping functions
2297 (see TDF_* in dumpfile.h). */
2299 void
2300 gimple_debug_cfg (int flags)
2302 gimple_dump_cfg (stderr, flags);
2306 /* Dump the program showing basic block boundaries on the given FILE.
2308 FLAGS are the same used by the tree dumping functions (see TDF_* in
2309 tree.h). */
2311 void
2312 gimple_dump_cfg (FILE *file, int flags)
2314 if (flags & TDF_DETAILS)
2316 dump_function_header (file, current_function_decl, flags);
2317 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2318 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2319 last_basic_block_for_fn (cfun));
2321 brief_dump_cfg (file, flags | TDF_COMMENT);
2322 fprintf (file, "\n");
2325 if (flags & TDF_STATS)
2326 dump_cfg_stats (file);
2328 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2332 /* Dump CFG statistics on FILE. */
2334 void
2335 dump_cfg_stats (FILE *file)
2337 static long max_num_merged_labels = 0;
2338 unsigned long size, total = 0;
2339 long num_edges;
2340 basic_block bb;
2341 const char * const fmt_str = "%-30s%-13s%12s\n";
2342 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2343 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2344 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2345 const char *funcname = current_function_name ();
2347 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2349 fprintf (file, "---------------------------------------------------------\n");
2350 fprintf (file, fmt_str, "", " Number of ", "Memory");
2351 fprintf (file, fmt_str, "", " instances ", "used ");
2352 fprintf (file, "---------------------------------------------------------\n");
2354 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2355 total += size;
2356 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2357 SCALE (size), LABEL (size));
2359 num_edges = 0;
2360 FOR_EACH_BB_FN (bb, cfun)
2361 num_edges += EDGE_COUNT (bb->succs);
2362 size = num_edges * sizeof (struct edge_def);
2363 total += size;
2364 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2366 fprintf (file, "---------------------------------------------------------\n");
2367 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2368 LABEL (total));
2369 fprintf (file, "---------------------------------------------------------\n");
2370 fprintf (file, "\n");
2372 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2373 max_num_merged_labels = cfg_stats.num_merged_labels;
2375 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2376 cfg_stats.num_merged_labels, max_num_merged_labels);
2378 fprintf (file, "\n");
2382 /* Dump CFG statistics on stderr. Keep extern so that it's always
2383 linked in the final executable. */
2385 DEBUG_FUNCTION void
2386 debug_cfg_stats (void)
2388 dump_cfg_stats (stderr);
2391 /*---------------------------------------------------------------------------
2392 Miscellaneous helpers
2393 ---------------------------------------------------------------------------*/
2395 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2396 flow. Transfers of control flow associated with EH are excluded. */
2398 static bool
2399 call_can_make_abnormal_goto (gimple t)
2401 /* If the function has no non-local labels, then a call cannot make an
2402 abnormal transfer of control. */
2403 if (!cfun->has_nonlocal_label
2404 && !cfun->calls_setjmp)
2405 return false;
2407 /* Likewise if the call has no side effects. */
2408 if (!gimple_has_side_effects (t))
2409 return false;
2411 /* Likewise if the called function is leaf. */
2412 if (gimple_call_flags (t) & ECF_LEAF)
2413 return false;
2415 return true;
2419 /* Return true if T can make an abnormal transfer of control flow.
2420 Transfers of control flow associated with EH are excluded. */
2422 bool
2423 stmt_can_make_abnormal_goto (gimple t)
2425 if (computed_goto_p (t))
2426 return true;
2427 if (is_gimple_call (t))
2428 return call_can_make_abnormal_goto (t);
2429 return false;
2433 /* Return true if T represents a stmt that always transfers control. */
2435 bool
2436 is_ctrl_stmt (gimple t)
2438 switch (gimple_code (t))
2440 case GIMPLE_COND:
2441 case GIMPLE_SWITCH:
2442 case GIMPLE_GOTO:
2443 case GIMPLE_RETURN:
2444 case GIMPLE_RESX:
2445 return true;
2446 default:
2447 return false;
2452 /* Return true if T is a statement that may alter the flow of control
2453 (e.g., a call to a non-returning function). */
2455 bool
2456 is_ctrl_altering_stmt (gimple t)
2458 gcc_assert (t);
2460 switch (gimple_code (t))
2462 case GIMPLE_CALL:
2463 /* Per stmt call flag indicates whether the call could alter
2464 controlflow. */
2465 if (gimple_call_ctrl_altering_p (t))
2466 return true;
2467 break;
2469 case GIMPLE_EH_DISPATCH:
2470 /* EH_DISPATCH branches to the individual catch handlers at
2471 this level of a try or allowed-exceptions region. It can
2472 fallthru to the next statement as well. */
2473 return true;
2475 case GIMPLE_ASM:
2476 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2477 return true;
2478 break;
2480 CASE_GIMPLE_OMP:
2481 /* OpenMP directives alter control flow. */
2482 return true;
2484 case GIMPLE_TRANSACTION:
2485 /* A transaction start alters control flow. */
2486 return true;
2488 default:
2489 break;
2492 /* If a statement can throw, it alters control flow. */
2493 return stmt_can_throw_internal (t);
2497 /* Return true if T is a simple local goto. */
2499 bool
2500 simple_goto_p (gimple t)
2502 return (gimple_code (t) == GIMPLE_GOTO
2503 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2507 /* Return true if STMT should start a new basic block. PREV_STMT is
2508 the statement preceding STMT. It is used when STMT is a label or a
2509 case label. Labels should only start a new basic block if their
2510 previous statement wasn't a label. Otherwise, sequence of labels
2511 would generate unnecessary basic blocks that only contain a single
2512 label. */
2514 static inline bool
2515 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2517 if (stmt == NULL)
2518 return false;
2520 /* Labels start a new basic block only if the preceding statement
2521 wasn't a label of the same type. This prevents the creation of
2522 consecutive blocks that have nothing but a single label. */
2523 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2525 /* Nonlocal and computed GOTO targets always start a new block. */
2526 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2527 || FORCED_LABEL (gimple_label_label (label_stmt)))
2528 return true;
2530 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2532 if (DECL_NONLOCAL (gimple_label_label (
2533 as_a <glabel *> (prev_stmt))))
2534 return true;
2536 cfg_stats.num_merged_labels++;
2537 return false;
2539 else
2540 return true;
2542 else if (gimple_code (stmt) == GIMPLE_CALL
2543 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2544 /* setjmp acts similar to a nonlocal GOTO target and thus should
2545 start a new block. */
2546 return true;
2548 return false;
2552 /* Return true if T should end a basic block. */
2554 bool
2555 stmt_ends_bb_p (gimple t)
2557 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2560 /* Remove block annotations and other data structures. */
2562 void
2563 delete_tree_cfg_annotations (void)
2565 vec_free (label_to_block_map_for_fn (cfun));
2569 /* Return the first statement in basic block BB. */
2571 gimple
2572 first_stmt (basic_block bb)
2574 gimple_stmt_iterator i = gsi_start_bb (bb);
2575 gimple stmt = NULL;
2577 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2579 gsi_next (&i);
2580 stmt = NULL;
2582 return stmt;
2585 /* Return the first non-label statement in basic block BB. */
2587 static gimple
2588 first_non_label_stmt (basic_block bb)
2590 gimple_stmt_iterator i = gsi_start_bb (bb);
2591 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2592 gsi_next (&i);
2593 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2596 /* Return the last statement in basic block BB. */
2598 gimple
2599 last_stmt (basic_block bb)
2601 gimple_stmt_iterator i = gsi_last_bb (bb);
2602 gimple stmt = NULL;
2604 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2606 gsi_prev (&i);
2607 stmt = NULL;
2609 return stmt;
2612 /* Return the last statement of an otherwise empty block. Return NULL
2613 if the block is totally empty, or if it contains more than one
2614 statement. */
2616 gimple
2617 last_and_only_stmt (basic_block bb)
2619 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2620 gimple last, prev;
2622 if (gsi_end_p (i))
2623 return NULL;
2625 last = gsi_stmt (i);
2626 gsi_prev_nondebug (&i);
2627 if (gsi_end_p (i))
2628 return last;
2630 /* Empty statements should no longer appear in the instruction stream.
2631 Everything that might have appeared before should be deleted by
2632 remove_useless_stmts, and the optimizers should just gsi_remove
2633 instead of smashing with build_empty_stmt.
2635 Thus the only thing that should appear here in a block containing
2636 one executable statement is a label. */
2637 prev = gsi_stmt (i);
2638 if (gimple_code (prev) == GIMPLE_LABEL)
2639 return last;
2640 else
2641 return NULL;
2644 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2646 static void
2647 reinstall_phi_args (edge new_edge, edge old_edge)
2649 edge_var_map *vm;
2650 int i;
2651 gphi_iterator phis;
2653 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2654 if (!v)
2655 return;
2657 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2658 v->iterate (i, &vm) && !gsi_end_p (phis);
2659 i++, gsi_next (&phis))
2661 gphi *phi = phis.phi ();
2662 tree result = redirect_edge_var_map_result (vm);
2663 tree arg = redirect_edge_var_map_def (vm);
2665 gcc_assert (result == gimple_phi_result (phi));
2667 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2670 redirect_edge_var_map_clear (old_edge);
2673 /* Returns the basic block after which the new basic block created
2674 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2675 near its "logical" location. This is of most help to humans looking
2676 at debugging dumps. */
2678 basic_block
2679 split_edge_bb_loc (edge edge_in)
2681 basic_block dest = edge_in->dest;
2682 basic_block dest_prev = dest->prev_bb;
2684 if (dest_prev)
2686 edge e = find_edge (dest_prev, dest);
2687 if (e && !(e->flags & EDGE_COMPLEX))
2688 return edge_in->src;
2690 return dest_prev;
2693 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2694 Abort on abnormal edges. */
2696 static basic_block
2697 gimple_split_edge (edge edge_in)
2699 basic_block new_bb, after_bb, dest;
2700 edge new_edge, e;
2702 /* Abnormal edges cannot be split. */
2703 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2705 dest = edge_in->dest;
2707 after_bb = split_edge_bb_loc (edge_in);
2709 new_bb = create_empty_bb (after_bb);
2710 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2711 new_bb->count = edge_in->count;
2712 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2713 new_edge->probability = REG_BR_PROB_BASE;
2714 new_edge->count = edge_in->count;
2716 e = redirect_edge_and_branch (edge_in, new_bb);
2717 gcc_assert (e == edge_in);
2718 reinstall_phi_args (new_edge, e);
2720 return new_bb;
2724 /* Verify properties of the address expression T with base object BASE. */
2726 static tree
2727 verify_address (tree t, tree base)
2729 bool old_constant;
2730 bool old_side_effects;
2731 bool new_constant;
2732 bool new_side_effects;
2734 old_constant = TREE_CONSTANT (t);
2735 old_side_effects = TREE_SIDE_EFFECTS (t);
2737 recompute_tree_invariant_for_addr_expr (t);
2738 new_side_effects = TREE_SIDE_EFFECTS (t);
2739 new_constant = TREE_CONSTANT (t);
2741 if (old_constant != new_constant)
2743 error ("constant not recomputed when ADDR_EXPR changed");
2744 return t;
2746 if (old_side_effects != new_side_effects)
2748 error ("side effects not recomputed when ADDR_EXPR changed");
2749 return t;
2752 if (!(TREE_CODE (base) == VAR_DECL
2753 || TREE_CODE (base) == PARM_DECL
2754 || TREE_CODE (base) == RESULT_DECL))
2755 return NULL_TREE;
2757 if (DECL_GIMPLE_REG_P (base))
2759 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2760 return base;
2763 return NULL_TREE;
2766 /* Callback for walk_tree, check that all elements with address taken are
2767 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2768 inside a PHI node. */
2770 static tree
2771 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2773 tree t = *tp, x;
2775 if (TYPE_P (t))
2776 *walk_subtrees = 0;
2778 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2779 #define CHECK_OP(N, MSG) \
2780 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2781 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2783 switch (TREE_CODE (t))
2785 case SSA_NAME:
2786 if (SSA_NAME_IN_FREE_LIST (t))
2788 error ("SSA name in freelist but still referenced");
2789 return *tp;
2791 break;
2793 case INDIRECT_REF:
2794 error ("INDIRECT_REF in gimple IL");
2795 return t;
2797 case MEM_REF:
2798 x = TREE_OPERAND (t, 0);
2799 if (!POINTER_TYPE_P (TREE_TYPE (x))
2800 || !is_gimple_mem_ref_addr (x))
2802 error ("invalid first operand of MEM_REF");
2803 return x;
2805 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2806 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2808 error ("invalid offset operand of MEM_REF");
2809 return TREE_OPERAND (t, 1);
2811 if (TREE_CODE (x) == ADDR_EXPR
2812 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2813 return x;
2814 *walk_subtrees = 0;
2815 break;
2817 case ASSERT_EXPR:
2818 x = fold (ASSERT_EXPR_COND (t));
2819 if (x == boolean_false_node)
2821 error ("ASSERT_EXPR with an always-false condition");
2822 return *tp;
2824 break;
2826 case MODIFY_EXPR:
2827 error ("MODIFY_EXPR not expected while having tuples");
2828 return *tp;
2830 case ADDR_EXPR:
2832 tree tem;
2834 gcc_assert (is_gimple_address (t));
2836 /* Skip any references (they will be checked when we recurse down the
2837 tree) and ensure that any variable used as a prefix is marked
2838 addressable. */
2839 for (x = TREE_OPERAND (t, 0);
2840 handled_component_p (x);
2841 x = TREE_OPERAND (x, 0))
2844 if ((tem = verify_address (t, x)))
2845 return tem;
2847 if (!(TREE_CODE (x) == VAR_DECL
2848 || TREE_CODE (x) == PARM_DECL
2849 || TREE_CODE (x) == RESULT_DECL))
2850 return NULL;
2852 if (!TREE_ADDRESSABLE (x))
2854 error ("address taken, but ADDRESSABLE bit not set");
2855 return x;
2858 break;
2861 case COND_EXPR:
2862 x = COND_EXPR_COND (t);
2863 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2865 error ("non-integral used in condition");
2866 return x;
2868 if (!is_gimple_condexpr (x))
2870 error ("invalid conditional operand");
2871 return x;
2873 break;
2875 case NON_LVALUE_EXPR:
2876 case TRUTH_NOT_EXPR:
2877 gcc_unreachable ();
2879 CASE_CONVERT:
2880 case FIX_TRUNC_EXPR:
2881 case FLOAT_EXPR:
2882 case NEGATE_EXPR:
2883 case ABS_EXPR:
2884 case BIT_NOT_EXPR:
2885 CHECK_OP (0, "invalid operand to unary operator");
2886 break;
2888 case REALPART_EXPR:
2889 case IMAGPART_EXPR:
2890 case BIT_FIELD_REF:
2891 if (!is_gimple_reg_type (TREE_TYPE (t)))
2893 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2894 return t;
2897 if (TREE_CODE (t) == BIT_FIELD_REF)
2899 tree t0 = TREE_OPERAND (t, 0);
2900 tree t1 = TREE_OPERAND (t, 1);
2901 tree t2 = TREE_OPERAND (t, 2);
2902 if (!tree_fits_uhwi_p (t1)
2903 || !tree_fits_uhwi_p (t2))
2905 error ("invalid position or size operand to BIT_FIELD_REF");
2906 return t;
2908 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2909 && (TYPE_PRECISION (TREE_TYPE (t))
2910 != tree_to_uhwi (t1)))
2912 error ("integral result type precision does not match "
2913 "field size of BIT_FIELD_REF");
2914 return t;
2916 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2917 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2918 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2919 != tree_to_uhwi (t1)))
2921 error ("mode precision of non-integral result does not "
2922 "match field size of BIT_FIELD_REF");
2923 return t;
2925 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2926 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2927 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2929 error ("position plus size exceeds size of referenced object in "
2930 "BIT_FIELD_REF");
2931 return t;
2934 t = TREE_OPERAND (t, 0);
2936 /* Fall-through. */
2937 case COMPONENT_REF:
2938 case ARRAY_REF:
2939 case ARRAY_RANGE_REF:
2940 case VIEW_CONVERT_EXPR:
2941 /* We have a nest of references. Verify that each of the operands
2942 that determine where to reference is either a constant or a variable,
2943 verify that the base is valid, and then show we've already checked
2944 the subtrees. */
2945 while (handled_component_p (t))
2947 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2948 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2949 else if (TREE_CODE (t) == ARRAY_REF
2950 || TREE_CODE (t) == ARRAY_RANGE_REF)
2952 CHECK_OP (1, "invalid array index");
2953 if (TREE_OPERAND (t, 2))
2954 CHECK_OP (2, "invalid array lower bound");
2955 if (TREE_OPERAND (t, 3))
2956 CHECK_OP (3, "invalid array stride");
2958 else if (TREE_CODE (t) == BIT_FIELD_REF
2959 || TREE_CODE (t) == REALPART_EXPR
2960 || TREE_CODE (t) == IMAGPART_EXPR)
2962 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2963 "REALPART_EXPR");
2964 return t;
2967 t = TREE_OPERAND (t, 0);
2970 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2972 error ("invalid reference prefix");
2973 return t;
2975 *walk_subtrees = 0;
2976 break;
2977 case PLUS_EXPR:
2978 case MINUS_EXPR:
2979 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2980 POINTER_PLUS_EXPR. */
2981 if (POINTER_TYPE_P (TREE_TYPE (t)))
2983 error ("invalid operand to plus/minus, type is a pointer");
2984 return t;
2986 CHECK_OP (0, "invalid operand to binary operator");
2987 CHECK_OP (1, "invalid operand to binary operator");
2988 break;
2990 case POINTER_PLUS_EXPR:
2991 /* Check to make sure the first operand is a pointer or reference type. */
2992 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2994 error ("invalid operand to pointer plus, first operand is not a pointer");
2995 return t;
2997 /* Check to make sure the second operand is a ptrofftype. */
2998 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3000 error ("invalid operand to pointer plus, second operand is not an "
3001 "integer type of appropriate width");
3002 return t;
3004 /* FALLTHROUGH */
3005 case LT_EXPR:
3006 case LE_EXPR:
3007 case GT_EXPR:
3008 case GE_EXPR:
3009 case EQ_EXPR:
3010 case NE_EXPR:
3011 case UNORDERED_EXPR:
3012 case ORDERED_EXPR:
3013 case UNLT_EXPR:
3014 case UNLE_EXPR:
3015 case UNGT_EXPR:
3016 case UNGE_EXPR:
3017 case UNEQ_EXPR:
3018 case LTGT_EXPR:
3019 case MULT_EXPR:
3020 case TRUNC_DIV_EXPR:
3021 case CEIL_DIV_EXPR:
3022 case FLOOR_DIV_EXPR:
3023 case ROUND_DIV_EXPR:
3024 case TRUNC_MOD_EXPR:
3025 case CEIL_MOD_EXPR:
3026 case FLOOR_MOD_EXPR:
3027 case ROUND_MOD_EXPR:
3028 case RDIV_EXPR:
3029 case EXACT_DIV_EXPR:
3030 case MIN_EXPR:
3031 case MAX_EXPR:
3032 case LSHIFT_EXPR:
3033 case RSHIFT_EXPR:
3034 case LROTATE_EXPR:
3035 case RROTATE_EXPR:
3036 case BIT_IOR_EXPR:
3037 case BIT_XOR_EXPR:
3038 case BIT_AND_EXPR:
3039 CHECK_OP (0, "invalid operand to binary operator");
3040 CHECK_OP (1, "invalid operand to binary operator");
3041 break;
3043 case CONSTRUCTOR:
3044 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3045 *walk_subtrees = 0;
3046 break;
3048 case CASE_LABEL_EXPR:
3049 if (CASE_CHAIN (t))
3051 error ("invalid CASE_CHAIN");
3052 return t;
3054 break;
3056 default:
3057 break;
3059 return NULL;
3061 #undef CHECK_OP
3065 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3066 Returns true if there is an error, otherwise false. */
3068 static bool
3069 verify_types_in_gimple_min_lval (tree expr)
3071 tree op;
3073 if (is_gimple_id (expr))
3074 return false;
3076 if (TREE_CODE (expr) != TARGET_MEM_REF
3077 && TREE_CODE (expr) != MEM_REF)
3079 error ("invalid expression for min lvalue");
3080 return true;
3083 /* TARGET_MEM_REFs are strange beasts. */
3084 if (TREE_CODE (expr) == TARGET_MEM_REF)
3085 return false;
3087 op = TREE_OPERAND (expr, 0);
3088 if (!is_gimple_val (op))
3090 error ("invalid operand in indirect reference");
3091 debug_generic_stmt (op);
3092 return true;
3094 /* Memory references now generally can involve a value conversion. */
3096 return false;
3099 /* Verify if EXPR is a valid GIMPLE reference expression. If
3100 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3101 if there is an error, otherwise false. */
3103 static bool
3104 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3106 while (handled_component_p (expr))
3108 tree op = TREE_OPERAND (expr, 0);
3110 if (TREE_CODE (expr) == ARRAY_REF
3111 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3113 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3114 || (TREE_OPERAND (expr, 2)
3115 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3116 || (TREE_OPERAND (expr, 3)
3117 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3119 error ("invalid operands to array reference");
3120 debug_generic_stmt (expr);
3121 return true;
3125 /* Verify if the reference array element types are compatible. */
3126 if (TREE_CODE (expr) == ARRAY_REF
3127 && !useless_type_conversion_p (TREE_TYPE (expr),
3128 TREE_TYPE (TREE_TYPE (op))))
3130 error ("type mismatch in array reference");
3131 debug_generic_stmt (TREE_TYPE (expr));
3132 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3133 return true;
3135 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3136 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3137 TREE_TYPE (TREE_TYPE (op))))
3139 error ("type mismatch in array range reference");
3140 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3141 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3142 return true;
3145 if ((TREE_CODE (expr) == REALPART_EXPR
3146 || TREE_CODE (expr) == IMAGPART_EXPR)
3147 && !useless_type_conversion_p (TREE_TYPE (expr),
3148 TREE_TYPE (TREE_TYPE (op))))
3150 error ("type mismatch in real/imagpart reference");
3151 debug_generic_stmt (TREE_TYPE (expr));
3152 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3153 return true;
3156 if (TREE_CODE (expr) == COMPONENT_REF
3157 && !useless_type_conversion_p (TREE_TYPE (expr),
3158 TREE_TYPE (TREE_OPERAND (expr, 1))))
3160 error ("type mismatch in component reference");
3161 debug_generic_stmt (TREE_TYPE (expr));
3162 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3163 return true;
3166 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3168 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3169 that their operand is not an SSA name or an invariant when
3170 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3171 bug). Otherwise there is nothing to verify, gross mismatches at
3172 most invoke undefined behavior. */
3173 if (require_lvalue
3174 && (TREE_CODE (op) == SSA_NAME
3175 || is_gimple_min_invariant (op)))
3177 error ("conversion of an SSA_NAME on the left hand side");
3178 debug_generic_stmt (expr);
3179 return true;
3181 else if (TREE_CODE (op) == SSA_NAME
3182 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3184 error ("conversion of register to a different size");
3185 debug_generic_stmt (expr);
3186 return true;
3188 else if (!handled_component_p (op))
3189 return false;
3192 expr = op;
3195 if (TREE_CODE (expr) == MEM_REF)
3197 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3199 error ("invalid address operand in MEM_REF");
3200 debug_generic_stmt (expr);
3201 return true;
3203 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3204 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3206 error ("invalid offset operand in MEM_REF");
3207 debug_generic_stmt (expr);
3208 return true;
3211 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3213 if (!TMR_BASE (expr)
3214 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3216 error ("invalid address operand in TARGET_MEM_REF");
3217 return true;
3219 if (!TMR_OFFSET (expr)
3220 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3221 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3223 error ("invalid offset operand in TARGET_MEM_REF");
3224 debug_generic_stmt (expr);
3225 return true;
3229 return ((require_lvalue || !is_gimple_min_invariant (expr))
3230 && verify_types_in_gimple_min_lval (expr));
3233 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3234 list of pointer-to types that is trivially convertible to DEST. */
3236 static bool
3237 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3239 tree src;
3241 if (!TYPE_POINTER_TO (src_obj))
3242 return true;
3244 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3245 if (useless_type_conversion_p (dest, src))
3246 return true;
3248 return false;
3251 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3252 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3254 static bool
3255 valid_fixed_convert_types_p (tree type1, tree type2)
3257 return (FIXED_POINT_TYPE_P (type1)
3258 && (INTEGRAL_TYPE_P (type2)
3259 || SCALAR_FLOAT_TYPE_P (type2)
3260 || FIXED_POINT_TYPE_P (type2)));
3263 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3264 is a problem, otherwise false. */
3266 static bool
3267 verify_gimple_call (gcall *stmt)
3269 tree fn = gimple_call_fn (stmt);
3270 tree fntype, fndecl;
3271 unsigned i;
3273 if (gimple_call_internal_p (stmt))
3275 if (fn)
3277 error ("gimple call has two targets");
3278 debug_generic_stmt (fn);
3279 return true;
3282 else
3284 if (!fn)
3286 error ("gimple call has no target");
3287 return true;
3291 if (fn && !is_gimple_call_addr (fn))
3293 error ("invalid function in gimple call");
3294 debug_generic_stmt (fn);
3295 return true;
3298 if (fn
3299 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3300 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3301 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3303 error ("non-function in gimple call");
3304 return true;
3307 fndecl = gimple_call_fndecl (stmt);
3308 if (fndecl
3309 && TREE_CODE (fndecl) == FUNCTION_DECL
3310 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3311 && !DECL_PURE_P (fndecl)
3312 && !TREE_READONLY (fndecl))
3314 error ("invalid pure const state for function");
3315 return true;
3318 if (gimple_call_lhs (stmt)
3319 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3320 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3322 error ("invalid LHS in gimple call");
3323 return true;
3326 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3328 error ("LHS in noreturn call");
3329 return true;
3332 fntype = gimple_call_fntype (stmt);
3333 if (fntype
3334 && gimple_call_lhs (stmt)
3335 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3336 TREE_TYPE (fntype))
3337 /* ??? At least C++ misses conversions at assignments from
3338 void * call results.
3339 ??? Java is completely off. Especially with functions
3340 returning java.lang.Object.
3341 For now simply allow arbitrary pointer type conversions. */
3342 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3343 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3345 error ("invalid conversion in gimple call");
3346 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3347 debug_generic_stmt (TREE_TYPE (fntype));
3348 return true;
3351 if (gimple_call_chain (stmt)
3352 && !is_gimple_val (gimple_call_chain (stmt)))
3354 error ("invalid static chain in gimple call");
3355 debug_generic_stmt (gimple_call_chain (stmt));
3356 return true;
3359 /* If there is a static chain argument, the call should either be
3360 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3361 if (gimple_call_chain (stmt)
3362 && fndecl
3363 && !DECL_STATIC_CHAIN (fndecl))
3365 error ("static chain with function that doesn%'t use one");
3366 return true;
3369 /* ??? The C frontend passes unpromoted arguments in case it
3370 didn't see a function declaration before the call. So for now
3371 leave the call arguments mostly unverified. Once we gimplify
3372 unit-at-a-time we have a chance to fix this. */
3374 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3376 tree arg = gimple_call_arg (stmt, i);
3377 if ((is_gimple_reg_type (TREE_TYPE (arg))
3378 && !is_gimple_val (arg))
3379 || (!is_gimple_reg_type (TREE_TYPE (arg))
3380 && !is_gimple_lvalue (arg)))
3382 error ("invalid argument to gimple call");
3383 debug_generic_expr (arg);
3384 return true;
3388 return false;
3391 /* Verifies the gimple comparison with the result type TYPE and
3392 the operands OP0 and OP1. */
3394 static bool
3395 verify_gimple_comparison (tree type, tree op0, tree op1)
3397 tree op0_type = TREE_TYPE (op0);
3398 tree op1_type = TREE_TYPE (op1);
3400 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3402 error ("invalid operands in gimple comparison");
3403 return true;
3406 /* For comparisons we do not have the operations type as the
3407 effective type the comparison is carried out in. Instead
3408 we require that either the first operand is trivially
3409 convertible into the second, or the other way around.
3410 Because we special-case pointers to void we allow
3411 comparisons of pointers with the same mode as well. */
3412 if (!useless_type_conversion_p (op0_type, op1_type)
3413 && !useless_type_conversion_p (op1_type, op0_type)
3414 && (!POINTER_TYPE_P (op0_type)
3415 || !POINTER_TYPE_P (op1_type)
3416 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3418 error ("mismatching comparison operand types");
3419 debug_generic_expr (op0_type);
3420 debug_generic_expr (op1_type);
3421 return true;
3424 /* The resulting type of a comparison may be an effective boolean type. */
3425 if (INTEGRAL_TYPE_P (type)
3426 && (TREE_CODE (type) == BOOLEAN_TYPE
3427 || TYPE_PRECISION (type) == 1))
3429 if (TREE_CODE (op0_type) == VECTOR_TYPE
3430 || TREE_CODE (op1_type) == VECTOR_TYPE)
3432 error ("vector comparison returning a boolean");
3433 debug_generic_expr (op0_type);
3434 debug_generic_expr (op1_type);
3435 return true;
3438 /* Or an integer vector type with the same size and element count
3439 as the comparison operand types. */
3440 else if (TREE_CODE (type) == VECTOR_TYPE
3441 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3443 if (TREE_CODE (op0_type) != VECTOR_TYPE
3444 || TREE_CODE (op1_type) != VECTOR_TYPE)
3446 error ("non-vector operands in vector comparison");
3447 debug_generic_expr (op0_type);
3448 debug_generic_expr (op1_type);
3449 return true;
3452 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3453 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3454 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3455 /* The result of a vector comparison is of signed
3456 integral type. */
3457 || TYPE_UNSIGNED (TREE_TYPE (type)))
3459 error ("invalid vector comparison resulting type");
3460 debug_generic_expr (type);
3461 return true;
3464 else
3466 error ("bogus comparison result type");
3467 debug_generic_expr (type);
3468 return true;
3471 return false;
3474 /* Verify a gimple assignment statement STMT with an unary rhs.
3475 Returns true if anything is wrong. */
3477 static bool
3478 verify_gimple_assign_unary (gassign *stmt)
3480 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3481 tree lhs = gimple_assign_lhs (stmt);
3482 tree lhs_type = TREE_TYPE (lhs);
3483 tree rhs1 = gimple_assign_rhs1 (stmt);
3484 tree rhs1_type = TREE_TYPE (rhs1);
3486 if (!is_gimple_reg (lhs))
3488 error ("non-register as LHS of unary operation");
3489 return true;
3492 if (!is_gimple_val (rhs1))
3494 error ("invalid operand in unary operation");
3495 return true;
3498 /* First handle conversions. */
3499 switch (rhs_code)
3501 CASE_CONVERT:
3503 /* Allow conversions from pointer type to integral type only if
3504 there is no sign or zero extension involved.
3505 For targets were the precision of ptrofftype doesn't match that
3506 of pointers we need to allow arbitrary conversions to ptrofftype. */
3507 if ((POINTER_TYPE_P (lhs_type)
3508 && INTEGRAL_TYPE_P (rhs1_type))
3509 || (POINTER_TYPE_P (rhs1_type)
3510 && INTEGRAL_TYPE_P (lhs_type)
3511 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3512 || ptrofftype_p (sizetype))))
3513 return false;
3515 /* Allow conversion from integral to offset type and vice versa. */
3516 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3517 && INTEGRAL_TYPE_P (rhs1_type))
3518 || (INTEGRAL_TYPE_P (lhs_type)
3519 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3520 return false;
3522 /* Otherwise assert we are converting between types of the
3523 same kind. */
3524 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3526 error ("invalid types in nop conversion");
3527 debug_generic_expr (lhs_type);
3528 debug_generic_expr (rhs1_type);
3529 return true;
3532 return false;
3535 case ADDR_SPACE_CONVERT_EXPR:
3537 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3538 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3539 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3541 error ("invalid types in address space conversion");
3542 debug_generic_expr (lhs_type);
3543 debug_generic_expr (rhs1_type);
3544 return true;
3547 return false;
3550 case FIXED_CONVERT_EXPR:
3552 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3553 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3555 error ("invalid types in fixed-point conversion");
3556 debug_generic_expr (lhs_type);
3557 debug_generic_expr (rhs1_type);
3558 return true;
3561 return false;
3564 case FLOAT_EXPR:
3566 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3567 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3568 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3570 error ("invalid types in conversion to floating point");
3571 debug_generic_expr (lhs_type);
3572 debug_generic_expr (rhs1_type);
3573 return true;
3576 return false;
3579 case FIX_TRUNC_EXPR:
3581 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3582 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3583 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3585 error ("invalid types in conversion to integer");
3586 debug_generic_expr (lhs_type);
3587 debug_generic_expr (rhs1_type);
3588 return true;
3591 return false;
3593 case REDUC_MAX_EXPR:
3594 case REDUC_MIN_EXPR:
3595 case REDUC_PLUS_EXPR:
3596 if (!VECTOR_TYPE_P (rhs1_type)
3597 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3599 error ("reduction should convert from vector to element type");
3600 debug_generic_expr (lhs_type);
3601 debug_generic_expr (rhs1_type);
3602 return true;
3604 return false;
3606 case VEC_UNPACK_HI_EXPR:
3607 case VEC_UNPACK_LO_EXPR:
3608 case VEC_UNPACK_FLOAT_HI_EXPR:
3609 case VEC_UNPACK_FLOAT_LO_EXPR:
3610 /* FIXME. */
3611 return false;
3613 case NEGATE_EXPR:
3614 case ABS_EXPR:
3615 case BIT_NOT_EXPR:
3616 case PAREN_EXPR:
3617 case CONJ_EXPR:
3618 break;
3620 default:
3621 gcc_unreachable ();
3624 /* For the remaining codes assert there is no conversion involved. */
3625 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3627 error ("non-trivial conversion in unary operation");
3628 debug_generic_expr (lhs_type);
3629 debug_generic_expr (rhs1_type);
3630 return true;
3633 return false;
3636 /* Verify a gimple assignment statement STMT with a binary rhs.
3637 Returns true if anything is wrong. */
3639 static bool
3640 verify_gimple_assign_binary (gassign *stmt)
3642 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3643 tree lhs = gimple_assign_lhs (stmt);
3644 tree lhs_type = TREE_TYPE (lhs);
3645 tree rhs1 = gimple_assign_rhs1 (stmt);
3646 tree rhs1_type = TREE_TYPE (rhs1);
3647 tree rhs2 = gimple_assign_rhs2 (stmt);
3648 tree rhs2_type = TREE_TYPE (rhs2);
3650 if (!is_gimple_reg (lhs))
3652 error ("non-register as LHS of binary operation");
3653 return true;
3656 if (!is_gimple_val (rhs1)
3657 || !is_gimple_val (rhs2))
3659 error ("invalid operands in binary operation");
3660 return true;
3663 /* First handle operations that involve different types. */
3664 switch (rhs_code)
3666 case COMPLEX_EXPR:
3668 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3669 || !(INTEGRAL_TYPE_P (rhs1_type)
3670 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3671 || !(INTEGRAL_TYPE_P (rhs2_type)
3672 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3674 error ("type mismatch in complex expression");
3675 debug_generic_expr (lhs_type);
3676 debug_generic_expr (rhs1_type);
3677 debug_generic_expr (rhs2_type);
3678 return true;
3681 return false;
3684 case LSHIFT_EXPR:
3685 case RSHIFT_EXPR:
3686 case LROTATE_EXPR:
3687 case RROTATE_EXPR:
3689 /* Shifts and rotates are ok on integral types, fixed point
3690 types and integer vector types. */
3691 if ((!INTEGRAL_TYPE_P (rhs1_type)
3692 && !FIXED_POINT_TYPE_P (rhs1_type)
3693 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3694 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3695 || (!INTEGRAL_TYPE_P (rhs2_type)
3696 /* Vector shifts of vectors are also ok. */
3697 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3698 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3699 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3700 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3701 || !useless_type_conversion_p (lhs_type, rhs1_type))
3703 error ("type mismatch in shift expression");
3704 debug_generic_expr (lhs_type);
3705 debug_generic_expr (rhs1_type);
3706 debug_generic_expr (rhs2_type);
3707 return true;
3710 return false;
3713 case WIDEN_LSHIFT_EXPR:
3715 if (!INTEGRAL_TYPE_P (lhs_type)
3716 || !INTEGRAL_TYPE_P (rhs1_type)
3717 || TREE_CODE (rhs2) != INTEGER_CST
3718 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3720 error ("type mismatch in widening vector shift expression");
3721 debug_generic_expr (lhs_type);
3722 debug_generic_expr (rhs1_type);
3723 debug_generic_expr (rhs2_type);
3724 return true;
3727 return false;
3730 case VEC_WIDEN_LSHIFT_HI_EXPR:
3731 case VEC_WIDEN_LSHIFT_LO_EXPR:
3733 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3734 || TREE_CODE (lhs_type) != VECTOR_TYPE
3735 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3736 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3737 || TREE_CODE (rhs2) != INTEGER_CST
3738 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3739 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3741 error ("type mismatch in widening vector shift expression");
3742 debug_generic_expr (lhs_type);
3743 debug_generic_expr (rhs1_type);
3744 debug_generic_expr (rhs2_type);
3745 return true;
3748 return false;
3751 case PLUS_EXPR:
3752 case MINUS_EXPR:
3754 tree lhs_etype = lhs_type;
3755 tree rhs1_etype = rhs1_type;
3756 tree rhs2_etype = rhs2_type;
3757 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3759 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3760 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3762 error ("invalid non-vector operands to vector valued plus");
3763 return true;
3765 lhs_etype = TREE_TYPE (lhs_type);
3766 rhs1_etype = TREE_TYPE (rhs1_type);
3767 rhs2_etype = TREE_TYPE (rhs2_type);
3769 if (POINTER_TYPE_P (lhs_etype)
3770 || POINTER_TYPE_P (rhs1_etype)
3771 || POINTER_TYPE_P (rhs2_etype))
3773 error ("invalid (pointer) operands to plus/minus");
3774 return true;
3777 /* Continue with generic binary expression handling. */
3778 break;
3781 case POINTER_PLUS_EXPR:
3783 if (!POINTER_TYPE_P (rhs1_type)
3784 || !useless_type_conversion_p (lhs_type, rhs1_type)
3785 || !ptrofftype_p (rhs2_type))
3787 error ("type mismatch in pointer plus expression");
3788 debug_generic_stmt (lhs_type);
3789 debug_generic_stmt (rhs1_type);
3790 debug_generic_stmt (rhs2_type);
3791 return true;
3794 return false;
3797 case TRUTH_ANDIF_EXPR:
3798 case TRUTH_ORIF_EXPR:
3799 case TRUTH_AND_EXPR:
3800 case TRUTH_OR_EXPR:
3801 case TRUTH_XOR_EXPR:
3803 gcc_unreachable ();
3805 case LT_EXPR:
3806 case LE_EXPR:
3807 case GT_EXPR:
3808 case GE_EXPR:
3809 case EQ_EXPR:
3810 case NE_EXPR:
3811 case UNORDERED_EXPR:
3812 case ORDERED_EXPR:
3813 case UNLT_EXPR:
3814 case UNLE_EXPR:
3815 case UNGT_EXPR:
3816 case UNGE_EXPR:
3817 case UNEQ_EXPR:
3818 case LTGT_EXPR:
3819 /* Comparisons are also binary, but the result type is not
3820 connected to the operand types. */
3821 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3823 case WIDEN_MULT_EXPR:
3824 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3825 return true;
3826 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3827 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3829 case WIDEN_SUM_EXPR:
3830 case VEC_WIDEN_MULT_HI_EXPR:
3831 case VEC_WIDEN_MULT_LO_EXPR:
3832 case VEC_WIDEN_MULT_EVEN_EXPR:
3833 case VEC_WIDEN_MULT_ODD_EXPR:
3834 case VEC_PACK_TRUNC_EXPR:
3835 case VEC_PACK_SAT_EXPR:
3836 case VEC_PACK_FIX_TRUNC_EXPR:
3837 /* FIXME. */
3838 return false;
3840 case MULT_EXPR:
3841 case MULT_HIGHPART_EXPR:
3842 case TRUNC_DIV_EXPR:
3843 case CEIL_DIV_EXPR:
3844 case FLOOR_DIV_EXPR:
3845 case ROUND_DIV_EXPR:
3846 case TRUNC_MOD_EXPR:
3847 case CEIL_MOD_EXPR:
3848 case FLOOR_MOD_EXPR:
3849 case ROUND_MOD_EXPR:
3850 case RDIV_EXPR:
3851 case EXACT_DIV_EXPR:
3852 case MIN_EXPR:
3853 case MAX_EXPR:
3854 case BIT_IOR_EXPR:
3855 case BIT_XOR_EXPR:
3856 case BIT_AND_EXPR:
3857 /* Continue with generic binary expression handling. */
3858 break;
3860 default:
3861 gcc_unreachable ();
3864 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3865 || !useless_type_conversion_p (lhs_type, rhs2_type))
3867 error ("type mismatch in binary expression");
3868 debug_generic_stmt (lhs_type);
3869 debug_generic_stmt (rhs1_type);
3870 debug_generic_stmt (rhs2_type);
3871 return true;
3874 return false;
3877 /* Verify a gimple assignment statement STMT with a ternary rhs.
3878 Returns true if anything is wrong. */
3880 static bool
3881 verify_gimple_assign_ternary (gassign *stmt)
3883 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3884 tree lhs = gimple_assign_lhs (stmt);
3885 tree lhs_type = TREE_TYPE (lhs);
3886 tree rhs1 = gimple_assign_rhs1 (stmt);
3887 tree rhs1_type = TREE_TYPE (rhs1);
3888 tree rhs2 = gimple_assign_rhs2 (stmt);
3889 tree rhs2_type = TREE_TYPE (rhs2);
3890 tree rhs3 = gimple_assign_rhs3 (stmt);
3891 tree rhs3_type = TREE_TYPE (rhs3);
3893 if (!is_gimple_reg (lhs))
3895 error ("non-register as LHS of ternary operation");
3896 return true;
3899 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3900 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3901 || !is_gimple_val (rhs2)
3902 || !is_gimple_val (rhs3))
3904 error ("invalid operands in ternary operation");
3905 return true;
3908 /* First handle operations that involve different types. */
3909 switch (rhs_code)
3911 case WIDEN_MULT_PLUS_EXPR:
3912 case WIDEN_MULT_MINUS_EXPR:
3913 if ((!INTEGRAL_TYPE_P (rhs1_type)
3914 && !FIXED_POINT_TYPE_P (rhs1_type))
3915 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3916 || !useless_type_conversion_p (lhs_type, rhs3_type)
3917 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3918 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3920 error ("type mismatch in widening multiply-accumulate expression");
3921 debug_generic_expr (lhs_type);
3922 debug_generic_expr (rhs1_type);
3923 debug_generic_expr (rhs2_type);
3924 debug_generic_expr (rhs3_type);
3925 return true;
3927 break;
3929 case FMA_EXPR:
3930 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3931 || !useless_type_conversion_p (lhs_type, rhs2_type)
3932 || !useless_type_conversion_p (lhs_type, rhs3_type))
3934 error ("type mismatch in fused multiply-add expression");
3935 debug_generic_expr (lhs_type);
3936 debug_generic_expr (rhs1_type);
3937 debug_generic_expr (rhs2_type);
3938 debug_generic_expr (rhs3_type);
3939 return true;
3941 break;
3943 case COND_EXPR:
3944 case VEC_COND_EXPR:
3945 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3946 || !useless_type_conversion_p (lhs_type, rhs3_type))
3948 error ("type mismatch in conditional expression");
3949 debug_generic_expr (lhs_type);
3950 debug_generic_expr (rhs2_type);
3951 debug_generic_expr (rhs3_type);
3952 return true;
3954 break;
3956 case VEC_PERM_EXPR:
3957 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3958 || !useless_type_conversion_p (lhs_type, rhs2_type))
3960 error ("type mismatch in vector permute expression");
3961 debug_generic_expr (lhs_type);
3962 debug_generic_expr (rhs1_type);
3963 debug_generic_expr (rhs2_type);
3964 debug_generic_expr (rhs3_type);
3965 return true;
3968 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3969 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3970 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3972 error ("vector types expected in vector permute expression");
3973 debug_generic_expr (lhs_type);
3974 debug_generic_expr (rhs1_type);
3975 debug_generic_expr (rhs2_type);
3976 debug_generic_expr (rhs3_type);
3977 return true;
3980 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3981 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3982 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3983 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3984 != TYPE_VECTOR_SUBPARTS (lhs_type))
3986 error ("vectors with different element number found "
3987 "in vector permute expression");
3988 debug_generic_expr (lhs_type);
3989 debug_generic_expr (rhs1_type);
3990 debug_generic_expr (rhs2_type);
3991 debug_generic_expr (rhs3_type);
3992 return true;
3995 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3996 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3997 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3999 error ("invalid mask type in vector permute expression");
4000 debug_generic_expr (lhs_type);
4001 debug_generic_expr (rhs1_type);
4002 debug_generic_expr (rhs2_type);
4003 debug_generic_expr (rhs3_type);
4004 return true;
4007 return false;
4009 case SAD_EXPR:
4010 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4011 || !useless_type_conversion_p (lhs_type, rhs3_type)
4012 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4013 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4014 > GET_MODE_BITSIZE (GET_MODE_INNER
4015 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4017 error ("type mismatch in sad expression");
4018 debug_generic_expr (lhs_type);
4019 debug_generic_expr (rhs1_type);
4020 debug_generic_expr (rhs2_type);
4021 debug_generic_expr (rhs3_type);
4022 return true;
4025 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4026 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4027 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4029 error ("vector types expected in sad expression");
4030 debug_generic_expr (lhs_type);
4031 debug_generic_expr (rhs1_type);
4032 debug_generic_expr (rhs2_type);
4033 debug_generic_expr (rhs3_type);
4034 return true;
4037 return false;
4039 case DOT_PROD_EXPR:
4040 case REALIGN_LOAD_EXPR:
4041 /* FIXME. */
4042 return false;
4044 default:
4045 gcc_unreachable ();
4047 return false;
4050 /* Verify a gimple assignment statement STMT with a single rhs.
4051 Returns true if anything is wrong. */
4053 static bool
4054 verify_gimple_assign_single (gassign *stmt)
4056 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4057 tree lhs = gimple_assign_lhs (stmt);
4058 tree lhs_type = TREE_TYPE (lhs);
4059 tree rhs1 = gimple_assign_rhs1 (stmt);
4060 tree rhs1_type = TREE_TYPE (rhs1);
4061 bool res = false;
4063 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4065 error ("non-trivial conversion at assignment");
4066 debug_generic_expr (lhs_type);
4067 debug_generic_expr (rhs1_type);
4068 return true;
4071 if (gimple_clobber_p (stmt)
4072 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4074 error ("non-decl/MEM_REF LHS in clobber statement");
4075 debug_generic_expr (lhs);
4076 return true;
4079 if (handled_component_p (lhs)
4080 || TREE_CODE (lhs) == MEM_REF
4081 || TREE_CODE (lhs) == TARGET_MEM_REF)
4082 res |= verify_types_in_gimple_reference (lhs, true);
4084 /* Special codes we cannot handle via their class. */
4085 switch (rhs_code)
4087 case ADDR_EXPR:
4089 tree op = TREE_OPERAND (rhs1, 0);
4090 if (!is_gimple_addressable (op))
4092 error ("invalid operand in unary expression");
4093 return true;
4096 /* Technically there is no longer a need for matching types, but
4097 gimple hygiene asks for this check. In LTO we can end up
4098 combining incompatible units and thus end up with addresses
4099 of globals that change their type to a common one. */
4100 if (!in_lto_p
4101 && !types_compatible_p (TREE_TYPE (op),
4102 TREE_TYPE (TREE_TYPE (rhs1)))
4103 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4104 TREE_TYPE (op)))
4106 error ("type mismatch in address expression");
4107 debug_generic_stmt (TREE_TYPE (rhs1));
4108 debug_generic_stmt (TREE_TYPE (op));
4109 return true;
4112 return verify_types_in_gimple_reference (op, true);
4115 /* tcc_reference */
4116 case INDIRECT_REF:
4117 error ("INDIRECT_REF in gimple IL");
4118 return true;
4120 case COMPONENT_REF:
4121 case BIT_FIELD_REF:
4122 case ARRAY_REF:
4123 case ARRAY_RANGE_REF:
4124 case VIEW_CONVERT_EXPR:
4125 case REALPART_EXPR:
4126 case IMAGPART_EXPR:
4127 case TARGET_MEM_REF:
4128 case MEM_REF:
4129 if (!is_gimple_reg (lhs)
4130 && is_gimple_reg_type (TREE_TYPE (lhs)))
4132 error ("invalid rhs for gimple memory store");
4133 debug_generic_stmt (lhs);
4134 debug_generic_stmt (rhs1);
4135 return true;
4137 return res || verify_types_in_gimple_reference (rhs1, false);
4139 /* tcc_constant */
4140 case SSA_NAME:
4141 case INTEGER_CST:
4142 case REAL_CST:
4143 case FIXED_CST:
4144 case COMPLEX_CST:
4145 case VECTOR_CST:
4146 case STRING_CST:
4147 return res;
4149 /* tcc_declaration */
4150 case CONST_DECL:
4151 return res;
4152 case VAR_DECL:
4153 case PARM_DECL:
4154 if (!is_gimple_reg (lhs)
4155 && !is_gimple_reg (rhs1)
4156 && is_gimple_reg_type (TREE_TYPE (lhs)))
4158 error ("invalid rhs for gimple memory store");
4159 debug_generic_stmt (lhs);
4160 debug_generic_stmt (rhs1);
4161 return true;
4163 return res;
4165 case CONSTRUCTOR:
4166 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4168 unsigned int i;
4169 tree elt_i, elt_v, elt_t = NULL_TREE;
4171 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4172 return res;
4173 /* For vector CONSTRUCTORs we require that either it is empty
4174 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4175 (then the element count must be correct to cover the whole
4176 outer vector and index must be NULL on all elements, or it is
4177 a CONSTRUCTOR of scalar elements, where we as an exception allow
4178 smaller number of elements (assuming zero filling) and
4179 consecutive indexes as compared to NULL indexes (such
4180 CONSTRUCTORs can appear in the IL from FEs). */
4181 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4183 if (elt_t == NULL_TREE)
4185 elt_t = TREE_TYPE (elt_v);
4186 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4188 tree elt_t = TREE_TYPE (elt_v);
4189 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4190 TREE_TYPE (elt_t)))
4192 error ("incorrect type of vector CONSTRUCTOR"
4193 " elements");
4194 debug_generic_stmt (rhs1);
4195 return true;
4197 else if (CONSTRUCTOR_NELTS (rhs1)
4198 * TYPE_VECTOR_SUBPARTS (elt_t)
4199 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4201 error ("incorrect number of vector CONSTRUCTOR"
4202 " elements");
4203 debug_generic_stmt (rhs1);
4204 return true;
4207 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4208 elt_t))
4210 error ("incorrect type of vector CONSTRUCTOR elements");
4211 debug_generic_stmt (rhs1);
4212 return true;
4214 else if (CONSTRUCTOR_NELTS (rhs1)
4215 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4217 error ("incorrect number of vector CONSTRUCTOR elements");
4218 debug_generic_stmt (rhs1);
4219 return true;
4222 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4224 error ("incorrect type of vector CONSTRUCTOR elements");
4225 debug_generic_stmt (rhs1);
4226 return true;
4228 if (elt_i != NULL_TREE
4229 && (TREE_CODE (elt_t) == VECTOR_TYPE
4230 || TREE_CODE (elt_i) != INTEGER_CST
4231 || compare_tree_int (elt_i, i) != 0))
4233 error ("vector CONSTRUCTOR with non-NULL element index");
4234 debug_generic_stmt (rhs1);
4235 return true;
4237 if (!is_gimple_val (elt_v))
4239 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4240 debug_generic_stmt (rhs1);
4241 return true;
4245 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4247 error ("non-vector CONSTRUCTOR with elements");
4248 debug_generic_stmt (rhs1);
4249 return true;
4251 return res;
4252 case OBJ_TYPE_REF:
4253 case ASSERT_EXPR:
4254 case WITH_SIZE_EXPR:
4255 /* FIXME. */
4256 return res;
4258 default:;
4261 return res;
4264 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4265 is a problem, otherwise false. */
4267 static bool
4268 verify_gimple_assign (gassign *stmt)
4270 switch (gimple_assign_rhs_class (stmt))
4272 case GIMPLE_SINGLE_RHS:
4273 return verify_gimple_assign_single (stmt);
4275 case GIMPLE_UNARY_RHS:
4276 return verify_gimple_assign_unary (stmt);
4278 case GIMPLE_BINARY_RHS:
4279 return verify_gimple_assign_binary (stmt);
4281 case GIMPLE_TERNARY_RHS:
4282 return verify_gimple_assign_ternary (stmt);
4284 default:
4285 gcc_unreachable ();
4289 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4290 is a problem, otherwise false. */
4292 static bool
4293 verify_gimple_return (greturn *stmt)
4295 tree op = gimple_return_retval (stmt);
4296 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4298 /* We cannot test for present return values as we do not fix up missing
4299 return values from the original source. */
4300 if (op == NULL)
4301 return false;
4303 if (!is_gimple_val (op)
4304 && TREE_CODE (op) != RESULT_DECL)
4306 error ("invalid operand in return statement");
4307 debug_generic_stmt (op);
4308 return true;
4311 if ((TREE_CODE (op) == RESULT_DECL
4312 && DECL_BY_REFERENCE (op))
4313 || (TREE_CODE (op) == SSA_NAME
4314 && SSA_NAME_VAR (op)
4315 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4316 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4317 op = TREE_TYPE (op);
4319 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4321 error ("invalid conversion in return statement");
4322 debug_generic_stmt (restype);
4323 debug_generic_stmt (TREE_TYPE (op));
4324 return true;
4327 return false;
4331 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4332 is a problem, otherwise false. */
4334 static bool
4335 verify_gimple_goto (ggoto *stmt)
4337 tree dest = gimple_goto_dest (stmt);
4339 /* ??? We have two canonical forms of direct goto destinations, a
4340 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4341 if (TREE_CODE (dest) != LABEL_DECL
4342 && (!is_gimple_val (dest)
4343 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4345 error ("goto destination is neither a label nor a pointer");
4346 return true;
4349 return false;
4352 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4353 is a problem, otherwise false. */
4355 static bool
4356 verify_gimple_switch (gswitch *stmt)
4358 unsigned int i, n;
4359 tree elt, prev_upper_bound = NULL_TREE;
4360 tree index_type, elt_type = NULL_TREE;
4362 if (!is_gimple_val (gimple_switch_index (stmt)))
4364 error ("invalid operand to switch statement");
4365 debug_generic_stmt (gimple_switch_index (stmt));
4366 return true;
4369 index_type = TREE_TYPE (gimple_switch_index (stmt));
4370 if (! INTEGRAL_TYPE_P (index_type))
4372 error ("non-integral type switch statement");
4373 debug_generic_expr (index_type);
4374 return true;
4377 elt = gimple_switch_label (stmt, 0);
4378 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4380 error ("invalid default case label in switch statement");
4381 debug_generic_expr (elt);
4382 return true;
4385 n = gimple_switch_num_labels (stmt);
4386 for (i = 1; i < n; i++)
4388 elt = gimple_switch_label (stmt, i);
4390 if (! CASE_LOW (elt))
4392 error ("invalid case label in switch statement");
4393 debug_generic_expr (elt);
4394 return true;
4396 if (CASE_HIGH (elt)
4397 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4399 error ("invalid case range in switch statement");
4400 debug_generic_expr (elt);
4401 return true;
4404 if (elt_type)
4406 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4407 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4409 error ("type mismatch for case label in switch statement");
4410 debug_generic_expr (elt);
4411 return true;
4414 else
4416 elt_type = TREE_TYPE (CASE_LOW (elt));
4417 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4419 error ("type precision mismatch in switch statement");
4420 return true;
4424 if (prev_upper_bound)
4426 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4428 error ("case labels not sorted in switch statement");
4429 return true;
4433 prev_upper_bound = CASE_HIGH (elt);
4434 if (! prev_upper_bound)
4435 prev_upper_bound = CASE_LOW (elt);
4438 return false;
4441 /* Verify a gimple debug statement STMT.
4442 Returns true if anything is wrong. */
4444 static bool
4445 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4447 /* There isn't much that could be wrong in a gimple debug stmt. A
4448 gimple debug bind stmt, for example, maps a tree, that's usually
4449 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4450 component or member of an aggregate type, to another tree, that
4451 can be an arbitrary expression. These stmts expand into debug
4452 insns, and are converted to debug notes by var-tracking.c. */
4453 return false;
4456 /* Verify a gimple label statement STMT.
4457 Returns true if anything is wrong. */
4459 static bool
4460 verify_gimple_label (glabel *stmt)
4462 tree decl = gimple_label_label (stmt);
4463 int uid;
4464 bool err = false;
4466 if (TREE_CODE (decl) != LABEL_DECL)
4467 return true;
4468 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4469 && DECL_CONTEXT (decl) != current_function_decl)
4471 error ("label's context is not the current function decl");
4472 err |= true;
4475 uid = LABEL_DECL_UID (decl);
4476 if (cfun->cfg
4477 && (uid == -1
4478 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4480 error ("incorrect entry in label_to_block_map");
4481 err |= true;
4484 uid = EH_LANDING_PAD_NR (decl);
4485 if (uid)
4487 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4488 if (decl != lp->post_landing_pad)
4490 error ("incorrect setting of landing pad number");
4491 err |= true;
4495 return err;
4498 /* Verify a gimple cond statement STMT.
4499 Returns true if anything is wrong. */
4501 static bool
4502 verify_gimple_cond (gcond *stmt)
4504 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4506 error ("invalid comparison code in gimple cond");
4507 return true;
4509 if (!(!gimple_cond_true_label (stmt)
4510 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4511 || !(!gimple_cond_false_label (stmt)
4512 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4514 error ("invalid labels in gimple cond");
4515 return true;
4518 return verify_gimple_comparison (boolean_type_node,
4519 gimple_cond_lhs (stmt),
4520 gimple_cond_rhs (stmt));
4523 /* Verify the GIMPLE statement STMT. Returns true if there is an
4524 error, otherwise false. */
4526 static bool
4527 verify_gimple_stmt (gimple stmt)
4529 switch (gimple_code (stmt))
4531 case GIMPLE_ASSIGN:
4532 return verify_gimple_assign (as_a <gassign *> (stmt));
4534 case GIMPLE_LABEL:
4535 return verify_gimple_label (as_a <glabel *> (stmt));
4537 case GIMPLE_CALL:
4538 return verify_gimple_call (as_a <gcall *> (stmt));
4540 case GIMPLE_COND:
4541 return verify_gimple_cond (as_a <gcond *> (stmt));
4543 case GIMPLE_GOTO:
4544 return verify_gimple_goto (as_a <ggoto *> (stmt));
4546 case GIMPLE_SWITCH:
4547 return verify_gimple_switch (as_a <gswitch *> (stmt));
4549 case GIMPLE_RETURN:
4550 return verify_gimple_return (as_a <greturn *> (stmt));
4552 case GIMPLE_ASM:
4553 return false;
4555 case GIMPLE_TRANSACTION:
4556 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4558 /* Tuples that do not have tree operands. */
4559 case GIMPLE_NOP:
4560 case GIMPLE_PREDICT:
4561 case GIMPLE_RESX:
4562 case GIMPLE_EH_DISPATCH:
4563 case GIMPLE_EH_MUST_NOT_THROW:
4564 return false;
4566 CASE_GIMPLE_OMP:
4567 /* OpenMP directives are validated by the FE and never operated
4568 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4569 non-gimple expressions when the main index variable has had
4570 its address taken. This does not affect the loop itself
4571 because the header of an GIMPLE_OMP_FOR is merely used to determine
4572 how to setup the parallel iteration. */
4573 return false;
4575 case GIMPLE_DEBUG:
4576 return verify_gimple_debug (stmt);
4578 default:
4579 gcc_unreachable ();
4583 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4584 and false otherwise. */
4586 static bool
4587 verify_gimple_phi (gimple phi)
4589 bool err = false;
4590 unsigned i;
4591 tree phi_result = gimple_phi_result (phi);
4592 bool virtual_p;
4594 if (!phi_result)
4596 error ("invalid PHI result");
4597 return true;
4600 virtual_p = virtual_operand_p (phi_result);
4601 if (TREE_CODE (phi_result) != SSA_NAME
4602 || (virtual_p
4603 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4605 error ("invalid PHI result");
4606 err = true;
4609 for (i = 0; i < gimple_phi_num_args (phi); i++)
4611 tree t = gimple_phi_arg_def (phi, i);
4613 if (!t)
4615 error ("missing PHI def");
4616 err |= true;
4617 continue;
4619 /* Addressable variables do have SSA_NAMEs but they
4620 are not considered gimple values. */
4621 else if ((TREE_CODE (t) == SSA_NAME
4622 && virtual_p != virtual_operand_p (t))
4623 || (virtual_p
4624 && (TREE_CODE (t) != SSA_NAME
4625 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4626 || (!virtual_p
4627 && !is_gimple_val (t)))
4629 error ("invalid PHI argument");
4630 debug_generic_expr (t);
4631 err |= true;
4633 #ifdef ENABLE_TYPES_CHECKING
4634 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4636 error ("incompatible types in PHI argument %u", i);
4637 debug_generic_stmt (TREE_TYPE (phi_result));
4638 debug_generic_stmt (TREE_TYPE (t));
4639 err |= true;
4641 #endif
4644 return err;
4647 /* Verify the GIMPLE statements inside the sequence STMTS. */
4649 static bool
4650 verify_gimple_in_seq_2 (gimple_seq stmts)
4652 gimple_stmt_iterator ittr;
4653 bool err = false;
4655 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4657 gimple stmt = gsi_stmt (ittr);
4659 switch (gimple_code (stmt))
4661 case GIMPLE_BIND:
4662 err |= verify_gimple_in_seq_2 (
4663 gimple_bind_body (as_a <gbind *> (stmt)));
4664 break;
4666 case GIMPLE_TRY:
4667 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4668 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4669 break;
4671 case GIMPLE_EH_FILTER:
4672 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4673 break;
4675 case GIMPLE_EH_ELSE:
4677 geh_else *eh_else = as_a <geh_else *> (stmt);
4678 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4679 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4681 break;
4683 case GIMPLE_CATCH:
4684 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4685 as_a <gcatch *> (stmt)));
4686 break;
4688 case GIMPLE_TRANSACTION:
4689 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4690 break;
4692 default:
4694 bool err2 = verify_gimple_stmt (stmt);
4695 if (err2)
4696 debug_gimple_stmt (stmt);
4697 err |= err2;
4702 return err;
4705 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4706 is a problem, otherwise false. */
4708 static bool
4709 verify_gimple_transaction (gtransaction *stmt)
4711 tree lab = gimple_transaction_label (stmt);
4712 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4713 return true;
4714 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4718 /* Verify the GIMPLE statements inside the statement list STMTS. */
4720 DEBUG_FUNCTION void
4721 verify_gimple_in_seq (gimple_seq stmts)
4723 timevar_push (TV_TREE_STMT_VERIFY);
4724 if (verify_gimple_in_seq_2 (stmts))
4725 internal_error ("verify_gimple failed");
4726 timevar_pop (TV_TREE_STMT_VERIFY);
4729 /* Return true when the T can be shared. */
4731 static bool
4732 tree_node_can_be_shared (tree t)
4734 if (IS_TYPE_OR_DECL_P (t)
4735 || is_gimple_min_invariant (t)
4736 || TREE_CODE (t) == SSA_NAME
4737 || t == error_mark_node
4738 || TREE_CODE (t) == IDENTIFIER_NODE)
4739 return true;
4741 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4742 return true;
4744 if (DECL_P (t))
4745 return true;
4747 return false;
4750 /* Called via walk_tree. Verify tree sharing. */
4752 static tree
4753 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4755 hash_set<void *> *visited = (hash_set<void *> *) data;
4757 if (tree_node_can_be_shared (*tp))
4759 *walk_subtrees = false;
4760 return NULL;
4763 if (visited->add (*tp))
4764 return *tp;
4766 return NULL;
4769 /* Called via walk_gimple_stmt. Verify tree sharing. */
4771 static tree
4772 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4774 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4775 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4778 static bool eh_error_found;
4779 bool
4780 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4781 hash_set<gimple> *visited)
4783 if (!visited->contains (stmt))
4785 error ("dead STMT in EH table");
4786 debug_gimple_stmt (stmt);
4787 eh_error_found = true;
4789 return true;
4792 /* Verify if the location LOCs block is in BLOCKS. */
4794 static bool
4795 verify_location (hash_set<tree> *blocks, location_t loc)
4797 tree block = LOCATION_BLOCK (loc);
4798 if (block != NULL_TREE
4799 && !blocks->contains (block))
4801 error ("location references block not in block tree");
4802 return true;
4804 if (block != NULL_TREE)
4805 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4806 return false;
4809 /* Called via walk_tree. Verify that expressions have no blocks. */
4811 static tree
4812 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4814 if (!EXPR_P (*tp))
4816 *walk_subtrees = false;
4817 return NULL;
4820 location_t loc = EXPR_LOCATION (*tp);
4821 if (LOCATION_BLOCK (loc) != NULL)
4822 return *tp;
4824 return NULL;
4827 /* Called via walk_tree. Verify locations of expressions. */
4829 static tree
4830 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4832 hash_set<tree> *blocks = (hash_set<tree> *) data;
4834 if (TREE_CODE (*tp) == VAR_DECL
4835 && DECL_HAS_DEBUG_EXPR_P (*tp))
4837 tree t = DECL_DEBUG_EXPR (*tp);
4838 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4839 if (addr)
4840 return addr;
4842 if ((TREE_CODE (*tp) == VAR_DECL
4843 || TREE_CODE (*tp) == PARM_DECL
4844 || TREE_CODE (*tp) == RESULT_DECL)
4845 && DECL_HAS_VALUE_EXPR_P (*tp))
4847 tree t = DECL_VALUE_EXPR (*tp);
4848 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4849 if (addr)
4850 return addr;
4853 if (!EXPR_P (*tp))
4855 *walk_subtrees = false;
4856 return NULL;
4859 location_t loc = EXPR_LOCATION (*tp);
4860 if (verify_location (blocks, loc))
4861 return *tp;
4863 return NULL;
4866 /* Called via walk_gimple_op. Verify locations of expressions. */
4868 static tree
4869 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4871 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4872 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4875 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4877 static void
4878 collect_subblocks (hash_set<tree> *blocks, tree block)
4880 tree t;
4881 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4883 blocks->add (t);
4884 collect_subblocks (blocks, t);
4888 /* Verify the GIMPLE statements in the CFG of FN. */
4890 DEBUG_FUNCTION void
4891 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4893 basic_block bb;
4894 bool err = false;
4896 timevar_push (TV_TREE_STMT_VERIFY);
4897 hash_set<void *> visited;
4898 hash_set<gimple> visited_stmts;
4900 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4901 hash_set<tree> blocks;
4902 if (DECL_INITIAL (fn->decl))
4904 blocks.add (DECL_INITIAL (fn->decl));
4905 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4908 FOR_EACH_BB_FN (bb, fn)
4910 gimple_stmt_iterator gsi;
4912 for (gphi_iterator gpi = gsi_start_phis (bb);
4913 !gsi_end_p (gpi);
4914 gsi_next (&gpi))
4916 gphi *phi = gpi.phi ();
4917 bool err2 = false;
4918 unsigned i;
4920 visited_stmts.add (phi);
4922 if (gimple_bb (phi) != bb)
4924 error ("gimple_bb (phi) is set to a wrong basic block");
4925 err2 = true;
4928 err2 |= verify_gimple_phi (phi);
4930 /* Only PHI arguments have locations. */
4931 if (gimple_location (phi) != UNKNOWN_LOCATION)
4933 error ("PHI node with location");
4934 err2 = true;
4937 for (i = 0; i < gimple_phi_num_args (phi); i++)
4939 tree arg = gimple_phi_arg_def (phi, i);
4940 tree addr = walk_tree (&arg, verify_node_sharing_1,
4941 &visited, NULL);
4942 if (addr)
4944 error ("incorrect sharing of tree nodes");
4945 debug_generic_expr (addr);
4946 err2 |= true;
4948 location_t loc = gimple_phi_arg_location (phi, i);
4949 if (virtual_operand_p (gimple_phi_result (phi))
4950 && loc != UNKNOWN_LOCATION)
4952 error ("virtual PHI with argument locations");
4953 err2 = true;
4955 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4956 if (addr)
4958 debug_generic_expr (addr);
4959 err2 = true;
4961 err2 |= verify_location (&blocks, loc);
4964 if (err2)
4965 debug_gimple_stmt (phi);
4966 err |= err2;
4969 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4971 gimple stmt = gsi_stmt (gsi);
4972 bool err2 = false;
4973 struct walk_stmt_info wi;
4974 tree addr;
4975 int lp_nr;
4977 visited_stmts.add (stmt);
4979 if (gimple_bb (stmt) != bb)
4981 error ("gimple_bb (stmt) is set to a wrong basic block");
4982 err2 = true;
4985 err2 |= verify_gimple_stmt (stmt);
4986 err2 |= verify_location (&blocks, gimple_location (stmt));
4988 memset (&wi, 0, sizeof (wi));
4989 wi.info = (void *) &visited;
4990 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4991 if (addr)
4993 error ("incorrect sharing of tree nodes");
4994 debug_generic_expr (addr);
4995 err2 |= true;
4998 memset (&wi, 0, sizeof (wi));
4999 wi.info = (void *) &blocks;
5000 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5001 if (addr)
5003 debug_generic_expr (addr);
5004 err2 |= true;
5007 /* ??? Instead of not checking these stmts at all the walker
5008 should know its context via wi. */
5009 if (!is_gimple_debug (stmt)
5010 && !is_gimple_omp (stmt))
5012 memset (&wi, 0, sizeof (wi));
5013 addr = walk_gimple_op (stmt, verify_expr, &wi);
5014 if (addr)
5016 debug_generic_expr (addr);
5017 inform (gimple_location (stmt), "in statement");
5018 err2 |= true;
5022 /* If the statement is marked as part of an EH region, then it is
5023 expected that the statement could throw. Verify that when we
5024 have optimizations that simplify statements such that we prove
5025 that they cannot throw, that we update other data structures
5026 to match. */
5027 lp_nr = lookup_stmt_eh_lp (stmt);
5028 if (lp_nr > 0)
5030 if (!stmt_could_throw_p (stmt))
5032 if (verify_nothrow)
5034 error ("statement marked for throw, but doesn%'t");
5035 err2 |= true;
5038 else if (!gsi_one_before_end_p (gsi))
5040 error ("statement marked for throw in middle of block");
5041 err2 |= true;
5045 if (err2)
5046 debug_gimple_stmt (stmt);
5047 err |= err2;
5051 eh_error_found = false;
5052 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5053 if (eh_table)
5054 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5055 (&visited_stmts);
5057 if (err || eh_error_found)
5058 internal_error ("verify_gimple failed");
5060 verify_histograms ();
5061 timevar_pop (TV_TREE_STMT_VERIFY);
5065 /* Verifies that the flow information is OK. */
5067 static int
5068 gimple_verify_flow_info (void)
5070 int err = 0;
5071 basic_block bb;
5072 gimple_stmt_iterator gsi;
5073 gimple stmt;
5074 edge e;
5075 edge_iterator ei;
5077 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5078 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5080 error ("ENTRY_BLOCK has IL associated with it");
5081 err = 1;
5084 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5085 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5087 error ("EXIT_BLOCK has IL associated with it");
5088 err = 1;
5091 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5092 if (e->flags & EDGE_FALLTHRU)
5094 error ("fallthru to exit from bb %d", e->src->index);
5095 err = 1;
5098 FOR_EACH_BB_FN (bb, cfun)
5100 bool found_ctrl_stmt = false;
5102 stmt = NULL;
5104 /* Skip labels on the start of basic block. */
5105 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5107 tree label;
5108 gimple prev_stmt = stmt;
5110 stmt = gsi_stmt (gsi);
5112 if (gimple_code (stmt) != GIMPLE_LABEL)
5113 break;
5115 label = gimple_label_label (as_a <glabel *> (stmt));
5116 if (prev_stmt && DECL_NONLOCAL (label))
5118 error ("nonlocal label ");
5119 print_generic_expr (stderr, label, 0);
5120 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5121 bb->index);
5122 err = 1;
5125 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5127 error ("EH landing pad label ");
5128 print_generic_expr (stderr, label, 0);
5129 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5130 bb->index);
5131 err = 1;
5134 if (label_to_block (label) != bb)
5136 error ("label ");
5137 print_generic_expr (stderr, label, 0);
5138 fprintf (stderr, " to block does not match in bb %d",
5139 bb->index);
5140 err = 1;
5143 if (decl_function_context (label) != current_function_decl)
5145 error ("label ");
5146 print_generic_expr (stderr, label, 0);
5147 fprintf (stderr, " has incorrect context in bb %d",
5148 bb->index);
5149 err = 1;
5153 /* Verify that body of basic block BB is free of control flow. */
5154 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5156 gimple stmt = gsi_stmt (gsi);
5158 if (found_ctrl_stmt)
5160 error ("control flow in the middle of basic block %d",
5161 bb->index);
5162 err = 1;
5165 if (stmt_ends_bb_p (stmt))
5166 found_ctrl_stmt = true;
5168 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5170 error ("label ");
5171 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5172 fprintf (stderr, " in the middle of basic block %d", bb->index);
5173 err = 1;
5177 gsi = gsi_last_bb (bb);
5178 if (gsi_end_p (gsi))
5179 continue;
5181 stmt = gsi_stmt (gsi);
5183 if (gimple_code (stmt) == GIMPLE_LABEL)
5184 continue;
5186 err |= verify_eh_edges (stmt);
5188 if (is_ctrl_stmt (stmt))
5190 FOR_EACH_EDGE (e, ei, bb->succs)
5191 if (e->flags & EDGE_FALLTHRU)
5193 error ("fallthru edge after a control statement in bb %d",
5194 bb->index);
5195 err = 1;
5199 if (gimple_code (stmt) != GIMPLE_COND)
5201 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5202 after anything else but if statement. */
5203 FOR_EACH_EDGE (e, ei, bb->succs)
5204 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5206 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5207 bb->index);
5208 err = 1;
5212 switch (gimple_code (stmt))
5214 case GIMPLE_COND:
5216 edge true_edge;
5217 edge false_edge;
5219 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5221 if (!true_edge
5222 || !false_edge
5223 || !(true_edge->flags & EDGE_TRUE_VALUE)
5224 || !(false_edge->flags & EDGE_FALSE_VALUE)
5225 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5226 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5227 || EDGE_COUNT (bb->succs) >= 3)
5229 error ("wrong outgoing edge flags at end of bb %d",
5230 bb->index);
5231 err = 1;
5234 break;
5236 case GIMPLE_GOTO:
5237 if (simple_goto_p (stmt))
5239 error ("explicit goto at end of bb %d", bb->index);
5240 err = 1;
5242 else
5244 /* FIXME. We should double check that the labels in the
5245 destination blocks have their address taken. */
5246 FOR_EACH_EDGE (e, ei, bb->succs)
5247 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5248 | EDGE_FALSE_VALUE))
5249 || !(e->flags & EDGE_ABNORMAL))
5251 error ("wrong outgoing edge flags at end of bb %d",
5252 bb->index);
5253 err = 1;
5256 break;
5258 case GIMPLE_CALL:
5259 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5260 break;
5261 /* ... fallthru ... */
5262 case GIMPLE_RETURN:
5263 if (!single_succ_p (bb)
5264 || (single_succ_edge (bb)->flags
5265 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5266 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5268 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5269 err = 1;
5271 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5273 error ("return edge does not point to exit in bb %d",
5274 bb->index);
5275 err = 1;
5277 break;
5279 case GIMPLE_SWITCH:
5281 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5282 tree prev;
5283 edge e;
5284 size_t i, n;
5286 n = gimple_switch_num_labels (switch_stmt);
5288 /* Mark all the destination basic blocks. */
5289 for (i = 0; i < n; ++i)
5291 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5292 basic_block label_bb = label_to_block (lab);
5293 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5294 label_bb->aux = (void *)1;
5297 /* Verify that the case labels are sorted. */
5298 prev = gimple_switch_label (switch_stmt, 0);
5299 for (i = 1; i < n; ++i)
5301 tree c = gimple_switch_label (switch_stmt, i);
5302 if (!CASE_LOW (c))
5304 error ("found default case not at the start of "
5305 "case vector");
5306 err = 1;
5307 continue;
5309 if (CASE_LOW (prev)
5310 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5312 error ("case labels not sorted: ");
5313 print_generic_expr (stderr, prev, 0);
5314 fprintf (stderr," is greater than ");
5315 print_generic_expr (stderr, c, 0);
5316 fprintf (stderr," but comes before it.\n");
5317 err = 1;
5319 prev = c;
5321 /* VRP will remove the default case if it can prove it will
5322 never be executed. So do not verify there always exists
5323 a default case here. */
5325 FOR_EACH_EDGE (e, ei, bb->succs)
5327 if (!e->dest->aux)
5329 error ("extra outgoing edge %d->%d",
5330 bb->index, e->dest->index);
5331 err = 1;
5334 e->dest->aux = (void *)2;
5335 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5336 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5338 error ("wrong outgoing edge flags at end of bb %d",
5339 bb->index);
5340 err = 1;
5344 /* Check that we have all of them. */
5345 for (i = 0; i < n; ++i)
5347 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5348 basic_block label_bb = label_to_block (lab);
5350 if (label_bb->aux != (void *)2)
5352 error ("missing edge %i->%i", bb->index, label_bb->index);
5353 err = 1;
5357 FOR_EACH_EDGE (e, ei, bb->succs)
5358 e->dest->aux = (void *)0;
5360 break;
5362 case GIMPLE_EH_DISPATCH:
5363 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5364 break;
5366 default:
5367 break;
5371 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5372 verify_dominators (CDI_DOMINATORS);
5374 return err;
5378 /* Updates phi nodes after creating a forwarder block joined
5379 by edge FALLTHRU. */
5381 static void
5382 gimple_make_forwarder_block (edge fallthru)
5384 edge e;
5385 edge_iterator ei;
5386 basic_block dummy, bb;
5387 tree var;
5388 gphi_iterator gsi;
5390 dummy = fallthru->src;
5391 bb = fallthru->dest;
5393 if (single_pred_p (bb))
5394 return;
5396 /* If we redirected a branch we must create new PHI nodes at the
5397 start of BB. */
5398 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5400 gphi *phi, *new_phi;
5402 phi = gsi.phi ();
5403 var = gimple_phi_result (phi);
5404 new_phi = create_phi_node (var, bb);
5405 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5406 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5407 UNKNOWN_LOCATION);
5410 /* Add the arguments we have stored on edges. */
5411 FOR_EACH_EDGE (e, ei, bb->preds)
5413 if (e == fallthru)
5414 continue;
5416 flush_pending_stmts (e);
5421 /* Return a non-special label in the head of basic block BLOCK.
5422 Create one if it doesn't exist. */
5424 tree
5425 gimple_block_label (basic_block bb)
5427 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5428 bool first = true;
5429 tree label;
5430 glabel *stmt;
5432 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5434 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5435 if (!stmt)
5436 break;
5437 label = gimple_label_label (stmt);
5438 if (!DECL_NONLOCAL (label))
5440 if (!first)
5441 gsi_move_before (&i, &s);
5442 return label;
5446 label = create_artificial_label (UNKNOWN_LOCATION);
5447 stmt = gimple_build_label (label);
5448 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5449 return label;
5453 /* Attempt to perform edge redirection by replacing a possibly complex
5454 jump instruction by a goto or by removing the jump completely.
5455 This can apply only if all edges now point to the same block. The
5456 parameters and return values are equivalent to
5457 redirect_edge_and_branch. */
5459 static edge
5460 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5462 basic_block src = e->src;
5463 gimple_stmt_iterator i;
5464 gimple stmt;
5466 /* We can replace or remove a complex jump only when we have exactly
5467 two edges. */
5468 if (EDGE_COUNT (src->succs) != 2
5469 /* Verify that all targets will be TARGET. Specifically, the
5470 edge that is not E must also go to TARGET. */
5471 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5472 return NULL;
5474 i = gsi_last_bb (src);
5475 if (gsi_end_p (i))
5476 return NULL;
5478 stmt = gsi_stmt (i);
5480 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5482 gsi_remove (&i, true);
5483 e = ssa_redirect_edge (e, target);
5484 e->flags = EDGE_FALLTHRU;
5485 return e;
5488 return NULL;
5492 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5493 edge representing the redirected branch. */
5495 static edge
5496 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5498 basic_block bb = e->src;
5499 gimple_stmt_iterator gsi;
5500 edge ret;
5501 gimple stmt;
5503 if (e->flags & EDGE_ABNORMAL)
5504 return NULL;
5506 if (e->dest == dest)
5507 return NULL;
5509 if (e->flags & EDGE_EH)
5510 return redirect_eh_edge (e, dest);
5512 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5514 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5515 if (ret)
5516 return ret;
5519 gsi = gsi_last_bb (bb);
5520 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5522 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5524 case GIMPLE_COND:
5525 /* For COND_EXPR, we only need to redirect the edge. */
5526 break;
5528 case GIMPLE_GOTO:
5529 /* No non-abnormal edges should lead from a non-simple goto, and
5530 simple ones should be represented implicitly. */
5531 gcc_unreachable ();
5533 case GIMPLE_SWITCH:
5535 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5536 tree label = gimple_block_label (dest);
5537 tree cases = get_cases_for_edge (e, switch_stmt);
5539 /* If we have a list of cases associated with E, then use it
5540 as it's a lot faster than walking the entire case vector. */
5541 if (cases)
5543 edge e2 = find_edge (e->src, dest);
5544 tree last, first;
5546 first = cases;
5547 while (cases)
5549 last = cases;
5550 CASE_LABEL (cases) = label;
5551 cases = CASE_CHAIN (cases);
5554 /* If there was already an edge in the CFG, then we need
5555 to move all the cases associated with E to E2. */
5556 if (e2)
5558 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5560 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5561 CASE_CHAIN (cases2) = first;
5563 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5565 else
5567 size_t i, n = gimple_switch_num_labels (switch_stmt);
5569 for (i = 0; i < n; i++)
5571 tree elt = gimple_switch_label (switch_stmt, i);
5572 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5573 CASE_LABEL (elt) = label;
5577 break;
5579 case GIMPLE_ASM:
5581 gasm *asm_stmt = as_a <gasm *> (stmt);
5582 int i, n = gimple_asm_nlabels (asm_stmt);
5583 tree label = NULL;
5585 for (i = 0; i < n; ++i)
5587 tree cons = gimple_asm_label_op (asm_stmt, i);
5588 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5590 if (!label)
5591 label = gimple_block_label (dest);
5592 TREE_VALUE (cons) = label;
5596 /* If we didn't find any label matching the former edge in the
5597 asm labels, we must be redirecting the fallthrough
5598 edge. */
5599 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5601 break;
5603 case GIMPLE_RETURN:
5604 gsi_remove (&gsi, true);
5605 e->flags |= EDGE_FALLTHRU;
5606 break;
5608 case GIMPLE_OMP_RETURN:
5609 case GIMPLE_OMP_CONTINUE:
5610 case GIMPLE_OMP_SECTIONS_SWITCH:
5611 case GIMPLE_OMP_FOR:
5612 /* The edges from OMP constructs can be simply redirected. */
5613 break;
5615 case GIMPLE_EH_DISPATCH:
5616 if (!(e->flags & EDGE_FALLTHRU))
5617 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5618 break;
5620 case GIMPLE_TRANSACTION:
5621 /* The ABORT edge has a stored label associated with it, otherwise
5622 the edges are simply redirectable. */
5623 if (e->flags == 0)
5624 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5625 gimple_block_label (dest));
5626 break;
5628 default:
5629 /* Otherwise it must be a fallthru edge, and we don't need to
5630 do anything besides redirecting it. */
5631 gcc_assert (e->flags & EDGE_FALLTHRU);
5632 break;
5635 /* Update/insert PHI nodes as necessary. */
5637 /* Now update the edges in the CFG. */
5638 e = ssa_redirect_edge (e, dest);
5640 return e;
5643 /* Returns true if it is possible to remove edge E by redirecting
5644 it to the destination of the other edge from E->src. */
5646 static bool
5647 gimple_can_remove_branch_p (const_edge e)
5649 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5650 return false;
5652 return true;
5655 /* Simple wrapper, as we can always redirect fallthru edges. */
5657 static basic_block
5658 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5660 e = gimple_redirect_edge_and_branch (e, dest);
5661 gcc_assert (e);
5663 return NULL;
5667 /* Splits basic block BB after statement STMT (but at least after the
5668 labels). If STMT is NULL, BB is split just after the labels. */
5670 static basic_block
5671 gimple_split_block (basic_block bb, void *stmt)
5673 gimple_stmt_iterator gsi;
5674 gimple_stmt_iterator gsi_tgt;
5675 gimple act;
5676 gimple_seq list;
5677 basic_block new_bb;
5678 edge e;
5679 edge_iterator ei;
5681 new_bb = create_empty_bb (bb);
5683 /* Redirect the outgoing edges. */
5684 new_bb->succs = bb->succs;
5685 bb->succs = NULL;
5686 FOR_EACH_EDGE (e, ei, new_bb->succs)
5687 e->src = new_bb;
5689 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5690 stmt = NULL;
5692 /* Move everything from GSI to the new basic block. */
5693 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5695 act = gsi_stmt (gsi);
5696 if (gimple_code (act) == GIMPLE_LABEL)
5697 continue;
5699 if (!stmt)
5700 break;
5702 if (stmt == act)
5704 gsi_next (&gsi);
5705 break;
5709 if (gsi_end_p (gsi))
5710 return new_bb;
5712 /* Split the statement list - avoid re-creating new containers as this
5713 brings ugly quadratic memory consumption in the inliner.
5714 (We are still quadratic since we need to update stmt BB pointers,
5715 sadly.) */
5716 gsi_split_seq_before (&gsi, &list);
5717 set_bb_seq (new_bb, list);
5718 for (gsi_tgt = gsi_start (list);
5719 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5720 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5722 return new_bb;
5726 /* Moves basic block BB after block AFTER. */
5728 static bool
5729 gimple_move_block_after (basic_block bb, basic_block after)
5731 if (bb->prev_bb == after)
5732 return true;
5734 unlink_block (bb);
5735 link_block (bb, after);
5737 return true;
5741 /* Return TRUE if block BB has no executable statements, otherwise return
5742 FALSE. */
5744 static bool
5745 gimple_empty_block_p (basic_block bb)
5747 /* BB must have no executable statements. */
5748 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5749 if (phi_nodes (bb))
5750 return false;
5751 if (gsi_end_p (gsi))
5752 return true;
5753 if (is_gimple_debug (gsi_stmt (gsi)))
5754 gsi_next_nondebug (&gsi);
5755 return gsi_end_p (gsi);
5759 /* Split a basic block if it ends with a conditional branch and if the
5760 other part of the block is not empty. */
5762 static basic_block
5763 gimple_split_block_before_cond_jump (basic_block bb)
5765 gimple last, split_point;
5766 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5767 if (gsi_end_p (gsi))
5768 return NULL;
5769 last = gsi_stmt (gsi);
5770 if (gimple_code (last) != GIMPLE_COND
5771 && gimple_code (last) != GIMPLE_SWITCH)
5772 return NULL;
5773 gsi_prev_nondebug (&gsi);
5774 split_point = gsi_stmt (gsi);
5775 return split_block (bb, split_point)->dest;
5779 /* Return true if basic_block can be duplicated. */
5781 static bool
5782 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5784 return true;
5787 /* Create a duplicate of the basic block BB. NOTE: This does not
5788 preserve SSA form. */
5790 static basic_block
5791 gimple_duplicate_bb (basic_block bb)
5793 basic_block new_bb;
5794 gimple_stmt_iterator gsi_tgt;
5796 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5798 /* Copy the PHI nodes. We ignore PHI node arguments here because
5799 the incoming edges have not been setup yet. */
5800 for (gphi_iterator gpi = gsi_start_phis (bb);
5801 !gsi_end_p (gpi);
5802 gsi_next (&gpi))
5804 gphi *phi, *copy;
5805 phi = gpi.phi ();
5806 copy = create_phi_node (NULL_TREE, new_bb);
5807 create_new_def_for (gimple_phi_result (phi), copy,
5808 gimple_phi_result_ptr (copy));
5809 gimple_set_uid (copy, gimple_uid (phi));
5812 gsi_tgt = gsi_start_bb (new_bb);
5813 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5814 !gsi_end_p (gsi);
5815 gsi_next (&gsi))
5817 def_operand_p def_p;
5818 ssa_op_iter op_iter;
5819 tree lhs;
5820 gimple stmt, copy;
5822 stmt = gsi_stmt (gsi);
5823 if (gimple_code (stmt) == GIMPLE_LABEL)
5824 continue;
5826 /* Don't duplicate label debug stmts. */
5827 if (gimple_debug_bind_p (stmt)
5828 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5829 == LABEL_DECL)
5830 continue;
5832 /* Create a new copy of STMT and duplicate STMT's virtual
5833 operands. */
5834 copy = gimple_copy (stmt);
5835 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5837 maybe_duplicate_eh_stmt (copy, stmt);
5838 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5840 /* When copying around a stmt writing into a local non-user
5841 aggregate, make sure it won't share stack slot with other
5842 vars. */
5843 lhs = gimple_get_lhs (stmt);
5844 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5846 tree base = get_base_address (lhs);
5847 if (base
5848 && (TREE_CODE (base) == VAR_DECL
5849 || TREE_CODE (base) == RESULT_DECL)
5850 && DECL_IGNORED_P (base)
5851 && !TREE_STATIC (base)
5852 && !DECL_EXTERNAL (base)
5853 && (TREE_CODE (base) != VAR_DECL
5854 || !DECL_HAS_VALUE_EXPR_P (base)))
5855 DECL_NONSHAREABLE (base) = 1;
5858 /* Create new names for all the definitions created by COPY and
5859 add replacement mappings for each new name. */
5860 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5861 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5864 return new_bb;
5867 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5869 static void
5870 add_phi_args_after_copy_edge (edge e_copy)
5872 basic_block bb, bb_copy = e_copy->src, dest;
5873 edge e;
5874 edge_iterator ei;
5875 gphi *phi, *phi_copy;
5876 tree def;
5877 gphi_iterator psi, psi_copy;
5879 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5880 return;
5882 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5884 if (e_copy->dest->flags & BB_DUPLICATED)
5885 dest = get_bb_original (e_copy->dest);
5886 else
5887 dest = e_copy->dest;
5889 e = find_edge (bb, dest);
5890 if (!e)
5892 /* During loop unrolling the target of the latch edge is copied.
5893 In this case we are not looking for edge to dest, but to
5894 duplicated block whose original was dest. */
5895 FOR_EACH_EDGE (e, ei, bb->succs)
5897 if ((e->dest->flags & BB_DUPLICATED)
5898 && get_bb_original (e->dest) == dest)
5899 break;
5902 gcc_assert (e != NULL);
5905 for (psi = gsi_start_phis (e->dest),
5906 psi_copy = gsi_start_phis (e_copy->dest);
5907 !gsi_end_p (psi);
5908 gsi_next (&psi), gsi_next (&psi_copy))
5910 phi = psi.phi ();
5911 phi_copy = psi_copy.phi ();
5912 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5913 add_phi_arg (phi_copy, def, e_copy,
5914 gimple_phi_arg_location_from_edge (phi, e));
5919 /* Basic block BB_COPY was created by code duplication. Add phi node
5920 arguments for edges going out of BB_COPY. The blocks that were
5921 duplicated have BB_DUPLICATED set. */
5923 void
5924 add_phi_args_after_copy_bb (basic_block bb_copy)
5926 edge e_copy;
5927 edge_iterator ei;
5929 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5931 add_phi_args_after_copy_edge (e_copy);
5935 /* Blocks in REGION_COPY array of length N_REGION were created by
5936 duplication of basic blocks. Add phi node arguments for edges
5937 going from these blocks. If E_COPY is not NULL, also add
5938 phi node arguments for its destination.*/
5940 void
5941 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5942 edge e_copy)
5944 unsigned i;
5946 for (i = 0; i < n_region; i++)
5947 region_copy[i]->flags |= BB_DUPLICATED;
5949 for (i = 0; i < n_region; i++)
5950 add_phi_args_after_copy_bb (region_copy[i]);
5951 if (e_copy)
5952 add_phi_args_after_copy_edge (e_copy);
5954 for (i = 0; i < n_region; i++)
5955 region_copy[i]->flags &= ~BB_DUPLICATED;
5958 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5959 important exit edge EXIT. By important we mean that no SSA name defined
5960 inside region is live over the other exit edges of the region. All entry
5961 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5962 to the duplicate of the region. Dominance and loop information is
5963 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5964 UPDATE_DOMINANCE is false then we assume that the caller will update the
5965 dominance information after calling this function. The new basic
5966 blocks are stored to REGION_COPY in the same order as they had in REGION,
5967 provided that REGION_COPY is not NULL.
5968 The function returns false if it is unable to copy the region,
5969 true otherwise. */
5971 bool
5972 gimple_duplicate_sese_region (edge entry, edge exit,
5973 basic_block *region, unsigned n_region,
5974 basic_block *region_copy,
5975 bool update_dominance)
5977 unsigned i;
5978 bool free_region_copy = false, copying_header = false;
5979 struct loop *loop = entry->dest->loop_father;
5980 edge exit_copy;
5981 vec<basic_block> doms;
5982 edge redirected;
5983 int total_freq = 0, entry_freq = 0;
5984 gcov_type total_count = 0, entry_count = 0;
5986 if (!can_copy_bbs_p (region, n_region))
5987 return false;
5989 /* Some sanity checking. Note that we do not check for all possible
5990 missuses of the functions. I.e. if you ask to copy something weird,
5991 it will work, but the state of structures probably will not be
5992 correct. */
5993 for (i = 0; i < n_region; i++)
5995 /* We do not handle subloops, i.e. all the blocks must belong to the
5996 same loop. */
5997 if (region[i]->loop_father != loop)
5998 return false;
6000 if (region[i] != entry->dest
6001 && region[i] == loop->header)
6002 return false;
6005 /* In case the function is used for loop header copying (which is the primary
6006 use), ensure that EXIT and its copy will be new latch and entry edges. */
6007 if (loop->header == entry->dest)
6009 copying_header = true;
6011 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6012 return false;
6014 for (i = 0; i < n_region; i++)
6015 if (region[i] != exit->src
6016 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6017 return false;
6020 initialize_original_copy_tables ();
6022 if (copying_header)
6023 set_loop_copy (loop, loop_outer (loop));
6024 else
6025 set_loop_copy (loop, loop);
6027 if (!region_copy)
6029 region_copy = XNEWVEC (basic_block, n_region);
6030 free_region_copy = true;
6033 /* Record blocks outside the region that are dominated by something
6034 inside. */
6035 if (update_dominance)
6037 doms.create (0);
6038 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6041 if (entry->dest->count)
6043 total_count = entry->dest->count;
6044 entry_count = entry->count;
6045 /* Fix up corner cases, to avoid division by zero or creation of negative
6046 frequencies. */
6047 if (entry_count > total_count)
6048 entry_count = total_count;
6050 else
6052 total_freq = entry->dest->frequency;
6053 entry_freq = EDGE_FREQUENCY (entry);
6054 /* Fix up corner cases, to avoid division by zero or creation of negative
6055 frequencies. */
6056 if (total_freq == 0)
6057 total_freq = 1;
6058 else if (entry_freq > total_freq)
6059 entry_freq = total_freq;
6062 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6063 split_edge_bb_loc (entry), update_dominance);
6064 if (total_count)
6066 scale_bbs_frequencies_gcov_type (region, n_region,
6067 total_count - entry_count,
6068 total_count);
6069 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6070 total_count);
6072 else
6074 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6075 total_freq);
6076 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6079 if (copying_header)
6081 loop->header = exit->dest;
6082 loop->latch = exit->src;
6085 /* Redirect the entry and add the phi node arguments. */
6086 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6087 gcc_assert (redirected != NULL);
6088 flush_pending_stmts (entry);
6090 /* Concerning updating of dominators: We must recount dominators
6091 for entry block and its copy. Anything that is outside of the
6092 region, but was dominated by something inside needs recounting as
6093 well. */
6094 if (update_dominance)
6096 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6097 doms.safe_push (get_bb_original (entry->dest));
6098 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6099 doms.release ();
6102 /* Add the other PHI node arguments. */
6103 add_phi_args_after_copy (region_copy, n_region, NULL);
6105 if (free_region_copy)
6106 free (region_copy);
6108 free_original_copy_tables ();
6109 return true;
6112 /* Checks if BB is part of the region defined by N_REGION BBS. */
6113 static bool
6114 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6116 unsigned int n;
6118 for (n = 0; n < n_region; n++)
6120 if (bb == bbs[n])
6121 return true;
6123 return false;
6126 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6127 are stored to REGION_COPY in the same order in that they appear
6128 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6129 the region, EXIT an exit from it. The condition guarding EXIT
6130 is moved to ENTRY. Returns true if duplication succeeds, false
6131 otherwise.
6133 For example,
6135 some_code;
6136 if (cond)
6138 else
6141 is transformed to
6143 if (cond)
6145 some_code;
6148 else
6150 some_code;
6155 bool
6156 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6157 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6158 basic_block *region_copy ATTRIBUTE_UNUSED)
6160 unsigned i;
6161 bool free_region_copy = false;
6162 struct loop *loop = exit->dest->loop_father;
6163 struct loop *orig_loop = entry->dest->loop_father;
6164 basic_block switch_bb, entry_bb, nentry_bb;
6165 vec<basic_block> doms;
6166 int total_freq = 0, exit_freq = 0;
6167 gcov_type total_count = 0, exit_count = 0;
6168 edge exits[2], nexits[2], e;
6169 gimple_stmt_iterator gsi;
6170 gimple cond_stmt;
6171 edge sorig, snew;
6172 basic_block exit_bb;
6173 gphi_iterator psi;
6174 gphi *phi;
6175 tree def;
6176 struct loop *target, *aloop, *cloop;
6178 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6179 exits[0] = exit;
6180 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6182 if (!can_copy_bbs_p (region, n_region))
6183 return false;
6185 initialize_original_copy_tables ();
6186 set_loop_copy (orig_loop, loop);
6188 target= loop;
6189 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6191 if (bb_part_of_region_p (aloop->header, region, n_region))
6193 cloop = duplicate_loop (aloop, target);
6194 duplicate_subloops (aloop, cloop);
6198 if (!region_copy)
6200 region_copy = XNEWVEC (basic_block, n_region);
6201 free_region_copy = true;
6204 gcc_assert (!need_ssa_update_p (cfun));
6206 /* Record blocks outside the region that are dominated by something
6207 inside. */
6208 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6210 if (exit->src->count)
6212 total_count = exit->src->count;
6213 exit_count = exit->count;
6214 /* Fix up corner cases, to avoid division by zero or creation of negative
6215 frequencies. */
6216 if (exit_count > total_count)
6217 exit_count = total_count;
6219 else
6221 total_freq = exit->src->frequency;
6222 exit_freq = EDGE_FREQUENCY (exit);
6223 /* Fix up corner cases, to avoid division by zero or creation of negative
6224 frequencies. */
6225 if (total_freq == 0)
6226 total_freq = 1;
6227 if (exit_freq > total_freq)
6228 exit_freq = total_freq;
6231 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6232 split_edge_bb_loc (exit), true);
6233 if (total_count)
6235 scale_bbs_frequencies_gcov_type (region, n_region,
6236 total_count - exit_count,
6237 total_count);
6238 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6239 total_count);
6241 else
6243 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6244 total_freq);
6245 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6248 /* Create the switch block, and put the exit condition to it. */
6249 entry_bb = entry->dest;
6250 nentry_bb = get_bb_copy (entry_bb);
6251 if (!last_stmt (entry->src)
6252 || !stmt_ends_bb_p (last_stmt (entry->src)))
6253 switch_bb = entry->src;
6254 else
6255 switch_bb = split_edge (entry);
6256 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6258 gsi = gsi_last_bb (switch_bb);
6259 cond_stmt = last_stmt (exit->src);
6260 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6261 cond_stmt = gimple_copy (cond_stmt);
6263 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6265 sorig = single_succ_edge (switch_bb);
6266 sorig->flags = exits[1]->flags;
6267 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6269 /* Register the new edge from SWITCH_BB in loop exit lists. */
6270 rescan_loop_exit (snew, true, false);
6272 /* Add the PHI node arguments. */
6273 add_phi_args_after_copy (region_copy, n_region, snew);
6275 /* Get rid of now superfluous conditions and associated edges (and phi node
6276 arguments). */
6277 exit_bb = exit->dest;
6279 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6280 PENDING_STMT (e) = NULL;
6282 /* The latch of ORIG_LOOP was copied, and so was the backedge
6283 to the original header. We redirect this backedge to EXIT_BB. */
6284 for (i = 0; i < n_region; i++)
6285 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6287 gcc_assert (single_succ_edge (region_copy[i]));
6288 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6289 PENDING_STMT (e) = NULL;
6290 for (psi = gsi_start_phis (exit_bb);
6291 !gsi_end_p (psi);
6292 gsi_next (&psi))
6294 phi = psi.phi ();
6295 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6296 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6299 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6300 PENDING_STMT (e) = NULL;
6302 /* Anything that is outside of the region, but was dominated by something
6303 inside needs to update dominance info. */
6304 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6305 doms.release ();
6306 /* Update the SSA web. */
6307 update_ssa (TODO_update_ssa);
6309 if (free_region_copy)
6310 free (region_copy);
6312 free_original_copy_tables ();
6313 return true;
6316 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6317 adding blocks when the dominator traversal reaches EXIT. This
6318 function silently assumes that ENTRY strictly dominates EXIT. */
6320 void
6321 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6322 vec<basic_block> *bbs_p)
6324 basic_block son;
6326 for (son = first_dom_son (CDI_DOMINATORS, entry);
6327 son;
6328 son = next_dom_son (CDI_DOMINATORS, son))
6330 bbs_p->safe_push (son);
6331 if (son != exit)
6332 gather_blocks_in_sese_region (son, exit, bbs_p);
6336 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6337 The duplicates are recorded in VARS_MAP. */
6339 static void
6340 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6341 tree to_context)
6343 tree t = *tp, new_t;
6344 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6346 if (DECL_CONTEXT (t) == to_context)
6347 return;
6349 bool existed;
6350 tree &loc = vars_map->get_or_insert (t, &existed);
6352 if (!existed)
6354 if (SSA_VAR_P (t))
6356 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6357 add_local_decl (f, new_t);
6359 else
6361 gcc_assert (TREE_CODE (t) == CONST_DECL);
6362 new_t = copy_node (t);
6364 DECL_CONTEXT (new_t) = to_context;
6366 loc = new_t;
6368 else
6369 new_t = loc;
6371 *tp = new_t;
6375 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6376 VARS_MAP maps old ssa names and var_decls to the new ones. */
6378 static tree
6379 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6380 tree to_context)
6382 tree new_name;
6384 gcc_assert (!virtual_operand_p (name));
6386 tree *loc = vars_map->get (name);
6388 if (!loc)
6390 tree decl = SSA_NAME_VAR (name);
6391 if (decl)
6393 replace_by_duplicate_decl (&decl, vars_map, to_context);
6394 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6395 decl, SSA_NAME_DEF_STMT (name));
6396 if (SSA_NAME_IS_DEFAULT_DEF (name))
6397 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6398 decl, new_name);
6400 else
6401 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6402 name, SSA_NAME_DEF_STMT (name));
6404 vars_map->put (name, new_name);
6406 else
6407 new_name = *loc;
6409 return new_name;
6412 struct move_stmt_d
6414 tree orig_block;
6415 tree new_block;
6416 tree from_context;
6417 tree to_context;
6418 hash_map<tree, tree> *vars_map;
6419 htab_t new_label_map;
6420 hash_map<void *, void *> *eh_map;
6421 bool remap_decls_p;
6424 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6425 contained in *TP if it has been ORIG_BLOCK previously and change the
6426 DECL_CONTEXT of every local variable referenced in *TP. */
6428 static tree
6429 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6431 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6432 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6433 tree t = *tp;
6435 if (EXPR_P (t))
6437 tree block = TREE_BLOCK (t);
6438 if (block == p->orig_block
6439 || (p->orig_block == NULL_TREE
6440 && block != NULL_TREE))
6441 TREE_SET_BLOCK (t, p->new_block);
6442 #ifdef ENABLE_CHECKING
6443 else if (block != NULL_TREE)
6445 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6446 block = BLOCK_SUPERCONTEXT (block);
6447 gcc_assert (block == p->orig_block);
6449 #endif
6451 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6453 if (TREE_CODE (t) == SSA_NAME)
6454 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6455 else if (TREE_CODE (t) == LABEL_DECL)
6457 if (p->new_label_map)
6459 struct tree_map in, *out;
6460 in.base.from = t;
6461 out = (struct tree_map *)
6462 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6463 if (out)
6464 *tp = t = out->to;
6467 DECL_CONTEXT (t) = p->to_context;
6469 else if (p->remap_decls_p)
6471 /* Replace T with its duplicate. T should no longer appear in the
6472 parent function, so this looks wasteful; however, it may appear
6473 in referenced_vars, and more importantly, as virtual operands of
6474 statements, and in alias lists of other variables. It would be
6475 quite difficult to expunge it from all those places. ??? It might
6476 suffice to do this for addressable variables. */
6477 if ((TREE_CODE (t) == VAR_DECL
6478 && !is_global_var (t))
6479 || TREE_CODE (t) == CONST_DECL)
6480 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6482 *walk_subtrees = 0;
6484 else if (TYPE_P (t))
6485 *walk_subtrees = 0;
6487 return NULL_TREE;
6490 /* Helper for move_stmt_r. Given an EH region number for the source
6491 function, map that to the duplicate EH regio number in the dest. */
6493 static int
6494 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6496 eh_region old_r, new_r;
6498 old_r = get_eh_region_from_number (old_nr);
6499 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6501 return new_r->index;
6504 /* Similar, but operate on INTEGER_CSTs. */
6506 static tree
6507 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6509 int old_nr, new_nr;
6511 old_nr = tree_to_shwi (old_t_nr);
6512 new_nr = move_stmt_eh_region_nr (old_nr, p);
6514 return build_int_cst (integer_type_node, new_nr);
6517 /* Like move_stmt_op, but for gimple statements.
6519 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6520 contained in the current statement in *GSI_P and change the
6521 DECL_CONTEXT of every local variable referenced in the current
6522 statement. */
6524 static tree
6525 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6526 struct walk_stmt_info *wi)
6528 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6529 gimple stmt = gsi_stmt (*gsi_p);
6530 tree block = gimple_block (stmt);
6532 if (block == p->orig_block
6533 || (p->orig_block == NULL_TREE
6534 && block != NULL_TREE))
6535 gimple_set_block (stmt, p->new_block);
6537 switch (gimple_code (stmt))
6539 case GIMPLE_CALL:
6540 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6542 tree r, fndecl = gimple_call_fndecl (stmt);
6543 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6544 switch (DECL_FUNCTION_CODE (fndecl))
6546 case BUILT_IN_EH_COPY_VALUES:
6547 r = gimple_call_arg (stmt, 1);
6548 r = move_stmt_eh_region_tree_nr (r, p);
6549 gimple_call_set_arg (stmt, 1, r);
6550 /* FALLTHRU */
6552 case BUILT_IN_EH_POINTER:
6553 case BUILT_IN_EH_FILTER:
6554 r = gimple_call_arg (stmt, 0);
6555 r = move_stmt_eh_region_tree_nr (r, p);
6556 gimple_call_set_arg (stmt, 0, r);
6557 break;
6559 default:
6560 break;
6563 break;
6565 case GIMPLE_RESX:
6567 gresx *resx_stmt = as_a <gresx *> (stmt);
6568 int r = gimple_resx_region (resx_stmt);
6569 r = move_stmt_eh_region_nr (r, p);
6570 gimple_resx_set_region (resx_stmt, r);
6572 break;
6574 case GIMPLE_EH_DISPATCH:
6576 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6577 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6578 r = move_stmt_eh_region_nr (r, p);
6579 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6581 break;
6583 case GIMPLE_OMP_RETURN:
6584 case GIMPLE_OMP_CONTINUE:
6585 break;
6586 default:
6587 if (is_gimple_omp (stmt))
6589 /* Do not remap variables inside OMP directives. Variables
6590 referenced in clauses and directive header belong to the
6591 parent function and should not be moved into the child
6592 function. */
6593 bool save_remap_decls_p = p->remap_decls_p;
6594 p->remap_decls_p = false;
6595 *handled_ops_p = true;
6597 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6598 move_stmt_op, wi);
6600 p->remap_decls_p = save_remap_decls_p;
6602 break;
6605 return NULL_TREE;
6608 /* Move basic block BB from function CFUN to function DEST_FN. The
6609 block is moved out of the original linked list and placed after
6610 block AFTER in the new list. Also, the block is removed from the
6611 original array of blocks and placed in DEST_FN's array of blocks.
6612 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6613 updated to reflect the moved edges.
6615 The local variables are remapped to new instances, VARS_MAP is used
6616 to record the mapping. */
6618 static void
6619 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6620 basic_block after, bool update_edge_count_p,
6621 struct move_stmt_d *d)
6623 struct control_flow_graph *cfg;
6624 edge_iterator ei;
6625 edge e;
6626 gimple_stmt_iterator si;
6627 unsigned old_len, new_len;
6629 /* Remove BB from dominance structures. */
6630 delete_from_dominance_info (CDI_DOMINATORS, bb);
6632 /* Move BB from its current loop to the copy in the new function. */
6633 if (current_loops)
6635 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6636 if (new_loop)
6637 bb->loop_father = new_loop;
6640 /* Link BB to the new linked list. */
6641 move_block_after (bb, after);
6643 /* Update the edge count in the corresponding flowgraphs. */
6644 if (update_edge_count_p)
6645 FOR_EACH_EDGE (e, ei, bb->succs)
6647 cfun->cfg->x_n_edges--;
6648 dest_cfun->cfg->x_n_edges++;
6651 /* Remove BB from the original basic block array. */
6652 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6653 cfun->cfg->x_n_basic_blocks--;
6655 /* Grow DEST_CFUN's basic block array if needed. */
6656 cfg = dest_cfun->cfg;
6657 cfg->x_n_basic_blocks++;
6658 if (bb->index >= cfg->x_last_basic_block)
6659 cfg->x_last_basic_block = bb->index + 1;
6661 old_len = vec_safe_length (cfg->x_basic_block_info);
6662 if ((unsigned) cfg->x_last_basic_block >= old_len)
6664 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6665 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6668 (*cfg->x_basic_block_info)[bb->index] = bb;
6670 /* Remap the variables in phi nodes. */
6671 for (gphi_iterator psi = gsi_start_phis (bb);
6672 !gsi_end_p (psi); )
6674 gphi *phi = psi.phi ();
6675 use_operand_p use;
6676 tree op = PHI_RESULT (phi);
6677 ssa_op_iter oi;
6678 unsigned i;
6680 if (virtual_operand_p (op))
6682 /* Remove the phi nodes for virtual operands (alias analysis will be
6683 run for the new function, anyway). */
6684 remove_phi_node (&psi, true);
6685 continue;
6688 SET_PHI_RESULT (phi,
6689 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6690 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6692 op = USE_FROM_PTR (use);
6693 if (TREE_CODE (op) == SSA_NAME)
6694 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6697 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6699 location_t locus = gimple_phi_arg_location (phi, i);
6700 tree block = LOCATION_BLOCK (locus);
6702 if (locus == UNKNOWN_LOCATION)
6703 continue;
6704 if (d->orig_block == NULL_TREE || block == d->orig_block)
6706 if (d->new_block == NULL_TREE)
6707 locus = LOCATION_LOCUS (locus);
6708 else
6709 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6710 gimple_phi_arg_set_location (phi, i, locus);
6714 gsi_next (&psi);
6717 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6719 gimple stmt = gsi_stmt (si);
6720 struct walk_stmt_info wi;
6722 memset (&wi, 0, sizeof (wi));
6723 wi.info = d;
6724 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6726 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6728 tree label = gimple_label_label (label_stmt);
6729 int uid = LABEL_DECL_UID (label);
6731 gcc_assert (uid > -1);
6733 old_len = vec_safe_length (cfg->x_label_to_block_map);
6734 if (old_len <= (unsigned) uid)
6736 new_len = 3 * uid / 2 + 1;
6737 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6740 (*cfg->x_label_to_block_map)[uid] = bb;
6741 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6743 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6745 if (uid >= dest_cfun->cfg->last_label_uid)
6746 dest_cfun->cfg->last_label_uid = uid + 1;
6749 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6750 remove_stmt_from_eh_lp_fn (cfun, stmt);
6752 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6753 gimple_remove_stmt_histograms (cfun, stmt);
6755 /* We cannot leave any operands allocated from the operand caches of
6756 the current function. */
6757 free_stmt_operands (cfun, stmt);
6758 push_cfun (dest_cfun);
6759 update_stmt (stmt);
6760 pop_cfun ();
6763 FOR_EACH_EDGE (e, ei, bb->succs)
6764 if (e->goto_locus != UNKNOWN_LOCATION)
6766 tree block = LOCATION_BLOCK (e->goto_locus);
6767 if (d->orig_block == NULL_TREE
6768 || block == d->orig_block)
6769 e->goto_locus = d->new_block ?
6770 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6771 LOCATION_LOCUS (e->goto_locus);
6775 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6776 the outermost EH region. Use REGION as the incoming base EH region. */
6778 static eh_region
6779 find_outermost_region_in_block (struct function *src_cfun,
6780 basic_block bb, eh_region region)
6782 gimple_stmt_iterator si;
6784 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6786 gimple stmt = gsi_stmt (si);
6787 eh_region stmt_region;
6788 int lp_nr;
6790 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6791 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6792 if (stmt_region)
6794 if (region == NULL)
6795 region = stmt_region;
6796 else if (stmt_region != region)
6798 region = eh_region_outermost (src_cfun, stmt_region, region);
6799 gcc_assert (region != NULL);
6804 return region;
6807 static tree
6808 new_label_mapper (tree decl, void *data)
6810 htab_t hash = (htab_t) data;
6811 struct tree_map *m;
6812 void **slot;
6814 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6816 m = XNEW (struct tree_map);
6817 m->hash = DECL_UID (decl);
6818 m->base.from = decl;
6819 m->to = create_artificial_label (UNKNOWN_LOCATION);
6820 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6821 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6822 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6824 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6825 gcc_assert (*slot == NULL);
6827 *slot = m;
6829 return m->to;
6832 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6833 subblocks. */
6835 static void
6836 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6837 tree to_context)
6839 tree *tp, t;
6841 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6843 t = *tp;
6844 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6845 continue;
6846 replace_by_duplicate_decl (&t, vars_map, to_context);
6847 if (t != *tp)
6849 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6851 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6852 DECL_HAS_VALUE_EXPR_P (t) = 1;
6854 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6855 *tp = t;
6859 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6860 replace_block_vars_by_duplicates (block, vars_map, to_context);
6863 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6864 from FN1 to FN2. */
6866 static void
6867 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6868 struct loop *loop)
6870 /* Discard it from the old loop array. */
6871 (*get_loops (fn1))[loop->num] = NULL;
6873 /* Place it in the new loop array, assigning it a new number. */
6874 loop->num = number_of_loops (fn2);
6875 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6877 /* Recurse to children. */
6878 for (loop = loop->inner; loop; loop = loop->next)
6879 fixup_loop_arrays_after_move (fn1, fn2, loop);
6882 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6883 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6885 DEBUG_FUNCTION void
6886 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6888 basic_block bb;
6889 edge_iterator ei;
6890 edge e;
6891 bitmap bbs = BITMAP_ALLOC (NULL);
6892 int i;
6894 gcc_assert (entry != NULL);
6895 gcc_assert (entry != exit);
6896 gcc_assert (bbs_p != NULL);
6898 gcc_assert (bbs_p->length () > 0);
6900 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6901 bitmap_set_bit (bbs, bb->index);
6903 gcc_assert (bitmap_bit_p (bbs, entry->index));
6904 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6906 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6908 if (bb == entry)
6910 gcc_assert (single_pred_p (entry));
6911 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6913 else
6914 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6916 e = ei_edge (ei);
6917 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6920 if (bb == exit)
6922 gcc_assert (single_succ_p (exit));
6923 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6925 else
6926 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6928 e = ei_edge (ei);
6929 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6933 BITMAP_FREE (bbs);
6937 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6938 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6939 single basic block in the original CFG and the new basic block is
6940 returned. DEST_CFUN must not have a CFG yet.
6942 Note that the region need not be a pure SESE region. Blocks inside
6943 the region may contain calls to abort/exit. The only restriction
6944 is that ENTRY_BB should be the only entry point and it must
6945 dominate EXIT_BB.
6947 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6948 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6949 to the new function.
6951 All local variables referenced in the region are assumed to be in
6952 the corresponding BLOCK_VARS and unexpanded variable lists
6953 associated with DEST_CFUN. */
6955 basic_block
6956 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6957 basic_block exit_bb, tree orig_block)
6959 vec<basic_block> bbs, dom_bbs;
6960 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6961 basic_block after, bb, *entry_pred, *exit_succ, abb;
6962 struct function *saved_cfun = cfun;
6963 int *entry_flag, *exit_flag;
6964 unsigned *entry_prob, *exit_prob;
6965 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6966 edge e;
6967 edge_iterator ei;
6968 htab_t new_label_map;
6969 hash_map<void *, void *> *eh_map;
6970 struct loop *loop = entry_bb->loop_father;
6971 struct loop *loop0 = get_loop (saved_cfun, 0);
6972 struct move_stmt_d d;
6974 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6975 region. */
6976 gcc_assert (entry_bb != exit_bb
6977 && (!exit_bb
6978 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6980 /* Collect all the blocks in the region. Manually add ENTRY_BB
6981 because it won't be added by dfs_enumerate_from. */
6982 bbs.create (0);
6983 bbs.safe_push (entry_bb);
6984 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6985 #ifdef ENABLE_CHECKING
6986 verify_sese (entry_bb, exit_bb, &bbs);
6987 #endif
6989 /* The blocks that used to be dominated by something in BBS will now be
6990 dominated by the new block. */
6991 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6992 bbs.address (),
6993 bbs.length ());
6995 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6996 the predecessor edges to ENTRY_BB and the successor edges to
6997 EXIT_BB so that we can re-attach them to the new basic block that
6998 will replace the region. */
6999 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7000 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7001 entry_flag = XNEWVEC (int, num_entry_edges);
7002 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7003 i = 0;
7004 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7006 entry_prob[i] = e->probability;
7007 entry_flag[i] = e->flags;
7008 entry_pred[i++] = e->src;
7009 remove_edge (e);
7012 if (exit_bb)
7014 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7015 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7016 exit_flag = XNEWVEC (int, num_exit_edges);
7017 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7018 i = 0;
7019 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7021 exit_prob[i] = e->probability;
7022 exit_flag[i] = e->flags;
7023 exit_succ[i++] = e->dest;
7024 remove_edge (e);
7027 else
7029 num_exit_edges = 0;
7030 exit_succ = NULL;
7031 exit_flag = NULL;
7032 exit_prob = NULL;
7035 /* Switch context to the child function to initialize DEST_FN's CFG. */
7036 gcc_assert (dest_cfun->cfg == NULL);
7037 push_cfun (dest_cfun);
7039 init_empty_tree_cfg ();
7041 /* Initialize EH information for the new function. */
7042 eh_map = NULL;
7043 new_label_map = NULL;
7044 if (saved_cfun->eh)
7046 eh_region region = NULL;
7048 FOR_EACH_VEC_ELT (bbs, i, bb)
7049 region = find_outermost_region_in_block (saved_cfun, bb, region);
7051 init_eh_for_function ();
7052 if (region != NULL)
7054 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7055 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7056 new_label_mapper, new_label_map);
7060 /* Initialize an empty loop tree. */
7061 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7062 init_loops_structure (dest_cfun, loops, 1);
7063 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7064 set_loops_for_fn (dest_cfun, loops);
7066 /* Move the outlined loop tree part. */
7067 num_nodes = bbs.length ();
7068 FOR_EACH_VEC_ELT (bbs, i, bb)
7070 if (bb->loop_father->header == bb)
7072 struct loop *this_loop = bb->loop_father;
7073 struct loop *outer = loop_outer (this_loop);
7074 if (outer == loop
7075 /* If the SESE region contains some bbs ending with
7076 a noreturn call, those are considered to belong
7077 to the outermost loop in saved_cfun, rather than
7078 the entry_bb's loop_father. */
7079 || outer == loop0)
7081 if (outer != loop)
7082 num_nodes -= this_loop->num_nodes;
7083 flow_loop_tree_node_remove (bb->loop_father);
7084 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7085 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7088 else if (bb->loop_father == loop0 && loop0 != loop)
7089 num_nodes--;
7091 /* Remove loop exits from the outlined region. */
7092 if (loops_for_fn (saved_cfun)->exits)
7093 FOR_EACH_EDGE (e, ei, bb->succs)
7095 struct loops *l = loops_for_fn (saved_cfun);
7096 loop_exit **slot
7097 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7098 NO_INSERT);
7099 if (slot)
7100 l->exits->clear_slot (slot);
7105 /* Adjust the number of blocks in the tree root of the outlined part. */
7106 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7108 /* Setup a mapping to be used by move_block_to_fn. */
7109 loop->aux = current_loops->tree_root;
7110 loop0->aux = current_loops->tree_root;
7112 pop_cfun ();
7114 /* Move blocks from BBS into DEST_CFUN. */
7115 gcc_assert (bbs.length () >= 2);
7116 after = dest_cfun->cfg->x_entry_block_ptr;
7117 hash_map<tree, tree> vars_map;
7119 memset (&d, 0, sizeof (d));
7120 d.orig_block = orig_block;
7121 d.new_block = DECL_INITIAL (dest_cfun->decl);
7122 d.from_context = cfun->decl;
7123 d.to_context = dest_cfun->decl;
7124 d.vars_map = &vars_map;
7125 d.new_label_map = new_label_map;
7126 d.eh_map = eh_map;
7127 d.remap_decls_p = true;
7129 FOR_EACH_VEC_ELT (bbs, i, bb)
7131 /* No need to update edge counts on the last block. It has
7132 already been updated earlier when we detached the region from
7133 the original CFG. */
7134 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7135 after = bb;
7138 loop->aux = NULL;
7139 loop0->aux = NULL;
7140 /* Loop sizes are no longer correct, fix them up. */
7141 loop->num_nodes -= num_nodes;
7142 for (struct loop *outer = loop_outer (loop);
7143 outer; outer = loop_outer (outer))
7144 outer->num_nodes -= num_nodes;
7145 loop0->num_nodes -= bbs.length () - num_nodes;
7147 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7149 struct loop *aloop;
7150 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7151 if (aloop != NULL)
7153 if (aloop->simduid)
7155 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7156 d.to_context);
7157 dest_cfun->has_simduid_loops = true;
7159 if (aloop->force_vectorize)
7160 dest_cfun->has_force_vectorize_loops = true;
7164 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7165 if (orig_block)
7167 tree block;
7168 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7169 == NULL_TREE);
7170 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7171 = BLOCK_SUBBLOCKS (orig_block);
7172 for (block = BLOCK_SUBBLOCKS (orig_block);
7173 block; block = BLOCK_CHAIN (block))
7174 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7175 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7178 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7179 &vars_map, dest_cfun->decl);
7181 if (new_label_map)
7182 htab_delete (new_label_map);
7183 if (eh_map)
7184 delete eh_map;
7186 /* Rewire the entry and exit blocks. The successor to the entry
7187 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7188 the child function. Similarly, the predecessor of DEST_FN's
7189 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7190 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7191 various CFG manipulation function get to the right CFG.
7193 FIXME, this is silly. The CFG ought to become a parameter to
7194 these helpers. */
7195 push_cfun (dest_cfun);
7196 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7197 if (exit_bb)
7198 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7199 pop_cfun ();
7201 /* Back in the original function, the SESE region has disappeared,
7202 create a new basic block in its place. */
7203 bb = create_empty_bb (entry_pred[0]);
7204 if (current_loops)
7205 add_bb_to_loop (bb, loop);
7206 for (i = 0; i < num_entry_edges; i++)
7208 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7209 e->probability = entry_prob[i];
7212 for (i = 0; i < num_exit_edges; i++)
7214 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7215 e->probability = exit_prob[i];
7218 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7219 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7220 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7221 dom_bbs.release ();
7223 if (exit_bb)
7225 free (exit_prob);
7226 free (exit_flag);
7227 free (exit_succ);
7229 free (entry_prob);
7230 free (entry_flag);
7231 free (entry_pred);
7232 bbs.release ();
7234 return bb;
7238 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7241 void
7242 dump_function_to_file (tree fndecl, FILE *file, int flags)
7244 tree arg, var, old_current_fndecl = current_function_decl;
7245 struct function *dsf;
7246 bool ignore_topmost_bind = false, any_var = false;
7247 basic_block bb;
7248 tree chain;
7249 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7250 && decl_is_tm_clone (fndecl));
7251 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7253 current_function_decl = fndecl;
7254 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7256 arg = DECL_ARGUMENTS (fndecl);
7257 while (arg)
7259 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7260 fprintf (file, " ");
7261 print_generic_expr (file, arg, dump_flags);
7262 if (flags & TDF_VERBOSE)
7263 print_node (file, "", arg, 4);
7264 if (DECL_CHAIN (arg))
7265 fprintf (file, ", ");
7266 arg = DECL_CHAIN (arg);
7268 fprintf (file, ")\n");
7270 if (flags & TDF_VERBOSE)
7271 print_node (file, "", fndecl, 2);
7273 dsf = DECL_STRUCT_FUNCTION (fndecl);
7274 if (dsf && (flags & TDF_EH))
7275 dump_eh_tree (file, dsf);
7277 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7279 dump_node (fndecl, TDF_SLIM | flags, file);
7280 current_function_decl = old_current_fndecl;
7281 return;
7284 /* When GIMPLE is lowered, the variables are no longer available in
7285 BIND_EXPRs, so display them separately. */
7286 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7288 unsigned ix;
7289 ignore_topmost_bind = true;
7291 fprintf (file, "{\n");
7292 if (!vec_safe_is_empty (fun->local_decls))
7293 FOR_EACH_LOCAL_DECL (fun, ix, var)
7295 print_generic_decl (file, var, flags);
7296 if (flags & TDF_VERBOSE)
7297 print_node (file, "", var, 4);
7298 fprintf (file, "\n");
7300 any_var = true;
7302 if (gimple_in_ssa_p (cfun))
7303 for (ix = 1; ix < num_ssa_names; ++ix)
7305 tree name = ssa_name (ix);
7306 if (name && !SSA_NAME_VAR (name))
7308 fprintf (file, " ");
7309 print_generic_expr (file, TREE_TYPE (name), flags);
7310 fprintf (file, " ");
7311 print_generic_expr (file, name, flags);
7312 fprintf (file, ";\n");
7314 any_var = true;
7319 if (fun && fun->decl == fndecl
7320 && fun->cfg
7321 && basic_block_info_for_fn (fun))
7323 /* If the CFG has been built, emit a CFG-based dump. */
7324 if (!ignore_topmost_bind)
7325 fprintf (file, "{\n");
7327 if (any_var && n_basic_blocks_for_fn (fun))
7328 fprintf (file, "\n");
7330 FOR_EACH_BB_FN (bb, fun)
7331 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7333 fprintf (file, "}\n");
7335 else if (DECL_SAVED_TREE (fndecl) == NULL)
7337 /* The function is now in GIMPLE form but the CFG has not been
7338 built yet. Emit the single sequence of GIMPLE statements
7339 that make up its body. */
7340 gimple_seq body = gimple_body (fndecl);
7342 if (gimple_seq_first_stmt (body)
7343 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7344 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7345 print_gimple_seq (file, body, 0, flags);
7346 else
7348 if (!ignore_topmost_bind)
7349 fprintf (file, "{\n");
7351 if (any_var)
7352 fprintf (file, "\n");
7354 print_gimple_seq (file, body, 2, flags);
7355 fprintf (file, "}\n");
7358 else
7360 int indent;
7362 /* Make a tree based dump. */
7363 chain = DECL_SAVED_TREE (fndecl);
7364 if (chain && TREE_CODE (chain) == BIND_EXPR)
7366 if (ignore_topmost_bind)
7368 chain = BIND_EXPR_BODY (chain);
7369 indent = 2;
7371 else
7372 indent = 0;
7374 else
7376 if (!ignore_topmost_bind)
7377 fprintf (file, "{\n");
7378 indent = 2;
7381 if (any_var)
7382 fprintf (file, "\n");
7384 print_generic_stmt_indented (file, chain, flags, indent);
7385 if (ignore_topmost_bind)
7386 fprintf (file, "}\n");
7389 if (flags & TDF_ENUMERATE_LOCALS)
7390 dump_enumerated_decls (file, flags);
7391 fprintf (file, "\n\n");
7393 current_function_decl = old_current_fndecl;
7396 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7398 DEBUG_FUNCTION void
7399 debug_function (tree fn, int flags)
7401 dump_function_to_file (fn, stderr, flags);
7405 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7407 static void
7408 print_pred_bbs (FILE *file, basic_block bb)
7410 edge e;
7411 edge_iterator ei;
7413 FOR_EACH_EDGE (e, ei, bb->preds)
7414 fprintf (file, "bb_%d ", e->src->index);
7418 /* Print on FILE the indexes for the successors of basic_block BB. */
7420 static void
7421 print_succ_bbs (FILE *file, basic_block bb)
7423 edge e;
7424 edge_iterator ei;
7426 FOR_EACH_EDGE (e, ei, bb->succs)
7427 fprintf (file, "bb_%d ", e->dest->index);
7430 /* Print to FILE the basic block BB following the VERBOSITY level. */
7432 void
7433 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7435 char *s_indent = (char *) alloca ((size_t) indent + 1);
7436 memset ((void *) s_indent, ' ', (size_t) indent);
7437 s_indent[indent] = '\0';
7439 /* Print basic_block's header. */
7440 if (verbosity >= 2)
7442 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7443 print_pred_bbs (file, bb);
7444 fprintf (file, "}, succs = {");
7445 print_succ_bbs (file, bb);
7446 fprintf (file, "})\n");
7449 /* Print basic_block's body. */
7450 if (verbosity >= 3)
7452 fprintf (file, "%s {\n", s_indent);
7453 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7454 fprintf (file, "%s }\n", s_indent);
7458 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7460 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7461 VERBOSITY level this outputs the contents of the loop, or just its
7462 structure. */
7464 static void
7465 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7467 char *s_indent;
7468 basic_block bb;
7470 if (loop == NULL)
7471 return;
7473 s_indent = (char *) alloca ((size_t) indent + 1);
7474 memset ((void *) s_indent, ' ', (size_t) indent);
7475 s_indent[indent] = '\0';
7477 /* Print loop's header. */
7478 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7479 if (loop->header)
7480 fprintf (file, "header = %d", loop->header->index);
7481 else
7483 fprintf (file, "deleted)\n");
7484 return;
7486 if (loop->latch)
7487 fprintf (file, ", latch = %d", loop->latch->index);
7488 else
7489 fprintf (file, ", multiple latches");
7490 fprintf (file, ", niter = ");
7491 print_generic_expr (file, loop->nb_iterations, 0);
7493 if (loop->any_upper_bound)
7495 fprintf (file, ", upper_bound = ");
7496 print_decu (loop->nb_iterations_upper_bound, file);
7499 if (loop->any_estimate)
7501 fprintf (file, ", estimate = ");
7502 print_decu (loop->nb_iterations_estimate, file);
7504 fprintf (file, ")\n");
7506 /* Print loop's body. */
7507 if (verbosity >= 1)
7509 fprintf (file, "%s{\n", s_indent);
7510 FOR_EACH_BB_FN (bb, cfun)
7511 if (bb->loop_father == loop)
7512 print_loops_bb (file, bb, indent, verbosity);
7514 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7515 fprintf (file, "%s}\n", s_indent);
7519 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7520 spaces. Following VERBOSITY level this outputs the contents of the
7521 loop, or just its structure. */
7523 static void
7524 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7525 int verbosity)
7527 if (loop == NULL)
7528 return;
7530 print_loop (file, loop, indent, verbosity);
7531 print_loop_and_siblings (file, loop->next, indent, verbosity);
7534 /* Follow a CFG edge from the entry point of the program, and on entry
7535 of a loop, pretty print the loop structure on FILE. */
7537 void
7538 print_loops (FILE *file, int verbosity)
7540 basic_block bb;
7542 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7543 if (bb && bb->loop_father)
7544 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7547 /* Dump a loop. */
7549 DEBUG_FUNCTION void
7550 debug (struct loop &ref)
7552 print_loop (stderr, &ref, 0, /*verbosity*/0);
7555 DEBUG_FUNCTION void
7556 debug (struct loop *ptr)
7558 if (ptr)
7559 debug (*ptr);
7560 else
7561 fprintf (stderr, "<nil>\n");
7564 /* Dump a loop verbosely. */
7566 DEBUG_FUNCTION void
7567 debug_verbose (struct loop &ref)
7569 print_loop (stderr, &ref, 0, /*verbosity*/3);
7572 DEBUG_FUNCTION void
7573 debug_verbose (struct loop *ptr)
7575 if (ptr)
7576 debug (*ptr);
7577 else
7578 fprintf (stderr, "<nil>\n");
7582 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7584 DEBUG_FUNCTION void
7585 debug_loops (int verbosity)
7587 print_loops (stderr, verbosity);
7590 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7592 DEBUG_FUNCTION void
7593 debug_loop (struct loop *loop, int verbosity)
7595 print_loop (stderr, loop, 0, verbosity);
7598 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7599 level. */
7601 DEBUG_FUNCTION void
7602 debug_loop_num (unsigned num, int verbosity)
7604 debug_loop (get_loop (cfun, num), verbosity);
7607 /* Return true if BB ends with a call, possibly followed by some
7608 instructions that must stay with the call. Return false,
7609 otherwise. */
7611 static bool
7612 gimple_block_ends_with_call_p (basic_block bb)
7614 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7615 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7619 /* Return true if BB ends with a conditional branch. Return false,
7620 otherwise. */
7622 static bool
7623 gimple_block_ends_with_condjump_p (const_basic_block bb)
7625 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7626 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7630 /* Return true if we need to add fake edge to exit at statement T.
7631 Helper function for gimple_flow_call_edges_add. */
7633 static bool
7634 need_fake_edge_p (gimple t)
7636 tree fndecl = NULL_TREE;
7637 int call_flags = 0;
7639 /* NORETURN and LONGJMP calls already have an edge to exit.
7640 CONST and PURE calls do not need one.
7641 We don't currently check for CONST and PURE here, although
7642 it would be a good idea, because those attributes are
7643 figured out from the RTL in mark_constant_function, and
7644 the counter incrementation code from -fprofile-arcs
7645 leads to different results from -fbranch-probabilities. */
7646 if (is_gimple_call (t))
7648 fndecl = gimple_call_fndecl (t);
7649 call_flags = gimple_call_flags (t);
7652 if (is_gimple_call (t)
7653 && fndecl
7654 && DECL_BUILT_IN (fndecl)
7655 && (call_flags & ECF_NOTHROW)
7656 && !(call_flags & ECF_RETURNS_TWICE)
7657 /* fork() doesn't really return twice, but the effect of
7658 wrapping it in __gcov_fork() which calls __gcov_flush()
7659 and clears the counters before forking has the same
7660 effect as returning twice. Force a fake edge. */
7661 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7662 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7663 return false;
7665 if (is_gimple_call (t))
7667 edge_iterator ei;
7668 edge e;
7669 basic_block bb;
7671 if (!(call_flags & ECF_NORETURN))
7672 return true;
7674 bb = gimple_bb (t);
7675 FOR_EACH_EDGE (e, ei, bb->succs)
7676 if ((e->flags & EDGE_FAKE) == 0)
7677 return true;
7680 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7681 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7682 return true;
7684 return false;
7688 /* Add fake edges to the function exit for any non constant and non
7689 noreturn calls (or noreturn calls with EH/abnormal edges),
7690 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7691 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7692 that were split.
7694 The goal is to expose cases in which entering a basic block does
7695 not imply that all subsequent instructions must be executed. */
7697 static int
7698 gimple_flow_call_edges_add (sbitmap blocks)
7700 int i;
7701 int blocks_split = 0;
7702 int last_bb = last_basic_block_for_fn (cfun);
7703 bool check_last_block = false;
7705 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7706 return 0;
7708 if (! blocks)
7709 check_last_block = true;
7710 else
7711 check_last_block = bitmap_bit_p (blocks,
7712 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7714 /* In the last basic block, before epilogue generation, there will be
7715 a fallthru edge to EXIT. Special care is required if the last insn
7716 of the last basic block is a call because make_edge folds duplicate
7717 edges, which would result in the fallthru edge also being marked
7718 fake, which would result in the fallthru edge being removed by
7719 remove_fake_edges, which would result in an invalid CFG.
7721 Moreover, we can't elide the outgoing fake edge, since the block
7722 profiler needs to take this into account in order to solve the minimal
7723 spanning tree in the case that the call doesn't return.
7725 Handle this by adding a dummy instruction in a new last basic block. */
7726 if (check_last_block)
7728 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7729 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7730 gimple t = NULL;
7732 if (!gsi_end_p (gsi))
7733 t = gsi_stmt (gsi);
7735 if (t && need_fake_edge_p (t))
7737 edge e;
7739 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7740 if (e)
7742 gsi_insert_on_edge (e, gimple_build_nop ());
7743 gsi_commit_edge_inserts ();
7748 /* Now add fake edges to the function exit for any non constant
7749 calls since there is no way that we can determine if they will
7750 return or not... */
7751 for (i = 0; i < last_bb; i++)
7753 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7754 gimple_stmt_iterator gsi;
7755 gimple stmt, last_stmt;
7757 if (!bb)
7758 continue;
7760 if (blocks && !bitmap_bit_p (blocks, i))
7761 continue;
7763 gsi = gsi_last_nondebug_bb (bb);
7764 if (!gsi_end_p (gsi))
7766 last_stmt = gsi_stmt (gsi);
7769 stmt = gsi_stmt (gsi);
7770 if (need_fake_edge_p (stmt))
7772 edge e;
7774 /* The handling above of the final block before the
7775 epilogue should be enough to verify that there is
7776 no edge to the exit block in CFG already.
7777 Calling make_edge in such case would cause us to
7778 mark that edge as fake and remove it later. */
7779 #ifdef ENABLE_CHECKING
7780 if (stmt == last_stmt)
7782 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7783 gcc_assert (e == NULL);
7785 #endif
7787 /* Note that the following may create a new basic block
7788 and renumber the existing basic blocks. */
7789 if (stmt != last_stmt)
7791 e = split_block (bb, stmt);
7792 if (e)
7793 blocks_split++;
7795 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7797 gsi_prev (&gsi);
7799 while (!gsi_end_p (gsi));
7803 if (blocks_split)
7804 verify_flow_info ();
7806 return blocks_split;
7809 /* Removes edge E and all the blocks dominated by it, and updates dominance
7810 information. The IL in E->src needs to be updated separately.
7811 If dominance info is not available, only the edge E is removed.*/
7813 void
7814 remove_edge_and_dominated_blocks (edge e)
7816 vec<basic_block> bbs_to_remove = vNULL;
7817 vec<basic_block> bbs_to_fix_dom = vNULL;
7818 bitmap df, df_idom;
7819 edge f;
7820 edge_iterator ei;
7821 bool none_removed = false;
7822 unsigned i;
7823 basic_block bb, dbb;
7824 bitmap_iterator bi;
7826 if (!dom_info_available_p (CDI_DOMINATORS))
7828 remove_edge (e);
7829 return;
7832 /* No updating is needed for edges to exit. */
7833 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7835 if (cfgcleanup_altered_bbs)
7836 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7837 remove_edge (e);
7838 return;
7841 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7842 that is not dominated by E->dest, then this set is empty. Otherwise,
7843 all the basic blocks dominated by E->dest are removed.
7845 Also, to DF_IDOM we store the immediate dominators of the blocks in
7846 the dominance frontier of E (i.e., of the successors of the
7847 removed blocks, if there are any, and of E->dest otherwise). */
7848 FOR_EACH_EDGE (f, ei, e->dest->preds)
7850 if (f == e)
7851 continue;
7853 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7855 none_removed = true;
7856 break;
7860 df = BITMAP_ALLOC (NULL);
7861 df_idom = BITMAP_ALLOC (NULL);
7863 if (none_removed)
7864 bitmap_set_bit (df_idom,
7865 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7866 else
7868 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7869 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7871 FOR_EACH_EDGE (f, ei, bb->succs)
7873 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7874 bitmap_set_bit (df, f->dest->index);
7877 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7878 bitmap_clear_bit (df, bb->index);
7880 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7882 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7883 bitmap_set_bit (df_idom,
7884 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7888 if (cfgcleanup_altered_bbs)
7890 /* Record the set of the altered basic blocks. */
7891 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7892 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7895 /* Remove E and the cancelled blocks. */
7896 if (none_removed)
7897 remove_edge (e);
7898 else
7900 /* Walk backwards so as to get a chance to substitute all
7901 released DEFs into debug stmts. See
7902 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7903 details. */
7904 for (i = bbs_to_remove.length (); i-- > 0; )
7905 delete_basic_block (bbs_to_remove[i]);
7908 /* Update the dominance information. The immediate dominator may change only
7909 for blocks whose immediate dominator belongs to DF_IDOM:
7911 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7912 removal. Let Z the arbitrary block such that idom(Z) = Y and
7913 Z dominates X after the removal. Before removal, there exists a path P
7914 from Y to X that avoids Z. Let F be the last edge on P that is
7915 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7916 dominates W, and because of P, Z does not dominate W), and W belongs to
7917 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7918 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7920 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7921 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7922 dbb;
7923 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7924 bbs_to_fix_dom.safe_push (dbb);
7927 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7929 BITMAP_FREE (df);
7930 BITMAP_FREE (df_idom);
7931 bbs_to_remove.release ();
7932 bbs_to_fix_dom.release ();
7935 /* Purge dead EH edges from basic block BB. */
7937 bool
7938 gimple_purge_dead_eh_edges (basic_block bb)
7940 bool changed = false;
7941 edge e;
7942 edge_iterator ei;
7943 gimple stmt = last_stmt (bb);
7945 if (stmt && stmt_can_throw_internal (stmt))
7946 return false;
7948 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7950 if (e->flags & EDGE_EH)
7952 remove_edge_and_dominated_blocks (e);
7953 changed = true;
7955 else
7956 ei_next (&ei);
7959 return changed;
7962 /* Purge dead EH edges from basic block listed in BLOCKS. */
7964 bool
7965 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7967 bool changed = false;
7968 unsigned i;
7969 bitmap_iterator bi;
7971 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7973 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7975 /* Earlier gimple_purge_dead_eh_edges could have removed
7976 this basic block already. */
7977 gcc_assert (bb || changed);
7978 if (bb != NULL)
7979 changed |= gimple_purge_dead_eh_edges (bb);
7982 return changed;
7985 /* Purge dead abnormal call edges from basic block BB. */
7987 bool
7988 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7990 bool changed = false;
7991 edge e;
7992 edge_iterator ei;
7993 gimple stmt = last_stmt (bb);
7995 if (!cfun->has_nonlocal_label
7996 && !cfun->calls_setjmp)
7997 return false;
7999 if (stmt && stmt_can_make_abnormal_goto (stmt))
8000 return false;
8002 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8004 if (e->flags & EDGE_ABNORMAL)
8006 if (e->flags & EDGE_FALLTHRU)
8007 e->flags &= ~EDGE_ABNORMAL;
8008 else
8009 remove_edge_and_dominated_blocks (e);
8010 changed = true;
8012 else
8013 ei_next (&ei);
8016 return changed;
8019 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8021 bool
8022 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8024 bool changed = false;
8025 unsigned i;
8026 bitmap_iterator bi;
8028 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8030 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8032 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8033 this basic block already. */
8034 gcc_assert (bb || changed);
8035 if (bb != NULL)
8036 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8039 return changed;
8042 /* This function is called whenever a new edge is created or
8043 redirected. */
8045 static void
8046 gimple_execute_on_growing_pred (edge e)
8048 basic_block bb = e->dest;
8050 if (!gimple_seq_empty_p (phi_nodes (bb)))
8051 reserve_phi_args_for_new_edge (bb);
8054 /* This function is called immediately before edge E is removed from
8055 the edge vector E->dest->preds. */
8057 static void
8058 gimple_execute_on_shrinking_pred (edge e)
8060 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8061 remove_phi_args (e);
8064 /*---------------------------------------------------------------------------
8065 Helper functions for Loop versioning
8066 ---------------------------------------------------------------------------*/
8068 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8069 of 'first'. Both of them are dominated by 'new_head' basic block. When
8070 'new_head' was created by 'second's incoming edge it received phi arguments
8071 on the edge by split_edge(). Later, additional edge 'e' was created to
8072 connect 'new_head' and 'first'. Now this routine adds phi args on this
8073 additional edge 'e' that new_head to second edge received as part of edge
8074 splitting. */
8076 static void
8077 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8078 basic_block new_head, edge e)
8080 gphi *phi1, *phi2;
8081 gphi_iterator psi1, psi2;
8082 tree def;
8083 edge e2 = find_edge (new_head, second);
8085 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8086 edge, we should always have an edge from NEW_HEAD to SECOND. */
8087 gcc_assert (e2 != NULL);
8089 /* Browse all 'second' basic block phi nodes and add phi args to
8090 edge 'e' for 'first' head. PHI args are always in correct order. */
8092 for (psi2 = gsi_start_phis (second),
8093 psi1 = gsi_start_phis (first);
8094 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8095 gsi_next (&psi2), gsi_next (&psi1))
8097 phi1 = psi1.phi ();
8098 phi2 = psi2.phi ();
8099 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8100 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8105 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8106 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8107 the destination of the ELSE part. */
8109 static void
8110 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8111 basic_block second_head ATTRIBUTE_UNUSED,
8112 basic_block cond_bb, void *cond_e)
8114 gimple_stmt_iterator gsi;
8115 gimple new_cond_expr;
8116 tree cond_expr = (tree) cond_e;
8117 edge e0;
8119 /* Build new conditional expr */
8120 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8121 NULL_TREE, NULL_TREE);
8123 /* Add new cond in cond_bb. */
8124 gsi = gsi_last_bb (cond_bb);
8125 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8127 /* Adjust edges appropriately to connect new head with first head
8128 as well as second head. */
8129 e0 = single_succ_edge (cond_bb);
8130 e0->flags &= ~EDGE_FALLTHRU;
8131 e0->flags |= EDGE_FALSE_VALUE;
8135 /* Do book-keeping of basic block BB for the profile consistency checker.
8136 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8137 then do post-pass accounting. Store the counting in RECORD. */
8138 static void
8139 gimple_account_profile_record (basic_block bb, int after_pass,
8140 struct profile_record *record)
8142 gimple_stmt_iterator i;
8143 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8145 record->size[after_pass]
8146 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8147 if (profile_status_for_fn (cfun) == PROFILE_READ)
8148 record->time[after_pass]
8149 += estimate_num_insns (gsi_stmt (i),
8150 &eni_time_weights) * bb->count;
8151 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8152 record->time[after_pass]
8153 += estimate_num_insns (gsi_stmt (i),
8154 &eni_time_weights) * bb->frequency;
8158 struct cfg_hooks gimple_cfg_hooks = {
8159 "gimple",
8160 gimple_verify_flow_info,
8161 gimple_dump_bb, /* dump_bb */
8162 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8163 create_bb, /* create_basic_block */
8164 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8165 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8166 gimple_can_remove_branch_p, /* can_remove_branch_p */
8167 remove_bb, /* delete_basic_block */
8168 gimple_split_block, /* split_block */
8169 gimple_move_block_after, /* move_block_after */
8170 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8171 gimple_merge_blocks, /* merge_blocks */
8172 gimple_predict_edge, /* predict_edge */
8173 gimple_predicted_by_p, /* predicted_by_p */
8174 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8175 gimple_duplicate_bb, /* duplicate_block */
8176 gimple_split_edge, /* split_edge */
8177 gimple_make_forwarder_block, /* make_forward_block */
8178 NULL, /* tidy_fallthru_edge */
8179 NULL, /* force_nonfallthru */
8180 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8181 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8182 gimple_flow_call_edges_add, /* flow_call_edges_add */
8183 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8184 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8185 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8186 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8187 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8188 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8189 flush_pending_stmts, /* flush_pending_stmts */
8190 gimple_empty_block_p, /* block_empty_p */
8191 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8192 gimple_account_profile_record,
8196 /* Split all critical edges. */
8198 unsigned int
8199 split_critical_edges (void)
8201 basic_block bb;
8202 edge e;
8203 edge_iterator ei;
8205 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8206 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8207 mappings around the calls to split_edge. */
8208 start_recording_case_labels ();
8209 FOR_ALL_BB_FN (bb, cfun)
8211 FOR_EACH_EDGE (e, ei, bb->succs)
8213 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8214 split_edge (e);
8215 /* PRE inserts statements to edges and expects that
8216 since split_critical_edges was done beforehand, committing edge
8217 insertions will not split more edges. In addition to critical
8218 edges we must split edges that have multiple successors and
8219 end by control flow statements, such as RESX.
8220 Go ahead and split them too. This matches the logic in
8221 gimple_find_edge_insert_loc. */
8222 else if ((!single_pred_p (e->dest)
8223 || !gimple_seq_empty_p (phi_nodes (e->dest))
8224 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8225 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8226 && !(e->flags & EDGE_ABNORMAL))
8228 gimple_stmt_iterator gsi;
8230 gsi = gsi_last_bb (e->src);
8231 if (!gsi_end_p (gsi)
8232 && stmt_ends_bb_p (gsi_stmt (gsi))
8233 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8234 && !gimple_call_builtin_p (gsi_stmt (gsi),
8235 BUILT_IN_RETURN)))
8236 split_edge (e);
8240 end_recording_case_labels ();
8241 return 0;
8244 namespace {
8246 const pass_data pass_data_split_crit_edges =
8248 GIMPLE_PASS, /* type */
8249 "crited", /* name */
8250 OPTGROUP_NONE, /* optinfo_flags */
8251 TV_TREE_SPLIT_EDGES, /* tv_id */
8252 PROP_cfg, /* properties_required */
8253 PROP_no_crit_edges, /* properties_provided */
8254 0, /* properties_destroyed */
8255 0, /* todo_flags_start */
8256 0, /* todo_flags_finish */
8259 class pass_split_crit_edges : public gimple_opt_pass
8261 public:
8262 pass_split_crit_edges (gcc::context *ctxt)
8263 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8266 /* opt_pass methods: */
8267 virtual unsigned int execute (function *) { return split_critical_edges (); }
8269 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8270 }; // class pass_split_crit_edges
8272 } // anon namespace
8274 gimple_opt_pass *
8275 make_pass_split_crit_edges (gcc::context *ctxt)
8277 return new pass_split_crit_edges (ctxt);
8281 /* Insert COND expression which is GIMPLE_COND after STMT
8282 in basic block BB with appropriate basic block split
8283 and creation of a new conditionally executed basic block.
8284 Return created basic block. */
8285 basic_block
8286 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8288 edge fall = split_block (bb, stmt);
8289 gimple_stmt_iterator iter = gsi_last_bb (bb);
8290 basic_block new_bb;
8292 /* Insert cond statement. */
8293 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8294 if (gsi_end_p (iter))
8295 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8296 else
8297 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8299 /* Create conditionally executed block. */
8300 new_bb = create_empty_bb (bb);
8301 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8302 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8304 /* Fix edge for split bb. */
8305 fall->flags = EDGE_FALSE_VALUE;
8307 /* Update dominance info. */
8308 if (dom_info_available_p (CDI_DOMINATORS))
8310 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8311 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8314 /* Update loop info. */
8315 if (current_loops)
8316 add_bb_to_loop (new_bb, bb->loop_father);
8318 return new_bb;
8321 /* Build a ternary operation and gimplify it. Emit code before GSI.
8322 Return the gimple_val holding the result. */
8324 tree
8325 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8326 tree type, tree a, tree b, tree c)
8328 tree ret;
8329 location_t loc = gimple_location (gsi_stmt (*gsi));
8331 ret = fold_build3_loc (loc, code, type, a, b, c);
8332 STRIP_NOPS (ret);
8334 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8335 GSI_SAME_STMT);
8338 /* Build a binary operation and gimplify it. Emit code before GSI.
8339 Return the gimple_val holding the result. */
8341 tree
8342 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8343 tree type, tree a, tree b)
8345 tree ret;
8347 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8348 STRIP_NOPS (ret);
8350 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8351 GSI_SAME_STMT);
8354 /* Build a unary operation and gimplify it. Emit code before GSI.
8355 Return the gimple_val holding the result. */
8357 tree
8358 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8359 tree a)
8361 tree ret;
8363 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8364 STRIP_NOPS (ret);
8366 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8367 GSI_SAME_STMT);
8372 /* Given a basic block B which ends with a conditional and has
8373 precisely two successors, determine which of the edges is taken if
8374 the conditional is true and which is taken if the conditional is
8375 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8377 void
8378 extract_true_false_edges_from_block (basic_block b,
8379 edge *true_edge,
8380 edge *false_edge)
8382 edge e = EDGE_SUCC (b, 0);
8384 if (e->flags & EDGE_TRUE_VALUE)
8386 *true_edge = e;
8387 *false_edge = EDGE_SUCC (b, 1);
8389 else
8391 *false_edge = e;
8392 *true_edge = EDGE_SUCC (b, 1);
8396 /* Emit return warnings. */
8398 namespace {
8400 const pass_data pass_data_warn_function_return =
8402 GIMPLE_PASS, /* type */
8403 "*warn_function_return", /* name */
8404 OPTGROUP_NONE, /* optinfo_flags */
8405 TV_NONE, /* tv_id */
8406 PROP_cfg, /* properties_required */
8407 0, /* properties_provided */
8408 0, /* properties_destroyed */
8409 0, /* todo_flags_start */
8410 0, /* todo_flags_finish */
8413 class pass_warn_function_return : public gimple_opt_pass
8415 public:
8416 pass_warn_function_return (gcc::context *ctxt)
8417 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8420 /* opt_pass methods: */
8421 virtual unsigned int execute (function *);
8423 }; // class pass_warn_function_return
8425 unsigned int
8426 pass_warn_function_return::execute (function *fun)
8428 source_location location;
8429 gimple last;
8430 edge e;
8431 edge_iterator ei;
8433 if (!targetm.warn_func_return (fun->decl))
8434 return 0;
8436 /* If we have a path to EXIT, then we do return. */
8437 if (TREE_THIS_VOLATILE (fun->decl)
8438 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8440 location = UNKNOWN_LOCATION;
8441 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8443 last = last_stmt (e->src);
8444 if ((gimple_code (last) == GIMPLE_RETURN
8445 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8446 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8447 break;
8449 if (location == UNKNOWN_LOCATION)
8450 location = cfun->function_end_locus;
8451 warning_at (location, 0, "%<noreturn%> function does return");
8454 /* If we see "return;" in some basic block, then we do reach the end
8455 without returning a value. */
8456 else if (warn_return_type
8457 && !TREE_NO_WARNING (fun->decl)
8458 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8459 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8461 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8463 gimple last = last_stmt (e->src);
8464 greturn *return_stmt = dyn_cast <greturn *> (last);
8465 if (return_stmt
8466 && gimple_return_retval (return_stmt) == NULL
8467 && !gimple_no_warning_p (last))
8469 location = gimple_location (last);
8470 if (location == UNKNOWN_LOCATION)
8471 location = fun->function_end_locus;
8472 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8473 TREE_NO_WARNING (fun->decl) = 1;
8474 break;
8478 return 0;
8481 } // anon namespace
8483 gimple_opt_pass *
8484 make_pass_warn_function_return (gcc::context *ctxt)
8486 return new pass_warn_function_return (ctxt);
8489 /* Walk a gimplified function and warn for functions whose return value is
8490 ignored and attribute((warn_unused_result)) is set. This is done before
8491 inlining, so we don't have to worry about that. */
8493 static void
8494 do_warn_unused_result (gimple_seq seq)
8496 tree fdecl, ftype;
8497 gimple_stmt_iterator i;
8499 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8501 gimple g = gsi_stmt (i);
8503 switch (gimple_code (g))
8505 case GIMPLE_BIND:
8506 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8507 break;
8508 case GIMPLE_TRY:
8509 do_warn_unused_result (gimple_try_eval (g));
8510 do_warn_unused_result (gimple_try_cleanup (g));
8511 break;
8512 case GIMPLE_CATCH:
8513 do_warn_unused_result (gimple_catch_handler (
8514 as_a <gcatch *> (g)));
8515 break;
8516 case GIMPLE_EH_FILTER:
8517 do_warn_unused_result (gimple_eh_filter_failure (g));
8518 break;
8520 case GIMPLE_CALL:
8521 if (gimple_call_lhs (g))
8522 break;
8523 if (gimple_call_internal_p (g))
8524 break;
8526 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8527 LHS. All calls whose value is ignored should be
8528 represented like this. Look for the attribute. */
8529 fdecl = gimple_call_fndecl (g);
8530 ftype = gimple_call_fntype (g);
8532 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8534 location_t loc = gimple_location (g);
8536 if (fdecl)
8537 warning_at (loc, OPT_Wunused_result,
8538 "ignoring return value of %qD, "
8539 "declared with attribute warn_unused_result",
8540 fdecl);
8541 else
8542 warning_at (loc, OPT_Wunused_result,
8543 "ignoring return value of function "
8544 "declared with attribute warn_unused_result");
8546 break;
8548 default:
8549 /* Not a container, not a call, or a call whose value is used. */
8550 break;
8555 namespace {
8557 const pass_data pass_data_warn_unused_result =
8559 GIMPLE_PASS, /* type */
8560 "*warn_unused_result", /* name */
8561 OPTGROUP_NONE, /* optinfo_flags */
8562 TV_NONE, /* tv_id */
8563 PROP_gimple_any, /* properties_required */
8564 0, /* properties_provided */
8565 0, /* properties_destroyed */
8566 0, /* todo_flags_start */
8567 0, /* todo_flags_finish */
8570 class pass_warn_unused_result : public gimple_opt_pass
8572 public:
8573 pass_warn_unused_result (gcc::context *ctxt)
8574 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8577 /* opt_pass methods: */
8578 virtual bool gate (function *) { return flag_warn_unused_result; }
8579 virtual unsigned int execute (function *)
8581 do_warn_unused_result (gimple_body (current_function_decl));
8582 return 0;
8585 }; // class pass_warn_unused_result
8587 } // anon namespace
8589 gimple_opt_pass *
8590 make_pass_warn_unused_result (gcc::context *ctxt)
8592 return new pass_warn_unused_result (ctxt);
8595 /* IPA passes, compilation of earlier functions or inlining
8596 might have changed some properties, such as marked functions nothrow,
8597 pure, const or noreturn.
8598 Remove redundant edges and basic blocks, and create new ones if necessary.
8600 This pass can't be executed as stand alone pass from pass manager, because
8601 in between inlining and this fixup the verify_flow_info would fail. */
8603 unsigned int
8604 execute_fixup_cfg (void)
8606 basic_block bb;
8607 gimple_stmt_iterator gsi;
8608 int todo = 0;
8609 gcov_type count_scale;
8610 edge e;
8611 edge_iterator ei;
8613 count_scale
8614 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8615 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8617 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8618 cgraph_node::get (current_function_decl)->count;
8619 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8620 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8621 count_scale);
8623 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8624 e->count = apply_scale (e->count, count_scale);
8626 FOR_EACH_BB_FN (bb, cfun)
8628 bb->count = apply_scale (bb->count, count_scale);
8629 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8631 gimple stmt = gsi_stmt (gsi);
8632 tree decl = is_gimple_call (stmt)
8633 ? gimple_call_fndecl (stmt)
8634 : NULL;
8635 if (decl)
8637 int flags = gimple_call_flags (stmt);
8638 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8640 if (gimple_purge_dead_abnormal_call_edges (bb))
8641 todo |= TODO_cleanup_cfg;
8643 if (gimple_in_ssa_p (cfun))
8645 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8646 update_stmt (stmt);
8650 if (flags & ECF_NORETURN
8651 && fixup_noreturn_call (stmt))
8652 todo |= TODO_cleanup_cfg;
8655 /* Remove stores to variables we marked write-only.
8656 Keep access when store has side effect, i.e. in case when source
8657 is volatile. */
8658 if (gimple_store_p (stmt)
8659 && !gimple_has_side_effects (stmt))
8661 tree lhs = get_base_address (gimple_get_lhs (stmt));
8663 if (TREE_CODE (lhs) == VAR_DECL
8664 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8665 && varpool_node::get (lhs)->writeonly)
8667 unlink_stmt_vdef (stmt);
8668 gsi_remove (&gsi, true);
8669 release_defs (stmt);
8670 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8671 continue;
8674 /* For calls we can simply remove LHS when it is known
8675 to be write-only. */
8676 if (is_gimple_call (stmt)
8677 && gimple_get_lhs (stmt))
8679 tree lhs = get_base_address (gimple_get_lhs (stmt));
8681 if (TREE_CODE (lhs) == VAR_DECL
8682 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8683 && varpool_node::get (lhs)->writeonly)
8685 gimple_call_set_lhs (stmt, NULL);
8686 update_stmt (stmt);
8687 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8691 if (maybe_clean_eh_stmt (stmt)
8692 && gimple_purge_dead_eh_edges (bb))
8693 todo |= TODO_cleanup_cfg;
8694 gsi_next (&gsi);
8697 FOR_EACH_EDGE (e, ei, bb->succs)
8698 e->count = apply_scale (e->count, count_scale);
8700 /* If we have a basic block with no successors that does not
8701 end with a control statement or a noreturn call end it with
8702 a call to __builtin_unreachable. This situation can occur
8703 when inlining a noreturn call that does in fact return. */
8704 if (EDGE_COUNT (bb->succs) == 0)
8706 gimple stmt = last_stmt (bb);
8707 if (!stmt
8708 || (!is_ctrl_stmt (stmt)
8709 && (!is_gimple_call (stmt)
8710 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8712 if (stmt && is_gimple_call (stmt))
8713 gimple_call_set_ctrl_altering (stmt, false);
8714 stmt = gimple_build_call
8715 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8716 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8717 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8721 if (count_scale != REG_BR_PROB_BASE)
8722 compute_function_frequency ();
8724 /* Dump a textual representation of the flowgraph. */
8725 if (dump_file)
8726 gimple_dump_cfg (dump_file, dump_flags);
8728 if (current_loops
8729 && (todo & TODO_cleanup_cfg))
8730 loops_state_set (LOOPS_NEED_FIXUP);
8732 return todo;
8735 namespace {
8737 const pass_data pass_data_fixup_cfg =
8739 GIMPLE_PASS, /* type */
8740 "*free_cfg_annotations", /* name */
8741 OPTGROUP_NONE, /* optinfo_flags */
8742 TV_NONE, /* tv_id */
8743 PROP_cfg, /* properties_required */
8744 0, /* properties_provided */
8745 0, /* properties_destroyed */
8746 0, /* todo_flags_start */
8747 0, /* todo_flags_finish */
8750 class pass_fixup_cfg : public gimple_opt_pass
8752 public:
8753 pass_fixup_cfg (gcc::context *ctxt)
8754 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8757 /* opt_pass methods: */
8758 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8759 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8761 }; // class pass_fixup_cfg
8763 } // anon namespace
8765 gimple_opt_pass *
8766 make_pass_fixup_cfg (gcc::context *ctxt)
8768 return new pass_fixup_cfg (ctxt);
8771 /* Garbage collection support for edge_def. */
8773 extern void gt_ggc_mx (tree&);
8774 extern void gt_ggc_mx (gimple&);
8775 extern void gt_ggc_mx (rtx&);
8776 extern void gt_ggc_mx (basic_block&);
8778 static void
8779 gt_ggc_mx (rtx_insn *& x)
8781 if (x)
8782 gt_ggc_mx_rtx_def ((void *) x);
8785 void
8786 gt_ggc_mx (edge_def *e)
8788 tree block = LOCATION_BLOCK (e->goto_locus);
8789 gt_ggc_mx (e->src);
8790 gt_ggc_mx (e->dest);
8791 if (current_ir_type () == IR_GIMPLE)
8792 gt_ggc_mx (e->insns.g);
8793 else
8794 gt_ggc_mx (e->insns.r);
8795 gt_ggc_mx (block);
8798 /* PCH support for edge_def. */
8800 extern void gt_pch_nx (tree&);
8801 extern void gt_pch_nx (gimple&);
8802 extern void gt_pch_nx (rtx&);
8803 extern void gt_pch_nx (basic_block&);
8805 static void
8806 gt_pch_nx (rtx_insn *& x)
8808 if (x)
8809 gt_pch_nx_rtx_def ((void *) x);
8812 void
8813 gt_pch_nx (edge_def *e)
8815 tree block = LOCATION_BLOCK (e->goto_locus);
8816 gt_pch_nx (e->src);
8817 gt_pch_nx (e->dest);
8818 if (current_ir_type () == IR_GIMPLE)
8819 gt_pch_nx (e->insns.g);
8820 else
8821 gt_pch_nx (e->insns.r);
8822 gt_pch_nx (block);
8825 void
8826 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8828 tree block = LOCATION_BLOCK (e->goto_locus);
8829 op (&(e->src), cookie);
8830 op (&(e->dest), cookie);
8831 if (current_ir_type () == IR_GIMPLE)
8832 op (&(e->insns.g), cookie);
8833 else
8834 op (&(e->insns.r), cookie);
8835 op (&(block), cookie);