Merge branch 'master' r217593-r217725 into gimple-classes-v2-option-3
[official-gcc.git] / gcc / tree-cfg.c
blob098ebfa020afd0e7d2d383fcc8ac8bc6564e01eb
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "trans-mem.h"
29 #include "stor-layout.h"
30 #include "print-tree.h"
31 #include "tm_p.h"
32 #include "predict.h"
33 #include "vec.h"
34 #include "hashtab.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfganal.h"
43 #include "basic-block.h"
44 #include "flags.h"
45 #include "gimple-pretty-print.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-fold.h"
49 #include "tree-eh.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "gimple-iterator.h"
54 #include "gimplify-me.h"
55 #include "gimple-walk.h"
56 #include "gimple-ssa.h"
57 #include "plugin-api.h"
58 #include "ipa-ref.h"
59 #include "cgraph.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "stringpool.h"
64 #include "tree-ssanames.h"
65 #include "tree-ssa-loop-manip.h"
66 #include "tree-ssa-loop-niter.h"
67 #include "tree-into-ssa.h"
68 #include "expr.h"
69 #include "tree-dfa.h"
70 #include "tree-ssa.h"
71 #include "tree-dump.h"
72 #include "tree-pass.h"
73 #include "diagnostic-core.h"
74 #include "except.h"
75 #include "cfgloop.h"
76 #include "tree-ssa-propagate.h"
77 #include "value-prof.h"
78 #include "tree-inline.h"
79 #include "target.h"
80 #include "tree-ssa-live.h"
81 #include "omp-low.h"
82 #include "tree-cfgcleanup.h"
83 #include "wide-int.h"
84 #include "wide-int-print.h"
86 /* This file contains functions for building the Control Flow Graph (CFG)
87 for a function tree. */
89 /* Local declarations. */
91 /* Initial capacity for the basic block array. */
92 static const int initial_cfg_capacity = 20;
94 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
95 which use a particular edge. The CASE_LABEL_EXPRs are chained together
96 via their CASE_CHAIN field, which we clear after we're done with the
97 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
99 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
100 update the case vector in response to edge redirections.
102 Right now this table is set up and torn down at key points in the
103 compilation process. It would be nice if we could make the table
104 more persistent. The key is getting notification of changes to
105 the CFG (particularly edge removal, creation and redirection). */
107 static hash_map<edge, tree> *edge_to_cases;
109 /* If we record edge_to_cases, this bitmap will hold indexes
110 of basic blocks that end in a GIMPLE_SWITCH which we touched
111 due to edge manipulations. */
113 static bitmap touched_switch_bbs;
115 /* CFG statistics. */
116 struct cfg_stats_d
118 long num_merged_labels;
121 static struct cfg_stats_d cfg_stats;
123 /* Hash table to store last discriminator assigned for each locus. */
124 struct locus_discrim_map
126 location_t locus;
127 int discriminator;
130 /* Hashtable helpers. */
132 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
134 typedef locus_discrim_map value_type;
135 typedef locus_discrim_map compare_type;
136 static inline hashval_t hash (const value_type *);
137 static inline bool equal (const value_type *, const compare_type *);
140 /* Trivial hash function for a location_t. ITEM is a pointer to
141 a hash table entry that maps a location_t to a discriminator. */
143 inline hashval_t
144 locus_discrim_hasher::hash (const value_type *item)
146 return LOCATION_LINE (item->locus);
149 /* Equality function for the locus-to-discriminator map. A and B
150 point to the two hash table entries to compare. */
152 inline bool
153 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
155 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
158 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
160 /* Basic blocks and flowgraphs. */
161 static void make_blocks (gimple_seq);
163 /* Edges. */
164 static void make_edges (void);
165 static void assign_discriminators (void);
166 static void make_cond_expr_edges (basic_block);
167 static void make_gimple_switch_edges (gswitch *, basic_block);
168 static bool make_goto_expr_edges (basic_block);
169 static void make_gimple_asm_edges (basic_block);
170 static edge gimple_redirect_edge_and_branch (edge, basic_block);
171 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
173 /* Various helpers. */
174 static inline bool stmt_starts_bb_p (gimple, gimple);
175 static int gimple_verify_flow_info (void);
176 static void gimple_make_forwarder_block (edge);
177 static gimple first_non_label_stmt (basic_block);
178 static bool verify_gimple_transaction (gtransaction *);
179 static bool call_can_make_abnormal_goto (gimple);
181 /* Flowgraph optimization and cleanup. */
182 static void gimple_merge_blocks (basic_block, basic_block);
183 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
184 static void remove_bb (basic_block);
185 static edge find_taken_edge_computed_goto (basic_block, tree);
186 static edge find_taken_edge_cond_expr (basic_block, tree);
187 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
188 static tree find_case_label_for_value (gswitch *, tree);
190 void
191 init_empty_tree_cfg_for_function (struct function *fn)
193 /* Initialize the basic block array. */
194 init_flow (fn);
195 profile_status_for_fn (fn) = PROFILE_ABSENT;
196 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
197 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
198 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
199 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
200 initial_cfg_capacity);
202 /* Build a mapping of labels to their associated blocks. */
203 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
204 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
205 initial_cfg_capacity);
207 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
208 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
210 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
211 = EXIT_BLOCK_PTR_FOR_FN (fn);
212 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
213 = ENTRY_BLOCK_PTR_FOR_FN (fn);
216 void
217 init_empty_tree_cfg (void)
219 init_empty_tree_cfg_for_function (cfun);
222 /*---------------------------------------------------------------------------
223 Create basic blocks
224 ---------------------------------------------------------------------------*/
226 /* Entry point to the CFG builder for trees. SEQ is the sequence of
227 statements to be added to the flowgraph. */
229 static void
230 build_gimple_cfg (gimple_seq seq)
232 /* Register specific gimple functions. */
233 gimple_register_cfg_hooks ();
235 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
237 init_empty_tree_cfg ();
239 make_blocks (seq);
241 /* Make sure there is always at least one block, even if it's empty. */
242 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
243 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
245 /* Adjust the size of the array. */
246 if (basic_block_info_for_fn (cfun)->length ()
247 < (size_t) n_basic_blocks_for_fn (cfun))
248 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
249 n_basic_blocks_for_fn (cfun));
251 /* To speed up statement iterator walks, we first purge dead labels. */
252 cleanup_dead_labels ();
254 /* Group case nodes to reduce the number of edges.
255 We do this after cleaning up dead labels because otherwise we miss
256 a lot of obvious case merging opportunities. */
257 group_case_labels ();
259 /* Create the edges of the flowgraph. */
260 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
261 make_edges ();
262 assign_discriminators ();
263 cleanup_dead_labels ();
264 delete discriminator_per_locus;
265 discriminator_per_locus = NULL;
268 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
269 them and propagate the information to LOOP. We assume that the annotations
270 come immediately before the condition in BB, if any. */
272 static void
273 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
275 gimple_stmt_iterator gsi = gsi_last_bb (bb);
276 gimple stmt = gsi_stmt (gsi);
278 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
279 return;
281 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
283 stmt = gsi_stmt (gsi);
284 if (gimple_code (stmt) != GIMPLE_CALL)
285 break;
286 if (!gimple_call_internal_p (stmt)
287 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
288 break;
290 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
292 case annot_expr_ivdep_kind:
293 loop->safelen = INT_MAX;
294 break;
295 case annot_expr_no_vector_kind:
296 loop->dont_vectorize = true;
297 break;
298 case annot_expr_vector_kind:
299 loop->force_vectorize = true;
300 cfun->has_force_vectorize_loops = true;
301 break;
302 default:
303 gcc_unreachable ();
306 stmt = gimple_build_assign (gimple_call_lhs (stmt),
307 gimple_call_arg (stmt, 0));
308 gsi_replace (&gsi, stmt, true);
312 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
313 them and propagate the information to the loop. We assume that the
314 annotations come immediately before the condition of the loop. */
316 static void
317 replace_loop_annotate (void)
319 struct loop *loop;
320 basic_block bb;
321 gimple_stmt_iterator gsi;
322 gimple stmt;
324 FOR_EACH_LOOP (loop, 0)
326 /* First look into the header. */
327 replace_loop_annotate_in_block (loop->header, loop);
329 /* Then look into the latch, if any. */
330 if (loop->latch)
331 replace_loop_annotate_in_block (loop->latch, loop);
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb, cfun)
337 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
339 stmt = gsi_stmt (gsi);
340 if (gimple_code (stmt) != GIMPLE_CALL)
341 continue;
342 if (!gimple_call_internal_p (stmt)
343 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 continue;
346 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
348 case annot_expr_ivdep_kind:
349 case annot_expr_no_vector_kind:
350 case annot_expr_vector_kind:
351 break;
352 default:
353 gcc_unreachable ();
356 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
357 stmt = gimple_build_assign (gimple_call_lhs (stmt),
358 gimple_call_arg (stmt, 0));
359 gsi_replace (&gsi, stmt, true);
365 static unsigned int
366 execute_build_cfg (void)
368 gimple_seq body = gimple_body (current_function_decl);
370 build_gimple_cfg (body);
371 gimple_set_body (current_function_decl, NULL);
372 if (dump_file && (dump_flags & TDF_DETAILS))
374 fprintf (dump_file, "Scope blocks:\n");
375 dump_scope_blocks (dump_file, dump_flags);
377 cleanup_tree_cfg ();
378 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
379 replace_loop_annotate ();
380 return 0;
383 namespace {
385 const pass_data pass_data_build_cfg =
387 GIMPLE_PASS, /* type */
388 "cfg", /* name */
389 OPTGROUP_NONE, /* optinfo_flags */
390 TV_TREE_CFG, /* tv_id */
391 PROP_gimple_leh, /* properties_required */
392 ( PROP_cfg | PROP_loops ), /* properties_provided */
393 0, /* properties_destroyed */
394 0, /* todo_flags_start */
395 0, /* todo_flags_finish */
398 class pass_build_cfg : public gimple_opt_pass
400 public:
401 pass_build_cfg (gcc::context *ctxt)
402 : gimple_opt_pass (pass_data_build_cfg, ctxt)
405 /* opt_pass methods: */
406 virtual unsigned int execute (function *) { return execute_build_cfg (); }
408 }; // class pass_build_cfg
410 } // anon namespace
412 gimple_opt_pass *
413 make_pass_build_cfg (gcc::context *ctxt)
415 return new pass_build_cfg (ctxt);
419 /* Return true if T is a computed goto. */
421 bool
422 computed_goto_p (gimple t)
424 return (gimple_code (t) == GIMPLE_GOTO
425 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
428 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
429 the other edge points to a bb with just __builtin_unreachable ().
430 I.e. return true for C->M edge in:
431 <bb C>:
433 if (something)
434 goto <bb N>;
435 else
436 goto <bb M>;
437 <bb N>:
438 __builtin_unreachable ();
439 <bb M>: */
441 bool
442 assert_unreachable_fallthru_edge_p (edge e)
444 basic_block pred_bb = e->src;
445 gimple last = last_stmt (pred_bb);
446 if (last && gimple_code (last) == GIMPLE_COND)
448 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
449 if (other_bb == e->dest)
450 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
451 if (EDGE_COUNT (other_bb->succs) == 0)
453 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
454 gimple stmt;
456 if (gsi_end_p (gsi))
457 return false;
458 stmt = gsi_stmt (gsi);
459 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
461 gsi_next (&gsi);
462 if (gsi_end_p (gsi))
463 return false;
464 stmt = gsi_stmt (gsi);
466 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
469 return false;
473 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
474 could alter control flow except via eh. We initialize the flag at
475 CFG build time and only ever clear it later. */
477 static void
478 gimple_call_initialize_ctrl_altering (gimple stmt)
480 int flags = gimple_call_flags (stmt);
482 /* A call alters control flow if it can make an abnormal goto. */
483 if (call_can_make_abnormal_goto (stmt)
484 /* A call also alters control flow if it does not return. */
485 || flags & ECF_NORETURN
486 /* TM ending statements have backedges out of the transaction.
487 Return true so we split the basic block containing them.
488 Note that the TM_BUILTIN test is merely an optimization. */
489 || ((flags & ECF_TM_BUILTIN)
490 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
491 /* BUILT_IN_RETURN call is same as return statement. */
492 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
493 gimple_call_set_ctrl_altering (stmt, true);
494 else
495 gimple_call_set_ctrl_altering (stmt, false);
499 /* Build a flowgraph for the sequence of stmts SEQ. */
501 static void
502 make_blocks (gimple_seq seq)
504 gimple_stmt_iterator i = gsi_start (seq);
505 gimple stmt = NULL;
506 bool start_new_block = true;
507 bool first_stmt_of_seq = true;
508 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
510 while (!gsi_end_p (i))
512 gimple prev_stmt;
514 prev_stmt = stmt;
515 stmt = gsi_stmt (i);
517 if (stmt && is_gimple_call (stmt))
518 gimple_call_initialize_ctrl_altering (stmt);
520 /* If the statement starts a new basic block or if we have determined
521 in a previous pass that we need to create a new block for STMT, do
522 so now. */
523 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
525 if (!first_stmt_of_seq)
526 gsi_split_seq_before (&i, &seq);
527 bb = create_basic_block (seq, NULL, bb);
528 start_new_block = false;
531 /* Now add STMT to BB and create the subgraphs for special statement
532 codes. */
533 gimple_set_bb (stmt, bb);
535 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
536 next iteration. */
537 if (stmt_ends_bb_p (stmt))
539 /* If the stmt can make abnormal goto use a new temporary
540 for the assignment to the LHS. This makes sure the old value
541 of the LHS is available on the abnormal edge. Otherwise
542 we will end up with overlapping life-ranges for abnormal
543 SSA names. */
544 if (gimple_has_lhs (stmt)
545 && stmt_can_make_abnormal_goto (stmt)
546 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
548 tree lhs = gimple_get_lhs (stmt);
549 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
550 gimple s = gimple_build_assign (lhs, tmp);
551 gimple_set_location (s, gimple_location (stmt));
552 gimple_set_block (s, gimple_block (stmt));
553 gimple_set_lhs (stmt, tmp);
554 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
555 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
556 DECL_GIMPLE_REG_P (tmp) = 1;
557 gsi_insert_after (&i, s, GSI_SAME_STMT);
559 start_new_block = true;
562 gsi_next (&i);
563 first_stmt_of_seq = false;
568 /* Create and return a new empty basic block after bb AFTER. */
570 static basic_block
571 create_bb (void *h, void *e, basic_block after)
573 basic_block bb;
575 gcc_assert (!e);
577 /* Create and initialize a new basic block. Since alloc_block uses
578 GC allocation that clears memory to allocate a basic block, we do
579 not have to clear the newly allocated basic block here. */
580 bb = alloc_block ();
582 bb->index = last_basic_block_for_fn (cfun);
583 bb->flags = BB_NEW;
584 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
586 /* Add the new block to the linked list of blocks. */
587 link_block (bb, after);
589 /* Grow the basic block array if needed. */
590 if ((size_t) last_basic_block_for_fn (cfun)
591 == basic_block_info_for_fn (cfun)->length ())
593 size_t new_size =
594 (last_basic_block_for_fn (cfun)
595 + (last_basic_block_for_fn (cfun) + 3) / 4);
596 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
599 /* Add the newly created block to the array. */
600 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
602 n_basic_blocks_for_fn (cfun)++;
603 last_basic_block_for_fn (cfun)++;
605 return bb;
609 /*---------------------------------------------------------------------------
610 Edge creation
611 ---------------------------------------------------------------------------*/
613 /* Fold COND_EXPR_COND of each COND_EXPR. */
615 void
616 fold_cond_expr_cond (void)
618 basic_block bb;
620 FOR_EACH_BB_FN (bb, cfun)
622 gimple stmt = last_stmt (bb);
624 if (stmt && gimple_code (stmt) == GIMPLE_COND)
626 gcond *cond_stmt = as_a <gcond *> (stmt);
627 location_t loc = gimple_location (stmt);
628 tree cond;
629 bool zerop, onep;
631 fold_defer_overflow_warnings ();
632 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
633 boolean_type_node,
634 gimple_cond_lhs (cond_stmt),
635 gimple_cond_rhs (cond_stmt));
636 if (cond)
638 zerop = integer_zerop (cond);
639 onep = integer_onep (cond);
641 else
642 zerop = onep = false;
644 fold_undefer_overflow_warnings (zerop || onep,
645 stmt,
646 WARN_STRICT_OVERFLOW_CONDITIONAL);
647 if (zerop)
648 gimple_cond_make_false (cond_stmt);
649 else if (onep)
650 gimple_cond_make_true (cond_stmt);
655 /* If basic block BB has an abnormal edge to a basic block
656 containing IFN_ABNORMAL_DISPATCHER internal call, return
657 that the dispatcher's basic block, otherwise return NULL. */
659 basic_block
660 get_abnormal_succ_dispatcher (basic_block bb)
662 edge e;
663 edge_iterator ei;
665 FOR_EACH_EDGE (e, ei, bb->succs)
666 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
668 gimple_stmt_iterator gsi
669 = gsi_start_nondebug_after_labels_bb (e->dest);
670 gimple g = gsi_stmt (gsi);
671 if (g
672 && is_gimple_call (g)
673 && gimple_call_internal_p (g)
674 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
675 return e->dest;
677 return NULL;
680 /* Helper function for make_edges. Create a basic block with
681 with ABNORMAL_DISPATCHER internal call in it if needed, and
682 create abnormal edges from BBS to it and from it to FOR_BB
683 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
685 static void
686 handle_abnormal_edges (basic_block *dispatcher_bbs,
687 basic_block for_bb, int *bb_to_omp_idx,
688 auto_vec<basic_block> *bbs, bool computed_goto)
690 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
691 unsigned int idx = 0;
692 basic_block bb;
693 bool inner = false;
695 if (bb_to_omp_idx)
697 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
698 if (bb_to_omp_idx[for_bb->index] != 0)
699 inner = true;
702 /* If the dispatcher has been created already, then there are basic
703 blocks with abnormal edges to it, so just make a new edge to
704 for_bb. */
705 if (*dispatcher == NULL)
707 /* Check if there are any basic blocks that need to have
708 abnormal edges to this dispatcher. If there are none, return
709 early. */
710 if (bb_to_omp_idx == NULL)
712 if (bbs->is_empty ())
713 return;
715 else
717 FOR_EACH_VEC_ELT (*bbs, idx, bb)
718 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
719 break;
720 if (bb == NULL)
721 return;
724 /* Create the dispatcher bb. */
725 *dispatcher = create_basic_block (NULL, NULL, for_bb);
726 if (computed_goto)
728 /* Factor computed gotos into a common computed goto site. Also
729 record the location of that site so that we can un-factor the
730 gotos after we have converted back to normal form. */
731 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
733 /* Create the destination of the factored goto. Each original
734 computed goto will put its desired destination into this
735 variable and jump to the label we create immediately below. */
736 tree var = create_tmp_var (ptr_type_node, "gotovar");
738 /* Build a label for the new block which will contain the
739 factored computed goto. */
740 tree factored_label_decl
741 = create_artificial_label (UNKNOWN_LOCATION);
742 gimple factored_computed_goto_label
743 = gimple_build_label (factored_label_decl);
744 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
746 /* Build our new computed goto. */
747 gimple factored_computed_goto = gimple_build_goto (var);
748 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
750 FOR_EACH_VEC_ELT (*bbs, idx, bb)
752 if (bb_to_omp_idx
753 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
754 continue;
756 gsi = gsi_last_bb (bb);
757 gimple last = gsi_stmt (gsi);
759 gcc_assert (computed_goto_p (last));
761 /* Copy the original computed goto's destination into VAR. */
762 gimple assignment
763 = gimple_build_assign (var, gimple_goto_dest (last));
764 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
766 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
767 e->goto_locus = gimple_location (last);
768 gsi_remove (&gsi, true);
771 else
773 tree arg = inner ? boolean_true_node : boolean_false_node;
774 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
775 1, arg);
776 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
777 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
779 /* Create predecessor edges of the dispatcher. */
780 FOR_EACH_VEC_ELT (*bbs, idx, bb)
782 if (bb_to_omp_idx
783 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
784 continue;
785 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
790 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
793 /* Join all the blocks in the flowgraph. */
795 static void
796 make_edges (void)
798 basic_block bb;
799 struct omp_region *cur_region = NULL;
800 auto_vec<basic_block> ab_edge_goto;
801 auto_vec<basic_block> ab_edge_call;
802 int *bb_to_omp_idx = NULL;
803 int cur_omp_region_idx = 0;
805 /* Create an edge from entry to the first block with executable
806 statements in it. */
807 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
808 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
809 EDGE_FALLTHRU);
811 /* Traverse the basic block array placing edges. */
812 FOR_EACH_BB_FN (bb, cfun)
814 gimple last = last_stmt (bb);
815 bool fallthru;
817 if (bb_to_omp_idx)
818 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
820 if (last)
822 enum gimple_code code = gimple_code (last);
823 switch (code)
825 case GIMPLE_GOTO:
826 if (make_goto_expr_edges (bb))
827 ab_edge_goto.safe_push (bb);
828 fallthru = false;
829 break;
830 case GIMPLE_RETURN:
832 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
833 e->goto_locus = gimple_location (last);
834 fallthru = false;
836 break;
837 case GIMPLE_COND:
838 make_cond_expr_edges (bb);
839 fallthru = false;
840 break;
841 case GIMPLE_SWITCH:
842 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
843 fallthru = false;
844 break;
845 case GIMPLE_RESX:
846 make_eh_edges (last);
847 fallthru = false;
848 break;
849 case GIMPLE_EH_DISPATCH:
850 fallthru =
851 make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
852 break;
854 case GIMPLE_CALL:
855 /* If this function receives a nonlocal goto, then we need to
856 make edges from this call site to all the nonlocal goto
857 handlers. */
858 if (stmt_can_make_abnormal_goto (last))
859 ab_edge_call.safe_push (bb);
861 /* If this statement has reachable exception handlers, then
862 create abnormal edges to them. */
863 make_eh_edges (last);
865 /* BUILTIN_RETURN is really a return statement. */
866 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
868 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
869 fallthru = false;
871 /* Some calls are known not to return. */
872 else
873 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
874 break;
876 case GIMPLE_ASSIGN:
877 /* A GIMPLE_ASSIGN may throw internally and thus be considered
878 control-altering. */
879 if (is_ctrl_altering_stmt (last))
880 make_eh_edges (last);
881 fallthru = true;
882 break;
884 case GIMPLE_ASM:
885 make_gimple_asm_edges (bb);
886 fallthru = true;
887 break;
889 CASE_GIMPLE_OMP:
890 fallthru = make_gimple_omp_edges (bb, &cur_region,
891 &cur_omp_region_idx);
892 if (cur_region && bb_to_omp_idx == NULL)
893 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
894 break;
896 case GIMPLE_TRANSACTION:
898 tree abort_label =
899 gimple_transaction_label (as_a <gtransaction *> (last));
900 if (abort_label)
901 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
902 fallthru = true;
904 break;
906 default:
907 gcc_assert (!stmt_ends_bb_p (last));
908 fallthru = true;
911 else
912 fallthru = true;
914 if (fallthru)
915 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
918 /* Computed gotos are hell to deal with, especially if there are
919 lots of them with a large number of destinations. So we factor
920 them to a common computed goto location before we build the
921 edge list. After we convert back to normal form, we will un-factor
922 the computed gotos since factoring introduces an unwanted jump.
923 For non-local gotos and abnormal edges from calls to calls that return
924 twice or forced labels, factor the abnormal edges too, by having all
925 abnormal edges from the calls go to a common artificial basic block
926 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
927 basic block to all forced labels and calls returning twice.
928 We do this per-OpenMP structured block, because those regions
929 are guaranteed to be single entry single exit by the standard,
930 so it is not allowed to enter or exit such regions abnormally this way,
931 thus all computed gotos, non-local gotos and setjmp/longjmp calls
932 must not transfer control across SESE region boundaries. */
933 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
935 gimple_stmt_iterator gsi;
936 basic_block dispatcher_bb_array[2] = { NULL, NULL };
937 basic_block *dispatcher_bbs = dispatcher_bb_array;
938 int count = n_basic_blocks_for_fn (cfun);
940 if (bb_to_omp_idx)
941 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
943 FOR_EACH_BB_FN (bb, cfun)
945 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
947 glabel *label_stmt =
948 dyn_cast <glabel *> (gsi_stmt (gsi));
949 tree target;
951 if (!label_stmt)
952 break;
954 target = gimple_label_label (label_stmt);
956 /* Make an edge to every label block that has been marked as a
957 potential target for a computed goto or a non-local goto. */
958 if (FORCED_LABEL (target))
959 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
960 &ab_edge_goto, true);
961 if (DECL_NONLOCAL (target))
963 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
964 &ab_edge_call, false);
965 break;
969 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
970 gsi_next_nondebug (&gsi);
971 if (!gsi_end_p (gsi))
973 /* Make an edge to every setjmp-like call. */
974 gimple call_stmt = gsi_stmt (gsi);
975 if (is_gimple_call (call_stmt)
976 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
977 || gimple_call_builtin_p (call_stmt,
978 BUILT_IN_SETJMP_RECEIVER)))
979 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
980 &ab_edge_call, false);
984 if (bb_to_omp_idx)
985 XDELETE (dispatcher_bbs);
988 XDELETE (bb_to_omp_idx);
990 free_omp_regions ();
992 /* Fold COND_EXPR_COND of each COND_EXPR. */
993 fold_cond_expr_cond ();
996 /* Find the next available discriminator value for LOCUS. The
997 discriminator distinguishes among several basic blocks that
998 share a common locus, allowing for more accurate sample-based
999 profiling. */
1001 static int
1002 next_discriminator_for_locus (location_t locus)
1004 struct locus_discrim_map item;
1005 struct locus_discrim_map **slot;
1007 item.locus = locus;
1008 item.discriminator = 0;
1009 slot = discriminator_per_locus->find_slot_with_hash (
1010 &item, LOCATION_LINE (locus), INSERT);
1011 gcc_assert (slot);
1012 if (*slot == HTAB_EMPTY_ENTRY)
1014 *slot = XNEW (struct locus_discrim_map);
1015 gcc_assert (*slot);
1016 (*slot)->locus = locus;
1017 (*slot)->discriminator = 0;
1019 (*slot)->discriminator++;
1020 return (*slot)->discriminator;
1023 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1025 static bool
1026 same_line_p (location_t locus1, location_t locus2)
1028 expanded_location from, to;
1030 if (locus1 == locus2)
1031 return true;
1033 from = expand_location (locus1);
1034 to = expand_location (locus2);
1036 if (from.line != to.line)
1037 return false;
1038 if (from.file == to.file)
1039 return true;
1040 return (from.file != NULL
1041 && to.file != NULL
1042 && filename_cmp (from.file, to.file) == 0);
1045 /* Assign discriminators to each basic block. */
1047 static void
1048 assign_discriminators (void)
1050 basic_block bb;
1052 FOR_EACH_BB_FN (bb, cfun)
1054 edge e;
1055 edge_iterator ei;
1056 gimple last = last_stmt (bb);
1057 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1059 if (locus == UNKNOWN_LOCATION)
1060 continue;
1062 FOR_EACH_EDGE (e, ei, bb->succs)
1064 gimple first = first_non_label_stmt (e->dest);
1065 gimple last = last_stmt (e->dest);
1066 if ((first && same_line_p (locus, gimple_location (first)))
1067 || (last && same_line_p (locus, gimple_location (last))))
1069 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1070 bb->discriminator = next_discriminator_for_locus (locus);
1071 else
1072 e->dest->discriminator = next_discriminator_for_locus (locus);
1078 /* Create the edges for a GIMPLE_COND starting at block BB. */
1080 static void
1081 make_cond_expr_edges (basic_block bb)
1083 gcond *entry = as_a <gcond *> (last_stmt (bb));
1084 gimple then_stmt, else_stmt;
1085 basic_block then_bb, else_bb;
1086 tree then_label, else_label;
1087 edge e;
1089 gcc_assert (entry);
1090 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1092 /* Entry basic blocks for each component. */
1093 then_label = gimple_cond_true_label (entry);
1094 else_label = gimple_cond_false_label (entry);
1095 then_bb = label_to_block (then_label);
1096 else_bb = label_to_block (else_label);
1097 then_stmt = first_stmt (then_bb);
1098 else_stmt = first_stmt (else_bb);
1100 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1101 e->goto_locus = gimple_location (then_stmt);
1102 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1103 if (e)
1104 e->goto_locus = gimple_location (else_stmt);
1106 /* We do not need the labels anymore. */
1107 gimple_cond_set_true_label (entry, NULL_TREE);
1108 gimple_cond_set_false_label (entry, NULL_TREE);
1112 /* Called for each element in the hash table (P) as we delete the
1113 edge to cases hash table.
1115 Clear all the TREE_CHAINs to prevent problems with copying of
1116 SWITCH_EXPRs and structure sharing rules, then free the hash table
1117 element. */
1119 bool
1120 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1122 tree t, next;
1124 for (t = value; t; t = next)
1126 next = CASE_CHAIN (t);
1127 CASE_CHAIN (t) = NULL;
1130 return true;
1133 /* Start recording information mapping edges to case labels. */
1135 void
1136 start_recording_case_labels (void)
1138 gcc_assert (edge_to_cases == NULL);
1139 edge_to_cases = new hash_map<edge, tree>;
1140 touched_switch_bbs = BITMAP_ALLOC (NULL);
1143 /* Return nonzero if we are recording information for case labels. */
1145 static bool
1146 recording_case_labels_p (void)
1148 return (edge_to_cases != NULL);
1151 /* Stop recording information mapping edges to case labels and
1152 remove any information we have recorded. */
1153 void
1154 end_recording_case_labels (void)
1156 bitmap_iterator bi;
1157 unsigned i;
1158 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1159 delete edge_to_cases;
1160 edge_to_cases = NULL;
1161 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1163 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1164 if (bb)
1166 gimple stmt = last_stmt (bb);
1167 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1168 group_case_labels_stmt (as_a <gswitch *> (stmt));
1171 BITMAP_FREE (touched_switch_bbs);
1174 /* If we are inside a {start,end}_recording_cases block, then return
1175 a chain of CASE_LABEL_EXPRs from T which reference E.
1177 Otherwise return NULL. */
1179 static tree
1180 get_cases_for_edge (edge e, gswitch *t)
1182 tree *slot;
1183 size_t i, n;
1185 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1186 chains available. Return NULL so the caller can detect this case. */
1187 if (!recording_case_labels_p ())
1188 return NULL;
1190 slot = edge_to_cases->get (e);
1191 if (slot)
1192 return *slot;
1194 /* If we did not find E in the hash table, then this must be the first
1195 time we have been queried for information about E & T. Add all the
1196 elements from T to the hash table then perform the query again. */
1198 n = gimple_switch_num_labels (t);
1199 for (i = 0; i < n; i++)
1201 tree elt = gimple_switch_label (t, i);
1202 tree lab = CASE_LABEL (elt);
1203 basic_block label_bb = label_to_block (lab);
1204 edge this_edge = find_edge (e->src, label_bb);
1206 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1207 a new chain. */
1208 tree &s = edge_to_cases->get_or_insert (this_edge);
1209 CASE_CHAIN (elt) = s;
1210 s = elt;
1213 return *edge_to_cases->get (e);
1216 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1218 static void
1219 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1221 size_t i, n;
1223 n = gimple_switch_num_labels (entry);
1225 for (i = 0; i < n; ++i)
1227 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1228 basic_block label_bb = label_to_block (lab);
1229 make_edge (bb, label_bb, 0);
1234 /* Return the basic block holding label DEST. */
1236 basic_block
1237 label_to_block_fn (struct function *ifun, tree dest)
1239 int uid = LABEL_DECL_UID (dest);
1241 /* We would die hard when faced by an undefined label. Emit a label to
1242 the very first basic block. This will hopefully make even the dataflow
1243 and undefined variable warnings quite right. */
1244 if (seen_error () && uid < 0)
1246 gimple_stmt_iterator gsi =
1247 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1248 gimple stmt;
1250 stmt = gimple_build_label (dest);
1251 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1252 uid = LABEL_DECL_UID (dest);
1254 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1255 return NULL;
1256 return (*ifun->cfg->x_label_to_block_map)[uid];
1259 /* Create edges for a goto statement at block BB. Returns true
1260 if abnormal edges should be created. */
1262 static bool
1263 make_goto_expr_edges (basic_block bb)
1265 gimple_stmt_iterator last = gsi_last_bb (bb);
1266 gimple goto_t = gsi_stmt (last);
1268 /* A simple GOTO creates normal edges. */
1269 if (simple_goto_p (goto_t))
1271 tree dest = gimple_goto_dest (goto_t);
1272 basic_block label_bb = label_to_block (dest);
1273 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1274 e->goto_locus = gimple_location (goto_t);
1275 gsi_remove (&last, true);
1276 return false;
1279 /* A computed GOTO creates abnormal edges. */
1280 return true;
1283 /* Create edges for an asm statement with labels at block BB. */
1285 static void
1286 make_gimple_asm_edges (basic_block bb)
1288 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1289 int i, n = gimple_asm_nlabels (stmt);
1291 for (i = 0; i < n; ++i)
1293 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1294 basic_block label_bb = label_to_block (label);
1295 make_edge (bb, label_bb, 0);
1299 /*---------------------------------------------------------------------------
1300 Flowgraph analysis
1301 ---------------------------------------------------------------------------*/
1303 /* Cleanup useless labels in basic blocks. This is something we wish
1304 to do early because it allows us to group case labels before creating
1305 the edges for the CFG, and it speeds up block statement iterators in
1306 all passes later on.
1307 We rerun this pass after CFG is created, to get rid of the labels that
1308 are no longer referenced. After then we do not run it any more, since
1309 (almost) no new labels should be created. */
1311 /* A map from basic block index to the leading label of that block. */
1312 static struct label_record
1314 /* The label. */
1315 tree label;
1317 /* True if the label is referenced from somewhere. */
1318 bool used;
1319 } *label_for_bb;
1321 /* Given LABEL return the first label in the same basic block. */
1323 static tree
1324 main_block_label (tree label)
1326 basic_block bb = label_to_block (label);
1327 tree main_label = label_for_bb[bb->index].label;
1329 /* label_to_block possibly inserted undefined label into the chain. */
1330 if (!main_label)
1332 label_for_bb[bb->index].label = label;
1333 main_label = label;
1336 label_for_bb[bb->index].used = true;
1337 return main_label;
1340 /* Clean up redundant labels within the exception tree. */
1342 static void
1343 cleanup_dead_labels_eh (void)
1345 eh_landing_pad lp;
1346 eh_region r;
1347 tree lab;
1348 int i;
1350 if (cfun->eh == NULL)
1351 return;
1353 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1354 if (lp && lp->post_landing_pad)
1356 lab = main_block_label (lp->post_landing_pad);
1357 if (lab != lp->post_landing_pad)
1359 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1360 EH_LANDING_PAD_NR (lab) = lp->index;
1364 FOR_ALL_EH_REGION (r)
1365 switch (r->type)
1367 case ERT_CLEANUP:
1368 case ERT_MUST_NOT_THROW:
1369 break;
1371 case ERT_TRY:
1373 eh_catch c;
1374 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1376 lab = c->label;
1377 if (lab)
1378 c->label = main_block_label (lab);
1381 break;
1383 case ERT_ALLOWED_EXCEPTIONS:
1384 lab = r->u.allowed.label;
1385 if (lab)
1386 r->u.allowed.label = main_block_label (lab);
1387 break;
1392 /* Cleanup redundant labels. This is a three-step process:
1393 1) Find the leading label for each block.
1394 2) Redirect all references to labels to the leading labels.
1395 3) Cleanup all useless labels. */
1397 void
1398 cleanup_dead_labels (void)
1400 basic_block bb;
1401 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1403 /* Find a suitable label for each block. We use the first user-defined
1404 label if there is one, or otherwise just the first label we see. */
1405 FOR_EACH_BB_FN (bb, cfun)
1407 gimple_stmt_iterator i;
1409 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1411 tree label;
1412 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1414 if (!label_stmt)
1415 break;
1417 label = gimple_label_label (label_stmt);
1419 /* If we have not yet seen a label for the current block,
1420 remember this one and see if there are more labels. */
1421 if (!label_for_bb[bb->index].label)
1423 label_for_bb[bb->index].label = label;
1424 continue;
1427 /* If we did see a label for the current block already, but it
1428 is an artificially created label, replace it if the current
1429 label is a user defined label. */
1430 if (!DECL_ARTIFICIAL (label)
1431 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1433 label_for_bb[bb->index].label = label;
1434 break;
1439 /* Now redirect all jumps/branches to the selected label.
1440 First do so for each block ending in a control statement. */
1441 FOR_EACH_BB_FN (bb, cfun)
1443 gimple stmt = last_stmt (bb);
1444 tree label, new_label;
1446 if (!stmt)
1447 continue;
1449 switch (gimple_code (stmt))
1451 case GIMPLE_COND:
1453 gcond *cond_stmt = as_a <gcond *> (stmt);
1454 label = gimple_cond_true_label (cond_stmt);
1455 if (label)
1457 new_label = main_block_label (label);
1458 if (new_label != label)
1459 gimple_cond_set_true_label (cond_stmt, new_label);
1462 label = gimple_cond_false_label (cond_stmt);
1463 if (label)
1465 new_label = main_block_label (label);
1466 if (new_label != label)
1467 gimple_cond_set_false_label (cond_stmt, new_label);
1470 break;
1472 case GIMPLE_SWITCH:
1474 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1475 size_t i, n = gimple_switch_num_labels (switch_stmt);
1477 /* Replace all destination labels. */
1478 for (i = 0; i < n; ++i)
1480 tree case_label = gimple_switch_label (switch_stmt, i);
1481 label = CASE_LABEL (case_label);
1482 new_label = main_block_label (label);
1483 if (new_label != label)
1484 CASE_LABEL (case_label) = new_label;
1486 break;
1489 case GIMPLE_ASM:
1491 gasm *asm_stmt = as_a <gasm *> (stmt);
1492 int i, n = gimple_asm_nlabels (asm_stmt);
1494 for (i = 0; i < n; ++i)
1496 tree cons = gimple_asm_label_op (asm_stmt, i);
1497 tree label = main_block_label (TREE_VALUE (cons));
1498 TREE_VALUE (cons) = label;
1500 break;
1503 /* We have to handle gotos until they're removed, and we don't
1504 remove them until after we've created the CFG edges. */
1505 case GIMPLE_GOTO:
1506 if (!computed_goto_p (stmt))
1508 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1509 label = gimple_goto_dest (goto_stmt);
1510 new_label = main_block_label (label);
1511 if (new_label != label)
1512 gimple_goto_set_dest (goto_stmt, new_label);
1514 break;
1516 case GIMPLE_TRANSACTION:
1518 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1519 tree label = gimple_transaction_label (trans_stmt);
1520 if (label)
1522 tree new_label = main_block_label (label);
1523 if (new_label != label)
1524 gimple_transaction_set_label (trans_stmt, new_label);
1527 break;
1529 default:
1530 break;
1534 /* Do the same for the exception region tree labels. */
1535 cleanup_dead_labels_eh ();
1537 /* Finally, purge dead labels. All user-defined labels and labels that
1538 can be the target of non-local gotos and labels which have their
1539 address taken are preserved. */
1540 FOR_EACH_BB_FN (bb, cfun)
1542 gimple_stmt_iterator i;
1543 tree label_for_this_bb = label_for_bb[bb->index].label;
1545 if (!label_for_this_bb)
1546 continue;
1548 /* If the main label of the block is unused, we may still remove it. */
1549 if (!label_for_bb[bb->index].used)
1550 label_for_this_bb = NULL;
1552 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1554 tree label;
1555 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1557 if (!label_stmt)
1558 break;
1560 label = gimple_label_label (label_stmt);
1562 if (label == label_for_this_bb
1563 || !DECL_ARTIFICIAL (label)
1564 || DECL_NONLOCAL (label)
1565 || FORCED_LABEL (label))
1566 gsi_next (&i);
1567 else
1568 gsi_remove (&i, true);
1572 free (label_for_bb);
1575 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1576 the ones jumping to the same label.
1577 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1579 void
1580 group_case_labels_stmt (gswitch *stmt)
1582 int old_size = gimple_switch_num_labels (stmt);
1583 int i, j, new_size = old_size;
1584 basic_block default_bb = NULL;
1586 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1588 /* Look for possible opportunities to merge cases. */
1589 i = 1;
1590 while (i < old_size)
1592 tree base_case, base_high;
1593 basic_block base_bb;
1595 base_case = gimple_switch_label (stmt, i);
1597 gcc_assert (base_case);
1598 base_bb = label_to_block (CASE_LABEL (base_case));
1600 /* Discard cases that have the same destination as the
1601 default case. */
1602 if (base_bb == default_bb)
1604 gimple_switch_set_label (stmt, i, NULL_TREE);
1605 i++;
1606 new_size--;
1607 continue;
1610 base_high = CASE_HIGH (base_case)
1611 ? CASE_HIGH (base_case)
1612 : CASE_LOW (base_case);
1613 i++;
1615 /* Try to merge case labels. Break out when we reach the end
1616 of the label vector or when we cannot merge the next case
1617 label with the current one. */
1618 while (i < old_size)
1620 tree merge_case = gimple_switch_label (stmt, i);
1621 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1622 wide_int bhp1 = wi::add (base_high, 1);
1624 /* Merge the cases if they jump to the same place,
1625 and their ranges are consecutive. */
1626 if (merge_bb == base_bb
1627 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1629 base_high = CASE_HIGH (merge_case) ?
1630 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1631 CASE_HIGH (base_case) = base_high;
1632 gimple_switch_set_label (stmt, i, NULL_TREE);
1633 new_size--;
1634 i++;
1636 else
1637 break;
1641 /* Compress the case labels in the label vector, and adjust the
1642 length of the vector. */
1643 for (i = 0, j = 0; i < new_size; i++)
1645 while (! gimple_switch_label (stmt, j))
1646 j++;
1647 gimple_switch_set_label (stmt, i,
1648 gimple_switch_label (stmt, j++));
1651 gcc_assert (new_size <= old_size);
1652 gimple_switch_set_num_labels (stmt, new_size);
1655 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1656 and scan the sorted vector of cases. Combine the ones jumping to the
1657 same label. */
1659 void
1660 group_case_labels (void)
1662 basic_block bb;
1664 FOR_EACH_BB_FN (bb, cfun)
1666 gimple stmt = last_stmt (bb);
1667 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1668 group_case_labels_stmt (as_a <gswitch *> (stmt));
1672 /* Checks whether we can merge block B into block A. */
1674 static bool
1675 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1677 gimple stmt;
1679 if (!single_succ_p (a))
1680 return false;
1682 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1683 return false;
1685 if (single_succ (a) != b)
1686 return false;
1688 if (!single_pred_p (b))
1689 return false;
1691 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1692 return false;
1694 /* If A ends by a statement causing exceptions or something similar, we
1695 cannot merge the blocks. */
1696 stmt = last_stmt (a);
1697 if (stmt && stmt_ends_bb_p (stmt))
1698 return false;
1700 /* Do not allow a block with only a non-local label to be merged. */
1701 if (stmt)
1702 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1703 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1704 return false;
1706 /* Examine the labels at the beginning of B. */
1707 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1708 gsi_next (&gsi))
1710 tree lab;
1711 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1712 if (!label_stmt)
1713 break;
1714 lab = gimple_label_label (label_stmt);
1716 /* Do not remove user forced labels or for -O0 any user labels. */
1717 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1718 return false;
1721 /* Protect simple loop latches. We only want to avoid merging
1722 the latch with the loop header in this case. */
1723 if (current_loops
1724 && b->loop_father->latch == b
1725 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1726 && b->loop_father->header == a)
1727 return false;
1729 /* It must be possible to eliminate all phi nodes in B. If ssa form
1730 is not up-to-date and a name-mapping is registered, we cannot eliminate
1731 any phis. Symbols marked for renaming are never a problem though. */
1732 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1733 gsi_next (&gsi))
1735 gphi *phi = gsi.phi ();
1736 /* Technically only new names matter. */
1737 if (name_registered_for_update_p (PHI_RESULT (phi)))
1738 return false;
1741 /* When not optimizing, don't merge if we'd lose goto_locus. */
1742 if (!optimize
1743 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1745 location_t goto_locus = single_succ_edge (a)->goto_locus;
1746 gimple_stmt_iterator prev, next;
1747 prev = gsi_last_nondebug_bb (a);
1748 next = gsi_after_labels (b);
1749 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1750 gsi_next_nondebug (&next);
1751 if ((gsi_end_p (prev)
1752 || gimple_location (gsi_stmt (prev)) != goto_locus)
1753 && (gsi_end_p (next)
1754 || gimple_location (gsi_stmt (next)) != goto_locus))
1755 return false;
1758 return true;
1761 /* Replaces all uses of NAME by VAL. */
1763 void
1764 replace_uses_by (tree name, tree val)
1766 imm_use_iterator imm_iter;
1767 use_operand_p use;
1768 gimple stmt;
1769 edge e;
1771 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1773 /* Mark the block if we change the last stmt in it. */
1774 if (cfgcleanup_altered_bbs
1775 && stmt_ends_bb_p (stmt))
1776 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1778 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1780 replace_exp (use, val);
1782 if (gimple_code (stmt) == GIMPLE_PHI)
1784 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1785 PHI_ARG_INDEX_FROM_USE (use));
1786 if (e->flags & EDGE_ABNORMAL)
1788 /* This can only occur for virtual operands, since
1789 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1790 would prevent replacement. */
1791 gcc_checking_assert (virtual_operand_p (name));
1792 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1797 if (gimple_code (stmt) != GIMPLE_PHI)
1799 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1800 gimple orig_stmt = stmt;
1801 size_t i;
1803 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1804 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1805 only change sth from non-invariant to invariant, and only
1806 when propagating constants. */
1807 if (is_gimple_min_invariant (val))
1808 for (i = 0; i < gimple_num_ops (stmt); i++)
1810 tree op = gimple_op (stmt, i);
1811 /* Operands may be empty here. For example, the labels
1812 of a GIMPLE_COND are nulled out following the creation
1813 of the corresponding CFG edges. */
1814 if (op && TREE_CODE (op) == ADDR_EXPR)
1815 recompute_tree_invariant_for_addr_expr (op);
1818 if (fold_stmt (&gsi))
1819 stmt = gsi_stmt (gsi);
1821 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1822 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1824 update_stmt (stmt);
1828 gcc_checking_assert (has_zero_uses (name));
1830 /* Also update the trees stored in loop structures. */
1831 if (current_loops)
1833 struct loop *loop;
1835 FOR_EACH_LOOP (loop, 0)
1837 substitute_in_loop_info (loop, name, val);
1842 /* Merge block B into block A. */
1844 static void
1845 gimple_merge_blocks (basic_block a, basic_block b)
1847 gimple_stmt_iterator last, gsi;
1848 gphi_iterator psi;
1850 if (dump_file)
1851 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1853 /* Remove all single-valued PHI nodes from block B of the form
1854 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1855 gsi = gsi_last_bb (a);
1856 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1858 gimple phi = gsi_stmt (psi);
1859 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1860 gimple copy;
1861 bool may_replace_uses = (virtual_operand_p (def)
1862 || may_propagate_copy (def, use));
1864 /* In case we maintain loop closed ssa form, do not propagate arguments
1865 of loop exit phi nodes. */
1866 if (current_loops
1867 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1868 && !virtual_operand_p (def)
1869 && TREE_CODE (use) == SSA_NAME
1870 && a->loop_father != b->loop_father)
1871 may_replace_uses = false;
1873 if (!may_replace_uses)
1875 gcc_assert (!virtual_operand_p (def));
1877 /* Note that just emitting the copies is fine -- there is no problem
1878 with ordering of phi nodes. This is because A is the single
1879 predecessor of B, therefore results of the phi nodes cannot
1880 appear as arguments of the phi nodes. */
1881 copy = gimple_build_assign (def, use);
1882 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1883 remove_phi_node (&psi, false);
1885 else
1887 /* If we deal with a PHI for virtual operands, we can simply
1888 propagate these without fussing with folding or updating
1889 the stmt. */
1890 if (virtual_operand_p (def))
1892 imm_use_iterator iter;
1893 use_operand_p use_p;
1894 gimple stmt;
1896 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1897 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1898 SET_USE (use_p, use);
1900 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1901 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1903 else
1904 replace_uses_by (def, use);
1906 remove_phi_node (&psi, true);
1910 /* Ensure that B follows A. */
1911 move_block_after (b, a);
1913 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1914 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1916 /* Remove labels from B and set gimple_bb to A for other statements. */
1917 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1919 gimple stmt = gsi_stmt (gsi);
1920 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1922 tree label = gimple_label_label (label_stmt);
1923 int lp_nr;
1925 gsi_remove (&gsi, false);
1927 /* Now that we can thread computed gotos, we might have
1928 a situation where we have a forced label in block B
1929 However, the label at the start of block B might still be
1930 used in other ways (think about the runtime checking for
1931 Fortran assigned gotos). So we can not just delete the
1932 label. Instead we move the label to the start of block A. */
1933 if (FORCED_LABEL (label))
1935 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1936 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1938 /* Other user labels keep around in a form of a debug stmt. */
1939 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1941 gimple dbg = gimple_build_debug_bind (label,
1942 integer_zero_node,
1943 stmt);
1944 gimple_debug_bind_reset_value (dbg);
1945 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1948 lp_nr = EH_LANDING_PAD_NR (label);
1949 if (lp_nr)
1951 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1952 lp->post_landing_pad = NULL;
1955 else
1957 gimple_set_bb (stmt, a);
1958 gsi_next (&gsi);
1962 /* When merging two BBs, if their counts are different, the larger count
1963 is selected as the new bb count. This is to handle inconsistent
1964 profiles. */
1965 if (a->loop_father == b->loop_father)
1967 a->count = MAX (a->count, b->count);
1968 a->frequency = MAX (a->frequency, b->frequency);
1971 /* Merge the sequences. */
1972 last = gsi_last_bb (a);
1973 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1974 set_bb_seq (b, NULL);
1976 if (cfgcleanup_altered_bbs)
1977 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1981 /* Return the one of two successors of BB that is not reachable by a
1982 complex edge, if there is one. Else, return BB. We use
1983 this in optimizations that use post-dominators for their heuristics,
1984 to catch the cases in C++ where function calls are involved. */
1986 basic_block
1987 single_noncomplex_succ (basic_block bb)
1989 edge e0, e1;
1990 if (EDGE_COUNT (bb->succs) != 2)
1991 return bb;
1993 e0 = EDGE_SUCC (bb, 0);
1994 e1 = EDGE_SUCC (bb, 1);
1995 if (e0->flags & EDGE_COMPLEX)
1996 return e1->dest;
1997 if (e1->flags & EDGE_COMPLEX)
1998 return e0->dest;
2000 return bb;
2003 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2005 void
2006 notice_special_calls (gcall *call)
2008 int flags = gimple_call_flags (call);
2010 if (flags & ECF_MAY_BE_ALLOCA)
2011 cfun->calls_alloca = true;
2012 if (flags & ECF_RETURNS_TWICE)
2013 cfun->calls_setjmp = true;
2017 /* Clear flags set by notice_special_calls. Used by dead code removal
2018 to update the flags. */
2020 void
2021 clear_special_calls (void)
2023 cfun->calls_alloca = false;
2024 cfun->calls_setjmp = false;
2027 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2029 static void
2030 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2032 /* Since this block is no longer reachable, we can just delete all
2033 of its PHI nodes. */
2034 remove_phi_nodes (bb);
2036 /* Remove edges to BB's successors. */
2037 while (EDGE_COUNT (bb->succs) > 0)
2038 remove_edge (EDGE_SUCC (bb, 0));
2042 /* Remove statements of basic block BB. */
2044 static void
2045 remove_bb (basic_block bb)
2047 gimple_stmt_iterator i;
2049 if (dump_file)
2051 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2052 if (dump_flags & TDF_DETAILS)
2054 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2055 fprintf (dump_file, "\n");
2059 if (current_loops)
2061 struct loop *loop = bb->loop_father;
2063 /* If a loop gets removed, clean up the information associated
2064 with it. */
2065 if (loop->latch == bb
2066 || loop->header == bb)
2067 free_numbers_of_iterations_estimates_loop (loop);
2070 /* Remove all the instructions in the block. */
2071 if (bb_seq (bb) != NULL)
2073 /* Walk backwards so as to get a chance to substitute all
2074 released DEFs into debug stmts. See
2075 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2076 details. */
2077 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2079 gimple stmt = gsi_stmt (i);
2080 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2081 if (label_stmt
2082 && (FORCED_LABEL (gimple_label_label (label_stmt))
2083 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2085 basic_block new_bb;
2086 gimple_stmt_iterator new_gsi;
2088 /* A non-reachable non-local label may still be referenced.
2089 But it no longer needs to carry the extra semantics of
2090 non-locality. */
2091 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2093 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2094 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2097 new_bb = bb->prev_bb;
2098 new_gsi = gsi_start_bb (new_bb);
2099 gsi_remove (&i, false);
2100 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2102 else
2104 /* Release SSA definitions if we are in SSA. Note that we
2105 may be called when not in SSA. For example,
2106 final_cleanup calls this function via
2107 cleanup_tree_cfg. */
2108 if (gimple_in_ssa_p (cfun))
2109 release_defs (stmt);
2111 gsi_remove (&i, true);
2114 if (gsi_end_p (i))
2115 i = gsi_last_bb (bb);
2116 else
2117 gsi_prev (&i);
2121 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2122 bb->il.gimple.seq = NULL;
2123 bb->il.gimple.phi_nodes = NULL;
2127 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2128 predicate VAL, return the edge that will be taken out of the block.
2129 If VAL does not match a unique edge, NULL is returned. */
2131 edge
2132 find_taken_edge (basic_block bb, tree val)
2134 gimple stmt;
2136 stmt = last_stmt (bb);
2138 gcc_assert (stmt);
2139 gcc_assert (is_ctrl_stmt (stmt));
2141 if (val == NULL)
2142 return NULL;
2144 if (!is_gimple_min_invariant (val))
2145 return NULL;
2147 if (gimple_code (stmt) == GIMPLE_COND)
2148 return find_taken_edge_cond_expr (bb, val);
2150 if (gimple_code (stmt) == GIMPLE_SWITCH)
2151 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2153 if (computed_goto_p (stmt))
2155 /* Only optimize if the argument is a label, if the argument is
2156 not a label then we can not construct a proper CFG.
2158 It may be the case that we only need to allow the LABEL_REF to
2159 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2160 appear inside a LABEL_EXPR just to be safe. */
2161 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2162 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2163 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2164 return NULL;
2167 gcc_unreachable ();
2170 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2171 statement, determine which of the outgoing edges will be taken out of the
2172 block. Return NULL if either edge may be taken. */
2174 static edge
2175 find_taken_edge_computed_goto (basic_block bb, tree val)
2177 basic_block dest;
2178 edge e = NULL;
2180 dest = label_to_block (val);
2181 if (dest)
2183 e = find_edge (bb, dest);
2184 gcc_assert (e != NULL);
2187 return e;
2190 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2191 statement, determine which of the two edges will be taken out of the
2192 block. Return NULL if either edge may be taken. */
2194 static edge
2195 find_taken_edge_cond_expr (basic_block bb, tree val)
2197 edge true_edge, false_edge;
2199 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2201 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2202 return (integer_zerop (val) ? false_edge : true_edge);
2205 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2206 statement, determine which edge will be taken out of the block. Return
2207 NULL if any edge may be taken. */
2209 static edge
2210 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2211 tree val)
2213 basic_block dest_bb;
2214 edge e;
2215 tree taken_case;
2217 taken_case = find_case_label_for_value (switch_stmt, val);
2218 dest_bb = label_to_block (CASE_LABEL (taken_case));
2220 e = find_edge (bb, dest_bb);
2221 gcc_assert (e);
2222 return e;
2226 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2227 We can make optimal use here of the fact that the case labels are
2228 sorted: We can do a binary search for a case matching VAL. */
2230 static tree
2231 find_case_label_for_value (gswitch *switch_stmt, tree val)
2233 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2234 tree default_case = gimple_switch_default_label (switch_stmt);
2236 for (low = 0, high = n; high - low > 1; )
2238 size_t i = (high + low) / 2;
2239 tree t = gimple_switch_label (switch_stmt, i);
2240 int cmp;
2242 /* Cache the result of comparing CASE_LOW and val. */
2243 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2245 if (cmp > 0)
2246 high = i;
2247 else
2248 low = i;
2250 if (CASE_HIGH (t) == NULL)
2252 /* A singe-valued case label. */
2253 if (cmp == 0)
2254 return t;
2256 else
2258 /* A case range. We can only handle integer ranges. */
2259 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2260 return t;
2264 return default_case;
2268 /* Dump a basic block on stderr. */
2270 void
2271 gimple_debug_bb (basic_block bb)
2273 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2277 /* Dump basic block with index N on stderr. */
2279 basic_block
2280 gimple_debug_bb_n (int n)
2282 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2283 return BASIC_BLOCK_FOR_FN (cfun, n);
2287 /* Dump the CFG on stderr.
2289 FLAGS are the same used by the tree dumping functions
2290 (see TDF_* in dumpfile.h). */
2292 void
2293 gimple_debug_cfg (int flags)
2295 gimple_dump_cfg (stderr, flags);
2299 /* Dump the program showing basic block boundaries on the given FILE.
2301 FLAGS are the same used by the tree dumping functions (see TDF_* in
2302 tree.h). */
2304 void
2305 gimple_dump_cfg (FILE *file, int flags)
2307 if (flags & TDF_DETAILS)
2309 dump_function_header (file, current_function_decl, flags);
2310 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2311 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2312 last_basic_block_for_fn (cfun));
2314 brief_dump_cfg (file, flags | TDF_COMMENT);
2315 fprintf (file, "\n");
2318 if (flags & TDF_STATS)
2319 dump_cfg_stats (file);
2321 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2325 /* Dump CFG statistics on FILE. */
2327 void
2328 dump_cfg_stats (FILE *file)
2330 static long max_num_merged_labels = 0;
2331 unsigned long size, total = 0;
2332 long num_edges;
2333 basic_block bb;
2334 const char * const fmt_str = "%-30s%-13s%12s\n";
2335 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2336 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2337 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2338 const char *funcname = current_function_name ();
2340 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2342 fprintf (file, "---------------------------------------------------------\n");
2343 fprintf (file, fmt_str, "", " Number of ", "Memory");
2344 fprintf (file, fmt_str, "", " instances ", "used ");
2345 fprintf (file, "---------------------------------------------------------\n");
2347 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2348 total += size;
2349 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2350 SCALE (size), LABEL (size));
2352 num_edges = 0;
2353 FOR_EACH_BB_FN (bb, cfun)
2354 num_edges += EDGE_COUNT (bb->succs);
2355 size = num_edges * sizeof (struct edge_def);
2356 total += size;
2357 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2359 fprintf (file, "---------------------------------------------------------\n");
2360 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2361 LABEL (total));
2362 fprintf (file, "---------------------------------------------------------\n");
2363 fprintf (file, "\n");
2365 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2366 max_num_merged_labels = cfg_stats.num_merged_labels;
2368 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2369 cfg_stats.num_merged_labels, max_num_merged_labels);
2371 fprintf (file, "\n");
2375 /* Dump CFG statistics on stderr. Keep extern so that it's always
2376 linked in the final executable. */
2378 DEBUG_FUNCTION void
2379 debug_cfg_stats (void)
2381 dump_cfg_stats (stderr);
2384 /*---------------------------------------------------------------------------
2385 Miscellaneous helpers
2386 ---------------------------------------------------------------------------*/
2388 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2389 flow. Transfers of control flow associated with EH are excluded. */
2391 static bool
2392 call_can_make_abnormal_goto (gimple t)
2394 /* If the function has no non-local labels, then a call cannot make an
2395 abnormal transfer of control. */
2396 if (!cfun->has_nonlocal_label
2397 && !cfun->calls_setjmp)
2398 return false;
2400 /* Likewise if the call has no side effects. */
2401 if (!gimple_has_side_effects (t))
2402 return false;
2404 /* Likewise if the called function is leaf. */
2405 if (gimple_call_flags (t) & ECF_LEAF)
2406 return false;
2408 return true;
2412 /* Return true if T can make an abnormal transfer of control flow.
2413 Transfers of control flow associated with EH are excluded. */
2415 bool
2416 stmt_can_make_abnormal_goto (gimple t)
2418 if (computed_goto_p (t))
2419 return true;
2420 if (is_gimple_call (t))
2421 return call_can_make_abnormal_goto (t);
2422 return false;
2426 /* Return true if T represents a stmt that always transfers control. */
2428 bool
2429 is_ctrl_stmt (gimple t)
2431 switch (gimple_code (t))
2433 case GIMPLE_COND:
2434 case GIMPLE_SWITCH:
2435 case GIMPLE_GOTO:
2436 case GIMPLE_RETURN:
2437 case GIMPLE_RESX:
2438 return true;
2439 default:
2440 return false;
2445 /* Return true if T is a statement that may alter the flow of control
2446 (e.g., a call to a non-returning function). */
2448 bool
2449 is_ctrl_altering_stmt (gimple t)
2451 gcc_assert (t);
2453 switch (gimple_code (t))
2455 case GIMPLE_CALL:
2456 /* Per stmt call flag indicates whether the call could alter
2457 controlflow. */
2458 if (gimple_call_ctrl_altering_p (t))
2459 return true;
2460 break;
2462 case GIMPLE_EH_DISPATCH:
2463 /* EH_DISPATCH branches to the individual catch handlers at
2464 this level of a try or allowed-exceptions region. It can
2465 fallthru to the next statement as well. */
2466 return true;
2468 case GIMPLE_ASM:
2469 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2470 return true;
2471 break;
2473 CASE_GIMPLE_OMP:
2474 /* OpenMP directives alter control flow. */
2475 return true;
2477 case GIMPLE_TRANSACTION:
2478 /* A transaction start alters control flow. */
2479 return true;
2481 default:
2482 break;
2485 /* If a statement can throw, it alters control flow. */
2486 return stmt_can_throw_internal (t);
2490 /* Return true if T is a simple local goto. */
2492 bool
2493 simple_goto_p (gimple t)
2495 return (gimple_code (t) == GIMPLE_GOTO
2496 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2500 /* Return true if STMT should start a new basic block. PREV_STMT is
2501 the statement preceding STMT. It is used when STMT is a label or a
2502 case label. Labels should only start a new basic block if their
2503 previous statement wasn't a label. Otherwise, sequence of labels
2504 would generate unnecessary basic blocks that only contain a single
2505 label. */
2507 static inline bool
2508 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2510 if (stmt == NULL)
2511 return false;
2513 /* Labels start a new basic block only if the preceding statement
2514 wasn't a label of the same type. This prevents the creation of
2515 consecutive blocks that have nothing but a single label. */
2516 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2518 /* Nonlocal and computed GOTO targets always start a new block. */
2519 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2520 || FORCED_LABEL (gimple_label_label (label_stmt)))
2521 return true;
2523 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2525 if (DECL_NONLOCAL (gimple_label_label (
2526 as_a <glabel *> (prev_stmt))))
2527 return true;
2529 cfg_stats.num_merged_labels++;
2530 return false;
2532 else
2533 return true;
2535 else if (gimple_code (stmt) == GIMPLE_CALL
2536 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2537 /* setjmp acts similar to a nonlocal GOTO target and thus should
2538 start a new block. */
2539 return true;
2541 return false;
2545 /* Return true if T should end a basic block. */
2547 bool
2548 stmt_ends_bb_p (gimple t)
2550 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2553 /* Remove block annotations and other data structures. */
2555 void
2556 delete_tree_cfg_annotations (void)
2558 vec_free (label_to_block_map_for_fn (cfun));
2562 /* Return the first statement in basic block BB. */
2564 gimple
2565 first_stmt (basic_block bb)
2567 gimple_stmt_iterator i = gsi_start_bb (bb);
2568 gimple stmt = NULL;
2570 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2572 gsi_next (&i);
2573 stmt = NULL;
2575 return stmt;
2578 /* Return the first non-label statement in basic block BB. */
2580 static gimple
2581 first_non_label_stmt (basic_block bb)
2583 gimple_stmt_iterator i = gsi_start_bb (bb);
2584 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2585 gsi_next (&i);
2586 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2589 /* Return the last statement in basic block BB. */
2591 gimple
2592 last_stmt (basic_block bb)
2594 gimple_stmt_iterator i = gsi_last_bb (bb);
2595 gimple stmt = NULL;
2597 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2599 gsi_prev (&i);
2600 stmt = NULL;
2602 return stmt;
2605 /* Return the last statement of an otherwise empty block. Return NULL
2606 if the block is totally empty, or if it contains more than one
2607 statement. */
2609 gimple
2610 last_and_only_stmt (basic_block bb)
2612 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2613 gimple last, prev;
2615 if (gsi_end_p (i))
2616 return NULL;
2618 last = gsi_stmt (i);
2619 gsi_prev_nondebug (&i);
2620 if (gsi_end_p (i))
2621 return last;
2623 /* Empty statements should no longer appear in the instruction stream.
2624 Everything that might have appeared before should be deleted by
2625 remove_useless_stmts, and the optimizers should just gsi_remove
2626 instead of smashing with build_empty_stmt.
2628 Thus the only thing that should appear here in a block containing
2629 one executable statement is a label. */
2630 prev = gsi_stmt (i);
2631 if (gimple_code (prev) == GIMPLE_LABEL)
2632 return last;
2633 else
2634 return NULL;
2637 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2639 static void
2640 reinstall_phi_args (edge new_edge, edge old_edge)
2642 edge_var_map *vm;
2643 int i;
2644 gphi_iterator phis;
2646 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2647 if (!v)
2648 return;
2650 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2651 v->iterate (i, &vm) && !gsi_end_p (phis);
2652 i++, gsi_next (&phis))
2654 gphi *phi = phis.phi ();
2655 tree result = redirect_edge_var_map_result (vm);
2656 tree arg = redirect_edge_var_map_def (vm);
2658 gcc_assert (result == gimple_phi_result (phi));
2660 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2663 redirect_edge_var_map_clear (old_edge);
2666 /* Returns the basic block after which the new basic block created
2667 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2668 near its "logical" location. This is of most help to humans looking
2669 at debugging dumps. */
2671 static basic_block
2672 split_edge_bb_loc (edge edge_in)
2674 basic_block dest = edge_in->dest;
2675 basic_block dest_prev = dest->prev_bb;
2677 if (dest_prev)
2679 edge e = find_edge (dest_prev, dest);
2680 if (e && !(e->flags & EDGE_COMPLEX))
2681 return edge_in->src;
2683 return dest_prev;
2686 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2687 Abort on abnormal edges. */
2689 static basic_block
2690 gimple_split_edge (edge edge_in)
2692 basic_block new_bb, after_bb, dest;
2693 edge new_edge, e;
2695 /* Abnormal edges cannot be split. */
2696 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2698 dest = edge_in->dest;
2700 after_bb = split_edge_bb_loc (edge_in);
2702 new_bb = create_empty_bb (after_bb);
2703 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2704 new_bb->count = edge_in->count;
2705 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2706 new_edge->probability = REG_BR_PROB_BASE;
2707 new_edge->count = edge_in->count;
2709 e = redirect_edge_and_branch (edge_in, new_bb);
2710 gcc_assert (e == edge_in);
2711 reinstall_phi_args (new_edge, e);
2713 return new_bb;
2717 /* Verify properties of the address expression T with base object BASE. */
2719 static tree
2720 verify_address (tree t, tree base)
2722 bool old_constant;
2723 bool old_side_effects;
2724 bool new_constant;
2725 bool new_side_effects;
2727 old_constant = TREE_CONSTANT (t);
2728 old_side_effects = TREE_SIDE_EFFECTS (t);
2730 recompute_tree_invariant_for_addr_expr (t);
2731 new_side_effects = TREE_SIDE_EFFECTS (t);
2732 new_constant = TREE_CONSTANT (t);
2734 if (old_constant != new_constant)
2736 error ("constant not recomputed when ADDR_EXPR changed");
2737 return t;
2739 if (old_side_effects != new_side_effects)
2741 error ("side effects not recomputed when ADDR_EXPR changed");
2742 return t;
2745 if (!(TREE_CODE (base) == VAR_DECL
2746 || TREE_CODE (base) == PARM_DECL
2747 || TREE_CODE (base) == RESULT_DECL))
2748 return NULL_TREE;
2750 if (DECL_GIMPLE_REG_P (base))
2752 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2753 return base;
2756 return NULL_TREE;
2759 /* Callback for walk_tree, check that all elements with address taken are
2760 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2761 inside a PHI node. */
2763 static tree
2764 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2766 tree t = *tp, x;
2768 if (TYPE_P (t))
2769 *walk_subtrees = 0;
2771 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2772 #define CHECK_OP(N, MSG) \
2773 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2774 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2776 switch (TREE_CODE (t))
2778 case SSA_NAME:
2779 if (SSA_NAME_IN_FREE_LIST (t))
2781 error ("SSA name in freelist but still referenced");
2782 return *tp;
2784 break;
2786 case INDIRECT_REF:
2787 error ("INDIRECT_REF in gimple IL");
2788 return t;
2790 case MEM_REF:
2791 x = TREE_OPERAND (t, 0);
2792 if (!POINTER_TYPE_P (TREE_TYPE (x))
2793 || !is_gimple_mem_ref_addr (x))
2795 error ("invalid first operand of MEM_REF");
2796 return x;
2798 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2799 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2801 error ("invalid offset operand of MEM_REF");
2802 return TREE_OPERAND (t, 1);
2804 if (TREE_CODE (x) == ADDR_EXPR
2805 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2806 return x;
2807 *walk_subtrees = 0;
2808 break;
2810 case ASSERT_EXPR:
2811 x = fold (ASSERT_EXPR_COND (t));
2812 if (x == boolean_false_node)
2814 error ("ASSERT_EXPR with an always-false condition");
2815 return *tp;
2817 break;
2819 case MODIFY_EXPR:
2820 error ("MODIFY_EXPR not expected while having tuples");
2821 return *tp;
2823 case ADDR_EXPR:
2825 tree tem;
2827 gcc_assert (is_gimple_address (t));
2829 /* Skip any references (they will be checked when we recurse down the
2830 tree) and ensure that any variable used as a prefix is marked
2831 addressable. */
2832 for (x = TREE_OPERAND (t, 0);
2833 handled_component_p (x);
2834 x = TREE_OPERAND (x, 0))
2837 if ((tem = verify_address (t, x)))
2838 return tem;
2840 if (!(TREE_CODE (x) == VAR_DECL
2841 || TREE_CODE (x) == PARM_DECL
2842 || TREE_CODE (x) == RESULT_DECL))
2843 return NULL;
2845 if (!TREE_ADDRESSABLE (x))
2847 error ("address taken, but ADDRESSABLE bit not set");
2848 return x;
2851 break;
2854 case COND_EXPR:
2855 x = COND_EXPR_COND (t);
2856 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2858 error ("non-integral used in condition");
2859 return x;
2861 if (!is_gimple_condexpr (x))
2863 error ("invalid conditional operand");
2864 return x;
2866 break;
2868 case NON_LVALUE_EXPR:
2869 case TRUTH_NOT_EXPR:
2870 gcc_unreachable ();
2872 CASE_CONVERT:
2873 case FIX_TRUNC_EXPR:
2874 case FLOAT_EXPR:
2875 case NEGATE_EXPR:
2876 case ABS_EXPR:
2877 case BIT_NOT_EXPR:
2878 CHECK_OP (0, "invalid operand to unary operator");
2879 break;
2881 case REALPART_EXPR:
2882 case IMAGPART_EXPR:
2883 case BIT_FIELD_REF:
2884 if (!is_gimple_reg_type (TREE_TYPE (t)))
2886 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2887 return t;
2890 if (TREE_CODE (t) == BIT_FIELD_REF)
2892 tree t0 = TREE_OPERAND (t, 0);
2893 tree t1 = TREE_OPERAND (t, 1);
2894 tree t2 = TREE_OPERAND (t, 2);
2895 if (!tree_fits_uhwi_p (t1)
2896 || !tree_fits_uhwi_p (t2))
2898 error ("invalid position or size operand to BIT_FIELD_REF");
2899 return t;
2901 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2902 && (TYPE_PRECISION (TREE_TYPE (t))
2903 != tree_to_uhwi (t1)))
2905 error ("integral result type precision does not match "
2906 "field size of BIT_FIELD_REF");
2907 return t;
2909 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2910 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2911 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2912 != tree_to_uhwi (t1)))
2914 error ("mode precision of non-integral result does not "
2915 "match field size of BIT_FIELD_REF");
2916 return t;
2918 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2919 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2920 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2922 error ("position plus size exceeds size of referenced object in "
2923 "BIT_FIELD_REF");
2924 return t;
2927 t = TREE_OPERAND (t, 0);
2929 /* Fall-through. */
2930 case COMPONENT_REF:
2931 case ARRAY_REF:
2932 case ARRAY_RANGE_REF:
2933 case VIEW_CONVERT_EXPR:
2934 /* We have a nest of references. Verify that each of the operands
2935 that determine where to reference is either a constant or a variable,
2936 verify that the base is valid, and then show we've already checked
2937 the subtrees. */
2938 while (handled_component_p (t))
2940 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2941 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2942 else if (TREE_CODE (t) == ARRAY_REF
2943 || TREE_CODE (t) == ARRAY_RANGE_REF)
2945 CHECK_OP (1, "invalid array index");
2946 if (TREE_OPERAND (t, 2))
2947 CHECK_OP (2, "invalid array lower bound");
2948 if (TREE_OPERAND (t, 3))
2949 CHECK_OP (3, "invalid array stride");
2951 else if (TREE_CODE (t) == BIT_FIELD_REF
2952 || TREE_CODE (t) == REALPART_EXPR
2953 || TREE_CODE (t) == IMAGPART_EXPR)
2955 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2956 "REALPART_EXPR");
2957 return t;
2960 t = TREE_OPERAND (t, 0);
2963 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2965 error ("invalid reference prefix");
2966 return t;
2968 *walk_subtrees = 0;
2969 break;
2970 case PLUS_EXPR:
2971 case MINUS_EXPR:
2972 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2973 POINTER_PLUS_EXPR. */
2974 if (POINTER_TYPE_P (TREE_TYPE (t)))
2976 error ("invalid operand to plus/minus, type is a pointer");
2977 return t;
2979 CHECK_OP (0, "invalid operand to binary operator");
2980 CHECK_OP (1, "invalid operand to binary operator");
2981 break;
2983 case POINTER_PLUS_EXPR:
2984 /* Check to make sure the first operand is a pointer or reference type. */
2985 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2987 error ("invalid operand to pointer plus, first operand is not a pointer");
2988 return t;
2990 /* Check to make sure the second operand is a ptrofftype. */
2991 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2993 error ("invalid operand to pointer plus, second operand is not an "
2994 "integer type of appropriate width");
2995 return t;
2997 /* FALLTHROUGH */
2998 case LT_EXPR:
2999 case LE_EXPR:
3000 case GT_EXPR:
3001 case GE_EXPR:
3002 case EQ_EXPR:
3003 case NE_EXPR:
3004 case UNORDERED_EXPR:
3005 case ORDERED_EXPR:
3006 case UNLT_EXPR:
3007 case UNLE_EXPR:
3008 case UNGT_EXPR:
3009 case UNGE_EXPR:
3010 case UNEQ_EXPR:
3011 case LTGT_EXPR:
3012 case MULT_EXPR:
3013 case TRUNC_DIV_EXPR:
3014 case CEIL_DIV_EXPR:
3015 case FLOOR_DIV_EXPR:
3016 case ROUND_DIV_EXPR:
3017 case TRUNC_MOD_EXPR:
3018 case CEIL_MOD_EXPR:
3019 case FLOOR_MOD_EXPR:
3020 case ROUND_MOD_EXPR:
3021 case RDIV_EXPR:
3022 case EXACT_DIV_EXPR:
3023 case MIN_EXPR:
3024 case MAX_EXPR:
3025 case LSHIFT_EXPR:
3026 case RSHIFT_EXPR:
3027 case LROTATE_EXPR:
3028 case RROTATE_EXPR:
3029 case BIT_IOR_EXPR:
3030 case BIT_XOR_EXPR:
3031 case BIT_AND_EXPR:
3032 CHECK_OP (0, "invalid operand to binary operator");
3033 CHECK_OP (1, "invalid operand to binary operator");
3034 break;
3036 case CONSTRUCTOR:
3037 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3038 *walk_subtrees = 0;
3039 break;
3041 case CASE_LABEL_EXPR:
3042 if (CASE_CHAIN (t))
3044 error ("invalid CASE_CHAIN");
3045 return t;
3047 break;
3049 default:
3050 break;
3052 return NULL;
3054 #undef CHECK_OP
3058 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3059 Returns true if there is an error, otherwise false. */
3061 static bool
3062 verify_types_in_gimple_min_lval (tree expr)
3064 tree op;
3066 if (is_gimple_id (expr))
3067 return false;
3069 if (TREE_CODE (expr) != TARGET_MEM_REF
3070 && TREE_CODE (expr) != MEM_REF)
3072 error ("invalid expression for min lvalue");
3073 return true;
3076 /* TARGET_MEM_REFs are strange beasts. */
3077 if (TREE_CODE (expr) == TARGET_MEM_REF)
3078 return false;
3080 op = TREE_OPERAND (expr, 0);
3081 if (!is_gimple_val (op))
3083 error ("invalid operand in indirect reference");
3084 debug_generic_stmt (op);
3085 return true;
3087 /* Memory references now generally can involve a value conversion. */
3089 return false;
3092 /* Verify if EXPR is a valid GIMPLE reference expression. If
3093 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3094 if there is an error, otherwise false. */
3096 static bool
3097 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3099 while (handled_component_p (expr))
3101 tree op = TREE_OPERAND (expr, 0);
3103 if (TREE_CODE (expr) == ARRAY_REF
3104 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3106 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3107 || (TREE_OPERAND (expr, 2)
3108 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3109 || (TREE_OPERAND (expr, 3)
3110 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3112 error ("invalid operands to array reference");
3113 debug_generic_stmt (expr);
3114 return true;
3118 /* Verify if the reference array element types are compatible. */
3119 if (TREE_CODE (expr) == ARRAY_REF
3120 && !useless_type_conversion_p (TREE_TYPE (expr),
3121 TREE_TYPE (TREE_TYPE (op))))
3123 error ("type mismatch in array reference");
3124 debug_generic_stmt (TREE_TYPE (expr));
3125 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3126 return true;
3128 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3129 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3130 TREE_TYPE (TREE_TYPE (op))))
3132 error ("type mismatch in array range reference");
3133 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3134 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3135 return true;
3138 if ((TREE_CODE (expr) == REALPART_EXPR
3139 || TREE_CODE (expr) == IMAGPART_EXPR)
3140 && !useless_type_conversion_p (TREE_TYPE (expr),
3141 TREE_TYPE (TREE_TYPE (op))))
3143 error ("type mismatch in real/imagpart reference");
3144 debug_generic_stmt (TREE_TYPE (expr));
3145 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3146 return true;
3149 if (TREE_CODE (expr) == COMPONENT_REF
3150 && !useless_type_conversion_p (TREE_TYPE (expr),
3151 TREE_TYPE (TREE_OPERAND (expr, 1))))
3153 error ("type mismatch in component reference");
3154 debug_generic_stmt (TREE_TYPE (expr));
3155 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3156 return true;
3159 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3161 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3162 that their operand is not an SSA name or an invariant when
3163 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3164 bug). Otherwise there is nothing to verify, gross mismatches at
3165 most invoke undefined behavior. */
3166 if (require_lvalue
3167 && (TREE_CODE (op) == SSA_NAME
3168 || is_gimple_min_invariant (op)))
3170 error ("conversion of an SSA_NAME on the left hand side");
3171 debug_generic_stmt (expr);
3172 return true;
3174 else if (TREE_CODE (op) == SSA_NAME
3175 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3177 error ("conversion of register to a different size");
3178 debug_generic_stmt (expr);
3179 return true;
3181 else if (!handled_component_p (op))
3182 return false;
3185 expr = op;
3188 if (TREE_CODE (expr) == MEM_REF)
3190 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3192 error ("invalid address operand in MEM_REF");
3193 debug_generic_stmt (expr);
3194 return true;
3196 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3197 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3199 error ("invalid offset operand in MEM_REF");
3200 debug_generic_stmt (expr);
3201 return true;
3204 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3206 if (!TMR_BASE (expr)
3207 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3209 error ("invalid address operand in TARGET_MEM_REF");
3210 return true;
3212 if (!TMR_OFFSET (expr)
3213 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3214 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3216 error ("invalid offset operand in TARGET_MEM_REF");
3217 debug_generic_stmt (expr);
3218 return true;
3222 return ((require_lvalue || !is_gimple_min_invariant (expr))
3223 && verify_types_in_gimple_min_lval (expr));
3226 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3227 list of pointer-to types that is trivially convertible to DEST. */
3229 static bool
3230 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3232 tree src;
3234 if (!TYPE_POINTER_TO (src_obj))
3235 return true;
3237 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3238 if (useless_type_conversion_p (dest, src))
3239 return true;
3241 return false;
3244 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3245 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3247 static bool
3248 valid_fixed_convert_types_p (tree type1, tree type2)
3250 return (FIXED_POINT_TYPE_P (type1)
3251 && (INTEGRAL_TYPE_P (type2)
3252 || SCALAR_FLOAT_TYPE_P (type2)
3253 || FIXED_POINT_TYPE_P (type2)));
3256 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3257 is a problem, otherwise false. */
3259 static bool
3260 verify_gimple_call (gcall *stmt)
3262 tree fn = gimple_call_fn (stmt);
3263 tree fntype, fndecl;
3264 unsigned i;
3266 if (gimple_call_internal_p (stmt))
3268 if (fn)
3270 error ("gimple call has two targets");
3271 debug_generic_stmt (fn);
3272 return true;
3275 else
3277 if (!fn)
3279 error ("gimple call has no target");
3280 return true;
3284 if (fn && !is_gimple_call_addr (fn))
3286 error ("invalid function in gimple call");
3287 debug_generic_stmt (fn);
3288 return true;
3291 if (fn
3292 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3293 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3294 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3296 error ("non-function in gimple call");
3297 return true;
3300 fndecl = gimple_call_fndecl (stmt);
3301 if (fndecl
3302 && TREE_CODE (fndecl) == FUNCTION_DECL
3303 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3304 && !DECL_PURE_P (fndecl)
3305 && !TREE_READONLY (fndecl))
3307 error ("invalid pure const state for function");
3308 return true;
3311 if (gimple_call_lhs (stmt)
3312 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3313 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3315 error ("invalid LHS in gimple call");
3316 return true;
3319 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3321 error ("LHS in noreturn call");
3322 return true;
3325 fntype = gimple_call_fntype (stmt);
3326 if (fntype
3327 && gimple_call_lhs (stmt)
3328 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3329 TREE_TYPE (fntype))
3330 /* ??? At least C++ misses conversions at assignments from
3331 void * call results.
3332 ??? Java is completely off. Especially with functions
3333 returning java.lang.Object.
3334 For now simply allow arbitrary pointer type conversions. */
3335 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3336 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3338 error ("invalid conversion in gimple call");
3339 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3340 debug_generic_stmt (TREE_TYPE (fntype));
3341 return true;
3344 if (gimple_call_chain (stmt)
3345 && !is_gimple_val (gimple_call_chain (stmt)))
3347 error ("invalid static chain in gimple call");
3348 debug_generic_stmt (gimple_call_chain (stmt));
3349 return true;
3352 /* If there is a static chain argument, this should not be an indirect
3353 call, and the decl should have DECL_STATIC_CHAIN set. */
3354 if (gimple_call_chain (stmt))
3356 if (!gimple_call_fndecl (stmt))
3358 error ("static chain in indirect gimple call");
3359 return true;
3361 fn = TREE_OPERAND (fn, 0);
3363 if (!DECL_STATIC_CHAIN (fn))
3365 error ("static chain with function that doesn%'t use one");
3366 return true;
3370 /* ??? The C frontend passes unpromoted arguments in case it
3371 didn't see a function declaration before the call. So for now
3372 leave the call arguments mostly unverified. Once we gimplify
3373 unit-at-a-time we have a chance to fix this. */
3375 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3377 tree arg = gimple_call_arg (stmt, i);
3378 if ((is_gimple_reg_type (TREE_TYPE (arg))
3379 && !is_gimple_val (arg))
3380 || (!is_gimple_reg_type (TREE_TYPE (arg))
3381 && !is_gimple_lvalue (arg)))
3383 error ("invalid argument to gimple call");
3384 debug_generic_expr (arg);
3385 return true;
3389 return false;
3392 /* Verifies the gimple comparison with the result type TYPE and
3393 the operands OP0 and OP1. */
3395 static bool
3396 verify_gimple_comparison (tree type, tree op0, tree op1)
3398 tree op0_type = TREE_TYPE (op0);
3399 tree op1_type = TREE_TYPE (op1);
3401 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3403 error ("invalid operands in gimple comparison");
3404 return true;
3407 /* For comparisons we do not have the operations type as the
3408 effective type the comparison is carried out in. Instead
3409 we require that either the first operand is trivially
3410 convertible into the second, or the other way around.
3411 Because we special-case pointers to void we allow
3412 comparisons of pointers with the same mode as well. */
3413 if (!useless_type_conversion_p (op0_type, op1_type)
3414 && !useless_type_conversion_p (op1_type, op0_type)
3415 && (!POINTER_TYPE_P (op0_type)
3416 || !POINTER_TYPE_P (op1_type)
3417 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3419 error ("mismatching comparison operand types");
3420 debug_generic_expr (op0_type);
3421 debug_generic_expr (op1_type);
3422 return true;
3425 /* The resulting type of a comparison may be an effective boolean type. */
3426 if (INTEGRAL_TYPE_P (type)
3427 && (TREE_CODE (type) == BOOLEAN_TYPE
3428 || TYPE_PRECISION (type) == 1))
3430 if (TREE_CODE (op0_type) == VECTOR_TYPE
3431 || TREE_CODE (op1_type) == VECTOR_TYPE)
3433 error ("vector comparison returning a boolean");
3434 debug_generic_expr (op0_type);
3435 debug_generic_expr (op1_type);
3436 return true;
3439 /* Or an integer vector type with the same size and element count
3440 as the comparison operand types. */
3441 else if (TREE_CODE (type) == VECTOR_TYPE
3442 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3444 if (TREE_CODE (op0_type) != VECTOR_TYPE
3445 || TREE_CODE (op1_type) != VECTOR_TYPE)
3447 error ("non-vector operands in vector comparison");
3448 debug_generic_expr (op0_type);
3449 debug_generic_expr (op1_type);
3450 return true;
3453 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3454 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3455 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3456 /* The result of a vector comparison is of signed
3457 integral type. */
3458 || TYPE_UNSIGNED (TREE_TYPE (type)))
3460 error ("invalid vector comparison resulting type");
3461 debug_generic_expr (type);
3462 return true;
3465 else
3467 error ("bogus comparison result type");
3468 debug_generic_expr (type);
3469 return true;
3472 return false;
3475 /* Verify a gimple assignment statement STMT with an unary rhs.
3476 Returns true if anything is wrong. */
3478 static bool
3479 verify_gimple_assign_unary (gassign *stmt)
3481 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3482 tree lhs = gimple_assign_lhs (stmt);
3483 tree lhs_type = TREE_TYPE (lhs);
3484 tree rhs1 = gimple_assign_rhs1 (stmt);
3485 tree rhs1_type = TREE_TYPE (rhs1);
3487 if (!is_gimple_reg (lhs))
3489 error ("non-register as LHS of unary operation");
3490 return true;
3493 if (!is_gimple_val (rhs1))
3495 error ("invalid operand in unary operation");
3496 return true;
3499 /* First handle conversions. */
3500 switch (rhs_code)
3502 CASE_CONVERT:
3504 /* Allow conversions from pointer type to integral type only if
3505 there is no sign or zero extension involved.
3506 For targets were the precision of ptrofftype doesn't match that
3507 of pointers we need to allow arbitrary conversions to ptrofftype. */
3508 if ((POINTER_TYPE_P (lhs_type)
3509 && INTEGRAL_TYPE_P (rhs1_type))
3510 || (POINTER_TYPE_P (rhs1_type)
3511 && INTEGRAL_TYPE_P (lhs_type)
3512 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3513 || ptrofftype_p (sizetype))))
3514 return false;
3516 /* Allow conversion from integral to offset type and vice versa. */
3517 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3518 && INTEGRAL_TYPE_P (rhs1_type))
3519 || (INTEGRAL_TYPE_P (lhs_type)
3520 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3521 return false;
3523 /* Otherwise assert we are converting between types of the
3524 same kind. */
3525 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3527 error ("invalid types in nop conversion");
3528 debug_generic_expr (lhs_type);
3529 debug_generic_expr (rhs1_type);
3530 return true;
3533 return false;
3536 case ADDR_SPACE_CONVERT_EXPR:
3538 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3539 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3540 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3542 error ("invalid types in address space conversion");
3543 debug_generic_expr (lhs_type);
3544 debug_generic_expr (rhs1_type);
3545 return true;
3548 return false;
3551 case FIXED_CONVERT_EXPR:
3553 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3554 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3556 error ("invalid types in fixed-point conversion");
3557 debug_generic_expr (lhs_type);
3558 debug_generic_expr (rhs1_type);
3559 return true;
3562 return false;
3565 case FLOAT_EXPR:
3567 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3568 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3569 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3571 error ("invalid types in conversion to floating point");
3572 debug_generic_expr (lhs_type);
3573 debug_generic_expr (rhs1_type);
3574 return true;
3577 return false;
3580 case FIX_TRUNC_EXPR:
3582 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3583 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3584 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3586 error ("invalid types in conversion to integer");
3587 debug_generic_expr (lhs_type);
3588 debug_generic_expr (rhs1_type);
3589 return true;
3592 return false;
3594 case REDUC_MAX_EXPR:
3595 case REDUC_MIN_EXPR:
3596 case REDUC_PLUS_EXPR:
3597 if (!VECTOR_TYPE_P (rhs1_type)
3598 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3600 error ("reduction should convert from vector to element type");
3601 debug_generic_expr (lhs_type);
3602 debug_generic_expr (rhs1_type);
3603 return true;
3605 return false;
3607 case VEC_UNPACK_HI_EXPR:
3608 case VEC_UNPACK_LO_EXPR:
3609 case VEC_UNPACK_FLOAT_HI_EXPR:
3610 case VEC_UNPACK_FLOAT_LO_EXPR:
3611 /* FIXME. */
3612 return false;
3614 case NEGATE_EXPR:
3615 case ABS_EXPR:
3616 case BIT_NOT_EXPR:
3617 case PAREN_EXPR:
3618 case CONJ_EXPR:
3619 break;
3621 default:
3622 gcc_unreachable ();
3625 /* For the remaining codes assert there is no conversion involved. */
3626 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3628 error ("non-trivial conversion in unary operation");
3629 debug_generic_expr (lhs_type);
3630 debug_generic_expr (rhs1_type);
3631 return true;
3634 return false;
3637 /* Verify a gimple assignment statement STMT with a binary rhs.
3638 Returns true if anything is wrong. */
3640 static bool
3641 verify_gimple_assign_binary (gassign *stmt)
3643 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3644 tree lhs = gimple_assign_lhs (stmt);
3645 tree lhs_type = TREE_TYPE (lhs);
3646 tree rhs1 = gimple_assign_rhs1 (stmt);
3647 tree rhs1_type = TREE_TYPE (rhs1);
3648 tree rhs2 = gimple_assign_rhs2 (stmt);
3649 tree rhs2_type = TREE_TYPE (rhs2);
3651 if (!is_gimple_reg (lhs))
3653 error ("non-register as LHS of binary operation");
3654 return true;
3657 if (!is_gimple_val (rhs1)
3658 || !is_gimple_val (rhs2))
3660 error ("invalid operands in binary operation");
3661 return true;
3664 /* First handle operations that involve different types. */
3665 switch (rhs_code)
3667 case COMPLEX_EXPR:
3669 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3670 || !(INTEGRAL_TYPE_P (rhs1_type)
3671 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3672 || !(INTEGRAL_TYPE_P (rhs2_type)
3673 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3675 error ("type mismatch in complex expression");
3676 debug_generic_expr (lhs_type);
3677 debug_generic_expr (rhs1_type);
3678 debug_generic_expr (rhs2_type);
3679 return true;
3682 return false;
3685 case LSHIFT_EXPR:
3686 case RSHIFT_EXPR:
3687 case LROTATE_EXPR:
3688 case RROTATE_EXPR:
3690 /* Shifts and rotates are ok on integral types, fixed point
3691 types and integer vector types. */
3692 if ((!INTEGRAL_TYPE_P (rhs1_type)
3693 && !FIXED_POINT_TYPE_P (rhs1_type)
3694 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3695 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3696 || (!INTEGRAL_TYPE_P (rhs2_type)
3697 /* Vector shifts of vectors are also ok. */
3698 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3699 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3700 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3701 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3702 || !useless_type_conversion_p (lhs_type, rhs1_type))
3704 error ("type mismatch in shift expression");
3705 debug_generic_expr (lhs_type);
3706 debug_generic_expr (rhs1_type);
3707 debug_generic_expr (rhs2_type);
3708 return true;
3711 return false;
3714 case WIDEN_LSHIFT_EXPR:
3716 if (!INTEGRAL_TYPE_P (lhs_type)
3717 || !INTEGRAL_TYPE_P (rhs1_type)
3718 || TREE_CODE (rhs2) != INTEGER_CST
3719 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3721 error ("type mismatch in widening vector shift expression");
3722 debug_generic_expr (lhs_type);
3723 debug_generic_expr (rhs1_type);
3724 debug_generic_expr (rhs2_type);
3725 return true;
3728 return false;
3731 case VEC_WIDEN_LSHIFT_HI_EXPR:
3732 case VEC_WIDEN_LSHIFT_LO_EXPR:
3734 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3735 || TREE_CODE (lhs_type) != VECTOR_TYPE
3736 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3737 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3738 || TREE_CODE (rhs2) != INTEGER_CST
3739 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3740 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3742 error ("type mismatch in widening vector shift expression");
3743 debug_generic_expr (lhs_type);
3744 debug_generic_expr (rhs1_type);
3745 debug_generic_expr (rhs2_type);
3746 return true;
3749 return false;
3752 case PLUS_EXPR:
3753 case MINUS_EXPR:
3755 tree lhs_etype = lhs_type;
3756 tree rhs1_etype = rhs1_type;
3757 tree rhs2_etype = rhs2_type;
3758 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3760 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3761 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3763 error ("invalid non-vector operands to vector valued plus");
3764 return true;
3766 lhs_etype = TREE_TYPE (lhs_type);
3767 rhs1_etype = TREE_TYPE (rhs1_type);
3768 rhs2_etype = TREE_TYPE (rhs2_type);
3770 if (POINTER_TYPE_P (lhs_etype)
3771 || POINTER_TYPE_P (rhs1_etype)
3772 || POINTER_TYPE_P (rhs2_etype))
3774 error ("invalid (pointer) operands to plus/minus");
3775 return true;
3778 /* Continue with generic binary expression handling. */
3779 break;
3782 case POINTER_PLUS_EXPR:
3784 if (!POINTER_TYPE_P (rhs1_type)
3785 || !useless_type_conversion_p (lhs_type, rhs1_type)
3786 || !ptrofftype_p (rhs2_type))
3788 error ("type mismatch in pointer plus expression");
3789 debug_generic_stmt (lhs_type);
3790 debug_generic_stmt (rhs1_type);
3791 debug_generic_stmt (rhs2_type);
3792 return true;
3795 return false;
3798 case TRUTH_ANDIF_EXPR:
3799 case TRUTH_ORIF_EXPR:
3800 case TRUTH_AND_EXPR:
3801 case TRUTH_OR_EXPR:
3802 case TRUTH_XOR_EXPR:
3804 gcc_unreachable ();
3806 case LT_EXPR:
3807 case LE_EXPR:
3808 case GT_EXPR:
3809 case GE_EXPR:
3810 case EQ_EXPR:
3811 case NE_EXPR:
3812 case UNORDERED_EXPR:
3813 case ORDERED_EXPR:
3814 case UNLT_EXPR:
3815 case UNLE_EXPR:
3816 case UNGT_EXPR:
3817 case UNGE_EXPR:
3818 case UNEQ_EXPR:
3819 case LTGT_EXPR:
3820 /* Comparisons are also binary, but the result type is not
3821 connected to the operand types. */
3822 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3824 case WIDEN_MULT_EXPR:
3825 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3826 return true;
3827 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3828 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3830 case WIDEN_SUM_EXPR:
3831 case VEC_WIDEN_MULT_HI_EXPR:
3832 case VEC_WIDEN_MULT_LO_EXPR:
3833 case VEC_WIDEN_MULT_EVEN_EXPR:
3834 case VEC_WIDEN_MULT_ODD_EXPR:
3835 case VEC_PACK_TRUNC_EXPR:
3836 case VEC_PACK_SAT_EXPR:
3837 case VEC_PACK_FIX_TRUNC_EXPR:
3838 /* FIXME. */
3839 return false;
3841 case MULT_EXPR:
3842 case MULT_HIGHPART_EXPR:
3843 case TRUNC_DIV_EXPR:
3844 case CEIL_DIV_EXPR:
3845 case FLOOR_DIV_EXPR:
3846 case ROUND_DIV_EXPR:
3847 case TRUNC_MOD_EXPR:
3848 case CEIL_MOD_EXPR:
3849 case FLOOR_MOD_EXPR:
3850 case ROUND_MOD_EXPR:
3851 case RDIV_EXPR:
3852 case EXACT_DIV_EXPR:
3853 case MIN_EXPR:
3854 case MAX_EXPR:
3855 case BIT_IOR_EXPR:
3856 case BIT_XOR_EXPR:
3857 case BIT_AND_EXPR:
3858 /* Continue with generic binary expression handling. */
3859 break;
3861 default:
3862 gcc_unreachable ();
3865 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3866 || !useless_type_conversion_p (lhs_type, rhs2_type))
3868 error ("type mismatch in binary expression");
3869 debug_generic_stmt (lhs_type);
3870 debug_generic_stmt (rhs1_type);
3871 debug_generic_stmt (rhs2_type);
3872 return true;
3875 return false;
3878 /* Verify a gimple assignment statement STMT with a ternary rhs.
3879 Returns true if anything is wrong. */
3881 static bool
3882 verify_gimple_assign_ternary (gassign *stmt)
3884 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3885 tree lhs = gimple_assign_lhs (stmt);
3886 tree lhs_type = TREE_TYPE (lhs);
3887 tree rhs1 = gimple_assign_rhs1 (stmt);
3888 tree rhs1_type = TREE_TYPE (rhs1);
3889 tree rhs2 = gimple_assign_rhs2 (stmt);
3890 tree rhs2_type = TREE_TYPE (rhs2);
3891 tree rhs3 = gimple_assign_rhs3 (stmt);
3892 tree rhs3_type = TREE_TYPE (rhs3);
3894 if (!is_gimple_reg (lhs))
3896 error ("non-register as LHS of ternary operation");
3897 return true;
3900 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3901 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3902 || !is_gimple_val (rhs2)
3903 || !is_gimple_val (rhs3))
3905 error ("invalid operands in ternary operation");
3906 return true;
3909 /* First handle operations that involve different types. */
3910 switch (rhs_code)
3912 case WIDEN_MULT_PLUS_EXPR:
3913 case WIDEN_MULT_MINUS_EXPR:
3914 if ((!INTEGRAL_TYPE_P (rhs1_type)
3915 && !FIXED_POINT_TYPE_P (rhs1_type))
3916 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3917 || !useless_type_conversion_p (lhs_type, rhs3_type)
3918 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3919 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3921 error ("type mismatch in widening multiply-accumulate expression");
3922 debug_generic_expr (lhs_type);
3923 debug_generic_expr (rhs1_type);
3924 debug_generic_expr (rhs2_type);
3925 debug_generic_expr (rhs3_type);
3926 return true;
3928 break;
3930 case FMA_EXPR:
3931 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3932 || !useless_type_conversion_p (lhs_type, rhs2_type)
3933 || !useless_type_conversion_p (lhs_type, rhs3_type))
3935 error ("type mismatch in fused multiply-add expression");
3936 debug_generic_expr (lhs_type);
3937 debug_generic_expr (rhs1_type);
3938 debug_generic_expr (rhs2_type);
3939 debug_generic_expr (rhs3_type);
3940 return true;
3942 break;
3944 case COND_EXPR:
3945 case VEC_COND_EXPR:
3946 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3947 || !useless_type_conversion_p (lhs_type, rhs3_type))
3949 error ("type mismatch in conditional expression");
3950 debug_generic_expr (lhs_type);
3951 debug_generic_expr (rhs2_type);
3952 debug_generic_expr (rhs3_type);
3953 return true;
3955 break;
3957 case VEC_PERM_EXPR:
3958 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3959 || !useless_type_conversion_p (lhs_type, rhs2_type))
3961 error ("type mismatch in vector permute expression");
3962 debug_generic_expr (lhs_type);
3963 debug_generic_expr (rhs1_type);
3964 debug_generic_expr (rhs2_type);
3965 debug_generic_expr (rhs3_type);
3966 return true;
3969 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3970 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3971 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3973 error ("vector types expected in vector permute expression");
3974 debug_generic_expr (lhs_type);
3975 debug_generic_expr (rhs1_type);
3976 debug_generic_expr (rhs2_type);
3977 debug_generic_expr (rhs3_type);
3978 return true;
3981 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3982 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3983 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3984 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3985 != TYPE_VECTOR_SUBPARTS (lhs_type))
3987 error ("vectors with different element number found "
3988 "in vector permute expression");
3989 debug_generic_expr (lhs_type);
3990 debug_generic_expr (rhs1_type);
3991 debug_generic_expr (rhs2_type);
3992 debug_generic_expr (rhs3_type);
3993 return true;
3996 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3997 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3998 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4000 error ("invalid mask type in vector permute expression");
4001 debug_generic_expr (lhs_type);
4002 debug_generic_expr (rhs1_type);
4003 debug_generic_expr (rhs2_type);
4004 debug_generic_expr (rhs3_type);
4005 return true;
4008 return false;
4010 case SAD_EXPR:
4011 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4012 || !useless_type_conversion_p (lhs_type, rhs3_type)
4013 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4014 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4015 > GET_MODE_BITSIZE (GET_MODE_INNER
4016 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4018 error ("type mismatch in sad expression");
4019 debug_generic_expr (lhs_type);
4020 debug_generic_expr (rhs1_type);
4021 debug_generic_expr (rhs2_type);
4022 debug_generic_expr (rhs3_type);
4023 return true;
4026 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4027 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4028 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4030 error ("vector types expected in sad expression");
4031 debug_generic_expr (lhs_type);
4032 debug_generic_expr (rhs1_type);
4033 debug_generic_expr (rhs2_type);
4034 debug_generic_expr (rhs3_type);
4035 return true;
4038 return false;
4040 case DOT_PROD_EXPR:
4041 case REALIGN_LOAD_EXPR:
4042 /* FIXME. */
4043 return false;
4045 default:
4046 gcc_unreachable ();
4048 return false;
4051 /* Verify a gimple assignment statement STMT with a single rhs.
4052 Returns true if anything is wrong. */
4054 static bool
4055 verify_gimple_assign_single (gassign *stmt)
4057 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4058 tree lhs = gimple_assign_lhs (stmt);
4059 tree lhs_type = TREE_TYPE (lhs);
4060 tree rhs1 = gimple_assign_rhs1 (stmt);
4061 tree rhs1_type = TREE_TYPE (rhs1);
4062 bool res = false;
4064 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4066 error ("non-trivial conversion at assignment");
4067 debug_generic_expr (lhs_type);
4068 debug_generic_expr (rhs1_type);
4069 return true;
4072 if (gimple_clobber_p (stmt)
4073 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4075 error ("non-decl/MEM_REF LHS in clobber statement");
4076 debug_generic_expr (lhs);
4077 return true;
4080 if (handled_component_p (lhs)
4081 || TREE_CODE (lhs) == MEM_REF
4082 || TREE_CODE (lhs) == TARGET_MEM_REF)
4083 res |= verify_types_in_gimple_reference (lhs, true);
4085 /* Special codes we cannot handle via their class. */
4086 switch (rhs_code)
4088 case ADDR_EXPR:
4090 tree op = TREE_OPERAND (rhs1, 0);
4091 if (!is_gimple_addressable (op))
4093 error ("invalid operand in unary expression");
4094 return true;
4097 /* Technically there is no longer a need for matching types, but
4098 gimple hygiene asks for this check. In LTO we can end up
4099 combining incompatible units and thus end up with addresses
4100 of globals that change their type to a common one. */
4101 if (!in_lto_p
4102 && !types_compatible_p (TREE_TYPE (op),
4103 TREE_TYPE (TREE_TYPE (rhs1)))
4104 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4105 TREE_TYPE (op)))
4107 error ("type mismatch in address expression");
4108 debug_generic_stmt (TREE_TYPE (rhs1));
4109 debug_generic_stmt (TREE_TYPE (op));
4110 return true;
4113 return verify_types_in_gimple_reference (op, true);
4116 /* tcc_reference */
4117 case INDIRECT_REF:
4118 error ("INDIRECT_REF in gimple IL");
4119 return true;
4121 case COMPONENT_REF:
4122 case BIT_FIELD_REF:
4123 case ARRAY_REF:
4124 case ARRAY_RANGE_REF:
4125 case VIEW_CONVERT_EXPR:
4126 case REALPART_EXPR:
4127 case IMAGPART_EXPR:
4128 case TARGET_MEM_REF:
4129 case MEM_REF:
4130 if (!is_gimple_reg (lhs)
4131 && is_gimple_reg_type (TREE_TYPE (lhs)))
4133 error ("invalid rhs for gimple memory store");
4134 debug_generic_stmt (lhs);
4135 debug_generic_stmt (rhs1);
4136 return true;
4138 return res || verify_types_in_gimple_reference (rhs1, false);
4140 /* tcc_constant */
4141 case SSA_NAME:
4142 case INTEGER_CST:
4143 case REAL_CST:
4144 case FIXED_CST:
4145 case COMPLEX_CST:
4146 case VECTOR_CST:
4147 case STRING_CST:
4148 return res;
4150 /* tcc_declaration */
4151 case CONST_DECL:
4152 return res;
4153 case VAR_DECL:
4154 case PARM_DECL:
4155 if (!is_gimple_reg (lhs)
4156 && !is_gimple_reg (rhs1)
4157 && is_gimple_reg_type (TREE_TYPE (lhs)))
4159 error ("invalid rhs for gimple memory store");
4160 debug_generic_stmt (lhs);
4161 debug_generic_stmt (rhs1);
4162 return true;
4164 return res;
4166 case CONSTRUCTOR:
4167 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4169 unsigned int i;
4170 tree elt_i, elt_v, elt_t = NULL_TREE;
4172 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4173 return res;
4174 /* For vector CONSTRUCTORs we require that either it is empty
4175 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4176 (then the element count must be correct to cover the whole
4177 outer vector and index must be NULL on all elements, or it is
4178 a CONSTRUCTOR of scalar elements, where we as an exception allow
4179 smaller number of elements (assuming zero filling) and
4180 consecutive indexes as compared to NULL indexes (such
4181 CONSTRUCTORs can appear in the IL from FEs). */
4182 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4184 if (elt_t == NULL_TREE)
4186 elt_t = TREE_TYPE (elt_v);
4187 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4189 tree elt_t = TREE_TYPE (elt_v);
4190 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4191 TREE_TYPE (elt_t)))
4193 error ("incorrect type of vector CONSTRUCTOR"
4194 " elements");
4195 debug_generic_stmt (rhs1);
4196 return true;
4198 else if (CONSTRUCTOR_NELTS (rhs1)
4199 * TYPE_VECTOR_SUBPARTS (elt_t)
4200 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4202 error ("incorrect number of vector CONSTRUCTOR"
4203 " elements");
4204 debug_generic_stmt (rhs1);
4205 return true;
4208 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4209 elt_t))
4211 error ("incorrect type of vector CONSTRUCTOR elements");
4212 debug_generic_stmt (rhs1);
4213 return true;
4215 else if (CONSTRUCTOR_NELTS (rhs1)
4216 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4218 error ("incorrect number of vector CONSTRUCTOR elements");
4219 debug_generic_stmt (rhs1);
4220 return true;
4223 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4225 error ("incorrect type of vector CONSTRUCTOR elements");
4226 debug_generic_stmt (rhs1);
4227 return true;
4229 if (elt_i != NULL_TREE
4230 && (TREE_CODE (elt_t) == VECTOR_TYPE
4231 || TREE_CODE (elt_i) != INTEGER_CST
4232 || compare_tree_int (elt_i, i) != 0))
4234 error ("vector CONSTRUCTOR with non-NULL element index");
4235 debug_generic_stmt (rhs1);
4236 return true;
4238 if (!is_gimple_val (elt_v))
4240 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4241 debug_generic_stmt (rhs1);
4242 return true;
4246 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4248 error ("non-vector CONSTRUCTOR with elements");
4249 debug_generic_stmt (rhs1);
4250 return true;
4252 return res;
4253 case OBJ_TYPE_REF:
4254 case ASSERT_EXPR:
4255 case WITH_SIZE_EXPR:
4256 /* FIXME. */
4257 return res;
4259 default:;
4262 return res;
4265 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4266 is a problem, otherwise false. */
4268 static bool
4269 verify_gimple_assign (gassign *stmt)
4271 switch (gimple_assign_rhs_class (stmt))
4273 case GIMPLE_SINGLE_RHS:
4274 return verify_gimple_assign_single (stmt);
4276 case GIMPLE_UNARY_RHS:
4277 return verify_gimple_assign_unary (stmt);
4279 case GIMPLE_BINARY_RHS:
4280 return verify_gimple_assign_binary (stmt);
4282 case GIMPLE_TERNARY_RHS:
4283 return verify_gimple_assign_ternary (stmt);
4285 default:
4286 gcc_unreachable ();
4290 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4291 is a problem, otherwise false. */
4293 static bool
4294 verify_gimple_return (greturn *stmt)
4296 tree op = gimple_return_retval (stmt);
4297 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4299 /* We cannot test for present return values as we do not fix up missing
4300 return values from the original source. */
4301 if (op == NULL)
4302 return false;
4304 if (!is_gimple_val (op)
4305 && TREE_CODE (op) != RESULT_DECL)
4307 error ("invalid operand in return statement");
4308 debug_generic_stmt (op);
4309 return true;
4312 if ((TREE_CODE (op) == RESULT_DECL
4313 && DECL_BY_REFERENCE (op))
4314 || (TREE_CODE (op) == SSA_NAME
4315 && SSA_NAME_VAR (op)
4316 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4317 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4318 op = TREE_TYPE (op);
4320 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4322 error ("invalid conversion in return statement");
4323 debug_generic_stmt (restype);
4324 debug_generic_stmt (TREE_TYPE (op));
4325 return true;
4328 return false;
4332 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4333 is a problem, otherwise false. */
4335 static bool
4336 verify_gimple_goto (ggoto *stmt)
4338 tree dest = gimple_goto_dest (stmt);
4340 /* ??? We have two canonical forms of direct goto destinations, a
4341 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4342 if (TREE_CODE (dest) != LABEL_DECL
4343 && (!is_gimple_val (dest)
4344 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4346 error ("goto destination is neither a label nor a pointer");
4347 return true;
4350 return false;
4353 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4354 is a problem, otherwise false. */
4356 static bool
4357 verify_gimple_switch (gswitch *stmt)
4359 unsigned int i, n;
4360 tree elt, prev_upper_bound = NULL_TREE;
4361 tree index_type, elt_type = NULL_TREE;
4363 if (!is_gimple_val (gimple_switch_index (stmt)))
4365 error ("invalid operand to switch statement");
4366 debug_generic_stmt (gimple_switch_index (stmt));
4367 return true;
4370 index_type = TREE_TYPE (gimple_switch_index (stmt));
4371 if (! INTEGRAL_TYPE_P (index_type))
4373 error ("non-integral type switch statement");
4374 debug_generic_expr (index_type);
4375 return true;
4378 elt = gimple_switch_label (stmt, 0);
4379 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4381 error ("invalid default case label in switch statement");
4382 debug_generic_expr (elt);
4383 return true;
4386 n = gimple_switch_num_labels (stmt);
4387 for (i = 1; i < n; i++)
4389 elt = gimple_switch_label (stmt, i);
4391 if (! CASE_LOW (elt))
4393 error ("invalid case label in switch statement");
4394 debug_generic_expr (elt);
4395 return true;
4397 if (CASE_HIGH (elt)
4398 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4400 error ("invalid case range in switch statement");
4401 debug_generic_expr (elt);
4402 return true;
4405 if (elt_type)
4407 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4408 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4410 error ("type mismatch for case label in switch statement");
4411 debug_generic_expr (elt);
4412 return true;
4415 else
4417 elt_type = TREE_TYPE (CASE_LOW (elt));
4418 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4420 error ("type precision mismatch in switch statement");
4421 return true;
4425 if (prev_upper_bound)
4427 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4429 error ("case labels not sorted in switch statement");
4430 return true;
4434 prev_upper_bound = CASE_HIGH (elt);
4435 if (! prev_upper_bound)
4436 prev_upper_bound = CASE_LOW (elt);
4439 return false;
4442 /* Verify a gimple debug statement STMT.
4443 Returns true if anything is wrong. */
4445 static bool
4446 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4448 /* There isn't much that could be wrong in a gimple debug stmt. A
4449 gimple debug bind stmt, for example, maps a tree, that's usually
4450 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4451 component or member of an aggregate type, to another tree, that
4452 can be an arbitrary expression. These stmts expand into debug
4453 insns, and are converted to debug notes by var-tracking.c. */
4454 return false;
4457 /* Verify a gimple label statement STMT.
4458 Returns true if anything is wrong. */
4460 static bool
4461 verify_gimple_label (glabel *stmt)
4463 tree decl = gimple_label_label (stmt);
4464 int uid;
4465 bool err = false;
4467 if (TREE_CODE (decl) != LABEL_DECL)
4468 return true;
4469 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4470 && DECL_CONTEXT (decl) != current_function_decl)
4472 error ("label's context is not the current function decl");
4473 err |= true;
4476 uid = LABEL_DECL_UID (decl);
4477 if (cfun->cfg
4478 && (uid == -1
4479 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4481 error ("incorrect entry in label_to_block_map");
4482 err |= true;
4485 uid = EH_LANDING_PAD_NR (decl);
4486 if (uid)
4488 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4489 if (decl != lp->post_landing_pad)
4491 error ("incorrect setting of landing pad number");
4492 err |= true;
4496 return err;
4499 /* Verify a gimple cond statement STMT.
4500 Returns true if anything is wrong. */
4502 static bool
4503 verify_gimple_cond (gcond *stmt)
4505 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4507 error ("invalid comparison code in gimple cond");
4508 return true;
4510 if (!(!gimple_cond_true_label (stmt)
4511 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4512 || !(!gimple_cond_false_label (stmt)
4513 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4515 error ("invalid labels in gimple cond");
4516 return true;
4519 return verify_gimple_comparison (boolean_type_node,
4520 gimple_cond_lhs (stmt),
4521 gimple_cond_rhs (stmt));
4524 /* Verify the GIMPLE statement STMT. Returns true if there is an
4525 error, otherwise false. */
4527 static bool
4528 verify_gimple_stmt (gimple stmt)
4530 switch (gimple_code (stmt))
4532 case GIMPLE_ASSIGN:
4533 return verify_gimple_assign (as_a <gassign *> (stmt));
4535 case GIMPLE_LABEL:
4536 return verify_gimple_label (as_a <glabel *> (stmt));
4538 case GIMPLE_CALL:
4539 return verify_gimple_call (as_a <gcall *> (stmt));
4541 case GIMPLE_COND:
4542 return verify_gimple_cond (as_a <gcond *> (stmt));
4544 case GIMPLE_GOTO:
4545 return verify_gimple_goto (as_a <ggoto *> (stmt));
4547 case GIMPLE_SWITCH:
4548 return verify_gimple_switch (as_a <gswitch *> (stmt));
4550 case GIMPLE_RETURN:
4551 return verify_gimple_return (as_a <greturn *> (stmt));
4553 case GIMPLE_ASM:
4554 return false;
4556 case GIMPLE_TRANSACTION:
4557 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4559 /* Tuples that do not have tree operands. */
4560 case GIMPLE_NOP:
4561 case GIMPLE_PREDICT:
4562 case GIMPLE_RESX:
4563 case GIMPLE_EH_DISPATCH:
4564 case GIMPLE_EH_MUST_NOT_THROW:
4565 return false;
4567 CASE_GIMPLE_OMP:
4568 /* OpenMP directives are validated by the FE and never operated
4569 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4570 non-gimple expressions when the main index variable has had
4571 its address taken. This does not affect the loop itself
4572 because the header of an GIMPLE_OMP_FOR is merely used to determine
4573 how to setup the parallel iteration. */
4574 return false;
4576 case GIMPLE_DEBUG:
4577 return verify_gimple_debug (stmt);
4579 default:
4580 gcc_unreachable ();
4584 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4585 and false otherwise. */
4587 static bool
4588 verify_gimple_phi (gimple phi)
4590 bool err = false;
4591 unsigned i;
4592 tree phi_result = gimple_phi_result (phi);
4593 bool virtual_p;
4595 if (!phi_result)
4597 error ("invalid PHI result");
4598 return true;
4601 virtual_p = virtual_operand_p (phi_result);
4602 if (TREE_CODE (phi_result) != SSA_NAME
4603 || (virtual_p
4604 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4606 error ("invalid PHI result");
4607 err = true;
4610 for (i = 0; i < gimple_phi_num_args (phi); i++)
4612 tree t = gimple_phi_arg_def (phi, i);
4614 if (!t)
4616 error ("missing PHI def");
4617 err |= true;
4618 continue;
4620 /* Addressable variables do have SSA_NAMEs but they
4621 are not considered gimple values. */
4622 else if ((TREE_CODE (t) == SSA_NAME
4623 && virtual_p != virtual_operand_p (t))
4624 || (virtual_p
4625 && (TREE_CODE (t) != SSA_NAME
4626 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4627 || (!virtual_p
4628 && !is_gimple_val (t)))
4630 error ("invalid PHI argument");
4631 debug_generic_expr (t);
4632 err |= true;
4634 #ifdef ENABLE_TYPES_CHECKING
4635 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4637 error ("incompatible types in PHI argument %u", i);
4638 debug_generic_stmt (TREE_TYPE (phi_result));
4639 debug_generic_stmt (TREE_TYPE (t));
4640 err |= true;
4642 #endif
4645 return err;
4648 /* Verify the GIMPLE statements inside the sequence STMTS. */
4650 static bool
4651 verify_gimple_in_seq_2 (gimple_seq stmts)
4653 gimple_stmt_iterator ittr;
4654 bool err = false;
4656 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4658 gimple stmt = gsi_stmt (ittr);
4660 switch (gimple_code (stmt))
4662 case GIMPLE_BIND:
4663 err |= verify_gimple_in_seq_2 (
4664 gimple_bind_body (as_a <gbind *> (stmt)));
4665 break;
4667 case GIMPLE_TRY:
4668 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4669 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4670 break;
4672 case GIMPLE_EH_FILTER:
4673 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4674 break;
4676 case GIMPLE_EH_ELSE:
4678 geh_else *eh_else = as_a <geh_else *> (stmt);
4679 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4680 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4682 break;
4684 case GIMPLE_CATCH:
4685 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4686 as_a <gcatch *> (stmt)));
4687 break;
4689 case GIMPLE_TRANSACTION:
4690 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4691 break;
4693 default:
4695 bool err2 = verify_gimple_stmt (stmt);
4696 if (err2)
4697 debug_gimple_stmt (stmt);
4698 err |= err2;
4703 return err;
4706 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4707 is a problem, otherwise false. */
4709 static bool
4710 verify_gimple_transaction (gtransaction *stmt)
4712 tree lab = gimple_transaction_label (stmt);
4713 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4714 return true;
4715 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4719 /* Verify the GIMPLE statements inside the statement list STMTS. */
4721 DEBUG_FUNCTION void
4722 verify_gimple_in_seq (gimple_seq stmts)
4724 timevar_push (TV_TREE_STMT_VERIFY);
4725 if (verify_gimple_in_seq_2 (stmts))
4726 internal_error ("verify_gimple failed");
4727 timevar_pop (TV_TREE_STMT_VERIFY);
4730 /* Return true when the T can be shared. */
4732 static bool
4733 tree_node_can_be_shared (tree t)
4735 if (IS_TYPE_OR_DECL_P (t)
4736 || is_gimple_min_invariant (t)
4737 || TREE_CODE (t) == SSA_NAME
4738 || t == error_mark_node
4739 || TREE_CODE (t) == IDENTIFIER_NODE)
4740 return true;
4742 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4743 return true;
4745 if (DECL_P (t))
4746 return true;
4748 return false;
4751 /* Called via walk_tree. Verify tree sharing. */
4753 static tree
4754 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4756 hash_set<void *> *visited = (hash_set<void *> *) data;
4758 if (tree_node_can_be_shared (*tp))
4760 *walk_subtrees = false;
4761 return NULL;
4764 if (visited->add (*tp))
4765 return *tp;
4767 return NULL;
4770 /* Called via walk_gimple_stmt. Verify tree sharing. */
4772 static tree
4773 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4775 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4776 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4779 static bool eh_error_found;
4780 bool
4781 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4782 hash_set<gimple> *visited)
4784 if (!visited->contains (stmt))
4786 error ("dead STMT in EH table");
4787 debug_gimple_stmt (stmt);
4788 eh_error_found = true;
4790 return true;
4793 /* Verify if the location LOCs block is in BLOCKS. */
4795 static bool
4796 verify_location (hash_set<tree> *blocks, location_t loc)
4798 tree block = LOCATION_BLOCK (loc);
4799 if (block != NULL_TREE
4800 && !blocks->contains (block))
4802 error ("location references block not in block tree");
4803 return true;
4805 if (block != NULL_TREE)
4806 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4807 return false;
4810 /* Called via walk_tree. Verify that expressions have no blocks. */
4812 static tree
4813 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4815 if (!EXPR_P (*tp))
4817 *walk_subtrees = false;
4818 return NULL;
4821 location_t loc = EXPR_LOCATION (*tp);
4822 if (LOCATION_BLOCK (loc) != NULL)
4823 return *tp;
4825 return NULL;
4828 /* Called via walk_tree. Verify locations of expressions. */
4830 static tree
4831 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4833 hash_set<tree> *blocks = (hash_set<tree> *) data;
4835 if (TREE_CODE (*tp) == VAR_DECL
4836 && DECL_HAS_DEBUG_EXPR_P (*tp))
4838 tree t = DECL_DEBUG_EXPR (*tp);
4839 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4840 if (addr)
4841 return addr;
4843 if ((TREE_CODE (*tp) == VAR_DECL
4844 || TREE_CODE (*tp) == PARM_DECL
4845 || TREE_CODE (*tp) == RESULT_DECL)
4846 && DECL_HAS_VALUE_EXPR_P (*tp))
4848 tree t = DECL_VALUE_EXPR (*tp);
4849 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4850 if (addr)
4851 return addr;
4854 if (!EXPR_P (*tp))
4856 *walk_subtrees = false;
4857 return NULL;
4860 location_t loc = EXPR_LOCATION (*tp);
4861 if (verify_location (blocks, loc))
4862 return *tp;
4864 return NULL;
4867 /* Called via walk_gimple_op. Verify locations of expressions. */
4869 static tree
4870 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4872 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4873 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4876 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4878 static void
4879 collect_subblocks (hash_set<tree> *blocks, tree block)
4881 tree t;
4882 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4884 blocks->add (t);
4885 collect_subblocks (blocks, t);
4889 /* Verify the GIMPLE statements in the CFG of FN. */
4891 DEBUG_FUNCTION void
4892 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4894 basic_block bb;
4895 bool err = false;
4897 timevar_push (TV_TREE_STMT_VERIFY);
4898 hash_set<void *> visited;
4899 hash_set<gimple> visited_stmts;
4901 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4902 hash_set<tree> blocks;
4903 if (DECL_INITIAL (fn->decl))
4905 blocks.add (DECL_INITIAL (fn->decl));
4906 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4909 FOR_EACH_BB_FN (bb, fn)
4911 gimple_stmt_iterator gsi;
4913 for (gphi_iterator gpi = gsi_start_phis (bb);
4914 !gsi_end_p (gpi);
4915 gsi_next (&gpi))
4917 gphi *phi = gpi.phi ();
4918 bool err2 = false;
4919 unsigned i;
4921 visited_stmts.add (phi);
4923 if (gimple_bb (phi) != bb)
4925 error ("gimple_bb (phi) is set to a wrong basic block");
4926 err2 = true;
4929 err2 |= verify_gimple_phi (phi);
4931 /* Only PHI arguments have locations. */
4932 if (gimple_location (phi) != UNKNOWN_LOCATION)
4934 error ("PHI node with location");
4935 err2 = true;
4938 for (i = 0; i < gimple_phi_num_args (phi); i++)
4940 tree arg = gimple_phi_arg_def (phi, i);
4941 tree addr = walk_tree (&arg, verify_node_sharing_1,
4942 &visited, NULL);
4943 if (addr)
4945 error ("incorrect sharing of tree nodes");
4946 debug_generic_expr (addr);
4947 err2 |= true;
4949 location_t loc = gimple_phi_arg_location (phi, i);
4950 if (virtual_operand_p (gimple_phi_result (phi))
4951 && loc != UNKNOWN_LOCATION)
4953 error ("virtual PHI with argument locations");
4954 err2 = true;
4956 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4957 if (addr)
4959 debug_generic_expr (addr);
4960 err2 = true;
4962 err2 |= verify_location (&blocks, loc);
4965 if (err2)
4966 debug_gimple_stmt (phi);
4967 err |= err2;
4970 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4972 gimple stmt = gsi_stmt (gsi);
4973 bool err2 = false;
4974 struct walk_stmt_info wi;
4975 tree addr;
4976 int lp_nr;
4978 visited_stmts.add (stmt);
4980 if (gimple_bb (stmt) != bb)
4982 error ("gimple_bb (stmt) is set to a wrong basic block");
4983 err2 = true;
4986 err2 |= verify_gimple_stmt (stmt);
4987 err2 |= verify_location (&blocks, gimple_location (stmt));
4989 memset (&wi, 0, sizeof (wi));
4990 wi.info = (void *) &visited;
4991 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4992 if (addr)
4994 error ("incorrect sharing of tree nodes");
4995 debug_generic_expr (addr);
4996 err2 |= true;
4999 memset (&wi, 0, sizeof (wi));
5000 wi.info = (void *) &blocks;
5001 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5002 if (addr)
5004 debug_generic_expr (addr);
5005 err2 |= true;
5008 /* ??? Instead of not checking these stmts at all the walker
5009 should know its context via wi. */
5010 if (!is_gimple_debug (stmt)
5011 && !is_gimple_omp (stmt))
5013 memset (&wi, 0, sizeof (wi));
5014 addr = walk_gimple_op (stmt, verify_expr, &wi);
5015 if (addr)
5017 debug_generic_expr (addr);
5018 inform (gimple_location (stmt), "in statement");
5019 err2 |= true;
5023 /* If the statement is marked as part of an EH region, then it is
5024 expected that the statement could throw. Verify that when we
5025 have optimizations that simplify statements such that we prove
5026 that they cannot throw, that we update other data structures
5027 to match. */
5028 lp_nr = lookup_stmt_eh_lp (stmt);
5029 if (lp_nr > 0)
5031 if (!stmt_could_throw_p (stmt))
5033 if (verify_nothrow)
5035 error ("statement marked for throw, but doesn%'t");
5036 err2 |= true;
5039 else if (!gsi_one_before_end_p (gsi))
5041 error ("statement marked for throw in middle of block");
5042 err2 |= true;
5046 if (err2)
5047 debug_gimple_stmt (stmt);
5048 err |= err2;
5052 eh_error_found = false;
5053 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5054 if (eh_table)
5055 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5056 (&visited_stmts);
5058 if (err || eh_error_found)
5059 internal_error ("verify_gimple failed");
5061 verify_histograms ();
5062 timevar_pop (TV_TREE_STMT_VERIFY);
5066 /* Verifies that the flow information is OK. */
5068 static int
5069 gimple_verify_flow_info (void)
5071 int err = 0;
5072 basic_block bb;
5073 gimple_stmt_iterator gsi;
5074 gimple stmt;
5075 edge e;
5076 edge_iterator ei;
5078 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5079 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5081 error ("ENTRY_BLOCK has IL associated with it");
5082 err = 1;
5085 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5086 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5088 error ("EXIT_BLOCK has IL associated with it");
5089 err = 1;
5092 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5093 if (e->flags & EDGE_FALLTHRU)
5095 error ("fallthru to exit from bb %d", e->src->index);
5096 err = 1;
5099 FOR_EACH_BB_FN (bb, cfun)
5101 bool found_ctrl_stmt = false;
5103 stmt = NULL;
5105 /* Skip labels on the start of basic block. */
5106 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5108 tree label;
5109 gimple prev_stmt = stmt;
5111 stmt = gsi_stmt (gsi);
5113 if (gimple_code (stmt) != GIMPLE_LABEL)
5114 break;
5116 label = gimple_label_label (as_a <glabel *> (stmt));
5117 if (prev_stmt && DECL_NONLOCAL (label))
5119 error ("nonlocal label ");
5120 print_generic_expr (stderr, label, 0);
5121 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5122 bb->index);
5123 err = 1;
5126 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5128 error ("EH landing pad label ");
5129 print_generic_expr (stderr, label, 0);
5130 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5131 bb->index);
5132 err = 1;
5135 if (label_to_block (label) != bb)
5137 error ("label ");
5138 print_generic_expr (stderr, label, 0);
5139 fprintf (stderr, " to block does not match in bb %d",
5140 bb->index);
5141 err = 1;
5144 if (decl_function_context (label) != current_function_decl)
5146 error ("label ");
5147 print_generic_expr (stderr, label, 0);
5148 fprintf (stderr, " has incorrect context in bb %d",
5149 bb->index);
5150 err = 1;
5154 /* Verify that body of basic block BB is free of control flow. */
5155 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5157 gimple stmt = gsi_stmt (gsi);
5159 if (found_ctrl_stmt)
5161 error ("control flow in the middle of basic block %d",
5162 bb->index);
5163 err = 1;
5166 if (stmt_ends_bb_p (stmt))
5167 found_ctrl_stmt = true;
5169 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5171 error ("label ");
5172 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5173 fprintf (stderr, " in the middle of basic block %d", bb->index);
5174 err = 1;
5178 gsi = gsi_last_bb (bb);
5179 if (gsi_end_p (gsi))
5180 continue;
5182 stmt = gsi_stmt (gsi);
5184 if (gimple_code (stmt) == GIMPLE_LABEL)
5185 continue;
5187 err |= verify_eh_edges (stmt);
5189 if (is_ctrl_stmt (stmt))
5191 FOR_EACH_EDGE (e, ei, bb->succs)
5192 if (e->flags & EDGE_FALLTHRU)
5194 error ("fallthru edge after a control statement in bb %d",
5195 bb->index);
5196 err = 1;
5200 if (gimple_code (stmt) != GIMPLE_COND)
5202 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5203 after anything else but if statement. */
5204 FOR_EACH_EDGE (e, ei, bb->succs)
5205 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5207 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5208 bb->index);
5209 err = 1;
5213 switch (gimple_code (stmt))
5215 case GIMPLE_COND:
5217 edge true_edge;
5218 edge false_edge;
5220 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5222 if (!true_edge
5223 || !false_edge
5224 || !(true_edge->flags & EDGE_TRUE_VALUE)
5225 || !(false_edge->flags & EDGE_FALSE_VALUE)
5226 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5227 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5228 || EDGE_COUNT (bb->succs) >= 3)
5230 error ("wrong outgoing edge flags at end of bb %d",
5231 bb->index);
5232 err = 1;
5235 break;
5237 case GIMPLE_GOTO:
5238 if (simple_goto_p (stmt))
5240 error ("explicit goto at end of bb %d", bb->index);
5241 err = 1;
5243 else
5245 /* FIXME. We should double check that the labels in the
5246 destination blocks have their address taken. */
5247 FOR_EACH_EDGE (e, ei, bb->succs)
5248 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5249 | EDGE_FALSE_VALUE))
5250 || !(e->flags & EDGE_ABNORMAL))
5252 error ("wrong outgoing edge flags at end of bb %d",
5253 bb->index);
5254 err = 1;
5257 break;
5259 case GIMPLE_CALL:
5260 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5261 break;
5262 /* ... fallthru ... */
5263 case GIMPLE_RETURN:
5264 if (!single_succ_p (bb)
5265 || (single_succ_edge (bb)->flags
5266 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5267 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5269 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5270 err = 1;
5272 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5274 error ("return edge does not point to exit in bb %d",
5275 bb->index);
5276 err = 1;
5278 break;
5280 case GIMPLE_SWITCH:
5282 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5283 tree prev;
5284 edge e;
5285 size_t i, n;
5287 n = gimple_switch_num_labels (switch_stmt);
5289 /* Mark all the destination basic blocks. */
5290 for (i = 0; i < n; ++i)
5292 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5293 basic_block label_bb = label_to_block (lab);
5294 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5295 label_bb->aux = (void *)1;
5298 /* Verify that the case labels are sorted. */
5299 prev = gimple_switch_label (switch_stmt, 0);
5300 for (i = 1; i < n; ++i)
5302 tree c = gimple_switch_label (switch_stmt, i);
5303 if (!CASE_LOW (c))
5305 error ("found default case not at the start of "
5306 "case vector");
5307 err = 1;
5308 continue;
5310 if (CASE_LOW (prev)
5311 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5313 error ("case labels not sorted: ");
5314 print_generic_expr (stderr, prev, 0);
5315 fprintf (stderr," is greater than ");
5316 print_generic_expr (stderr, c, 0);
5317 fprintf (stderr," but comes before it.\n");
5318 err = 1;
5320 prev = c;
5322 /* VRP will remove the default case if it can prove it will
5323 never be executed. So do not verify there always exists
5324 a default case here. */
5326 FOR_EACH_EDGE (e, ei, bb->succs)
5328 if (!e->dest->aux)
5330 error ("extra outgoing edge %d->%d",
5331 bb->index, e->dest->index);
5332 err = 1;
5335 e->dest->aux = (void *)2;
5336 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5337 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5339 error ("wrong outgoing edge flags at end of bb %d",
5340 bb->index);
5341 err = 1;
5345 /* Check that we have all of them. */
5346 for (i = 0; i < n; ++i)
5348 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5349 basic_block label_bb = label_to_block (lab);
5351 if (label_bb->aux != (void *)2)
5353 error ("missing edge %i->%i", bb->index, label_bb->index);
5354 err = 1;
5358 FOR_EACH_EDGE (e, ei, bb->succs)
5359 e->dest->aux = (void *)0;
5361 break;
5363 case GIMPLE_EH_DISPATCH:
5364 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5365 break;
5367 default:
5368 break;
5372 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5373 verify_dominators (CDI_DOMINATORS);
5375 return err;
5379 /* Updates phi nodes after creating a forwarder block joined
5380 by edge FALLTHRU. */
5382 static void
5383 gimple_make_forwarder_block (edge fallthru)
5385 edge e;
5386 edge_iterator ei;
5387 basic_block dummy, bb;
5388 tree var;
5389 gphi_iterator gsi;
5391 dummy = fallthru->src;
5392 bb = fallthru->dest;
5394 if (single_pred_p (bb))
5395 return;
5397 /* If we redirected a branch we must create new PHI nodes at the
5398 start of BB. */
5399 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5401 gphi *phi, *new_phi;
5403 phi = gsi.phi ();
5404 var = gimple_phi_result (phi);
5405 new_phi = create_phi_node (var, bb);
5406 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5407 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5408 UNKNOWN_LOCATION);
5411 /* Add the arguments we have stored on edges. */
5412 FOR_EACH_EDGE (e, ei, bb->preds)
5414 if (e == fallthru)
5415 continue;
5417 flush_pending_stmts (e);
5422 /* Return a non-special label in the head of basic block BLOCK.
5423 Create one if it doesn't exist. */
5425 tree
5426 gimple_block_label (basic_block bb)
5428 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5429 bool first = true;
5430 tree label;
5431 glabel *stmt;
5433 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5435 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5436 if (!stmt)
5437 break;
5438 label = gimple_label_label (stmt);
5439 if (!DECL_NONLOCAL (label))
5441 if (!first)
5442 gsi_move_before (&i, &s);
5443 return label;
5447 label = create_artificial_label (UNKNOWN_LOCATION);
5448 stmt = gimple_build_label (label);
5449 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5450 return label;
5454 /* Attempt to perform edge redirection by replacing a possibly complex
5455 jump instruction by a goto or by removing the jump completely.
5456 This can apply only if all edges now point to the same block. The
5457 parameters and return values are equivalent to
5458 redirect_edge_and_branch. */
5460 static edge
5461 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5463 basic_block src = e->src;
5464 gimple_stmt_iterator i;
5465 gimple stmt;
5467 /* We can replace or remove a complex jump only when we have exactly
5468 two edges. */
5469 if (EDGE_COUNT (src->succs) != 2
5470 /* Verify that all targets will be TARGET. Specifically, the
5471 edge that is not E must also go to TARGET. */
5472 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5473 return NULL;
5475 i = gsi_last_bb (src);
5476 if (gsi_end_p (i))
5477 return NULL;
5479 stmt = gsi_stmt (i);
5481 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5483 gsi_remove (&i, true);
5484 e = ssa_redirect_edge (e, target);
5485 e->flags = EDGE_FALLTHRU;
5486 return e;
5489 return NULL;
5493 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5494 edge representing the redirected branch. */
5496 static edge
5497 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5499 basic_block bb = e->src;
5500 gimple_stmt_iterator gsi;
5501 edge ret;
5502 gimple stmt;
5504 if (e->flags & EDGE_ABNORMAL)
5505 return NULL;
5507 if (e->dest == dest)
5508 return NULL;
5510 if (e->flags & EDGE_EH)
5511 return redirect_eh_edge (e, dest);
5513 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5515 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5516 if (ret)
5517 return ret;
5520 gsi = gsi_last_bb (bb);
5521 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5523 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5525 case GIMPLE_COND:
5526 /* For COND_EXPR, we only need to redirect the edge. */
5527 break;
5529 case GIMPLE_GOTO:
5530 /* No non-abnormal edges should lead from a non-simple goto, and
5531 simple ones should be represented implicitly. */
5532 gcc_unreachable ();
5534 case GIMPLE_SWITCH:
5536 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5537 tree label = gimple_block_label (dest);
5538 tree cases = get_cases_for_edge (e, switch_stmt);
5540 /* If we have a list of cases associated with E, then use it
5541 as it's a lot faster than walking the entire case vector. */
5542 if (cases)
5544 edge e2 = find_edge (e->src, dest);
5545 tree last, first;
5547 first = cases;
5548 while (cases)
5550 last = cases;
5551 CASE_LABEL (cases) = label;
5552 cases = CASE_CHAIN (cases);
5555 /* If there was already an edge in the CFG, then we need
5556 to move all the cases associated with E to E2. */
5557 if (e2)
5559 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5561 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5562 CASE_CHAIN (cases2) = first;
5564 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5566 else
5568 size_t i, n = gimple_switch_num_labels (switch_stmt);
5570 for (i = 0; i < n; i++)
5572 tree elt = gimple_switch_label (switch_stmt, i);
5573 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5574 CASE_LABEL (elt) = label;
5578 break;
5580 case GIMPLE_ASM:
5582 gasm *asm_stmt = as_a <gasm *> (stmt);
5583 int i, n = gimple_asm_nlabels (asm_stmt);
5584 tree label = NULL;
5586 for (i = 0; i < n; ++i)
5588 tree cons = gimple_asm_label_op (asm_stmt, i);
5589 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5591 if (!label)
5592 label = gimple_block_label (dest);
5593 TREE_VALUE (cons) = label;
5597 /* If we didn't find any label matching the former edge in the
5598 asm labels, we must be redirecting the fallthrough
5599 edge. */
5600 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5602 break;
5604 case GIMPLE_RETURN:
5605 gsi_remove (&gsi, true);
5606 e->flags |= EDGE_FALLTHRU;
5607 break;
5609 case GIMPLE_OMP_RETURN:
5610 case GIMPLE_OMP_CONTINUE:
5611 case GIMPLE_OMP_SECTIONS_SWITCH:
5612 case GIMPLE_OMP_FOR:
5613 /* The edges from OMP constructs can be simply redirected. */
5614 break;
5616 case GIMPLE_EH_DISPATCH:
5617 if (!(e->flags & EDGE_FALLTHRU))
5618 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5619 break;
5621 case GIMPLE_TRANSACTION:
5622 /* The ABORT edge has a stored label associated with it, otherwise
5623 the edges are simply redirectable. */
5624 if (e->flags == 0)
5625 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5626 gimple_block_label (dest));
5627 break;
5629 default:
5630 /* Otherwise it must be a fallthru edge, and we don't need to
5631 do anything besides redirecting it. */
5632 gcc_assert (e->flags & EDGE_FALLTHRU);
5633 break;
5636 /* Update/insert PHI nodes as necessary. */
5638 /* Now update the edges in the CFG. */
5639 e = ssa_redirect_edge (e, dest);
5641 return e;
5644 /* Returns true if it is possible to remove edge E by redirecting
5645 it to the destination of the other edge from E->src. */
5647 static bool
5648 gimple_can_remove_branch_p (const_edge e)
5650 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5651 return false;
5653 return true;
5656 /* Simple wrapper, as we can always redirect fallthru edges. */
5658 static basic_block
5659 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5661 e = gimple_redirect_edge_and_branch (e, dest);
5662 gcc_assert (e);
5664 return NULL;
5668 /* Splits basic block BB after statement STMT (but at least after the
5669 labels). If STMT is NULL, BB is split just after the labels. */
5671 static basic_block
5672 gimple_split_block (basic_block bb, void *stmt)
5674 gimple_stmt_iterator gsi;
5675 gimple_stmt_iterator gsi_tgt;
5676 gimple act;
5677 gimple_seq list;
5678 basic_block new_bb;
5679 edge e;
5680 edge_iterator ei;
5682 new_bb = create_empty_bb (bb);
5684 /* Redirect the outgoing edges. */
5685 new_bb->succs = bb->succs;
5686 bb->succs = NULL;
5687 FOR_EACH_EDGE (e, ei, new_bb->succs)
5688 e->src = new_bb;
5690 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5691 stmt = NULL;
5693 /* Move everything from GSI to the new basic block. */
5694 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5696 act = gsi_stmt (gsi);
5697 if (gimple_code (act) == GIMPLE_LABEL)
5698 continue;
5700 if (!stmt)
5701 break;
5703 if (stmt == act)
5705 gsi_next (&gsi);
5706 break;
5710 if (gsi_end_p (gsi))
5711 return new_bb;
5713 /* Split the statement list - avoid re-creating new containers as this
5714 brings ugly quadratic memory consumption in the inliner.
5715 (We are still quadratic since we need to update stmt BB pointers,
5716 sadly.) */
5717 gsi_split_seq_before (&gsi, &list);
5718 set_bb_seq (new_bb, list);
5719 for (gsi_tgt = gsi_start (list);
5720 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5721 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5723 return new_bb;
5727 /* Moves basic block BB after block AFTER. */
5729 static bool
5730 gimple_move_block_after (basic_block bb, basic_block after)
5732 if (bb->prev_bb == after)
5733 return true;
5735 unlink_block (bb);
5736 link_block (bb, after);
5738 return true;
5742 /* Return TRUE if block BB has no executable statements, otherwise return
5743 FALSE. */
5745 static bool
5746 gimple_empty_block_p (basic_block bb)
5748 /* BB must have no executable statements. */
5749 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5750 if (phi_nodes (bb))
5751 return false;
5752 if (gsi_end_p (gsi))
5753 return true;
5754 if (is_gimple_debug (gsi_stmt (gsi)))
5755 gsi_next_nondebug (&gsi);
5756 return gsi_end_p (gsi);
5760 /* Split a basic block if it ends with a conditional branch and if the
5761 other part of the block is not empty. */
5763 static basic_block
5764 gimple_split_block_before_cond_jump (basic_block bb)
5766 gimple last, split_point;
5767 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5768 if (gsi_end_p (gsi))
5769 return NULL;
5770 last = gsi_stmt (gsi);
5771 if (gimple_code (last) != GIMPLE_COND
5772 && gimple_code (last) != GIMPLE_SWITCH)
5773 return NULL;
5774 gsi_prev_nondebug (&gsi);
5775 split_point = gsi_stmt (gsi);
5776 return split_block (bb, split_point)->dest;
5780 /* Return true if basic_block can be duplicated. */
5782 static bool
5783 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5785 return true;
5788 /* Create a duplicate of the basic block BB. NOTE: This does not
5789 preserve SSA form. */
5791 static basic_block
5792 gimple_duplicate_bb (basic_block bb)
5794 basic_block new_bb;
5795 gimple_stmt_iterator gsi_tgt;
5797 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5799 /* Copy the PHI nodes. We ignore PHI node arguments here because
5800 the incoming edges have not been setup yet. */
5801 for (gphi_iterator gpi = gsi_start_phis (bb);
5802 !gsi_end_p (gpi);
5803 gsi_next (&gpi))
5805 gphi *phi, *copy;
5806 phi = gpi.phi ();
5807 copy = create_phi_node (NULL_TREE, new_bb);
5808 create_new_def_for (gimple_phi_result (phi), copy,
5809 gimple_phi_result_ptr (copy));
5810 gimple_set_uid (copy, gimple_uid (phi));
5813 gsi_tgt = gsi_start_bb (new_bb);
5814 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5815 !gsi_end_p (gsi);
5816 gsi_next (&gsi))
5818 def_operand_p def_p;
5819 ssa_op_iter op_iter;
5820 tree lhs;
5821 gimple stmt, copy;
5823 stmt = gsi_stmt (gsi);
5824 if (gimple_code (stmt) == GIMPLE_LABEL)
5825 continue;
5827 /* Don't duplicate label debug stmts. */
5828 if (gimple_debug_bind_p (stmt)
5829 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5830 == LABEL_DECL)
5831 continue;
5833 /* Create a new copy of STMT and duplicate STMT's virtual
5834 operands. */
5835 copy = gimple_copy (stmt);
5836 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5838 maybe_duplicate_eh_stmt (copy, stmt);
5839 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5841 /* When copying around a stmt writing into a local non-user
5842 aggregate, make sure it won't share stack slot with other
5843 vars. */
5844 lhs = gimple_get_lhs (stmt);
5845 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5847 tree base = get_base_address (lhs);
5848 if (base
5849 && (TREE_CODE (base) == VAR_DECL
5850 || TREE_CODE (base) == RESULT_DECL)
5851 && DECL_IGNORED_P (base)
5852 && !TREE_STATIC (base)
5853 && !DECL_EXTERNAL (base)
5854 && (TREE_CODE (base) != VAR_DECL
5855 || !DECL_HAS_VALUE_EXPR_P (base)))
5856 DECL_NONSHAREABLE (base) = 1;
5859 /* Create new names for all the definitions created by COPY and
5860 add replacement mappings for each new name. */
5861 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5862 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5865 return new_bb;
5868 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5870 static void
5871 add_phi_args_after_copy_edge (edge e_copy)
5873 basic_block bb, bb_copy = e_copy->src, dest;
5874 edge e;
5875 edge_iterator ei;
5876 gphi *phi, *phi_copy;
5877 tree def;
5878 gphi_iterator psi, psi_copy;
5880 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5881 return;
5883 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5885 if (e_copy->dest->flags & BB_DUPLICATED)
5886 dest = get_bb_original (e_copy->dest);
5887 else
5888 dest = e_copy->dest;
5890 e = find_edge (bb, dest);
5891 if (!e)
5893 /* During loop unrolling the target of the latch edge is copied.
5894 In this case we are not looking for edge to dest, but to
5895 duplicated block whose original was dest. */
5896 FOR_EACH_EDGE (e, ei, bb->succs)
5898 if ((e->dest->flags & BB_DUPLICATED)
5899 && get_bb_original (e->dest) == dest)
5900 break;
5903 gcc_assert (e != NULL);
5906 for (psi = gsi_start_phis (e->dest),
5907 psi_copy = gsi_start_phis (e_copy->dest);
5908 !gsi_end_p (psi);
5909 gsi_next (&psi), gsi_next (&psi_copy))
5911 phi = psi.phi ();
5912 phi_copy = psi_copy.phi ();
5913 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5914 add_phi_arg (phi_copy, def, e_copy,
5915 gimple_phi_arg_location_from_edge (phi, e));
5920 /* Basic block BB_COPY was created by code duplication. Add phi node
5921 arguments for edges going out of BB_COPY. The blocks that were
5922 duplicated have BB_DUPLICATED set. */
5924 void
5925 add_phi_args_after_copy_bb (basic_block bb_copy)
5927 edge e_copy;
5928 edge_iterator ei;
5930 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5932 add_phi_args_after_copy_edge (e_copy);
5936 /* Blocks in REGION_COPY array of length N_REGION were created by
5937 duplication of basic blocks. Add phi node arguments for edges
5938 going from these blocks. If E_COPY is not NULL, also add
5939 phi node arguments for its destination.*/
5941 void
5942 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5943 edge e_copy)
5945 unsigned i;
5947 for (i = 0; i < n_region; i++)
5948 region_copy[i]->flags |= BB_DUPLICATED;
5950 for (i = 0; i < n_region; i++)
5951 add_phi_args_after_copy_bb (region_copy[i]);
5952 if (e_copy)
5953 add_phi_args_after_copy_edge (e_copy);
5955 for (i = 0; i < n_region; i++)
5956 region_copy[i]->flags &= ~BB_DUPLICATED;
5959 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5960 important exit edge EXIT. By important we mean that no SSA name defined
5961 inside region is live over the other exit edges of the region. All entry
5962 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5963 to the duplicate of the region. Dominance and loop information is
5964 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5965 UPDATE_DOMINANCE is false then we assume that the caller will update the
5966 dominance information after calling this function. The new basic
5967 blocks are stored to REGION_COPY in the same order as they had in REGION,
5968 provided that REGION_COPY is not NULL.
5969 The function returns false if it is unable to copy the region,
5970 true otherwise. */
5972 bool
5973 gimple_duplicate_sese_region (edge entry, edge exit,
5974 basic_block *region, unsigned n_region,
5975 basic_block *region_copy,
5976 bool update_dominance)
5978 unsigned i;
5979 bool free_region_copy = false, copying_header = false;
5980 struct loop *loop = entry->dest->loop_father;
5981 edge exit_copy;
5982 vec<basic_block> doms;
5983 edge redirected;
5984 int total_freq = 0, entry_freq = 0;
5985 gcov_type total_count = 0, entry_count = 0;
5987 if (!can_copy_bbs_p (region, n_region))
5988 return false;
5990 /* Some sanity checking. Note that we do not check for all possible
5991 missuses of the functions. I.e. if you ask to copy something weird,
5992 it will work, but the state of structures probably will not be
5993 correct. */
5994 for (i = 0; i < n_region; i++)
5996 /* We do not handle subloops, i.e. all the blocks must belong to the
5997 same loop. */
5998 if (region[i]->loop_father != loop)
5999 return false;
6001 if (region[i] != entry->dest
6002 && region[i] == loop->header)
6003 return false;
6006 /* In case the function is used for loop header copying (which is the primary
6007 use), ensure that EXIT and its copy will be new latch and entry edges. */
6008 if (loop->header == entry->dest)
6010 copying_header = true;
6012 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6013 return false;
6015 for (i = 0; i < n_region; i++)
6016 if (region[i] != exit->src
6017 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6018 return false;
6021 initialize_original_copy_tables ();
6023 if (copying_header)
6024 set_loop_copy (loop, loop_outer (loop));
6025 else
6026 set_loop_copy (loop, loop);
6028 if (!region_copy)
6030 region_copy = XNEWVEC (basic_block, n_region);
6031 free_region_copy = true;
6034 /* Record blocks outside the region that are dominated by something
6035 inside. */
6036 if (update_dominance)
6038 doms.create (0);
6039 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6042 if (entry->dest->count)
6044 total_count = entry->dest->count;
6045 entry_count = entry->count;
6046 /* Fix up corner cases, to avoid division by zero or creation of negative
6047 frequencies. */
6048 if (entry_count > total_count)
6049 entry_count = total_count;
6051 else
6053 total_freq = entry->dest->frequency;
6054 entry_freq = EDGE_FREQUENCY (entry);
6055 /* Fix up corner cases, to avoid division by zero or creation of negative
6056 frequencies. */
6057 if (total_freq == 0)
6058 total_freq = 1;
6059 else if (entry_freq > total_freq)
6060 entry_freq = total_freq;
6063 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6064 split_edge_bb_loc (entry), update_dominance);
6065 if (total_count)
6067 scale_bbs_frequencies_gcov_type (region, n_region,
6068 total_count - entry_count,
6069 total_count);
6070 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6071 total_count);
6073 else
6075 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6076 total_freq);
6077 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6080 if (copying_header)
6082 loop->header = exit->dest;
6083 loop->latch = exit->src;
6086 /* Redirect the entry and add the phi node arguments. */
6087 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6088 gcc_assert (redirected != NULL);
6089 flush_pending_stmts (entry);
6091 /* Concerning updating of dominators: We must recount dominators
6092 for entry block and its copy. Anything that is outside of the
6093 region, but was dominated by something inside needs recounting as
6094 well. */
6095 if (update_dominance)
6097 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6098 doms.safe_push (get_bb_original (entry->dest));
6099 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6100 doms.release ();
6103 /* Add the other PHI node arguments. */
6104 add_phi_args_after_copy (region_copy, n_region, NULL);
6106 if (free_region_copy)
6107 free (region_copy);
6109 free_original_copy_tables ();
6110 return true;
6113 /* Checks if BB is part of the region defined by N_REGION BBS. */
6114 static bool
6115 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6117 unsigned int n;
6119 for (n = 0; n < n_region; n++)
6121 if (bb == bbs[n])
6122 return true;
6124 return false;
6127 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6128 are stored to REGION_COPY in the same order in that they appear
6129 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6130 the region, EXIT an exit from it. The condition guarding EXIT
6131 is moved to ENTRY. Returns true if duplication succeeds, false
6132 otherwise.
6134 For example,
6136 some_code;
6137 if (cond)
6139 else
6142 is transformed to
6144 if (cond)
6146 some_code;
6149 else
6151 some_code;
6156 bool
6157 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6158 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6159 basic_block *region_copy ATTRIBUTE_UNUSED)
6161 unsigned i;
6162 bool free_region_copy = false;
6163 struct loop *loop = exit->dest->loop_father;
6164 struct loop *orig_loop = entry->dest->loop_father;
6165 basic_block switch_bb, entry_bb, nentry_bb;
6166 vec<basic_block> doms;
6167 int total_freq = 0, exit_freq = 0;
6168 gcov_type total_count = 0, exit_count = 0;
6169 edge exits[2], nexits[2], e;
6170 gimple_stmt_iterator gsi;
6171 gimple cond_stmt;
6172 edge sorig, snew;
6173 basic_block exit_bb;
6174 gphi_iterator psi;
6175 gphi *phi;
6176 tree def;
6177 struct loop *target, *aloop, *cloop;
6179 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6180 exits[0] = exit;
6181 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6183 if (!can_copy_bbs_p (region, n_region))
6184 return false;
6186 initialize_original_copy_tables ();
6187 set_loop_copy (orig_loop, loop);
6189 target= loop;
6190 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6192 if (bb_part_of_region_p (aloop->header, region, n_region))
6194 cloop = duplicate_loop (aloop, target);
6195 duplicate_subloops (aloop, cloop);
6199 if (!region_copy)
6201 region_copy = XNEWVEC (basic_block, n_region);
6202 free_region_copy = true;
6205 gcc_assert (!need_ssa_update_p (cfun));
6207 /* Record blocks outside the region that are dominated by something
6208 inside. */
6209 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6211 if (exit->src->count)
6213 total_count = exit->src->count;
6214 exit_count = exit->count;
6215 /* Fix up corner cases, to avoid division by zero or creation of negative
6216 frequencies. */
6217 if (exit_count > total_count)
6218 exit_count = total_count;
6220 else
6222 total_freq = exit->src->frequency;
6223 exit_freq = EDGE_FREQUENCY (exit);
6224 /* Fix up corner cases, to avoid division by zero or creation of negative
6225 frequencies. */
6226 if (total_freq == 0)
6227 total_freq = 1;
6228 if (exit_freq > total_freq)
6229 exit_freq = total_freq;
6232 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6233 split_edge_bb_loc (exit), true);
6234 if (total_count)
6236 scale_bbs_frequencies_gcov_type (region, n_region,
6237 total_count - exit_count,
6238 total_count);
6239 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6240 total_count);
6242 else
6244 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6245 total_freq);
6246 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6249 /* Create the switch block, and put the exit condition to it. */
6250 entry_bb = entry->dest;
6251 nentry_bb = get_bb_copy (entry_bb);
6252 if (!last_stmt (entry->src)
6253 || !stmt_ends_bb_p (last_stmt (entry->src)))
6254 switch_bb = entry->src;
6255 else
6256 switch_bb = split_edge (entry);
6257 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6259 gsi = gsi_last_bb (switch_bb);
6260 cond_stmt = last_stmt (exit->src);
6261 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6262 cond_stmt = gimple_copy (cond_stmt);
6264 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6266 sorig = single_succ_edge (switch_bb);
6267 sorig->flags = exits[1]->flags;
6268 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6270 /* Register the new edge from SWITCH_BB in loop exit lists. */
6271 rescan_loop_exit (snew, true, false);
6273 /* Add the PHI node arguments. */
6274 add_phi_args_after_copy (region_copy, n_region, snew);
6276 /* Get rid of now superfluous conditions and associated edges (and phi node
6277 arguments). */
6278 exit_bb = exit->dest;
6280 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6281 PENDING_STMT (e) = NULL;
6283 /* The latch of ORIG_LOOP was copied, and so was the backedge
6284 to the original header. We redirect this backedge to EXIT_BB. */
6285 for (i = 0; i < n_region; i++)
6286 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6288 gcc_assert (single_succ_edge (region_copy[i]));
6289 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6290 PENDING_STMT (e) = NULL;
6291 for (psi = gsi_start_phis (exit_bb);
6292 !gsi_end_p (psi);
6293 gsi_next (&psi))
6295 phi = psi.phi ();
6296 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6297 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6300 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6301 PENDING_STMT (e) = NULL;
6303 /* Anything that is outside of the region, but was dominated by something
6304 inside needs to update dominance info. */
6305 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6306 doms.release ();
6307 /* Update the SSA web. */
6308 update_ssa (TODO_update_ssa);
6310 if (free_region_copy)
6311 free (region_copy);
6313 free_original_copy_tables ();
6314 return true;
6317 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6318 adding blocks when the dominator traversal reaches EXIT. This
6319 function silently assumes that ENTRY strictly dominates EXIT. */
6321 void
6322 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6323 vec<basic_block> *bbs_p)
6325 basic_block son;
6327 for (son = first_dom_son (CDI_DOMINATORS, entry);
6328 son;
6329 son = next_dom_son (CDI_DOMINATORS, son))
6331 bbs_p->safe_push (son);
6332 if (son != exit)
6333 gather_blocks_in_sese_region (son, exit, bbs_p);
6337 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6338 The duplicates are recorded in VARS_MAP. */
6340 static void
6341 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6342 tree to_context)
6344 tree t = *tp, new_t;
6345 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6347 if (DECL_CONTEXT (t) == to_context)
6348 return;
6350 bool existed;
6351 tree &loc = vars_map->get_or_insert (t, &existed);
6353 if (!existed)
6355 if (SSA_VAR_P (t))
6357 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6358 add_local_decl (f, new_t);
6360 else
6362 gcc_assert (TREE_CODE (t) == CONST_DECL);
6363 new_t = copy_node (t);
6365 DECL_CONTEXT (new_t) = to_context;
6367 loc = new_t;
6369 else
6370 new_t = loc;
6372 *tp = new_t;
6376 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6377 VARS_MAP maps old ssa names and var_decls to the new ones. */
6379 static tree
6380 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6381 tree to_context)
6383 tree new_name;
6385 gcc_assert (!virtual_operand_p (name));
6387 tree *loc = vars_map->get (name);
6389 if (!loc)
6391 tree decl = SSA_NAME_VAR (name);
6392 if (decl)
6394 replace_by_duplicate_decl (&decl, vars_map, to_context);
6395 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6396 decl, SSA_NAME_DEF_STMT (name));
6397 if (SSA_NAME_IS_DEFAULT_DEF (name))
6398 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6399 decl, new_name);
6401 else
6402 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6403 name, SSA_NAME_DEF_STMT (name));
6405 vars_map->put (name, new_name);
6407 else
6408 new_name = *loc;
6410 return new_name;
6413 struct move_stmt_d
6415 tree orig_block;
6416 tree new_block;
6417 tree from_context;
6418 tree to_context;
6419 hash_map<tree, tree> *vars_map;
6420 htab_t new_label_map;
6421 hash_map<void *, void *> *eh_map;
6422 bool remap_decls_p;
6425 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6426 contained in *TP if it has been ORIG_BLOCK previously and change the
6427 DECL_CONTEXT of every local variable referenced in *TP. */
6429 static tree
6430 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6432 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6433 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6434 tree t = *tp;
6436 if (EXPR_P (t))
6438 tree block = TREE_BLOCK (t);
6439 if (block == p->orig_block
6440 || (p->orig_block == NULL_TREE
6441 && block != NULL_TREE))
6442 TREE_SET_BLOCK (t, p->new_block);
6443 #ifdef ENABLE_CHECKING
6444 else if (block != NULL_TREE)
6446 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6447 block = BLOCK_SUPERCONTEXT (block);
6448 gcc_assert (block == p->orig_block);
6450 #endif
6452 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6454 if (TREE_CODE (t) == SSA_NAME)
6455 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6456 else if (TREE_CODE (t) == LABEL_DECL)
6458 if (p->new_label_map)
6460 struct tree_map in, *out;
6461 in.base.from = t;
6462 out = (struct tree_map *)
6463 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6464 if (out)
6465 *tp = t = out->to;
6468 DECL_CONTEXT (t) = p->to_context;
6470 else if (p->remap_decls_p)
6472 /* Replace T with its duplicate. T should no longer appear in the
6473 parent function, so this looks wasteful; however, it may appear
6474 in referenced_vars, and more importantly, as virtual operands of
6475 statements, and in alias lists of other variables. It would be
6476 quite difficult to expunge it from all those places. ??? It might
6477 suffice to do this for addressable variables. */
6478 if ((TREE_CODE (t) == VAR_DECL
6479 && !is_global_var (t))
6480 || TREE_CODE (t) == CONST_DECL)
6481 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6483 *walk_subtrees = 0;
6485 else if (TYPE_P (t))
6486 *walk_subtrees = 0;
6488 return NULL_TREE;
6491 /* Helper for move_stmt_r. Given an EH region number for the source
6492 function, map that to the duplicate EH regio number in the dest. */
6494 static int
6495 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6497 eh_region old_r, new_r;
6499 old_r = get_eh_region_from_number (old_nr);
6500 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6502 return new_r->index;
6505 /* Similar, but operate on INTEGER_CSTs. */
6507 static tree
6508 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6510 int old_nr, new_nr;
6512 old_nr = tree_to_shwi (old_t_nr);
6513 new_nr = move_stmt_eh_region_nr (old_nr, p);
6515 return build_int_cst (integer_type_node, new_nr);
6518 /* Like move_stmt_op, but for gimple statements.
6520 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6521 contained in the current statement in *GSI_P and change the
6522 DECL_CONTEXT of every local variable referenced in the current
6523 statement. */
6525 static tree
6526 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6527 struct walk_stmt_info *wi)
6529 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6530 gimple stmt = gsi_stmt (*gsi_p);
6531 tree block = gimple_block (stmt);
6533 if (block == p->orig_block
6534 || (p->orig_block == NULL_TREE
6535 && block != NULL_TREE))
6536 gimple_set_block (stmt, p->new_block);
6538 switch (gimple_code (stmt))
6540 case GIMPLE_CALL:
6541 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6543 tree r, fndecl = gimple_call_fndecl (stmt);
6544 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6545 switch (DECL_FUNCTION_CODE (fndecl))
6547 case BUILT_IN_EH_COPY_VALUES:
6548 r = gimple_call_arg (stmt, 1);
6549 r = move_stmt_eh_region_tree_nr (r, p);
6550 gimple_call_set_arg (stmt, 1, r);
6551 /* FALLTHRU */
6553 case BUILT_IN_EH_POINTER:
6554 case BUILT_IN_EH_FILTER:
6555 r = gimple_call_arg (stmt, 0);
6556 r = move_stmt_eh_region_tree_nr (r, p);
6557 gimple_call_set_arg (stmt, 0, r);
6558 break;
6560 default:
6561 break;
6564 break;
6566 case GIMPLE_RESX:
6568 gresx *resx_stmt = as_a <gresx *> (stmt);
6569 int r = gimple_resx_region (resx_stmt);
6570 r = move_stmt_eh_region_nr (r, p);
6571 gimple_resx_set_region (resx_stmt, r);
6573 break;
6575 case GIMPLE_EH_DISPATCH:
6577 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6578 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6579 r = move_stmt_eh_region_nr (r, p);
6580 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6582 break;
6584 case GIMPLE_OMP_RETURN:
6585 case GIMPLE_OMP_CONTINUE:
6586 break;
6587 default:
6588 if (is_gimple_omp (stmt))
6590 /* Do not remap variables inside OMP directives. Variables
6591 referenced in clauses and directive header belong to the
6592 parent function and should not be moved into the child
6593 function. */
6594 bool save_remap_decls_p = p->remap_decls_p;
6595 p->remap_decls_p = false;
6596 *handled_ops_p = true;
6598 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6599 move_stmt_op, wi);
6601 p->remap_decls_p = save_remap_decls_p;
6603 break;
6606 return NULL_TREE;
6609 /* Move basic block BB from function CFUN to function DEST_FN. The
6610 block is moved out of the original linked list and placed after
6611 block AFTER in the new list. Also, the block is removed from the
6612 original array of blocks and placed in DEST_FN's array of blocks.
6613 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6614 updated to reflect the moved edges.
6616 The local variables are remapped to new instances, VARS_MAP is used
6617 to record the mapping. */
6619 static void
6620 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6621 basic_block after, bool update_edge_count_p,
6622 struct move_stmt_d *d)
6624 struct control_flow_graph *cfg;
6625 edge_iterator ei;
6626 edge e;
6627 gimple_stmt_iterator si;
6628 unsigned old_len, new_len;
6630 /* Remove BB from dominance structures. */
6631 delete_from_dominance_info (CDI_DOMINATORS, bb);
6633 /* Move BB from its current loop to the copy in the new function. */
6634 if (current_loops)
6636 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6637 if (new_loop)
6638 bb->loop_father = new_loop;
6641 /* Link BB to the new linked list. */
6642 move_block_after (bb, after);
6644 /* Update the edge count in the corresponding flowgraphs. */
6645 if (update_edge_count_p)
6646 FOR_EACH_EDGE (e, ei, bb->succs)
6648 cfun->cfg->x_n_edges--;
6649 dest_cfun->cfg->x_n_edges++;
6652 /* Remove BB from the original basic block array. */
6653 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6654 cfun->cfg->x_n_basic_blocks--;
6656 /* Grow DEST_CFUN's basic block array if needed. */
6657 cfg = dest_cfun->cfg;
6658 cfg->x_n_basic_blocks++;
6659 if (bb->index >= cfg->x_last_basic_block)
6660 cfg->x_last_basic_block = bb->index + 1;
6662 old_len = vec_safe_length (cfg->x_basic_block_info);
6663 if ((unsigned) cfg->x_last_basic_block >= old_len)
6665 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6666 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6669 (*cfg->x_basic_block_info)[bb->index] = bb;
6671 /* Remap the variables in phi nodes. */
6672 for (gphi_iterator psi = gsi_start_phis (bb);
6673 !gsi_end_p (psi); )
6675 gphi *phi = psi.phi ();
6676 use_operand_p use;
6677 tree op = PHI_RESULT (phi);
6678 ssa_op_iter oi;
6679 unsigned i;
6681 if (virtual_operand_p (op))
6683 /* Remove the phi nodes for virtual operands (alias analysis will be
6684 run for the new function, anyway). */
6685 remove_phi_node (&psi, true);
6686 continue;
6689 SET_PHI_RESULT (phi,
6690 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6691 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6693 op = USE_FROM_PTR (use);
6694 if (TREE_CODE (op) == SSA_NAME)
6695 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6698 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6700 location_t locus = gimple_phi_arg_location (phi, i);
6701 tree block = LOCATION_BLOCK (locus);
6703 if (locus == UNKNOWN_LOCATION)
6704 continue;
6705 if (d->orig_block == NULL_TREE || block == d->orig_block)
6707 if (d->new_block == NULL_TREE)
6708 locus = LOCATION_LOCUS (locus);
6709 else
6710 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6711 gimple_phi_arg_set_location (phi, i, locus);
6715 gsi_next (&psi);
6718 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6720 gimple stmt = gsi_stmt (si);
6721 struct walk_stmt_info wi;
6723 memset (&wi, 0, sizeof (wi));
6724 wi.info = d;
6725 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6727 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6729 tree label = gimple_label_label (label_stmt);
6730 int uid = LABEL_DECL_UID (label);
6732 gcc_assert (uid > -1);
6734 old_len = vec_safe_length (cfg->x_label_to_block_map);
6735 if (old_len <= (unsigned) uid)
6737 new_len = 3 * uid / 2 + 1;
6738 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6741 (*cfg->x_label_to_block_map)[uid] = bb;
6742 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6744 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6746 if (uid >= dest_cfun->cfg->last_label_uid)
6747 dest_cfun->cfg->last_label_uid = uid + 1;
6750 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6751 remove_stmt_from_eh_lp_fn (cfun, stmt);
6753 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6754 gimple_remove_stmt_histograms (cfun, stmt);
6756 /* We cannot leave any operands allocated from the operand caches of
6757 the current function. */
6758 free_stmt_operands (cfun, stmt);
6759 push_cfun (dest_cfun);
6760 update_stmt (stmt);
6761 pop_cfun ();
6764 FOR_EACH_EDGE (e, ei, bb->succs)
6765 if (e->goto_locus != UNKNOWN_LOCATION)
6767 tree block = LOCATION_BLOCK (e->goto_locus);
6768 if (d->orig_block == NULL_TREE
6769 || block == d->orig_block)
6770 e->goto_locus = d->new_block ?
6771 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6772 LOCATION_LOCUS (e->goto_locus);
6776 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6777 the outermost EH region. Use REGION as the incoming base EH region. */
6779 static eh_region
6780 find_outermost_region_in_block (struct function *src_cfun,
6781 basic_block bb, eh_region region)
6783 gimple_stmt_iterator si;
6785 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6787 gimple stmt = gsi_stmt (si);
6788 eh_region stmt_region;
6789 int lp_nr;
6791 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6792 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6793 if (stmt_region)
6795 if (region == NULL)
6796 region = stmt_region;
6797 else if (stmt_region != region)
6799 region = eh_region_outermost (src_cfun, stmt_region, region);
6800 gcc_assert (region != NULL);
6805 return region;
6808 static tree
6809 new_label_mapper (tree decl, void *data)
6811 htab_t hash = (htab_t) data;
6812 struct tree_map *m;
6813 void **slot;
6815 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6817 m = XNEW (struct tree_map);
6818 m->hash = DECL_UID (decl);
6819 m->base.from = decl;
6820 m->to = create_artificial_label (UNKNOWN_LOCATION);
6821 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6822 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6823 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6825 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6826 gcc_assert (*slot == NULL);
6828 *slot = m;
6830 return m->to;
6833 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6834 subblocks. */
6836 static void
6837 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6838 tree to_context)
6840 tree *tp, t;
6842 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6844 t = *tp;
6845 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6846 continue;
6847 replace_by_duplicate_decl (&t, vars_map, to_context);
6848 if (t != *tp)
6850 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6852 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6853 DECL_HAS_VALUE_EXPR_P (t) = 1;
6855 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6856 *tp = t;
6860 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6861 replace_block_vars_by_duplicates (block, vars_map, to_context);
6864 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6865 from FN1 to FN2. */
6867 static void
6868 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6869 struct loop *loop)
6871 /* Discard it from the old loop array. */
6872 (*get_loops (fn1))[loop->num] = NULL;
6874 /* Place it in the new loop array, assigning it a new number. */
6875 loop->num = number_of_loops (fn2);
6876 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6878 /* Recurse to children. */
6879 for (loop = loop->inner; loop; loop = loop->next)
6880 fixup_loop_arrays_after_move (fn1, fn2, loop);
6883 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6884 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6885 single basic block in the original CFG and the new basic block is
6886 returned. DEST_CFUN must not have a CFG yet.
6888 Note that the region need not be a pure SESE region. Blocks inside
6889 the region may contain calls to abort/exit. The only restriction
6890 is that ENTRY_BB should be the only entry point and it must
6891 dominate EXIT_BB.
6893 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6894 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6895 to the new function.
6897 All local variables referenced in the region are assumed to be in
6898 the corresponding BLOCK_VARS and unexpanded variable lists
6899 associated with DEST_CFUN. */
6901 basic_block
6902 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6903 basic_block exit_bb, tree orig_block)
6905 vec<basic_block> bbs, dom_bbs;
6906 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6907 basic_block after, bb, *entry_pred, *exit_succ, abb;
6908 struct function *saved_cfun = cfun;
6909 int *entry_flag, *exit_flag;
6910 unsigned *entry_prob, *exit_prob;
6911 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6912 edge e;
6913 edge_iterator ei;
6914 htab_t new_label_map;
6915 hash_map<void *, void *> *eh_map;
6916 struct loop *loop = entry_bb->loop_father;
6917 struct loop *loop0 = get_loop (saved_cfun, 0);
6918 struct move_stmt_d d;
6920 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6921 region. */
6922 gcc_assert (entry_bb != exit_bb
6923 && (!exit_bb
6924 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6926 /* Collect all the blocks in the region. Manually add ENTRY_BB
6927 because it won't be added by dfs_enumerate_from. */
6928 bbs.create (0);
6929 bbs.safe_push (entry_bb);
6930 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6932 /* The blocks that used to be dominated by something in BBS will now be
6933 dominated by the new block. */
6934 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6935 bbs.address (),
6936 bbs.length ());
6938 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6939 the predecessor edges to ENTRY_BB and the successor edges to
6940 EXIT_BB so that we can re-attach them to the new basic block that
6941 will replace the region. */
6942 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6943 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6944 entry_flag = XNEWVEC (int, num_entry_edges);
6945 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6946 i = 0;
6947 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6949 entry_prob[i] = e->probability;
6950 entry_flag[i] = e->flags;
6951 entry_pred[i++] = e->src;
6952 remove_edge (e);
6955 if (exit_bb)
6957 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6958 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6959 exit_flag = XNEWVEC (int, num_exit_edges);
6960 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6961 i = 0;
6962 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6964 exit_prob[i] = e->probability;
6965 exit_flag[i] = e->flags;
6966 exit_succ[i++] = e->dest;
6967 remove_edge (e);
6970 else
6972 num_exit_edges = 0;
6973 exit_succ = NULL;
6974 exit_flag = NULL;
6975 exit_prob = NULL;
6978 /* Switch context to the child function to initialize DEST_FN's CFG. */
6979 gcc_assert (dest_cfun->cfg == NULL);
6980 push_cfun (dest_cfun);
6982 init_empty_tree_cfg ();
6984 /* Initialize EH information for the new function. */
6985 eh_map = NULL;
6986 new_label_map = NULL;
6987 if (saved_cfun->eh)
6989 eh_region region = NULL;
6991 FOR_EACH_VEC_ELT (bbs, i, bb)
6992 region = find_outermost_region_in_block (saved_cfun, bb, region);
6994 init_eh_for_function ();
6995 if (region != NULL)
6997 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6998 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6999 new_label_mapper, new_label_map);
7003 /* Initialize an empty loop tree. */
7004 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7005 init_loops_structure (dest_cfun, loops, 1);
7006 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7007 set_loops_for_fn (dest_cfun, loops);
7009 /* Move the outlined loop tree part. */
7010 num_nodes = bbs.length ();
7011 FOR_EACH_VEC_ELT (bbs, i, bb)
7013 if (bb->loop_father->header == bb)
7015 struct loop *this_loop = bb->loop_father;
7016 struct loop *outer = loop_outer (this_loop);
7017 if (outer == loop
7018 /* If the SESE region contains some bbs ending with
7019 a noreturn call, those are considered to belong
7020 to the outermost loop in saved_cfun, rather than
7021 the entry_bb's loop_father. */
7022 || outer == loop0)
7024 if (outer != loop)
7025 num_nodes -= this_loop->num_nodes;
7026 flow_loop_tree_node_remove (bb->loop_father);
7027 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7028 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7031 else if (bb->loop_father == loop0 && loop0 != loop)
7032 num_nodes--;
7034 /* Remove loop exits from the outlined region. */
7035 if (loops_for_fn (saved_cfun)->exits)
7036 FOR_EACH_EDGE (e, ei, bb->succs)
7038 struct loops *l = loops_for_fn (saved_cfun);
7039 loop_exit **slot
7040 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7041 NO_INSERT);
7042 if (slot)
7043 l->exits->clear_slot (slot);
7048 /* Adjust the number of blocks in the tree root of the outlined part. */
7049 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7051 /* Setup a mapping to be used by move_block_to_fn. */
7052 loop->aux = current_loops->tree_root;
7053 loop0->aux = current_loops->tree_root;
7055 pop_cfun ();
7057 /* Move blocks from BBS into DEST_CFUN. */
7058 gcc_assert (bbs.length () >= 2);
7059 after = dest_cfun->cfg->x_entry_block_ptr;
7060 hash_map<tree, tree> vars_map;
7062 memset (&d, 0, sizeof (d));
7063 d.orig_block = orig_block;
7064 d.new_block = DECL_INITIAL (dest_cfun->decl);
7065 d.from_context = cfun->decl;
7066 d.to_context = dest_cfun->decl;
7067 d.vars_map = &vars_map;
7068 d.new_label_map = new_label_map;
7069 d.eh_map = eh_map;
7070 d.remap_decls_p = true;
7072 FOR_EACH_VEC_ELT (bbs, i, bb)
7074 /* No need to update edge counts on the last block. It has
7075 already been updated earlier when we detached the region from
7076 the original CFG. */
7077 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7078 after = bb;
7081 loop->aux = NULL;
7082 loop0->aux = NULL;
7083 /* Loop sizes are no longer correct, fix them up. */
7084 loop->num_nodes -= num_nodes;
7085 for (struct loop *outer = loop_outer (loop);
7086 outer; outer = loop_outer (outer))
7087 outer->num_nodes -= num_nodes;
7088 loop0->num_nodes -= bbs.length () - num_nodes;
7090 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7092 struct loop *aloop;
7093 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7094 if (aloop != NULL)
7096 if (aloop->simduid)
7098 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7099 d.to_context);
7100 dest_cfun->has_simduid_loops = true;
7102 if (aloop->force_vectorize)
7103 dest_cfun->has_force_vectorize_loops = true;
7107 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7108 if (orig_block)
7110 tree block;
7111 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7112 == NULL_TREE);
7113 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7114 = BLOCK_SUBBLOCKS (orig_block);
7115 for (block = BLOCK_SUBBLOCKS (orig_block);
7116 block; block = BLOCK_CHAIN (block))
7117 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7118 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7121 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7122 &vars_map, dest_cfun->decl);
7124 if (new_label_map)
7125 htab_delete (new_label_map);
7126 if (eh_map)
7127 delete eh_map;
7129 /* Rewire the entry and exit blocks. The successor to the entry
7130 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7131 the child function. Similarly, the predecessor of DEST_FN's
7132 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7133 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7134 various CFG manipulation function get to the right CFG.
7136 FIXME, this is silly. The CFG ought to become a parameter to
7137 these helpers. */
7138 push_cfun (dest_cfun);
7139 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7140 if (exit_bb)
7141 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7142 pop_cfun ();
7144 /* Back in the original function, the SESE region has disappeared,
7145 create a new basic block in its place. */
7146 bb = create_empty_bb (entry_pred[0]);
7147 if (current_loops)
7148 add_bb_to_loop (bb, loop);
7149 for (i = 0; i < num_entry_edges; i++)
7151 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7152 e->probability = entry_prob[i];
7155 for (i = 0; i < num_exit_edges; i++)
7157 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7158 e->probability = exit_prob[i];
7161 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7162 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7163 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7164 dom_bbs.release ();
7166 if (exit_bb)
7168 free (exit_prob);
7169 free (exit_flag);
7170 free (exit_succ);
7172 free (entry_prob);
7173 free (entry_flag);
7174 free (entry_pred);
7175 bbs.release ();
7177 return bb;
7181 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7184 void
7185 dump_function_to_file (tree fndecl, FILE *file, int flags)
7187 tree arg, var, old_current_fndecl = current_function_decl;
7188 struct function *dsf;
7189 bool ignore_topmost_bind = false, any_var = false;
7190 basic_block bb;
7191 tree chain;
7192 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7193 && decl_is_tm_clone (fndecl));
7194 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7196 current_function_decl = fndecl;
7197 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7199 arg = DECL_ARGUMENTS (fndecl);
7200 while (arg)
7202 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7203 fprintf (file, " ");
7204 print_generic_expr (file, arg, dump_flags);
7205 if (flags & TDF_VERBOSE)
7206 print_node (file, "", arg, 4);
7207 if (DECL_CHAIN (arg))
7208 fprintf (file, ", ");
7209 arg = DECL_CHAIN (arg);
7211 fprintf (file, ")\n");
7213 if (flags & TDF_VERBOSE)
7214 print_node (file, "", fndecl, 2);
7216 dsf = DECL_STRUCT_FUNCTION (fndecl);
7217 if (dsf && (flags & TDF_EH))
7218 dump_eh_tree (file, dsf);
7220 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7222 dump_node (fndecl, TDF_SLIM | flags, file);
7223 current_function_decl = old_current_fndecl;
7224 return;
7227 /* When GIMPLE is lowered, the variables are no longer available in
7228 BIND_EXPRs, so display them separately. */
7229 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7231 unsigned ix;
7232 ignore_topmost_bind = true;
7234 fprintf (file, "{\n");
7235 if (!vec_safe_is_empty (fun->local_decls))
7236 FOR_EACH_LOCAL_DECL (fun, ix, var)
7238 print_generic_decl (file, var, flags);
7239 if (flags & TDF_VERBOSE)
7240 print_node (file, "", var, 4);
7241 fprintf (file, "\n");
7243 any_var = true;
7245 if (gimple_in_ssa_p (cfun))
7246 for (ix = 1; ix < num_ssa_names; ++ix)
7248 tree name = ssa_name (ix);
7249 if (name && !SSA_NAME_VAR (name))
7251 fprintf (file, " ");
7252 print_generic_expr (file, TREE_TYPE (name), flags);
7253 fprintf (file, " ");
7254 print_generic_expr (file, name, flags);
7255 fprintf (file, ";\n");
7257 any_var = true;
7262 if (fun && fun->decl == fndecl
7263 && fun->cfg
7264 && basic_block_info_for_fn (fun))
7266 /* If the CFG has been built, emit a CFG-based dump. */
7267 if (!ignore_topmost_bind)
7268 fprintf (file, "{\n");
7270 if (any_var && n_basic_blocks_for_fn (fun))
7271 fprintf (file, "\n");
7273 FOR_EACH_BB_FN (bb, fun)
7274 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7276 fprintf (file, "}\n");
7278 else if (DECL_SAVED_TREE (fndecl) == NULL)
7280 /* The function is now in GIMPLE form but the CFG has not been
7281 built yet. Emit the single sequence of GIMPLE statements
7282 that make up its body. */
7283 gimple_seq body = gimple_body (fndecl);
7285 if (gimple_seq_first_stmt (body)
7286 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7287 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7288 print_gimple_seq (file, body, 0, flags);
7289 else
7291 if (!ignore_topmost_bind)
7292 fprintf (file, "{\n");
7294 if (any_var)
7295 fprintf (file, "\n");
7297 print_gimple_seq (file, body, 2, flags);
7298 fprintf (file, "}\n");
7301 else
7303 int indent;
7305 /* Make a tree based dump. */
7306 chain = DECL_SAVED_TREE (fndecl);
7307 if (chain && TREE_CODE (chain) == BIND_EXPR)
7309 if (ignore_topmost_bind)
7311 chain = BIND_EXPR_BODY (chain);
7312 indent = 2;
7314 else
7315 indent = 0;
7317 else
7319 if (!ignore_topmost_bind)
7320 fprintf (file, "{\n");
7321 indent = 2;
7324 if (any_var)
7325 fprintf (file, "\n");
7327 print_generic_stmt_indented (file, chain, flags, indent);
7328 if (ignore_topmost_bind)
7329 fprintf (file, "}\n");
7332 if (flags & TDF_ENUMERATE_LOCALS)
7333 dump_enumerated_decls (file, flags);
7334 fprintf (file, "\n\n");
7336 current_function_decl = old_current_fndecl;
7339 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7341 DEBUG_FUNCTION void
7342 debug_function (tree fn, int flags)
7344 dump_function_to_file (fn, stderr, flags);
7348 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7350 static void
7351 print_pred_bbs (FILE *file, basic_block bb)
7353 edge e;
7354 edge_iterator ei;
7356 FOR_EACH_EDGE (e, ei, bb->preds)
7357 fprintf (file, "bb_%d ", e->src->index);
7361 /* Print on FILE the indexes for the successors of basic_block BB. */
7363 static void
7364 print_succ_bbs (FILE *file, basic_block bb)
7366 edge e;
7367 edge_iterator ei;
7369 FOR_EACH_EDGE (e, ei, bb->succs)
7370 fprintf (file, "bb_%d ", e->dest->index);
7373 /* Print to FILE the basic block BB following the VERBOSITY level. */
7375 void
7376 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7378 char *s_indent = (char *) alloca ((size_t) indent + 1);
7379 memset ((void *) s_indent, ' ', (size_t) indent);
7380 s_indent[indent] = '\0';
7382 /* Print basic_block's header. */
7383 if (verbosity >= 2)
7385 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7386 print_pred_bbs (file, bb);
7387 fprintf (file, "}, succs = {");
7388 print_succ_bbs (file, bb);
7389 fprintf (file, "})\n");
7392 /* Print basic_block's body. */
7393 if (verbosity >= 3)
7395 fprintf (file, "%s {\n", s_indent);
7396 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7397 fprintf (file, "%s }\n", s_indent);
7401 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7403 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7404 VERBOSITY level this outputs the contents of the loop, or just its
7405 structure. */
7407 static void
7408 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7410 char *s_indent;
7411 basic_block bb;
7413 if (loop == NULL)
7414 return;
7416 s_indent = (char *) alloca ((size_t) indent + 1);
7417 memset ((void *) s_indent, ' ', (size_t) indent);
7418 s_indent[indent] = '\0';
7420 /* Print loop's header. */
7421 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7422 if (loop->header)
7423 fprintf (file, "header = %d", loop->header->index);
7424 else
7426 fprintf (file, "deleted)\n");
7427 return;
7429 if (loop->latch)
7430 fprintf (file, ", latch = %d", loop->latch->index);
7431 else
7432 fprintf (file, ", multiple latches");
7433 fprintf (file, ", niter = ");
7434 print_generic_expr (file, loop->nb_iterations, 0);
7436 if (loop->any_upper_bound)
7438 fprintf (file, ", upper_bound = ");
7439 print_decu (loop->nb_iterations_upper_bound, file);
7442 if (loop->any_estimate)
7444 fprintf (file, ", estimate = ");
7445 print_decu (loop->nb_iterations_estimate, file);
7447 fprintf (file, ")\n");
7449 /* Print loop's body. */
7450 if (verbosity >= 1)
7452 fprintf (file, "%s{\n", s_indent);
7453 FOR_EACH_BB_FN (bb, cfun)
7454 if (bb->loop_father == loop)
7455 print_loops_bb (file, bb, indent, verbosity);
7457 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7458 fprintf (file, "%s}\n", s_indent);
7462 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7463 spaces. Following VERBOSITY level this outputs the contents of the
7464 loop, or just its structure. */
7466 static void
7467 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7468 int verbosity)
7470 if (loop == NULL)
7471 return;
7473 print_loop (file, loop, indent, verbosity);
7474 print_loop_and_siblings (file, loop->next, indent, verbosity);
7477 /* Follow a CFG edge from the entry point of the program, and on entry
7478 of a loop, pretty print the loop structure on FILE. */
7480 void
7481 print_loops (FILE *file, int verbosity)
7483 basic_block bb;
7485 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7486 if (bb && bb->loop_father)
7487 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7490 /* Dump a loop. */
7492 DEBUG_FUNCTION void
7493 debug (struct loop &ref)
7495 print_loop (stderr, &ref, 0, /*verbosity*/0);
7498 DEBUG_FUNCTION void
7499 debug (struct loop *ptr)
7501 if (ptr)
7502 debug (*ptr);
7503 else
7504 fprintf (stderr, "<nil>\n");
7507 /* Dump a loop verbosely. */
7509 DEBUG_FUNCTION void
7510 debug_verbose (struct loop &ref)
7512 print_loop (stderr, &ref, 0, /*verbosity*/3);
7515 DEBUG_FUNCTION void
7516 debug_verbose (struct loop *ptr)
7518 if (ptr)
7519 debug (*ptr);
7520 else
7521 fprintf (stderr, "<nil>\n");
7525 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7527 DEBUG_FUNCTION void
7528 debug_loops (int verbosity)
7530 print_loops (stderr, verbosity);
7533 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7535 DEBUG_FUNCTION void
7536 debug_loop (struct loop *loop, int verbosity)
7538 print_loop (stderr, loop, 0, verbosity);
7541 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7542 level. */
7544 DEBUG_FUNCTION void
7545 debug_loop_num (unsigned num, int verbosity)
7547 debug_loop (get_loop (cfun, num), verbosity);
7550 /* Return true if BB ends with a call, possibly followed by some
7551 instructions that must stay with the call. Return false,
7552 otherwise. */
7554 static bool
7555 gimple_block_ends_with_call_p (basic_block bb)
7557 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7558 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7562 /* Return true if BB ends with a conditional branch. Return false,
7563 otherwise. */
7565 static bool
7566 gimple_block_ends_with_condjump_p (const_basic_block bb)
7568 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7569 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7573 /* Return true if we need to add fake edge to exit at statement T.
7574 Helper function for gimple_flow_call_edges_add. */
7576 static bool
7577 need_fake_edge_p (gimple t)
7579 tree fndecl = NULL_TREE;
7580 int call_flags = 0;
7582 /* NORETURN and LONGJMP calls already have an edge to exit.
7583 CONST and PURE calls do not need one.
7584 We don't currently check for CONST and PURE here, although
7585 it would be a good idea, because those attributes are
7586 figured out from the RTL in mark_constant_function, and
7587 the counter incrementation code from -fprofile-arcs
7588 leads to different results from -fbranch-probabilities. */
7589 if (is_gimple_call (t))
7591 fndecl = gimple_call_fndecl (t);
7592 call_flags = gimple_call_flags (t);
7595 if (is_gimple_call (t)
7596 && fndecl
7597 && DECL_BUILT_IN (fndecl)
7598 && (call_flags & ECF_NOTHROW)
7599 && !(call_flags & ECF_RETURNS_TWICE)
7600 /* fork() doesn't really return twice, but the effect of
7601 wrapping it in __gcov_fork() which calls __gcov_flush()
7602 and clears the counters before forking has the same
7603 effect as returning twice. Force a fake edge. */
7604 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7605 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7606 return false;
7608 if (is_gimple_call (t))
7610 edge_iterator ei;
7611 edge e;
7612 basic_block bb;
7614 if (!(call_flags & ECF_NORETURN))
7615 return true;
7617 bb = gimple_bb (t);
7618 FOR_EACH_EDGE (e, ei, bb->succs)
7619 if ((e->flags & EDGE_FAKE) == 0)
7620 return true;
7623 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7624 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7625 return true;
7627 return false;
7631 /* Add fake edges to the function exit for any non constant and non
7632 noreturn calls (or noreturn calls with EH/abnormal edges),
7633 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7634 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7635 that were split.
7637 The goal is to expose cases in which entering a basic block does
7638 not imply that all subsequent instructions must be executed. */
7640 static int
7641 gimple_flow_call_edges_add (sbitmap blocks)
7643 int i;
7644 int blocks_split = 0;
7645 int last_bb = last_basic_block_for_fn (cfun);
7646 bool check_last_block = false;
7648 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7649 return 0;
7651 if (! blocks)
7652 check_last_block = true;
7653 else
7654 check_last_block = bitmap_bit_p (blocks,
7655 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7657 /* In the last basic block, before epilogue generation, there will be
7658 a fallthru edge to EXIT. Special care is required if the last insn
7659 of the last basic block is a call because make_edge folds duplicate
7660 edges, which would result in the fallthru edge also being marked
7661 fake, which would result in the fallthru edge being removed by
7662 remove_fake_edges, which would result in an invalid CFG.
7664 Moreover, we can't elide the outgoing fake edge, since the block
7665 profiler needs to take this into account in order to solve the minimal
7666 spanning tree in the case that the call doesn't return.
7668 Handle this by adding a dummy instruction in a new last basic block. */
7669 if (check_last_block)
7671 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7672 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7673 gimple t = NULL;
7675 if (!gsi_end_p (gsi))
7676 t = gsi_stmt (gsi);
7678 if (t && need_fake_edge_p (t))
7680 edge e;
7682 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7683 if (e)
7685 gsi_insert_on_edge (e, gimple_build_nop ());
7686 gsi_commit_edge_inserts ();
7691 /* Now add fake edges to the function exit for any non constant
7692 calls since there is no way that we can determine if they will
7693 return or not... */
7694 for (i = 0; i < last_bb; i++)
7696 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7697 gimple_stmt_iterator gsi;
7698 gimple stmt, last_stmt;
7700 if (!bb)
7701 continue;
7703 if (blocks && !bitmap_bit_p (blocks, i))
7704 continue;
7706 gsi = gsi_last_nondebug_bb (bb);
7707 if (!gsi_end_p (gsi))
7709 last_stmt = gsi_stmt (gsi);
7712 stmt = gsi_stmt (gsi);
7713 if (need_fake_edge_p (stmt))
7715 edge e;
7717 /* The handling above of the final block before the
7718 epilogue should be enough to verify that there is
7719 no edge to the exit block in CFG already.
7720 Calling make_edge in such case would cause us to
7721 mark that edge as fake and remove it later. */
7722 #ifdef ENABLE_CHECKING
7723 if (stmt == last_stmt)
7725 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7726 gcc_assert (e == NULL);
7728 #endif
7730 /* Note that the following may create a new basic block
7731 and renumber the existing basic blocks. */
7732 if (stmt != last_stmt)
7734 e = split_block (bb, stmt);
7735 if (e)
7736 blocks_split++;
7738 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7740 gsi_prev (&gsi);
7742 while (!gsi_end_p (gsi));
7746 if (blocks_split)
7747 verify_flow_info ();
7749 return blocks_split;
7752 /* Removes edge E and all the blocks dominated by it, and updates dominance
7753 information. The IL in E->src needs to be updated separately.
7754 If dominance info is not available, only the edge E is removed.*/
7756 void
7757 remove_edge_and_dominated_blocks (edge e)
7759 vec<basic_block> bbs_to_remove = vNULL;
7760 vec<basic_block> bbs_to_fix_dom = vNULL;
7761 bitmap df, df_idom;
7762 edge f;
7763 edge_iterator ei;
7764 bool none_removed = false;
7765 unsigned i;
7766 basic_block bb, dbb;
7767 bitmap_iterator bi;
7769 if (!dom_info_available_p (CDI_DOMINATORS))
7771 remove_edge (e);
7772 return;
7775 /* No updating is needed for edges to exit. */
7776 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7778 if (cfgcleanup_altered_bbs)
7779 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7780 remove_edge (e);
7781 return;
7784 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7785 that is not dominated by E->dest, then this set is empty. Otherwise,
7786 all the basic blocks dominated by E->dest are removed.
7788 Also, to DF_IDOM we store the immediate dominators of the blocks in
7789 the dominance frontier of E (i.e., of the successors of the
7790 removed blocks, if there are any, and of E->dest otherwise). */
7791 FOR_EACH_EDGE (f, ei, e->dest->preds)
7793 if (f == e)
7794 continue;
7796 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7798 none_removed = true;
7799 break;
7803 df = BITMAP_ALLOC (NULL);
7804 df_idom = BITMAP_ALLOC (NULL);
7806 if (none_removed)
7807 bitmap_set_bit (df_idom,
7808 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7809 else
7811 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7812 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7814 FOR_EACH_EDGE (f, ei, bb->succs)
7816 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7817 bitmap_set_bit (df, f->dest->index);
7820 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7821 bitmap_clear_bit (df, bb->index);
7823 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7825 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7826 bitmap_set_bit (df_idom,
7827 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7831 if (cfgcleanup_altered_bbs)
7833 /* Record the set of the altered basic blocks. */
7834 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7835 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7838 /* Remove E and the cancelled blocks. */
7839 if (none_removed)
7840 remove_edge (e);
7841 else
7843 /* Walk backwards so as to get a chance to substitute all
7844 released DEFs into debug stmts. See
7845 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7846 details. */
7847 for (i = bbs_to_remove.length (); i-- > 0; )
7848 delete_basic_block (bbs_to_remove[i]);
7851 /* Update the dominance information. The immediate dominator may change only
7852 for blocks whose immediate dominator belongs to DF_IDOM:
7854 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7855 removal. Let Z the arbitrary block such that idom(Z) = Y and
7856 Z dominates X after the removal. Before removal, there exists a path P
7857 from Y to X that avoids Z. Let F be the last edge on P that is
7858 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7859 dominates W, and because of P, Z does not dominate W), and W belongs to
7860 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7861 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7863 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7864 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7865 dbb;
7866 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7867 bbs_to_fix_dom.safe_push (dbb);
7870 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7872 BITMAP_FREE (df);
7873 BITMAP_FREE (df_idom);
7874 bbs_to_remove.release ();
7875 bbs_to_fix_dom.release ();
7878 /* Purge dead EH edges from basic block BB. */
7880 bool
7881 gimple_purge_dead_eh_edges (basic_block bb)
7883 bool changed = false;
7884 edge e;
7885 edge_iterator ei;
7886 gimple stmt = last_stmt (bb);
7888 if (stmt && stmt_can_throw_internal (stmt))
7889 return false;
7891 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7893 if (e->flags & EDGE_EH)
7895 remove_edge_and_dominated_blocks (e);
7896 changed = true;
7898 else
7899 ei_next (&ei);
7902 return changed;
7905 /* Purge dead EH edges from basic block listed in BLOCKS. */
7907 bool
7908 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7910 bool changed = false;
7911 unsigned i;
7912 bitmap_iterator bi;
7914 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7916 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7918 /* Earlier gimple_purge_dead_eh_edges could have removed
7919 this basic block already. */
7920 gcc_assert (bb || changed);
7921 if (bb != NULL)
7922 changed |= gimple_purge_dead_eh_edges (bb);
7925 return changed;
7928 /* Purge dead abnormal call edges from basic block BB. */
7930 bool
7931 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7933 bool changed = false;
7934 edge e;
7935 edge_iterator ei;
7936 gimple stmt = last_stmt (bb);
7938 if (!cfun->has_nonlocal_label
7939 && !cfun->calls_setjmp)
7940 return false;
7942 if (stmt && stmt_can_make_abnormal_goto (stmt))
7943 return false;
7945 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7947 if (e->flags & EDGE_ABNORMAL)
7949 if (e->flags & EDGE_FALLTHRU)
7950 e->flags &= ~EDGE_ABNORMAL;
7951 else
7952 remove_edge_and_dominated_blocks (e);
7953 changed = true;
7955 else
7956 ei_next (&ei);
7959 return changed;
7962 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7964 bool
7965 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7967 bool changed = false;
7968 unsigned i;
7969 bitmap_iterator bi;
7971 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7973 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7975 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7976 this basic block already. */
7977 gcc_assert (bb || changed);
7978 if (bb != NULL)
7979 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7982 return changed;
7985 /* This function is called whenever a new edge is created or
7986 redirected. */
7988 static void
7989 gimple_execute_on_growing_pred (edge e)
7991 basic_block bb = e->dest;
7993 if (!gimple_seq_empty_p (phi_nodes (bb)))
7994 reserve_phi_args_for_new_edge (bb);
7997 /* This function is called immediately before edge E is removed from
7998 the edge vector E->dest->preds. */
8000 static void
8001 gimple_execute_on_shrinking_pred (edge e)
8003 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8004 remove_phi_args (e);
8007 /*---------------------------------------------------------------------------
8008 Helper functions for Loop versioning
8009 ---------------------------------------------------------------------------*/
8011 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8012 of 'first'. Both of them are dominated by 'new_head' basic block. When
8013 'new_head' was created by 'second's incoming edge it received phi arguments
8014 on the edge by split_edge(). Later, additional edge 'e' was created to
8015 connect 'new_head' and 'first'. Now this routine adds phi args on this
8016 additional edge 'e' that new_head to second edge received as part of edge
8017 splitting. */
8019 static void
8020 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8021 basic_block new_head, edge e)
8023 gphi *phi1, *phi2;
8024 gphi_iterator psi1, psi2;
8025 tree def;
8026 edge e2 = find_edge (new_head, second);
8028 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8029 edge, we should always have an edge from NEW_HEAD to SECOND. */
8030 gcc_assert (e2 != NULL);
8032 /* Browse all 'second' basic block phi nodes and add phi args to
8033 edge 'e' for 'first' head. PHI args are always in correct order. */
8035 for (psi2 = gsi_start_phis (second),
8036 psi1 = gsi_start_phis (first);
8037 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8038 gsi_next (&psi2), gsi_next (&psi1))
8040 phi1 = psi1.phi ();
8041 phi2 = psi2.phi ();
8042 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8043 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8048 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8049 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8050 the destination of the ELSE part. */
8052 static void
8053 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8054 basic_block second_head ATTRIBUTE_UNUSED,
8055 basic_block cond_bb, void *cond_e)
8057 gimple_stmt_iterator gsi;
8058 gimple new_cond_expr;
8059 tree cond_expr = (tree) cond_e;
8060 edge e0;
8062 /* Build new conditional expr */
8063 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8064 NULL_TREE, NULL_TREE);
8066 /* Add new cond in cond_bb. */
8067 gsi = gsi_last_bb (cond_bb);
8068 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8070 /* Adjust edges appropriately to connect new head with first head
8071 as well as second head. */
8072 e0 = single_succ_edge (cond_bb);
8073 e0->flags &= ~EDGE_FALLTHRU;
8074 e0->flags |= EDGE_FALSE_VALUE;
8078 /* Do book-keeping of basic block BB for the profile consistency checker.
8079 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8080 then do post-pass accounting. Store the counting in RECORD. */
8081 static void
8082 gimple_account_profile_record (basic_block bb, int after_pass,
8083 struct profile_record *record)
8085 gimple_stmt_iterator i;
8086 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8088 record->size[after_pass]
8089 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8090 if (profile_status_for_fn (cfun) == PROFILE_READ)
8091 record->time[after_pass]
8092 += estimate_num_insns (gsi_stmt (i),
8093 &eni_time_weights) * bb->count;
8094 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8095 record->time[after_pass]
8096 += estimate_num_insns (gsi_stmt (i),
8097 &eni_time_weights) * bb->frequency;
8101 struct cfg_hooks gimple_cfg_hooks = {
8102 "gimple",
8103 gimple_verify_flow_info,
8104 gimple_dump_bb, /* dump_bb */
8105 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8106 create_bb, /* create_basic_block */
8107 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8108 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8109 gimple_can_remove_branch_p, /* can_remove_branch_p */
8110 remove_bb, /* delete_basic_block */
8111 gimple_split_block, /* split_block */
8112 gimple_move_block_after, /* move_block_after */
8113 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8114 gimple_merge_blocks, /* merge_blocks */
8115 gimple_predict_edge, /* predict_edge */
8116 gimple_predicted_by_p, /* predicted_by_p */
8117 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8118 gimple_duplicate_bb, /* duplicate_block */
8119 gimple_split_edge, /* split_edge */
8120 gimple_make_forwarder_block, /* make_forward_block */
8121 NULL, /* tidy_fallthru_edge */
8122 NULL, /* force_nonfallthru */
8123 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8124 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8125 gimple_flow_call_edges_add, /* flow_call_edges_add */
8126 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8127 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8128 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8129 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8130 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8131 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8132 flush_pending_stmts, /* flush_pending_stmts */
8133 gimple_empty_block_p, /* block_empty_p */
8134 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8135 gimple_account_profile_record,
8139 /* Split all critical edges. */
8141 unsigned int
8142 split_critical_edges (void)
8144 basic_block bb;
8145 edge e;
8146 edge_iterator ei;
8148 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8149 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8150 mappings around the calls to split_edge. */
8151 start_recording_case_labels ();
8152 FOR_ALL_BB_FN (bb, cfun)
8154 FOR_EACH_EDGE (e, ei, bb->succs)
8156 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8157 split_edge (e);
8158 /* PRE inserts statements to edges and expects that
8159 since split_critical_edges was done beforehand, committing edge
8160 insertions will not split more edges. In addition to critical
8161 edges we must split edges that have multiple successors and
8162 end by control flow statements, such as RESX.
8163 Go ahead and split them too. This matches the logic in
8164 gimple_find_edge_insert_loc. */
8165 else if ((!single_pred_p (e->dest)
8166 || !gimple_seq_empty_p (phi_nodes (e->dest))
8167 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8168 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8169 && !(e->flags & EDGE_ABNORMAL))
8171 gimple_stmt_iterator gsi;
8173 gsi = gsi_last_bb (e->src);
8174 if (!gsi_end_p (gsi)
8175 && stmt_ends_bb_p (gsi_stmt (gsi))
8176 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8177 && !gimple_call_builtin_p (gsi_stmt (gsi),
8178 BUILT_IN_RETURN)))
8179 split_edge (e);
8183 end_recording_case_labels ();
8184 return 0;
8187 namespace {
8189 const pass_data pass_data_split_crit_edges =
8191 GIMPLE_PASS, /* type */
8192 "crited", /* name */
8193 OPTGROUP_NONE, /* optinfo_flags */
8194 TV_TREE_SPLIT_EDGES, /* tv_id */
8195 PROP_cfg, /* properties_required */
8196 PROP_no_crit_edges, /* properties_provided */
8197 0, /* properties_destroyed */
8198 0, /* todo_flags_start */
8199 0, /* todo_flags_finish */
8202 class pass_split_crit_edges : public gimple_opt_pass
8204 public:
8205 pass_split_crit_edges (gcc::context *ctxt)
8206 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8209 /* opt_pass methods: */
8210 virtual unsigned int execute (function *) { return split_critical_edges (); }
8212 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8213 }; // class pass_split_crit_edges
8215 } // anon namespace
8217 gimple_opt_pass *
8218 make_pass_split_crit_edges (gcc::context *ctxt)
8220 return new pass_split_crit_edges (ctxt);
8224 /* Insert COND expression which is GIMPLE_COND after STMT
8225 in basic block BB with appropriate basic block split
8226 and creation of a new conditionally executed basic block.
8227 Return created basic block. */
8228 basic_block
8229 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8231 edge fall = split_block (bb, stmt);
8232 gimple_stmt_iterator iter = gsi_last_bb (bb);
8233 basic_block new_bb;
8235 /* Insert cond statement. */
8236 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8237 if (gsi_end_p (iter))
8238 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8239 else
8240 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8242 /* Create conditionally executed block. */
8243 new_bb = create_empty_bb (bb);
8244 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8245 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8247 /* Fix edge for split bb. */
8248 fall->flags = EDGE_FALSE_VALUE;
8250 /* Update dominance info. */
8251 if (dom_info_available_p (CDI_DOMINATORS))
8253 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8254 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8257 /* Update loop info. */
8258 if (current_loops)
8259 add_bb_to_loop (new_bb, bb->loop_father);
8261 return new_bb;
8264 /* Build a ternary operation and gimplify it. Emit code before GSI.
8265 Return the gimple_val holding the result. */
8267 tree
8268 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8269 tree type, tree a, tree b, tree c)
8271 tree ret;
8272 location_t loc = gimple_location (gsi_stmt (*gsi));
8274 ret = fold_build3_loc (loc, code, type, a, b, c);
8275 STRIP_NOPS (ret);
8277 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8278 GSI_SAME_STMT);
8281 /* Build a binary operation and gimplify it. Emit code before GSI.
8282 Return the gimple_val holding the result. */
8284 tree
8285 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8286 tree type, tree a, tree b)
8288 tree ret;
8290 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8291 STRIP_NOPS (ret);
8293 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8294 GSI_SAME_STMT);
8297 /* Build a unary operation and gimplify it. Emit code before GSI.
8298 Return the gimple_val holding the result. */
8300 tree
8301 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8302 tree a)
8304 tree ret;
8306 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8307 STRIP_NOPS (ret);
8309 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8310 GSI_SAME_STMT);
8315 /* Given a basic block B which ends with a conditional and has
8316 precisely two successors, determine which of the edges is taken if
8317 the conditional is true and which is taken if the conditional is
8318 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8320 void
8321 extract_true_false_edges_from_block (basic_block b,
8322 edge *true_edge,
8323 edge *false_edge)
8325 edge e = EDGE_SUCC (b, 0);
8327 if (e->flags & EDGE_TRUE_VALUE)
8329 *true_edge = e;
8330 *false_edge = EDGE_SUCC (b, 1);
8332 else
8334 *false_edge = e;
8335 *true_edge = EDGE_SUCC (b, 1);
8339 /* Emit return warnings. */
8341 namespace {
8343 const pass_data pass_data_warn_function_return =
8345 GIMPLE_PASS, /* type */
8346 "*warn_function_return", /* name */
8347 OPTGROUP_NONE, /* optinfo_flags */
8348 TV_NONE, /* tv_id */
8349 PROP_cfg, /* properties_required */
8350 0, /* properties_provided */
8351 0, /* properties_destroyed */
8352 0, /* todo_flags_start */
8353 0, /* todo_flags_finish */
8356 class pass_warn_function_return : public gimple_opt_pass
8358 public:
8359 pass_warn_function_return (gcc::context *ctxt)
8360 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8363 /* opt_pass methods: */
8364 virtual unsigned int execute (function *);
8366 }; // class pass_warn_function_return
8368 unsigned int
8369 pass_warn_function_return::execute (function *fun)
8371 source_location location;
8372 gimple last;
8373 edge e;
8374 edge_iterator ei;
8376 if (!targetm.warn_func_return (fun->decl))
8377 return 0;
8379 /* If we have a path to EXIT, then we do return. */
8380 if (TREE_THIS_VOLATILE (fun->decl)
8381 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8383 location = UNKNOWN_LOCATION;
8384 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8386 last = last_stmt (e->src);
8387 if ((gimple_code (last) == GIMPLE_RETURN
8388 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8389 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8390 break;
8392 if (location == UNKNOWN_LOCATION)
8393 location = cfun->function_end_locus;
8394 warning_at (location, 0, "%<noreturn%> function does return");
8397 /* If we see "return;" in some basic block, then we do reach the end
8398 without returning a value. */
8399 else if (warn_return_type
8400 && !TREE_NO_WARNING (fun->decl)
8401 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8402 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8404 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8406 gimple last = last_stmt (e->src);
8407 greturn *return_stmt = dyn_cast <greturn *> (last);
8408 if (return_stmt
8409 && gimple_return_retval (return_stmt) == NULL
8410 && !gimple_no_warning_p (last))
8412 location = gimple_location (last);
8413 if (location == UNKNOWN_LOCATION)
8414 location = fun->function_end_locus;
8415 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8416 TREE_NO_WARNING (fun->decl) = 1;
8417 break;
8421 return 0;
8424 } // anon namespace
8426 gimple_opt_pass *
8427 make_pass_warn_function_return (gcc::context *ctxt)
8429 return new pass_warn_function_return (ctxt);
8432 /* Walk a gimplified function and warn for functions whose return value is
8433 ignored and attribute((warn_unused_result)) is set. This is done before
8434 inlining, so we don't have to worry about that. */
8436 static void
8437 do_warn_unused_result (gimple_seq seq)
8439 tree fdecl, ftype;
8440 gimple_stmt_iterator i;
8442 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8444 gimple g = gsi_stmt (i);
8446 switch (gimple_code (g))
8448 case GIMPLE_BIND:
8449 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8450 break;
8451 case GIMPLE_TRY:
8452 do_warn_unused_result (gimple_try_eval (g));
8453 do_warn_unused_result (gimple_try_cleanup (g));
8454 break;
8455 case GIMPLE_CATCH:
8456 do_warn_unused_result (gimple_catch_handler (
8457 as_a <gcatch *> (g)));
8458 break;
8459 case GIMPLE_EH_FILTER:
8460 do_warn_unused_result (gimple_eh_filter_failure (g));
8461 break;
8463 case GIMPLE_CALL:
8464 if (gimple_call_lhs (g))
8465 break;
8466 if (gimple_call_internal_p (g))
8467 break;
8469 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8470 LHS. All calls whose value is ignored should be
8471 represented like this. Look for the attribute. */
8472 fdecl = gimple_call_fndecl (g);
8473 ftype = gimple_call_fntype (g);
8475 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8477 location_t loc = gimple_location (g);
8479 if (fdecl)
8480 warning_at (loc, OPT_Wunused_result,
8481 "ignoring return value of %qD, "
8482 "declared with attribute warn_unused_result",
8483 fdecl);
8484 else
8485 warning_at (loc, OPT_Wunused_result,
8486 "ignoring return value of function "
8487 "declared with attribute warn_unused_result");
8489 break;
8491 default:
8492 /* Not a container, not a call, or a call whose value is used. */
8493 break;
8498 namespace {
8500 const pass_data pass_data_warn_unused_result =
8502 GIMPLE_PASS, /* type */
8503 "*warn_unused_result", /* name */
8504 OPTGROUP_NONE, /* optinfo_flags */
8505 TV_NONE, /* tv_id */
8506 PROP_gimple_any, /* properties_required */
8507 0, /* properties_provided */
8508 0, /* properties_destroyed */
8509 0, /* todo_flags_start */
8510 0, /* todo_flags_finish */
8513 class pass_warn_unused_result : public gimple_opt_pass
8515 public:
8516 pass_warn_unused_result (gcc::context *ctxt)
8517 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8520 /* opt_pass methods: */
8521 virtual bool gate (function *) { return flag_warn_unused_result; }
8522 virtual unsigned int execute (function *)
8524 do_warn_unused_result (gimple_body (current_function_decl));
8525 return 0;
8528 }; // class pass_warn_unused_result
8530 } // anon namespace
8532 gimple_opt_pass *
8533 make_pass_warn_unused_result (gcc::context *ctxt)
8535 return new pass_warn_unused_result (ctxt);
8538 /* IPA passes, compilation of earlier functions or inlining
8539 might have changed some properties, such as marked functions nothrow,
8540 pure, const or noreturn.
8541 Remove redundant edges and basic blocks, and create new ones if necessary.
8543 This pass can't be executed as stand alone pass from pass manager, because
8544 in between inlining and this fixup the verify_flow_info would fail. */
8546 unsigned int
8547 execute_fixup_cfg (void)
8549 basic_block bb;
8550 gimple_stmt_iterator gsi;
8551 int todo = 0;
8552 gcov_type count_scale;
8553 edge e;
8554 edge_iterator ei;
8556 count_scale
8557 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8558 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8560 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8561 cgraph_node::get (current_function_decl)->count;
8562 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8563 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8564 count_scale);
8566 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8567 e->count = apply_scale (e->count, count_scale);
8569 FOR_EACH_BB_FN (bb, cfun)
8571 bb->count = apply_scale (bb->count, count_scale);
8572 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8574 gimple stmt = gsi_stmt (gsi);
8575 tree decl = is_gimple_call (stmt)
8576 ? gimple_call_fndecl (stmt)
8577 : NULL;
8578 if (decl)
8580 int flags = gimple_call_flags (stmt);
8581 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8583 if (gimple_purge_dead_abnormal_call_edges (bb))
8584 todo |= TODO_cleanup_cfg;
8586 if (gimple_in_ssa_p (cfun))
8588 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8589 update_stmt (stmt);
8593 if (flags & ECF_NORETURN
8594 && fixup_noreturn_call (stmt))
8595 todo |= TODO_cleanup_cfg;
8598 /* Remove stores to variables we marked write-only.
8599 Keep access when store has side effect, i.e. in case when source
8600 is volatile. */
8601 if (gimple_store_p (stmt)
8602 && !gimple_has_side_effects (stmt))
8604 tree lhs = get_base_address (gimple_get_lhs (stmt));
8606 if (TREE_CODE (lhs) == VAR_DECL
8607 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8608 && varpool_node::get (lhs)->writeonly)
8610 unlink_stmt_vdef (stmt);
8611 gsi_remove (&gsi, true);
8612 release_defs (stmt);
8613 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8614 continue;
8617 /* For calls we can simply remove LHS when it is known
8618 to be write-only. */
8619 if (is_gimple_call (stmt)
8620 && gimple_get_lhs (stmt))
8622 tree lhs = get_base_address (gimple_get_lhs (stmt));
8624 if (TREE_CODE (lhs) == VAR_DECL
8625 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8626 && varpool_node::get (lhs)->writeonly)
8628 gimple_call_set_lhs (stmt, NULL);
8629 update_stmt (stmt);
8630 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8634 if (maybe_clean_eh_stmt (stmt)
8635 && gimple_purge_dead_eh_edges (bb))
8636 todo |= TODO_cleanup_cfg;
8637 gsi_next (&gsi);
8640 FOR_EACH_EDGE (e, ei, bb->succs)
8641 e->count = apply_scale (e->count, count_scale);
8643 /* If we have a basic block with no successors that does not
8644 end with a control statement or a noreturn call end it with
8645 a call to __builtin_unreachable. This situation can occur
8646 when inlining a noreturn call that does in fact return. */
8647 if (EDGE_COUNT (bb->succs) == 0)
8649 gimple stmt = last_stmt (bb);
8650 if (!stmt
8651 || (!is_ctrl_stmt (stmt)
8652 && (!is_gimple_call (stmt)
8653 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8655 if (stmt && is_gimple_call (stmt))
8656 gimple_call_set_ctrl_altering (stmt, false);
8657 stmt = gimple_build_call
8658 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8659 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8660 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8664 if (count_scale != REG_BR_PROB_BASE)
8665 compute_function_frequency ();
8667 /* Dump a textual representation of the flowgraph. */
8668 if (dump_file)
8669 gimple_dump_cfg (dump_file, dump_flags);
8671 if (current_loops
8672 && (todo & TODO_cleanup_cfg))
8673 loops_state_set (LOOPS_NEED_FIXUP);
8675 return todo;
8678 namespace {
8680 const pass_data pass_data_fixup_cfg =
8682 GIMPLE_PASS, /* type */
8683 "*free_cfg_annotations", /* name */
8684 OPTGROUP_NONE, /* optinfo_flags */
8685 TV_NONE, /* tv_id */
8686 PROP_cfg, /* properties_required */
8687 0, /* properties_provided */
8688 0, /* properties_destroyed */
8689 0, /* todo_flags_start */
8690 0, /* todo_flags_finish */
8693 class pass_fixup_cfg : public gimple_opt_pass
8695 public:
8696 pass_fixup_cfg (gcc::context *ctxt)
8697 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8700 /* opt_pass methods: */
8701 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8702 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8704 }; // class pass_fixup_cfg
8706 } // anon namespace
8708 gimple_opt_pass *
8709 make_pass_fixup_cfg (gcc::context *ctxt)
8711 return new pass_fixup_cfg (ctxt);
8714 /* Garbage collection support for edge_def. */
8716 extern void gt_ggc_mx (tree&);
8717 extern void gt_ggc_mx (gimple&);
8718 extern void gt_ggc_mx (rtx&);
8719 extern void gt_ggc_mx (basic_block&);
8721 static void
8722 gt_ggc_mx (rtx_insn *& x)
8724 if (x)
8725 gt_ggc_mx_rtx_def ((void *) x);
8728 void
8729 gt_ggc_mx (edge_def *e)
8731 tree block = LOCATION_BLOCK (e->goto_locus);
8732 gt_ggc_mx (e->src);
8733 gt_ggc_mx (e->dest);
8734 if (current_ir_type () == IR_GIMPLE)
8735 gt_ggc_mx (e->insns.g);
8736 else
8737 gt_ggc_mx (e->insns.r);
8738 gt_ggc_mx (block);
8741 /* PCH support for edge_def. */
8743 extern void gt_pch_nx (tree&);
8744 extern void gt_pch_nx (gimple&);
8745 extern void gt_pch_nx (rtx&);
8746 extern void gt_pch_nx (basic_block&);
8748 static void
8749 gt_pch_nx (rtx_insn *& x)
8751 if (x)
8752 gt_pch_nx_rtx_def ((void *) x);
8755 void
8756 gt_pch_nx (edge_def *e)
8758 tree block = LOCATION_BLOCK (e->goto_locus);
8759 gt_pch_nx (e->src);
8760 gt_pch_nx (e->dest);
8761 if (current_ir_type () == IR_GIMPLE)
8762 gt_pch_nx (e->insns.g);
8763 else
8764 gt_pch_nx (e->insns.r);
8765 gt_pch_nx (block);
8768 void
8769 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8771 tree block = LOCATION_BLOCK (e->goto_locus);
8772 op (&(e->src), cookie);
8773 op (&(e->dest), cookie);
8774 if (current_ir_type () == IR_GIMPLE)
8775 op (&(e->insns.g), cookie);
8776 else
8777 op (&(e->insns.r), cookie);
8778 op (&(block), cookie);