Daily bump.
[official-gcc.git] / gcc / tree-cfg.c
blobfbbe9c8a9a5fa0a911aba62d5504b6688a0a555d
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "trans-mem.h"
29 #include "stor-layout.h"
30 #include "print-tree.h"
31 #include "tm_p.h"
32 #include "predict.h"
33 #include "vec.h"
34 #include "hashtab.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfganal.h"
43 #include "basic-block.h"
44 #include "flags.h"
45 #include "gimple-pretty-print.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-fold.h"
49 #include "tree-eh.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "gimple-iterator.h"
54 #include "gimplify-me.h"
55 #include "gimple-walk.h"
56 #include "gimple-ssa.h"
57 #include "plugin-api.h"
58 #include "ipa-ref.h"
59 #include "cgraph.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "stringpool.h"
64 #include "tree-ssanames.h"
65 #include "tree-ssa-loop-manip.h"
66 #include "tree-ssa-loop-niter.h"
67 #include "tree-into-ssa.h"
68 #include "expr.h"
69 #include "tree-dfa.h"
70 #include "tree-ssa.h"
71 #include "tree-dump.h"
72 #include "tree-pass.h"
73 #include "diagnostic-core.h"
74 #include "except.h"
75 #include "cfgloop.h"
76 #include "tree-ssa-propagate.h"
77 #include "value-prof.h"
78 #include "tree-inline.h"
79 #include "target.h"
80 #include "tree-ssa-live.h"
81 #include "omp-low.h"
82 #include "tree-cfgcleanup.h"
83 #include "wide-int.h"
84 #include "wide-int-print.h"
86 /* This file contains functions for building the Control Flow Graph (CFG)
87 for a function tree. */
89 /* Local declarations. */
91 /* Initial capacity for the basic block array. */
92 static const int initial_cfg_capacity = 20;
94 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
95 which use a particular edge. The CASE_LABEL_EXPRs are chained together
96 via their CASE_CHAIN field, which we clear after we're done with the
97 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
99 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
100 update the case vector in response to edge redirections.
102 Right now this table is set up and torn down at key points in the
103 compilation process. It would be nice if we could make the table
104 more persistent. The key is getting notification of changes to
105 the CFG (particularly edge removal, creation and redirection). */
107 static hash_map<edge, tree> *edge_to_cases;
109 /* If we record edge_to_cases, this bitmap will hold indexes
110 of basic blocks that end in a GIMPLE_SWITCH which we touched
111 due to edge manipulations. */
113 static bitmap touched_switch_bbs;
115 /* CFG statistics. */
116 struct cfg_stats_d
118 long num_merged_labels;
121 static struct cfg_stats_d cfg_stats;
123 /* Hash table to store last discriminator assigned for each locus. */
124 struct locus_discrim_map
126 location_t locus;
127 int discriminator;
130 /* Hashtable helpers. */
132 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
134 typedef locus_discrim_map value_type;
135 typedef locus_discrim_map compare_type;
136 static inline hashval_t hash (const value_type *);
137 static inline bool equal (const value_type *, const compare_type *);
140 /* Trivial hash function for a location_t. ITEM is a pointer to
141 a hash table entry that maps a location_t to a discriminator. */
143 inline hashval_t
144 locus_discrim_hasher::hash (const value_type *item)
146 return LOCATION_LINE (item->locus);
149 /* Equality function for the locus-to-discriminator map. A and B
150 point to the two hash table entries to compare. */
152 inline bool
153 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
155 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
158 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
160 /* Basic blocks and flowgraphs. */
161 static void make_blocks (gimple_seq);
163 /* Edges. */
164 static void make_edges (void);
165 static void assign_discriminators (void);
166 static void make_cond_expr_edges (basic_block);
167 static void make_gimple_switch_edges (gswitch *, basic_block);
168 static bool make_goto_expr_edges (basic_block);
169 static void make_gimple_asm_edges (basic_block);
170 static edge gimple_redirect_edge_and_branch (edge, basic_block);
171 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
173 /* Various helpers. */
174 static inline bool stmt_starts_bb_p (gimple, gimple);
175 static int gimple_verify_flow_info (void);
176 static void gimple_make_forwarder_block (edge);
177 static gimple first_non_label_stmt (basic_block);
178 static bool verify_gimple_transaction (gtransaction *);
179 static bool call_can_make_abnormal_goto (gimple);
181 /* Flowgraph optimization and cleanup. */
182 static void gimple_merge_blocks (basic_block, basic_block);
183 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
184 static void remove_bb (basic_block);
185 static edge find_taken_edge_computed_goto (basic_block, tree);
186 static edge find_taken_edge_cond_expr (basic_block, tree);
187 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
188 static tree find_case_label_for_value (gswitch *, tree);
190 void
191 init_empty_tree_cfg_for_function (struct function *fn)
193 /* Initialize the basic block array. */
194 init_flow (fn);
195 profile_status_for_fn (fn) = PROFILE_ABSENT;
196 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
197 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
198 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
199 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
200 initial_cfg_capacity);
202 /* Build a mapping of labels to their associated blocks. */
203 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
204 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
205 initial_cfg_capacity);
207 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
208 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
210 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
211 = EXIT_BLOCK_PTR_FOR_FN (fn);
212 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
213 = ENTRY_BLOCK_PTR_FOR_FN (fn);
216 void
217 init_empty_tree_cfg (void)
219 init_empty_tree_cfg_for_function (cfun);
222 /*---------------------------------------------------------------------------
223 Create basic blocks
224 ---------------------------------------------------------------------------*/
226 /* Entry point to the CFG builder for trees. SEQ is the sequence of
227 statements to be added to the flowgraph. */
229 static void
230 build_gimple_cfg (gimple_seq seq)
232 /* Register specific gimple functions. */
233 gimple_register_cfg_hooks ();
235 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
237 init_empty_tree_cfg ();
239 make_blocks (seq);
241 /* Make sure there is always at least one block, even if it's empty. */
242 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
243 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
245 /* Adjust the size of the array. */
246 if (basic_block_info_for_fn (cfun)->length ()
247 < (size_t) n_basic_blocks_for_fn (cfun))
248 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
249 n_basic_blocks_for_fn (cfun));
251 /* To speed up statement iterator walks, we first purge dead labels. */
252 cleanup_dead_labels ();
254 /* Group case nodes to reduce the number of edges.
255 We do this after cleaning up dead labels because otherwise we miss
256 a lot of obvious case merging opportunities. */
257 group_case_labels ();
259 /* Create the edges of the flowgraph. */
260 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
261 make_edges ();
262 assign_discriminators ();
263 cleanup_dead_labels ();
264 delete discriminator_per_locus;
265 discriminator_per_locus = NULL;
268 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
269 them and propagate the information to LOOP. We assume that the annotations
270 come immediately before the condition in BB, if any. */
272 static void
273 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
275 gimple_stmt_iterator gsi = gsi_last_bb (bb);
276 gimple stmt = gsi_stmt (gsi);
278 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
279 return;
281 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
283 stmt = gsi_stmt (gsi);
284 if (gimple_code (stmt) != GIMPLE_CALL)
285 break;
286 if (!gimple_call_internal_p (stmt)
287 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
288 break;
290 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
292 case annot_expr_ivdep_kind:
293 loop->safelen = INT_MAX;
294 break;
295 case annot_expr_no_vector_kind:
296 loop->dont_vectorize = true;
297 break;
298 case annot_expr_vector_kind:
299 loop->force_vectorize = true;
300 cfun->has_force_vectorize_loops = true;
301 break;
302 default:
303 gcc_unreachable ();
306 stmt = gimple_build_assign (gimple_call_lhs (stmt),
307 gimple_call_arg (stmt, 0));
308 gsi_replace (&gsi, stmt, true);
312 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
313 them and propagate the information to the loop. We assume that the
314 annotations come immediately before the condition of the loop. */
316 static void
317 replace_loop_annotate (void)
319 struct loop *loop;
320 basic_block bb;
321 gimple_stmt_iterator gsi;
322 gimple stmt;
324 FOR_EACH_LOOP (loop, 0)
326 /* First look into the header. */
327 replace_loop_annotate_in_block (loop->header, loop);
329 /* Then look into the latch, if any. */
330 if (loop->latch)
331 replace_loop_annotate_in_block (loop->latch, loop);
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb, cfun)
337 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
339 stmt = gsi_stmt (gsi);
340 if (gimple_code (stmt) != GIMPLE_CALL)
341 continue;
342 if (!gimple_call_internal_p (stmt)
343 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 continue;
346 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
348 case annot_expr_ivdep_kind:
349 case annot_expr_no_vector_kind:
350 case annot_expr_vector_kind:
351 break;
352 default:
353 gcc_unreachable ();
356 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
357 stmt = gimple_build_assign (gimple_call_lhs (stmt),
358 gimple_call_arg (stmt, 0));
359 gsi_replace (&gsi, stmt, true);
365 static unsigned int
366 execute_build_cfg (void)
368 gimple_seq body = gimple_body (current_function_decl);
370 build_gimple_cfg (body);
371 gimple_set_body (current_function_decl, NULL);
372 if (dump_file && (dump_flags & TDF_DETAILS))
374 fprintf (dump_file, "Scope blocks:\n");
375 dump_scope_blocks (dump_file, dump_flags);
377 cleanup_tree_cfg ();
378 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
379 replace_loop_annotate ();
380 return 0;
383 namespace {
385 const pass_data pass_data_build_cfg =
387 GIMPLE_PASS, /* type */
388 "cfg", /* name */
389 OPTGROUP_NONE, /* optinfo_flags */
390 TV_TREE_CFG, /* tv_id */
391 PROP_gimple_leh, /* properties_required */
392 ( PROP_cfg | PROP_loops ), /* properties_provided */
393 0, /* properties_destroyed */
394 0, /* todo_flags_start */
395 0, /* todo_flags_finish */
398 class pass_build_cfg : public gimple_opt_pass
400 public:
401 pass_build_cfg (gcc::context *ctxt)
402 : gimple_opt_pass (pass_data_build_cfg, ctxt)
405 /* opt_pass methods: */
406 virtual unsigned int execute (function *) { return execute_build_cfg (); }
408 }; // class pass_build_cfg
410 } // anon namespace
412 gimple_opt_pass *
413 make_pass_build_cfg (gcc::context *ctxt)
415 return new pass_build_cfg (ctxt);
419 /* Return true if T is a computed goto. */
421 bool
422 computed_goto_p (gimple t)
424 return (gimple_code (t) == GIMPLE_GOTO
425 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
428 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
429 the other edge points to a bb with just __builtin_unreachable ().
430 I.e. return true for C->M edge in:
431 <bb C>:
433 if (something)
434 goto <bb N>;
435 else
436 goto <bb M>;
437 <bb N>:
438 __builtin_unreachable ();
439 <bb M>: */
441 bool
442 assert_unreachable_fallthru_edge_p (edge e)
444 basic_block pred_bb = e->src;
445 gimple last = last_stmt (pred_bb);
446 if (last && gimple_code (last) == GIMPLE_COND)
448 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
449 if (other_bb == e->dest)
450 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
451 if (EDGE_COUNT (other_bb->succs) == 0)
453 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
454 gimple stmt;
456 if (gsi_end_p (gsi))
457 return false;
458 stmt = gsi_stmt (gsi);
459 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
461 gsi_next (&gsi);
462 if (gsi_end_p (gsi))
463 return false;
464 stmt = gsi_stmt (gsi);
466 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
469 return false;
473 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
474 could alter control flow except via eh. We initialize the flag at
475 CFG build time and only ever clear it later. */
477 static void
478 gimple_call_initialize_ctrl_altering (gimple stmt)
480 int flags = gimple_call_flags (stmt);
482 /* A call alters control flow if it can make an abnormal goto. */
483 if (call_can_make_abnormal_goto (stmt)
484 /* A call also alters control flow if it does not return. */
485 || flags & ECF_NORETURN
486 /* TM ending statements have backedges out of the transaction.
487 Return true so we split the basic block containing them.
488 Note that the TM_BUILTIN test is merely an optimization. */
489 || ((flags & ECF_TM_BUILTIN)
490 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
491 /* BUILT_IN_RETURN call is same as return statement. */
492 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
493 gimple_call_set_ctrl_altering (stmt, true);
494 else
495 gimple_call_set_ctrl_altering (stmt, false);
499 /* Build a flowgraph for the sequence of stmts SEQ. */
501 static void
502 make_blocks (gimple_seq seq)
504 gimple_stmt_iterator i = gsi_start (seq);
505 gimple stmt = NULL;
506 bool start_new_block = true;
507 bool first_stmt_of_seq = true;
508 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
510 while (!gsi_end_p (i))
512 gimple prev_stmt;
514 prev_stmt = stmt;
515 stmt = gsi_stmt (i);
517 if (stmt && is_gimple_call (stmt))
518 gimple_call_initialize_ctrl_altering (stmt);
520 /* If the statement starts a new basic block or if we have determined
521 in a previous pass that we need to create a new block for STMT, do
522 so now. */
523 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
525 if (!first_stmt_of_seq)
526 gsi_split_seq_before (&i, &seq);
527 bb = create_basic_block (seq, NULL, bb);
528 start_new_block = false;
531 /* Now add STMT to BB and create the subgraphs for special statement
532 codes. */
533 gimple_set_bb (stmt, bb);
535 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
536 next iteration. */
537 if (stmt_ends_bb_p (stmt))
539 /* If the stmt can make abnormal goto use a new temporary
540 for the assignment to the LHS. This makes sure the old value
541 of the LHS is available on the abnormal edge. Otherwise
542 we will end up with overlapping life-ranges for abnormal
543 SSA names. */
544 if (gimple_has_lhs (stmt)
545 && stmt_can_make_abnormal_goto (stmt)
546 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
548 tree lhs = gimple_get_lhs (stmt);
549 tree tmp = create_tmp_var (TREE_TYPE (lhs));
550 gimple s = gimple_build_assign (lhs, tmp);
551 gimple_set_location (s, gimple_location (stmt));
552 gimple_set_block (s, gimple_block (stmt));
553 gimple_set_lhs (stmt, tmp);
554 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
555 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
556 DECL_GIMPLE_REG_P (tmp) = 1;
557 gsi_insert_after (&i, s, GSI_SAME_STMT);
559 start_new_block = true;
562 gsi_next (&i);
563 first_stmt_of_seq = false;
568 /* Create and return a new empty basic block after bb AFTER. */
570 static basic_block
571 create_bb (void *h, void *e, basic_block after)
573 basic_block bb;
575 gcc_assert (!e);
577 /* Create and initialize a new basic block. Since alloc_block uses
578 GC allocation that clears memory to allocate a basic block, we do
579 not have to clear the newly allocated basic block here. */
580 bb = alloc_block ();
582 bb->index = last_basic_block_for_fn (cfun);
583 bb->flags = BB_NEW;
584 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
586 /* Add the new block to the linked list of blocks. */
587 link_block (bb, after);
589 /* Grow the basic block array if needed. */
590 if ((size_t) last_basic_block_for_fn (cfun)
591 == basic_block_info_for_fn (cfun)->length ())
593 size_t new_size =
594 (last_basic_block_for_fn (cfun)
595 + (last_basic_block_for_fn (cfun) + 3) / 4);
596 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
599 /* Add the newly created block to the array. */
600 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
602 n_basic_blocks_for_fn (cfun)++;
603 last_basic_block_for_fn (cfun)++;
605 return bb;
609 /*---------------------------------------------------------------------------
610 Edge creation
611 ---------------------------------------------------------------------------*/
613 /* Fold COND_EXPR_COND of each COND_EXPR. */
615 void
616 fold_cond_expr_cond (void)
618 basic_block bb;
620 FOR_EACH_BB_FN (bb, cfun)
622 gimple stmt = last_stmt (bb);
624 if (stmt && gimple_code (stmt) == GIMPLE_COND)
626 gcond *cond_stmt = as_a <gcond *> (stmt);
627 location_t loc = gimple_location (stmt);
628 tree cond;
629 bool zerop, onep;
631 fold_defer_overflow_warnings ();
632 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
633 boolean_type_node,
634 gimple_cond_lhs (cond_stmt),
635 gimple_cond_rhs (cond_stmt));
636 if (cond)
638 zerop = integer_zerop (cond);
639 onep = integer_onep (cond);
641 else
642 zerop = onep = false;
644 fold_undefer_overflow_warnings (zerop || onep,
645 stmt,
646 WARN_STRICT_OVERFLOW_CONDITIONAL);
647 if (zerop)
648 gimple_cond_make_false (cond_stmt);
649 else if (onep)
650 gimple_cond_make_true (cond_stmt);
655 /* If basic block BB has an abnormal edge to a basic block
656 containing IFN_ABNORMAL_DISPATCHER internal call, return
657 that the dispatcher's basic block, otherwise return NULL. */
659 basic_block
660 get_abnormal_succ_dispatcher (basic_block bb)
662 edge e;
663 edge_iterator ei;
665 FOR_EACH_EDGE (e, ei, bb->succs)
666 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
668 gimple_stmt_iterator gsi
669 = gsi_start_nondebug_after_labels_bb (e->dest);
670 gimple g = gsi_stmt (gsi);
671 if (g
672 && is_gimple_call (g)
673 && gimple_call_internal_p (g)
674 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
675 return e->dest;
677 return NULL;
680 /* Helper function for make_edges. Create a basic block with
681 with ABNORMAL_DISPATCHER internal call in it if needed, and
682 create abnormal edges from BBS to it and from it to FOR_BB
683 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
685 static void
686 handle_abnormal_edges (basic_block *dispatcher_bbs,
687 basic_block for_bb, int *bb_to_omp_idx,
688 auto_vec<basic_block> *bbs, bool computed_goto)
690 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
691 unsigned int idx = 0;
692 basic_block bb;
693 bool inner = false;
695 if (bb_to_omp_idx)
697 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
698 if (bb_to_omp_idx[for_bb->index] != 0)
699 inner = true;
702 /* If the dispatcher has been created already, then there are basic
703 blocks with abnormal edges to it, so just make a new edge to
704 for_bb. */
705 if (*dispatcher == NULL)
707 /* Check if there are any basic blocks that need to have
708 abnormal edges to this dispatcher. If there are none, return
709 early. */
710 if (bb_to_omp_idx == NULL)
712 if (bbs->is_empty ())
713 return;
715 else
717 FOR_EACH_VEC_ELT (*bbs, idx, bb)
718 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
719 break;
720 if (bb == NULL)
721 return;
724 /* Create the dispatcher bb. */
725 *dispatcher = create_basic_block (NULL, NULL, for_bb);
726 if (computed_goto)
728 /* Factor computed gotos into a common computed goto site. Also
729 record the location of that site so that we can un-factor the
730 gotos after we have converted back to normal form. */
731 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
733 /* Create the destination of the factored goto. Each original
734 computed goto will put its desired destination into this
735 variable and jump to the label we create immediately below. */
736 tree var = create_tmp_var (ptr_type_node, "gotovar");
738 /* Build a label for the new block which will contain the
739 factored computed goto. */
740 tree factored_label_decl
741 = create_artificial_label (UNKNOWN_LOCATION);
742 gimple factored_computed_goto_label
743 = gimple_build_label (factored_label_decl);
744 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
746 /* Build our new computed goto. */
747 gimple factored_computed_goto = gimple_build_goto (var);
748 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
750 FOR_EACH_VEC_ELT (*bbs, idx, bb)
752 if (bb_to_omp_idx
753 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
754 continue;
756 gsi = gsi_last_bb (bb);
757 gimple last = gsi_stmt (gsi);
759 gcc_assert (computed_goto_p (last));
761 /* Copy the original computed goto's destination into VAR. */
762 gimple assignment
763 = gimple_build_assign (var, gimple_goto_dest (last));
764 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
766 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
767 e->goto_locus = gimple_location (last);
768 gsi_remove (&gsi, true);
771 else
773 tree arg = inner ? boolean_true_node : boolean_false_node;
774 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
775 1, arg);
776 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
777 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
779 /* Create predecessor edges of the dispatcher. */
780 FOR_EACH_VEC_ELT (*bbs, idx, bb)
782 if (bb_to_omp_idx
783 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
784 continue;
785 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
790 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
793 /* Join all the blocks in the flowgraph. */
795 static void
796 make_edges (void)
798 basic_block bb;
799 struct omp_region *cur_region = NULL;
800 auto_vec<basic_block> ab_edge_goto;
801 auto_vec<basic_block> ab_edge_call;
802 int *bb_to_omp_idx = NULL;
803 int cur_omp_region_idx = 0;
805 /* Create an edge from entry to the first block with executable
806 statements in it. */
807 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
808 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
809 EDGE_FALLTHRU);
811 /* Traverse the basic block array placing edges. */
812 FOR_EACH_BB_FN (bb, cfun)
814 gimple last = last_stmt (bb);
815 bool fallthru;
817 if (bb_to_omp_idx)
818 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
820 if (last)
822 enum gimple_code code = gimple_code (last);
823 switch (code)
825 case GIMPLE_GOTO:
826 if (make_goto_expr_edges (bb))
827 ab_edge_goto.safe_push (bb);
828 fallthru = false;
829 break;
830 case GIMPLE_RETURN:
832 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
833 e->goto_locus = gimple_location (last);
834 fallthru = false;
836 break;
837 case GIMPLE_COND:
838 make_cond_expr_edges (bb);
839 fallthru = false;
840 break;
841 case GIMPLE_SWITCH:
842 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
843 fallthru = false;
844 break;
845 case GIMPLE_RESX:
846 make_eh_edges (last);
847 fallthru = false;
848 break;
849 case GIMPLE_EH_DISPATCH:
850 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
851 break;
853 case GIMPLE_CALL:
854 /* If this function receives a nonlocal goto, then we need to
855 make edges from this call site to all the nonlocal goto
856 handlers. */
857 if (stmt_can_make_abnormal_goto (last))
858 ab_edge_call.safe_push (bb);
860 /* If this statement has reachable exception handlers, then
861 create abnormal edges to them. */
862 make_eh_edges (last);
864 /* BUILTIN_RETURN is really a return statement. */
865 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
867 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
868 fallthru = false;
870 /* Some calls are known not to return. */
871 else
872 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
873 break;
875 case GIMPLE_ASSIGN:
876 /* A GIMPLE_ASSIGN may throw internally and thus be considered
877 control-altering. */
878 if (is_ctrl_altering_stmt (last))
879 make_eh_edges (last);
880 fallthru = true;
881 break;
883 case GIMPLE_ASM:
884 make_gimple_asm_edges (bb);
885 fallthru = true;
886 break;
888 CASE_GIMPLE_OMP:
889 fallthru = make_gimple_omp_edges (bb, &cur_region,
890 &cur_omp_region_idx);
891 if (cur_region && bb_to_omp_idx == NULL)
892 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
893 break;
895 case GIMPLE_TRANSACTION:
897 tree abort_label
898 = gimple_transaction_label (as_a <gtransaction *> (last));
899 if (abort_label)
900 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
901 fallthru = true;
903 break;
905 default:
906 gcc_assert (!stmt_ends_bb_p (last));
907 fallthru = true;
910 else
911 fallthru = true;
913 if (fallthru)
914 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
917 /* Computed gotos are hell to deal with, especially if there are
918 lots of them with a large number of destinations. So we factor
919 them to a common computed goto location before we build the
920 edge list. After we convert back to normal form, we will un-factor
921 the computed gotos since factoring introduces an unwanted jump.
922 For non-local gotos and abnormal edges from calls to calls that return
923 twice or forced labels, factor the abnormal edges too, by having all
924 abnormal edges from the calls go to a common artificial basic block
925 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
926 basic block to all forced labels and calls returning twice.
927 We do this per-OpenMP structured block, because those regions
928 are guaranteed to be single entry single exit by the standard,
929 so it is not allowed to enter or exit such regions abnormally this way,
930 thus all computed gotos, non-local gotos and setjmp/longjmp calls
931 must not transfer control across SESE region boundaries. */
932 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
934 gimple_stmt_iterator gsi;
935 basic_block dispatcher_bb_array[2] = { NULL, NULL };
936 basic_block *dispatcher_bbs = dispatcher_bb_array;
937 int count = n_basic_blocks_for_fn (cfun);
939 if (bb_to_omp_idx)
940 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
942 FOR_EACH_BB_FN (bb, cfun)
944 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
946 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
947 tree target;
949 if (!label_stmt)
950 break;
952 target = gimple_label_label (label_stmt);
954 /* Make an edge to every label block that has been marked as a
955 potential target for a computed goto or a non-local goto. */
956 if (FORCED_LABEL (target))
957 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
958 &ab_edge_goto, true);
959 if (DECL_NONLOCAL (target))
961 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
962 &ab_edge_call, false);
963 break;
967 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
968 gsi_next_nondebug (&gsi);
969 if (!gsi_end_p (gsi))
971 /* Make an edge to every setjmp-like call. */
972 gimple call_stmt = gsi_stmt (gsi);
973 if (is_gimple_call (call_stmt)
974 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
975 || gimple_call_builtin_p (call_stmt,
976 BUILT_IN_SETJMP_RECEIVER)))
977 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
978 &ab_edge_call, false);
982 if (bb_to_omp_idx)
983 XDELETE (dispatcher_bbs);
986 XDELETE (bb_to_omp_idx);
988 free_omp_regions ();
990 /* Fold COND_EXPR_COND of each COND_EXPR. */
991 fold_cond_expr_cond ();
994 /* Find the next available discriminator value for LOCUS. The
995 discriminator distinguishes among several basic blocks that
996 share a common locus, allowing for more accurate sample-based
997 profiling. */
999 static int
1000 next_discriminator_for_locus (location_t locus)
1002 struct locus_discrim_map item;
1003 struct locus_discrim_map **slot;
1005 item.locus = locus;
1006 item.discriminator = 0;
1007 slot = discriminator_per_locus->find_slot_with_hash (
1008 &item, LOCATION_LINE (locus), INSERT);
1009 gcc_assert (slot);
1010 if (*slot == HTAB_EMPTY_ENTRY)
1012 *slot = XNEW (struct locus_discrim_map);
1013 gcc_assert (*slot);
1014 (*slot)->locus = locus;
1015 (*slot)->discriminator = 0;
1017 (*slot)->discriminator++;
1018 return (*slot)->discriminator;
1021 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1023 static bool
1024 same_line_p (location_t locus1, location_t locus2)
1026 expanded_location from, to;
1028 if (locus1 == locus2)
1029 return true;
1031 from = expand_location (locus1);
1032 to = expand_location (locus2);
1034 if (from.line != to.line)
1035 return false;
1036 if (from.file == to.file)
1037 return true;
1038 return (from.file != NULL
1039 && to.file != NULL
1040 && filename_cmp (from.file, to.file) == 0);
1043 /* Assign discriminators to each basic block. */
1045 static void
1046 assign_discriminators (void)
1048 basic_block bb;
1050 FOR_EACH_BB_FN (bb, cfun)
1052 edge e;
1053 edge_iterator ei;
1054 gimple last = last_stmt (bb);
1055 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1057 if (locus == UNKNOWN_LOCATION)
1058 continue;
1060 FOR_EACH_EDGE (e, ei, bb->succs)
1062 gimple first = first_non_label_stmt (e->dest);
1063 gimple last = last_stmt (e->dest);
1064 if ((first && same_line_p (locus, gimple_location (first)))
1065 || (last && same_line_p (locus, gimple_location (last))))
1067 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1068 bb->discriminator = next_discriminator_for_locus (locus);
1069 else
1070 e->dest->discriminator = next_discriminator_for_locus (locus);
1076 /* Create the edges for a GIMPLE_COND starting at block BB. */
1078 static void
1079 make_cond_expr_edges (basic_block bb)
1081 gcond *entry = as_a <gcond *> (last_stmt (bb));
1082 gimple then_stmt, else_stmt;
1083 basic_block then_bb, else_bb;
1084 tree then_label, else_label;
1085 edge e;
1087 gcc_assert (entry);
1088 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1090 /* Entry basic blocks for each component. */
1091 then_label = gimple_cond_true_label (entry);
1092 else_label = gimple_cond_false_label (entry);
1093 then_bb = label_to_block (then_label);
1094 else_bb = label_to_block (else_label);
1095 then_stmt = first_stmt (then_bb);
1096 else_stmt = first_stmt (else_bb);
1098 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1099 e->goto_locus = gimple_location (then_stmt);
1100 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1101 if (e)
1102 e->goto_locus = gimple_location (else_stmt);
1104 /* We do not need the labels anymore. */
1105 gimple_cond_set_true_label (entry, NULL_TREE);
1106 gimple_cond_set_false_label (entry, NULL_TREE);
1110 /* Called for each element in the hash table (P) as we delete the
1111 edge to cases hash table.
1113 Clear all the TREE_CHAINs to prevent problems with copying of
1114 SWITCH_EXPRs and structure sharing rules, then free the hash table
1115 element. */
1117 bool
1118 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1120 tree t, next;
1122 for (t = value; t; t = next)
1124 next = CASE_CHAIN (t);
1125 CASE_CHAIN (t) = NULL;
1128 return true;
1131 /* Start recording information mapping edges to case labels. */
1133 void
1134 start_recording_case_labels (void)
1136 gcc_assert (edge_to_cases == NULL);
1137 edge_to_cases = new hash_map<edge, tree>;
1138 touched_switch_bbs = BITMAP_ALLOC (NULL);
1141 /* Return nonzero if we are recording information for case labels. */
1143 static bool
1144 recording_case_labels_p (void)
1146 return (edge_to_cases != NULL);
1149 /* Stop recording information mapping edges to case labels and
1150 remove any information we have recorded. */
1151 void
1152 end_recording_case_labels (void)
1154 bitmap_iterator bi;
1155 unsigned i;
1156 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1157 delete edge_to_cases;
1158 edge_to_cases = NULL;
1159 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1161 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1162 if (bb)
1164 gimple stmt = last_stmt (bb);
1165 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1166 group_case_labels_stmt (as_a <gswitch *> (stmt));
1169 BITMAP_FREE (touched_switch_bbs);
1172 /* If we are inside a {start,end}_recording_cases block, then return
1173 a chain of CASE_LABEL_EXPRs from T which reference E.
1175 Otherwise return NULL. */
1177 static tree
1178 get_cases_for_edge (edge e, gswitch *t)
1180 tree *slot;
1181 size_t i, n;
1183 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1184 chains available. Return NULL so the caller can detect this case. */
1185 if (!recording_case_labels_p ())
1186 return NULL;
1188 slot = edge_to_cases->get (e);
1189 if (slot)
1190 return *slot;
1192 /* If we did not find E in the hash table, then this must be the first
1193 time we have been queried for information about E & T. Add all the
1194 elements from T to the hash table then perform the query again. */
1196 n = gimple_switch_num_labels (t);
1197 for (i = 0; i < n; i++)
1199 tree elt = gimple_switch_label (t, i);
1200 tree lab = CASE_LABEL (elt);
1201 basic_block label_bb = label_to_block (lab);
1202 edge this_edge = find_edge (e->src, label_bb);
1204 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1205 a new chain. */
1206 tree &s = edge_to_cases->get_or_insert (this_edge);
1207 CASE_CHAIN (elt) = s;
1208 s = elt;
1211 return *edge_to_cases->get (e);
1214 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1216 static void
1217 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1219 size_t i, n;
1221 n = gimple_switch_num_labels (entry);
1223 for (i = 0; i < n; ++i)
1225 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1226 basic_block label_bb = label_to_block (lab);
1227 make_edge (bb, label_bb, 0);
1232 /* Return the basic block holding label DEST. */
1234 basic_block
1235 label_to_block_fn (struct function *ifun, tree dest)
1237 int uid = LABEL_DECL_UID (dest);
1239 /* We would die hard when faced by an undefined label. Emit a label to
1240 the very first basic block. This will hopefully make even the dataflow
1241 and undefined variable warnings quite right. */
1242 if (seen_error () && uid < 0)
1244 gimple_stmt_iterator gsi =
1245 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1246 gimple stmt;
1248 stmt = gimple_build_label (dest);
1249 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1250 uid = LABEL_DECL_UID (dest);
1252 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1253 return NULL;
1254 return (*ifun->cfg->x_label_to_block_map)[uid];
1257 /* Create edges for a goto statement at block BB. Returns true
1258 if abnormal edges should be created. */
1260 static bool
1261 make_goto_expr_edges (basic_block bb)
1263 gimple_stmt_iterator last = gsi_last_bb (bb);
1264 gimple goto_t = gsi_stmt (last);
1266 /* A simple GOTO creates normal edges. */
1267 if (simple_goto_p (goto_t))
1269 tree dest = gimple_goto_dest (goto_t);
1270 basic_block label_bb = label_to_block (dest);
1271 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1272 e->goto_locus = gimple_location (goto_t);
1273 gsi_remove (&last, true);
1274 return false;
1277 /* A computed GOTO creates abnormal edges. */
1278 return true;
1281 /* Create edges for an asm statement with labels at block BB. */
1283 static void
1284 make_gimple_asm_edges (basic_block bb)
1286 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1287 int i, n = gimple_asm_nlabels (stmt);
1289 for (i = 0; i < n; ++i)
1291 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1292 basic_block label_bb = label_to_block (label);
1293 make_edge (bb, label_bb, 0);
1297 /*---------------------------------------------------------------------------
1298 Flowgraph analysis
1299 ---------------------------------------------------------------------------*/
1301 /* Cleanup useless labels in basic blocks. This is something we wish
1302 to do early because it allows us to group case labels before creating
1303 the edges for the CFG, and it speeds up block statement iterators in
1304 all passes later on.
1305 We rerun this pass after CFG is created, to get rid of the labels that
1306 are no longer referenced. After then we do not run it any more, since
1307 (almost) no new labels should be created. */
1309 /* A map from basic block index to the leading label of that block. */
1310 static struct label_record
1312 /* The label. */
1313 tree label;
1315 /* True if the label is referenced from somewhere. */
1316 bool used;
1317 } *label_for_bb;
1319 /* Given LABEL return the first label in the same basic block. */
1321 static tree
1322 main_block_label (tree label)
1324 basic_block bb = label_to_block (label);
1325 tree main_label = label_for_bb[bb->index].label;
1327 /* label_to_block possibly inserted undefined label into the chain. */
1328 if (!main_label)
1330 label_for_bb[bb->index].label = label;
1331 main_label = label;
1334 label_for_bb[bb->index].used = true;
1335 return main_label;
1338 /* Clean up redundant labels within the exception tree. */
1340 static void
1341 cleanup_dead_labels_eh (void)
1343 eh_landing_pad lp;
1344 eh_region r;
1345 tree lab;
1346 int i;
1348 if (cfun->eh == NULL)
1349 return;
1351 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1352 if (lp && lp->post_landing_pad)
1354 lab = main_block_label (lp->post_landing_pad);
1355 if (lab != lp->post_landing_pad)
1357 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1358 EH_LANDING_PAD_NR (lab) = lp->index;
1362 FOR_ALL_EH_REGION (r)
1363 switch (r->type)
1365 case ERT_CLEANUP:
1366 case ERT_MUST_NOT_THROW:
1367 break;
1369 case ERT_TRY:
1371 eh_catch c;
1372 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1374 lab = c->label;
1375 if (lab)
1376 c->label = main_block_label (lab);
1379 break;
1381 case ERT_ALLOWED_EXCEPTIONS:
1382 lab = r->u.allowed.label;
1383 if (lab)
1384 r->u.allowed.label = main_block_label (lab);
1385 break;
1390 /* Cleanup redundant labels. This is a three-step process:
1391 1) Find the leading label for each block.
1392 2) Redirect all references to labels to the leading labels.
1393 3) Cleanup all useless labels. */
1395 void
1396 cleanup_dead_labels (void)
1398 basic_block bb;
1399 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1401 /* Find a suitable label for each block. We use the first user-defined
1402 label if there is one, or otherwise just the first label we see. */
1403 FOR_EACH_BB_FN (bb, cfun)
1405 gimple_stmt_iterator i;
1407 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1409 tree label;
1410 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1412 if (!label_stmt)
1413 break;
1415 label = gimple_label_label (label_stmt);
1417 /* If we have not yet seen a label for the current block,
1418 remember this one and see if there are more labels. */
1419 if (!label_for_bb[bb->index].label)
1421 label_for_bb[bb->index].label = label;
1422 continue;
1425 /* If we did see a label for the current block already, but it
1426 is an artificially created label, replace it if the current
1427 label is a user defined label. */
1428 if (!DECL_ARTIFICIAL (label)
1429 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1431 label_for_bb[bb->index].label = label;
1432 break;
1437 /* Now redirect all jumps/branches to the selected label.
1438 First do so for each block ending in a control statement. */
1439 FOR_EACH_BB_FN (bb, cfun)
1441 gimple stmt = last_stmt (bb);
1442 tree label, new_label;
1444 if (!stmt)
1445 continue;
1447 switch (gimple_code (stmt))
1449 case GIMPLE_COND:
1451 gcond *cond_stmt = as_a <gcond *> (stmt);
1452 label = gimple_cond_true_label (cond_stmt);
1453 if (label)
1455 new_label = main_block_label (label);
1456 if (new_label != label)
1457 gimple_cond_set_true_label (cond_stmt, new_label);
1460 label = gimple_cond_false_label (cond_stmt);
1461 if (label)
1463 new_label = main_block_label (label);
1464 if (new_label != label)
1465 gimple_cond_set_false_label (cond_stmt, new_label);
1468 break;
1470 case GIMPLE_SWITCH:
1472 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1473 size_t i, n = gimple_switch_num_labels (switch_stmt);
1475 /* Replace all destination labels. */
1476 for (i = 0; i < n; ++i)
1478 tree case_label = gimple_switch_label (switch_stmt, i);
1479 label = CASE_LABEL (case_label);
1480 new_label = main_block_label (label);
1481 if (new_label != label)
1482 CASE_LABEL (case_label) = new_label;
1484 break;
1487 case GIMPLE_ASM:
1489 gasm *asm_stmt = as_a <gasm *> (stmt);
1490 int i, n = gimple_asm_nlabels (asm_stmt);
1492 for (i = 0; i < n; ++i)
1494 tree cons = gimple_asm_label_op (asm_stmt, i);
1495 tree label = main_block_label (TREE_VALUE (cons));
1496 TREE_VALUE (cons) = label;
1498 break;
1501 /* We have to handle gotos until they're removed, and we don't
1502 remove them until after we've created the CFG edges. */
1503 case GIMPLE_GOTO:
1504 if (!computed_goto_p (stmt))
1506 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1507 label = gimple_goto_dest (goto_stmt);
1508 new_label = main_block_label (label);
1509 if (new_label != label)
1510 gimple_goto_set_dest (goto_stmt, new_label);
1512 break;
1514 case GIMPLE_TRANSACTION:
1516 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1517 tree label = gimple_transaction_label (trans_stmt);
1518 if (label)
1520 tree new_label = main_block_label (label);
1521 if (new_label != label)
1522 gimple_transaction_set_label (trans_stmt, new_label);
1525 break;
1527 default:
1528 break;
1532 /* Do the same for the exception region tree labels. */
1533 cleanup_dead_labels_eh ();
1535 /* Finally, purge dead labels. All user-defined labels and labels that
1536 can be the target of non-local gotos and labels which have their
1537 address taken are preserved. */
1538 FOR_EACH_BB_FN (bb, cfun)
1540 gimple_stmt_iterator i;
1541 tree label_for_this_bb = label_for_bb[bb->index].label;
1543 if (!label_for_this_bb)
1544 continue;
1546 /* If the main label of the block is unused, we may still remove it. */
1547 if (!label_for_bb[bb->index].used)
1548 label_for_this_bb = NULL;
1550 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1552 tree label;
1553 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1555 if (!label_stmt)
1556 break;
1558 label = gimple_label_label (label_stmt);
1560 if (label == label_for_this_bb
1561 || !DECL_ARTIFICIAL (label)
1562 || DECL_NONLOCAL (label)
1563 || FORCED_LABEL (label))
1564 gsi_next (&i);
1565 else
1566 gsi_remove (&i, true);
1570 free (label_for_bb);
1573 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1574 the ones jumping to the same label.
1575 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1577 void
1578 group_case_labels_stmt (gswitch *stmt)
1580 int old_size = gimple_switch_num_labels (stmt);
1581 int i, j, new_size = old_size;
1582 basic_block default_bb = NULL;
1584 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1586 /* Look for possible opportunities to merge cases. */
1587 i = 1;
1588 while (i < old_size)
1590 tree base_case, base_high;
1591 basic_block base_bb;
1593 base_case = gimple_switch_label (stmt, i);
1595 gcc_assert (base_case);
1596 base_bb = label_to_block (CASE_LABEL (base_case));
1598 /* Discard cases that have the same destination as the
1599 default case. */
1600 if (base_bb == default_bb)
1602 gimple_switch_set_label (stmt, i, NULL_TREE);
1603 i++;
1604 new_size--;
1605 continue;
1608 base_high = CASE_HIGH (base_case)
1609 ? CASE_HIGH (base_case)
1610 : CASE_LOW (base_case);
1611 i++;
1613 /* Try to merge case labels. Break out when we reach the end
1614 of the label vector or when we cannot merge the next case
1615 label with the current one. */
1616 while (i < old_size)
1618 tree merge_case = gimple_switch_label (stmt, i);
1619 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1620 wide_int bhp1 = wi::add (base_high, 1);
1622 /* Merge the cases if they jump to the same place,
1623 and their ranges are consecutive. */
1624 if (merge_bb == base_bb
1625 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1627 base_high = CASE_HIGH (merge_case) ?
1628 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1629 CASE_HIGH (base_case) = base_high;
1630 gimple_switch_set_label (stmt, i, NULL_TREE);
1631 new_size--;
1632 i++;
1634 else
1635 break;
1639 /* Compress the case labels in the label vector, and adjust the
1640 length of the vector. */
1641 for (i = 0, j = 0; i < new_size; i++)
1643 while (! gimple_switch_label (stmt, j))
1644 j++;
1645 gimple_switch_set_label (stmt, i,
1646 gimple_switch_label (stmt, j++));
1649 gcc_assert (new_size <= old_size);
1650 gimple_switch_set_num_labels (stmt, new_size);
1653 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1654 and scan the sorted vector of cases. Combine the ones jumping to the
1655 same label. */
1657 void
1658 group_case_labels (void)
1660 basic_block bb;
1662 FOR_EACH_BB_FN (bb, cfun)
1664 gimple stmt = last_stmt (bb);
1665 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1666 group_case_labels_stmt (as_a <gswitch *> (stmt));
1670 /* Checks whether we can merge block B into block A. */
1672 static bool
1673 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1675 gimple stmt;
1677 if (!single_succ_p (a))
1678 return false;
1680 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1681 return false;
1683 if (single_succ (a) != b)
1684 return false;
1686 if (!single_pred_p (b))
1687 return false;
1689 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1690 return false;
1692 /* If A ends by a statement causing exceptions or something similar, we
1693 cannot merge the blocks. */
1694 stmt = last_stmt (a);
1695 if (stmt && stmt_ends_bb_p (stmt))
1696 return false;
1698 /* Do not allow a block with only a non-local label to be merged. */
1699 if (stmt)
1700 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1701 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1702 return false;
1704 /* Examine the labels at the beginning of B. */
1705 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1706 gsi_next (&gsi))
1708 tree lab;
1709 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1710 if (!label_stmt)
1711 break;
1712 lab = gimple_label_label (label_stmt);
1714 /* Do not remove user forced labels or for -O0 any user labels. */
1715 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1716 return false;
1719 /* Protect simple loop latches. We only want to avoid merging
1720 the latch with the loop header in this case. */
1721 if (current_loops
1722 && b->loop_father->latch == b
1723 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1724 && b->loop_father->header == a)
1725 return false;
1727 /* It must be possible to eliminate all phi nodes in B. If ssa form
1728 is not up-to-date and a name-mapping is registered, we cannot eliminate
1729 any phis. Symbols marked for renaming are never a problem though. */
1730 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1731 gsi_next (&gsi))
1733 gphi *phi = gsi.phi ();
1734 /* Technically only new names matter. */
1735 if (name_registered_for_update_p (PHI_RESULT (phi)))
1736 return false;
1739 /* When not optimizing, don't merge if we'd lose goto_locus. */
1740 if (!optimize
1741 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1743 location_t goto_locus = single_succ_edge (a)->goto_locus;
1744 gimple_stmt_iterator prev, next;
1745 prev = gsi_last_nondebug_bb (a);
1746 next = gsi_after_labels (b);
1747 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1748 gsi_next_nondebug (&next);
1749 if ((gsi_end_p (prev)
1750 || gimple_location (gsi_stmt (prev)) != goto_locus)
1751 && (gsi_end_p (next)
1752 || gimple_location (gsi_stmt (next)) != goto_locus))
1753 return false;
1756 return true;
1759 /* Replaces all uses of NAME by VAL. */
1761 void
1762 replace_uses_by (tree name, tree val)
1764 imm_use_iterator imm_iter;
1765 use_operand_p use;
1766 gimple stmt;
1767 edge e;
1769 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1771 /* Mark the block if we change the last stmt in it. */
1772 if (cfgcleanup_altered_bbs
1773 && stmt_ends_bb_p (stmt))
1774 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1776 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1778 replace_exp (use, val);
1780 if (gimple_code (stmt) == GIMPLE_PHI)
1782 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1783 PHI_ARG_INDEX_FROM_USE (use));
1784 if (e->flags & EDGE_ABNORMAL)
1786 /* This can only occur for virtual operands, since
1787 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1788 would prevent replacement. */
1789 gcc_checking_assert (virtual_operand_p (name));
1790 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1795 if (gimple_code (stmt) != GIMPLE_PHI)
1797 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1798 gimple orig_stmt = stmt;
1799 size_t i;
1801 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1802 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1803 only change sth from non-invariant to invariant, and only
1804 when propagating constants. */
1805 if (is_gimple_min_invariant (val))
1806 for (i = 0; i < gimple_num_ops (stmt); i++)
1808 tree op = gimple_op (stmt, i);
1809 /* Operands may be empty here. For example, the labels
1810 of a GIMPLE_COND are nulled out following the creation
1811 of the corresponding CFG edges. */
1812 if (op && TREE_CODE (op) == ADDR_EXPR)
1813 recompute_tree_invariant_for_addr_expr (op);
1816 if (fold_stmt (&gsi))
1817 stmt = gsi_stmt (gsi);
1819 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1820 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1822 update_stmt (stmt);
1826 gcc_checking_assert (has_zero_uses (name));
1828 /* Also update the trees stored in loop structures. */
1829 if (current_loops)
1831 struct loop *loop;
1833 FOR_EACH_LOOP (loop, 0)
1835 substitute_in_loop_info (loop, name, val);
1840 /* Merge block B into block A. */
1842 static void
1843 gimple_merge_blocks (basic_block a, basic_block b)
1845 gimple_stmt_iterator last, gsi;
1846 gphi_iterator psi;
1848 if (dump_file)
1849 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1851 /* Remove all single-valued PHI nodes from block B of the form
1852 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1853 gsi = gsi_last_bb (a);
1854 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1856 gimple phi = gsi_stmt (psi);
1857 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1858 gimple copy;
1859 bool may_replace_uses = (virtual_operand_p (def)
1860 || may_propagate_copy (def, use));
1862 /* In case we maintain loop closed ssa form, do not propagate arguments
1863 of loop exit phi nodes. */
1864 if (current_loops
1865 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1866 && !virtual_operand_p (def)
1867 && TREE_CODE (use) == SSA_NAME
1868 && a->loop_father != b->loop_father)
1869 may_replace_uses = false;
1871 if (!may_replace_uses)
1873 gcc_assert (!virtual_operand_p (def));
1875 /* Note that just emitting the copies is fine -- there is no problem
1876 with ordering of phi nodes. This is because A is the single
1877 predecessor of B, therefore results of the phi nodes cannot
1878 appear as arguments of the phi nodes. */
1879 copy = gimple_build_assign (def, use);
1880 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1881 remove_phi_node (&psi, false);
1883 else
1885 /* If we deal with a PHI for virtual operands, we can simply
1886 propagate these without fussing with folding or updating
1887 the stmt. */
1888 if (virtual_operand_p (def))
1890 imm_use_iterator iter;
1891 use_operand_p use_p;
1892 gimple stmt;
1894 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1895 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1896 SET_USE (use_p, use);
1898 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1899 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1901 else
1902 replace_uses_by (def, use);
1904 remove_phi_node (&psi, true);
1908 /* Ensure that B follows A. */
1909 move_block_after (b, a);
1911 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1912 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1914 /* Remove labels from B and set gimple_bb to A for other statements. */
1915 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1917 gimple stmt = gsi_stmt (gsi);
1918 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1920 tree label = gimple_label_label (label_stmt);
1921 int lp_nr;
1923 gsi_remove (&gsi, false);
1925 /* Now that we can thread computed gotos, we might have
1926 a situation where we have a forced label in block B
1927 However, the label at the start of block B might still be
1928 used in other ways (think about the runtime checking for
1929 Fortran assigned gotos). So we can not just delete the
1930 label. Instead we move the label to the start of block A. */
1931 if (FORCED_LABEL (label))
1933 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1934 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1936 /* Other user labels keep around in a form of a debug stmt. */
1937 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1939 gimple dbg = gimple_build_debug_bind (label,
1940 integer_zero_node,
1941 stmt);
1942 gimple_debug_bind_reset_value (dbg);
1943 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1946 lp_nr = EH_LANDING_PAD_NR (label);
1947 if (lp_nr)
1949 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1950 lp->post_landing_pad = NULL;
1953 else
1955 gimple_set_bb (stmt, a);
1956 gsi_next (&gsi);
1960 /* When merging two BBs, if their counts are different, the larger count
1961 is selected as the new bb count. This is to handle inconsistent
1962 profiles. */
1963 if (a->loop_father == b->loop_father)
1965 a->count = MAX (a->count, b->count);
1966 a->frequency = MAX (a->frequency, b->frequency);
1969 /* Merge the sequences. */
1970 last = gsi_last_bb (a);
1971 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1972 set_bb_seq (b, NULL);
1974 if (cfgcleanup_altered_bbs)
1975 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1979 /* Return the one of two successors of BB that is not reachable by a
1980 complex edge, if there is one. Else, return BB. We use
1981 this in optimizations that use post-dominators for their heuristics,
1982 to catch the cases in C++ where function calls are involved. */
1984 basic_block
1985 single_noncomplex_succ (basic_block bb)
1987 edge e0, e1;
1988 if (EDGE_COUNT (bb->succs) != 2)
1989 return bb;
1991 e0 = EDGE_SUCC (bb, 0);
1992 e1 = EDGE_SUCC (bb, 1);
1993 if (e0->flags & EDGE_COMPLEX)
1994 return e1->dest;
1995 if (e1->flags & EDGE_COMPLEX)
1996 return e0->dest;
1998 return bb;
2001 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2003 void
2004 notice_special_calls (gcall *call)
2006 int flags = gimple_call_flags (call);
2008 if (flags & ECF_MAY_BE_ALLOCA)
2009 cfun->calls_alloca = true;
2010 if (flags & ECF_RETURNS_TWICE)
2011 cfun->calls_setjmp = true;
2015 /* Clear flags set by notice_special_calls. Used by dead code removal
2016 to update the flags. */
2018 void
2019 clear_special_calls (void)
2021 cfun->calls_alloca = false;
2022 cfun->calls_setjmp = false;
2025 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2027 static void
2028 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2030 /* Since this block is no longer reachable, we can just delete all
2031 of its PHI nodes. */
2032 remove_phi_nodes (bb);
2034 /* Remove edges to BB's successors. */
2035 while (EDGE_COUNT (bb->succs) > 0)
2036 remove_edge (EDGE_SUCC (bb, 0));
2040 /* Remove statements of basic block BB. */
2042 static void
2043 remove_bb (basic_block bb)
2045 gimple_stmt_iterator i;
2047 if (dump_file)
2049 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2050 if (dump_flags & TDF_DETAILS)
2052 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2053 fprintf (dump_file, "\n");
2057 if (current_loops)
2059 struct loop *loop = bb->loop_father;
2061 /* If a loop gets removed, clean up the information associated
2062 with it. */
2063 if (loop->latch == bb
2064 || loop->header == bb)
2065 free_numbers_of_iterations_estimates_loop (loop);
2068 /* Remove all the instructions in the block. */
2069 if (bb_seq (bb) != NULL)
2071 /* Walk backwards so as to get a chance to substitute all
2072 released DEFs into debug stmts. See
2073 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2074 details. */
2075 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2077 gimple stmt = gsi_stmt (i);
2078 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2079 if (label_stmt
2080 && (FORCED_LABEL (gimple_label_label (label_stmt))
2081 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2083 basic_block new_bb;
2084 gimple_stmt_iterator new_gsi;
2086 /* A non-reachable non-local label may still be referenced.
2087 But it no longer needs to carry the extra semantics of
2088 non-locality. */
2089 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2091 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2092 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2095 new_bb = bb->prev_bb;
2096 new_gsi = gsi_start_bb (new_bb);
2097 gsi_remove (&i, false);
2098 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2100 else
2102 /* Release SSA definitions if we are in SSA. Note that we
2103 may be called when not in SSA. For example,
2104 final_cleanup calls this function via
2105 cleanup_tree_cfg. */
2106 if (gimple_in_ssa_p (cfun))
2107 release_defs (stmt);
2109 gsi_remove (&i, true);
2112 if (gsi_end_p (i))
2113 i = gsi_last_bb (bb);
2114 else
2115 gsi_prev (&i);
2119 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2120 bb->il.gimple.seq = NULL;
2121 bb->il.gimple.phi_nodes = NULL;
2125 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2126 predicate VAL, return the edge that will be taken out of the block.
2127 If VAL does not match a unique edge, NULL is returned. */
2129 edge
2130 find_taken_edge (basic_block bb, tree val)
2132 gimple stmt;
2134 stmt = last_stmt (bb);
2136 gcc_assert (stmt);
2137 gcc_assert (is_ctrl_stmt (stmt));
2139 if (val == NULL)
2140 return NULL;
2142 if (!is_gimple_min_invariant (val))
2143 return NULL;
2145 if (gimple_code (stmt) == GIMPLE_COND)
2146 return find_taken_edge_cond_expr (bb, val);
2148 if (gimple_code (stmt) == GIMPLE_SWITCH)
2149 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2151 if (computed_goto_p (stmt))
2153 /* Only optimize if the argument is a label, if the argument is
2154 not a label then we can not construct a proper CFG.
2156 It may be the case that we only need to allow the LABEL_REF to
2157 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2158 appear inside a LABEL_EXPR just to be safe. */
2159 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2160 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2161 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2162 return NULL;
2165 gcc_unreachable ();
2168 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2169 statement, determine which of the outgoing edges will be taken out of the
2170 block. Return NULL if either edge may be taken. */
2172 static edge
2173 find_taken_edge_computed_goto (basic_block bb, tree val)
2175 basic_block dest;
2176 edge e = NULL;
2178 dest = label_to_block (val);
2179 if (dest)
2181 e = find_edge (bb, dest);
2182 gcc_assert (e != NULL);
2185 return e;
2188 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2189 statement, determine which of the two edges will be taken out of the
2190 block. Return NULL if either edge may be taken. */
2192 static edge
2193 find_taken_edge_cond_expr (basic_block bb, tree val)
2195 edge true_edge, false_edge;
2197 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2199 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2200 return (integer_zerop (val) ? false_edge : true_edge);
2203 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2204 statement, determine which edge will be taken out of the block. Return
2205 NULL if any edge may be taken. */
2207 static edge
2208 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2209 tree val)
2211 basic_block dest_bb;
2212 edge e;
2213 tree taken_case;
2215 taken_case = find_case_label_for_value (switch_stmt, val);
2216 dest_bb = label_to_block (CASE_LABEL (taken_case));
2218 e = find_edge (bb, dest_bb);
2219 gcc_assert (e);
2220 return e;
2224 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2225 We can make optimal use here of the fact that the case labels are
2226 sorted: We can do a binary search for a case matching VAL. */
2228 static tree
2229 find_case_label_for_value (gswitch *switch_stmt, tree val)
2231 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2232 tree default_case = gimple_switch_default_label (switch_stmt);
2234 for (low = 0, high = n; high - low > 1; )
2236 size_t i = (high + low) / 2;
2237 tree t = gimple_switch_label (switch_stmt, i);
2238 int cmp;
2240 /* Cache the result of comparing CASE_LOW and val. */
2241 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2243 if (cmp > 0)
2244 high = i;
2245 else
2246 low = i;
2248 if (CASE_HIGH (t) == NULL)
2250 /* A singe-valued case label. */
2251 if (cmp == 0)
2252 return t;
2254 else
2256 /* A case range. We can only handle integer ranges. */
2257 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2258 return t;
2262 return default_case;
2266 /* Dump a basic block on stderr. */
2268 void
2269 gimple_debug_bb (basic_block bb)
2271 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2275 /* Dump basic block with index N on stderr. */
2277 basic_block
2278 gimple_debug_bb_n (int n)
2280 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2281 return BASIC_BLOCK_FOR_FN (cfun, n);
2285 /* Dump the CFG on stderr.
2287 FLAGS are the same used by the tree dumping functions
2288 (see TDF_* in dumpfile.h). */
2290 void
2291 gimple_debug_cfg (int flags)
2293 gimple_dump_cfg (stderr, flags);
2297 /* Dump the program showing basic block boundaries on the given FILE.
2299 FLAGS are the same used by the tree dumping functions (see TDF_* in
2300 tree.h). */
2302 void
2303 gimple_dump_cfg (FILE *file, int flags)
2305 if (flags & TDF_DETAILS)
2307 dump_function_header (file, current_function_decl, flags);
2308 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2309 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2310 last_basic_block_for_fn (cfun));
2312 brief_dump_cfg (file, flags | TDF_COMMENT);
2313 fprintf (file, "\n");
2316 if (flags & TDF_STATS)
2317 dump_cfg_stats (file);
2319 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2323 /* Dump CFG statistics on FILE. */
2325 void
2326 dump_cfg_stats (FILE *file)
2328 static long max_num_merged_labels = 0;
2329 unsigned long size, total = 0;
2330 long num_edges;
2331 basic_block bb;
2332 const char * const fmt_str = "%-30s%-13s%12s\n";
2333 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2334 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2335 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2336 const char *funcname = current_function_name ();
2338 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2340 fprintf (file, "---------------------------------------------------------\n");
2341 fprintf (file, fmt_str, "", " Number of ", "Memory");
2342 fprintf (file, fmt_str, "", " instances ", "used ");
2343 fprintf (file, "---------------------------------------------------------\n");
2345 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2346 total += size;
2347 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2348 SCALE (size), LABEL (size));
2350 num_edges = 0;
2351 FOR_EACH_BB_FN (bb, cfun)
2352 num_edges += EDGE_COUNT (bb->succs);
2353 size = num_edges * sizeof (struct edge_def);
2354 total += size;
2355 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2357 fprintf (file, "---------------------------------------------------------\n");
2358 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2359 LABEL (total));
2360 fprintf (file, "---------------------------------------------------------\n");
2361 fprintf (file, "\n");
2363 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2364 max_num_merged_labels = cfg_stats.num_merged_labels;
2366 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2367 cfg_stats.num_merged_labels, max_num_merged_labels);
2369 fprintf (file, "\n");
2373 /* Dump CFG statistics on stderr. Keep extern so that it's always
2374 linked in the final executable. */
2376 DEBUG_FUNCTION void
2377 debug_cfg_stats (void)
2379 dump_cfg_stats (stderr);
2382 /*---------------------------------------------------------------------------
2383 Miscellaneous helpers
2384 ---------------------------------------------------------------------------*/
2386 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2387 flow. Transfers of control flow associated with EH are excluded. */
2389 static bool
2390 call_can_make_abnormal_goto (gimple t)
2392 /* If the function has no non-local labels, then a call cannot make an
2393 abnormal transfer of control. */
2394 if (!cfun->has_nonlocal_label
2395 && !cfun->calls_setjmp)
2396 return false;
2398 /* Likewise if the call has no side effects. */
2399 if (!gimple_has_side_effects (t))
2400 return false;
2402 /* Likewise if the called function is leaf. */
2403 if (gimple_call_flags (t) & ECF_LEAF)
2404 return false;
2406 return true;
2410 /* Return true if T can make an abnormal transfer of control flow.
2411 Transfers of control flow associated with EH are excluded. */
2413 bool
2414 stmt_can_make_abnormal_goto (gimple t)
2416 if (computed_goto_p (t))
2417 return true;
2418 if (is_gimple_call (t))
2419 return call_can_make_abnormal_goto (t);
2420 return false;
2424 /* Return true if T represents a stmt that always transfers control. */
2426 bool
2427 is_ctrl_stmt (gimple t)
2429 switch (gimple_code (t))
2431 case GIMPLE_COND:
2432 case GIMPLE_SWITCH:
2433 case GIMPLE_GOTO:
2434 case GIMPLE_RETURN:
2435 case GIMPLE_RESX:
2436 return true;
2437 default:
2438 return false;
2443 /* Return true if T is a statement that may alter the flow of control
2444 (e.g., a call to a non-returning function). */
2446 bool
2447 is_ctrl_altering_stmt (gimple t)
2449 gcc_assert (t);
2451 switch (gimple_code (t))
2453 case GIMPLE_CALL:
2454 /* Per stmt call flag indicates whether the call could alter
2455 controlflow. */
2456 if (gimple_call_ctrl_altering_p (t))
2457 return true;
2458 break;
2460 case GIMPLE_EH_DISPATCH:
2461 /* EH_DISPATCH branches to the individual catch handlers at
2462 this level of a try or allowed-exceptions region. It can
2463 fallthru to the next statement as well. */
2464 return true;
2466 case GIMPLE_ASM:
2467 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2468 return true;
2469 break;
2471 CASE_GIMPLE_OMP:
2472 /* OpenMP directives alter control flow. */
2473 return true;
2475 case GIMPLE_TRANSACTION:
2476 /* A transaction start alters control flow. */
2477 return true;
2479 default:
2480 break;
2483 /* If a statement can throw, it alters control flow. */
2484 return stmt_can_throw_internal (t);
2488 /* Return true if T is a simple local goto. */
2490 bool
2491 simple_goto_p (gimple t)
2493 return (gimple_code (t) == GIMPLE_GOTO
2494 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2498 /* Return true if STMT should start a new basic block. PREV_STMT is
2499 the statement preceding STMT. It is used when STMT is a label or a
2500 case label. Labels should only start a new basic block if their
2501 previous statement wasn't a label. Otherwise, sequence of labels
2502 would generate unnecessary basic blocks that only contain a single
2503 label. */
2505 static inline bool
2506 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2508 if (stmt == NULL)
2509 return false;
2511 /* Labels start a new basic block only if the preceding statement
2512 wasn't a label of the same type. This prevents the creation of
2513 consecutive blocks that have nothing but a single label. */
2514 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2516 /* Nonlocal and computed GOTO targets always start a new block. */
2517 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2518 || FORCED_LABEL (gimple_label_label (label_stmt)))
2519 return true;
2521 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2523 if (DECL_NONLOCAL (gimple_label_label (
2524 as_a <glabel *> (prev_stmt))))
2525 return true;
2527 cfg_stats.num_merged_labels++;
2528 return false;
2530 else
2531 return true;
2533 else if (gimple_code (stmt) == GIMPLE_CALL
2534 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2535 /* setjmp acts similar to a nonlocal GOTO target and thus should
2536 start a new block. */
2537 return true;
2539 return false;
2543 /* Return true if T should end a basic block. */
2545 bool
2546 stmt_ends_bb_p (gimple t)
2548 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2551 /* Remove block annotations and other data structures. */
2553 void
2554 delete_tree_cfg_annotations (void)
2556 vec_free (label_to_block_map_for_fn (cfun));
2560 /* Return the first statement in basic block BB. */
2562 gimple
2563 first_stmt (basic_block bb)
2565 gimple_stmt_iterator i = gsi_start_bb (bb);
2566 gimple stmt = NULL;
2568 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2570 gsi_next (&i);
2571 stmt = NULL;
2573 return stmt;
2576 /* Return the first non-label statement in basic block BB. */
2578 static gimple
2579 first_non_label_stmt (basic_block bb)
2581 gimple_stmt_iterator i = gsi_start_bb (bb);
2582 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2583 gsi_next (&i);
2584 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2587 /* Return the last statement in basic block BB. */
2589 gimple
2590 last_stmt (basic_block bb)
2592 gimple_stmt_iterator i = gsi_last_bb (bb);
2593 gimple stmt = NULL;
2595 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2597 gsi_prev (&i);
2598 stmt = NULL;
2600 return stmt;
2603 /* Return the last statement of an otherwise empty block. Return NULL
2604 if the block is totally empty, or if it contains more than one
2605 statement. */
2607 gimple
2608 last_and_only_stmt (basic_block bb)
2610 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2611 gimple last, prev;
2613 if (gsi_end_p (i))
2614 return NULL;
2616 last = gsi_stmt (i);
2617 gsi_prev_nondebug (&i);
2618 if (gsi_end_p (i))
2619 return last;
2621 /* Empty statements should no longer appear in the instruction stream.
2622 Everything that might have appeared before should be deleted by
2623 remove_useless_stmts, and the optimizers should just gsi_remove
2624 instead of smashing with build_empty_stmt.
2626 Thus the only thing that should appear here in a block containing
2627 one executable statement is a label. */
2628 prev = gsi_stmt (i);
2629 if (gimple_code (prev) == GIMPLE_LABEL)
2630 return last;
2631 else
2632 return NULL;
2635 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2637 static void
2638 reinstall_phi_args (edge new_edge, edge old_edge)
2640 edge_var_map *vm;
2641 int i;
2642 gphi_iterator phis;
2644 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2645 if (!v)
2646 return;
2648 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2649 v->iterate (i, &vm) && !gsi_end_p (phis);
2650 i++, gsi_next (&phis))
2652 gphi *phi = phis.phi ();
2653 tree result = redirect_edge_var_map_result (vm);
2654 tree arg = redirect_edge_var_map_def (vm);
2656 gcc_assert (result == gimple_phi_result (phi));
2658 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2661 redirect_edge_var_map_clear (old_edge);
2664 /* Returns the basic block after which the new basic block created
2665 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2666 near its "logical" location. This is of most help to humans looking
2667 at debugging dumps. */
2669 static basic_block
2670 split_edge_bb_loc (edge edge_in)
2672 basic_block dest = edge_in->dest;
2673 basic_block dest_prev = dest->prev_bb;
2675 if (dest_prev)
2677 edge e = find_edge (dest_prev, dest);
2678 if (e && !(e->flags & EDGE_COMPLEX))
2679 return edge_in->src;
2681 return dest_prev;
2684 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2685 Abort on abnormal edges. */
2687 static basic_block
2688 gimple_split_edge (edge edge_in)
2690 basic_block new_bb, after_bb, dest;
2691 edge new_edge, e;
2693 /* Abnormal edges cannot be split. */
2694 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2696 dest = edge_in->dest;
2698 after_bb = split_edge_bb_loc (edge_in);
2700 new_bb = create_empty_bb (after_bb);
2701 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2702 new_bb->count = edge_in->count;
2703 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2704 new_edge->probability = REG_BR_PROB_BASE;
2705 new_edge->count = edge_in->count;
2707 e = redirect_edge_and_branch (edge_in, new_bb);
2708 gcc_assert (e == edge_in);
2709 reinstall_phi_args (new_edge, e);
2711 return new_bb;
2715 /* Verify properties of the address expression T with base object BASE. */
2717 static tree
2718 verify_address (tree t, tree base)
2720 bool old_constant;
2721 bool old_side_effects;
2722 bool new_constant;
2723 bool new_side_effects;
2725 old_constant = TREE_CONSTANT (t);
2726 old_side_effects = TREE_SIDE_EFFECTS (t);
2728 recompute_tree_invariant_for_addr_expr (t);
2729 new_side_effects = TREE_SIDE_EFFECTS (t);
2730 new_constant = TREE_CONSTANT (t);
2732 if (old_constant != new_constant)
2734 error ("constant not recomputed when ADDR_EXPR changed");
2735 return t;
2737 if (old_side_effects != new_side_effects)
2739 error ("side effects not recomputed when ADDR_EXPR changed");
2740 return t;
2743 if (!(TREE_CODE (base) == VAR_DECL
2744 || TREE_CODE (base) == PARM_DECL
2745 || TREE_CODE (base) == RESULT_DECL))
2746 return NULL_TREE;
2748 if (DECL_GIMPLE_REG_P (base))
2750 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2751 return base;
2754 return NULL_TREE;
2757 /* Callback for walk_tree, check that all elements with address taken are
2758 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2759 inside a PHI node. */
2761 static tree
2762 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2764 tree t = *tp, x;
2766 if (TYPE_P (t))
2767 *walk_subtrees = 0;
2769 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2770 #define CHECK_OP(N, MSG) \
2771 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2772 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2774 switch (TREE_CODE (t))
2776 case SSA_NAME:
2777 if (SSA_NAME_IN_FREE_LIST (t))
2779 error ("SSA name in freelist but still referenced");
2780 return *tp;
2782 break;
2784 case INDIRECT_REF:
2785 error ("INDIRECT_REF in gimple IL");
2786 return t;
2788 case MEM_REF:
2789 x = TREE_OPERAND (t, 0);
2790 if (!POINTER_TYPE_P (TREE_TYPE (x))
2791 || !is_gimple_mem_ref_addr (x))
2793 error ("invalid first operand of MEM_REF");
2794 return x;
2796 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2797 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2799 error ("invalid offset operand of MEM_REF");
2800 return TREE_OPERAND (t, 1);
2802 if (TREE_CODE (x) == ADDR_EXPR
2803 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2804 return x;
2805 *walk_subtrees = 0;
2806 break;
2808 case ASSERT_EXPR:
2809 x = fold (ASSERT_EXPR_COND (t));
2810 if (x == boolean_false_node)
2812 error ("ASSERT_EXPR with an always-false condition");
2813 return *tp;
2815 break;
2817 case MODIFY_EXPR:
2818 error ("MODIFY_EXPR not expected while having tuples");
2819 return *tp;
2821 case ADDR_EXPR:
2823 tree tem;
2825 gcc_assert (is_gimple_address (t));
2827 /* Skip any references (they will be checked when we recurse down the
2828 tree) and ensure that any variable used as a prefix is marked
2829 addressable. */
2830 for (x = TREE_OPERAND (t, 0);
2831 handled_component_p (x);
2832 x = TREE_OPERAND (x, 0))
2835 if ((tem = verify_address (t, x)))
2836 return tem;
2838 if (!(TREE_CODE (x) == VAR_DECL
2839 || TREE_CODE (x) == PARM_DECL
2840 || TREE_CODE (x) == RESULT_DECL))
2841 return NULL;
2843 if (!TREE_ADDRESSABLE (x))
2845 error ("address taken, but ADDRESSABLE bit not set");
2846 return x;
2849 break;
2852 case COND_EXPR:
2853 x = COND_EXPR_COND (t);
2854 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2856 error ("non-integral used in condition");
2857 return x;
2859 if (!is_gimple_condexpr (x))
2861 error ("invalid conditional operand");
2862 return x;
2864 break;
2866 case NON_LVALUE_EXPR:
2867 case TRUTH_NOT_EXPR:
2868 gcc_unreachable ();
2870 CASE_CONVERT:
2871 case FIX_TRUNC_EXPR:
2872 case FLOAT_EXPR:
2873 case NEGATE_EXPR:
2874 case ABS_EXPR:
2875 case BIT_NOT_EXPR:
2876 CHECK_OP (0, "invalid operand to unary operator");
2877 break;
2879 case REALPART_EXPR:
2880 case IMAGPART_EXPR:
2881 case BIT_FIELD_REF:
2882 if (!is_gimple_reg_type (TREE_TYPE (t)))
2884 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2885 return t;
2888 if (TREE_CODE (t) == BIT_FIELD_REF)
2890 tree t0 = TREE_OPERAND (t, 0);
2891 tree t1 = TREE_OPERAND (t, 1);
2892 tree t2 = TREE_OPERAND (t, 2);
2893 if (!tree_fits_uhwi_p (t1)
2894 || !tree_fits_uhwi_p (t2))
2896 error ("invalid position or size operand to BIT_FIELD_REF");
2897 return t;
2899 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2900 && (TYPE_PRECISION (TREE_TYPE (t))
2901 != tree_to_uhwi (t1)))
2903 error ("integral result type precision does not match "
2904 "field size of BIT_FIELD_REF");
2905 return t;
2907 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2908 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2909 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2910 != tree_to_uhwi (t1)))
2912 error ("mode precision of non-integral result does not "
2913 "match field size of BIT_FIELD_REF");
2914 return t;
2916 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2917 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2918 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2920 error ("position plus size exceeds size of referenced object in "
2921 "BIT_FIELD_REF");
2922 return t;
2925 t = TREE_OPERAND (t, 0);
2927 /* Fall-through. */
2928 case COMPONENT_REF:
2929 case ARRAY_REF:
2930 case ARRAY_RANGE_REF:
2931 case VIEW_CONVERT_EXPR:
2932 /* We have a nest of references. Verify that each of the operands
2933 that determine where to reference is either a constant or a variable,
2934 verify that the base is valid, and then show we've already checked
2935 the subtrees. */
2936 while (handled_component_p (t))
2938 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2939 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2940 else if (TREE_CODE (t) == ARRAY_REF
2941 || TREE_CODE (t) == ARRAY_RANGE_REF)
2943 CHECK_OP (1, "invalid array index");
2944 if (TREE_OPERAND (t, 2))
2945 CHECK_OP (2, "invalid array lower bound");
2946 if (TREE_OPERAND (t, 3))
2947 CHECK_OP (3, "invalid array stride");
2949 else if (TREE_CODE (t) == BIT_FIELD_REF
2950 || TREE_CODE (t) == REALPART_EXPR
2951 || TREE_CODE (t) == IMAGPART_EXPR)
2953 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2954 "REALPART_EXPR");
2955 return t;
2958 t = TREE_OPERAND (t, 0);
2961 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2963 error ("invalid reference prefix");
2964 return t;
2966 *walk_subtrees = 0;
2967 break;
2968 case PLUS_EXPR:
2969 case MINUS_EXPR:
2970 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2971 POINTER_PLUS_EXPR. */
2972 if (POINTER_TYPE_P (TREE_TYPE (t)))
2974 error ("invalid operand to plus/minus, type is a pointer");
2975 return t;
2977 CHECK_OP (0, "invalid operand to binary operator");
2978 CHECK_OP (1, "invalid operand to binary operator");
2979 break;
2981 case POINTER_PLUS_EXPR:
2982 /* Check to make sure the first operand is a pointer or reference type. */
2983 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2985 error ("invalid operand to pointer plus, first operand is not a pointer");
2986 return t;
2988 /* Check to make sure the second operand is a ptrofftype. */
2989 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2991 error ("invalid operand to pointer plus, second operand is not an "
2992 "integer type of appropriate width");
2993 return t;
2995 /* FALLTHROUGH */
2996 case LT_EXPR:
2997 case LE_EXPR:
2998 case GT_EXPR:
2999 case GE_EXPR:
3000 case EQ_EXPR:
3001 case NE_EXPR:
3002 case UNORDERED_EXPR:
3003 case ORDERED_EXPR:
3004 case UNLT_EXPR:
3005 case UNLE_EXPR:
3006 case UNGT_EXPR:
3007 case UNGE_EXPR:
3008 case UNEQ_EXPR:
3009 case LTGT_EXPR:
3010 case MULT_EXPR:
3011 case TRUNC_DIV_EXPR:
3012 case CEIL_DIV_EXPR:
3013 case FLOOR_DIV_EXPR:
3014 case ROUND_DIV_EXPR:
3015 case TRUNC_MOD_EXPR:
3016 case CEIL_MOD_EXPR:
3017 case FLOOR_MOD_EXPR:
3018 case ROUND_MOD_EXPR:
3019 case RDIV_EXPR:
3020 case EXACT_DIV_EXPR:
3021 case MIN_EXPR:
3022 case MAX_EXPR:
3023 case LSHIFT_EXPR:
3024 case RSHIFT_EXPR:
3025 case LROTATE_EXPR:
3026 case RROTATE_EXPR:
3027 case BIT_IOR_EXPR:
3028 case BIT_XOR_EXPR:
3029 case BIT_AND_EXPR:
3030 CHECK_OP (0, "invalid operand to binary operator");
3031 CHECK_OP (1, "invalid operand to binary operator");
3032 break;
3034 case CONSTRUCTOR:
3035 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3036 *walk_subtrees = 0;
3037 break;
3039 case CASE_LABEL_EXPR:
3040 if (CASE_CHAIN (t))
3042 error ("invalid CASE_CHAIN");
3043 return t;
3045 break;
3047 default:
3048 break;
3050 return NULL;
3052 #undef CHECK_OP
3056 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3057 Returns true if there is an error, otherwise false. */
3059 static bool
3060 verify_types_in_gimple_min_lval (tree expr)
3062 tree op;
3064 if (is_gimple_id (expr))
3065 return false;
3067 if (TREE_CODE (expr) != TARGET_MEM_REF
3068 && TREE_CODE (expr) != MEM_REF)
3070 error ("invalid expression for min lvalue");
3071 return true;
3074 /* TARGET_MEM_REFs are strange beasts. */
3075 if (TREE_CODE (expr) == TARGET_MEM_REF)
3076 return false;
3078 op = TREE_OPERAND (expr, 0);
3079 if (!is_gimple_val (op))
3081 error ("invalid operand in indirect reference");
3082 debug_generic_stmt (op);
3083 return true;
3085 /* Memory references now generally can involve a value conversion. */
3087 return false;
3090 /* Verify if EXPR is a valid GIMPLE reference expression. If
3091 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3092 if there is an error, otherwise false. */
3094 static bool
3095 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3097 while (handled_component_p (expr))
3099 tree op = TREE_OPERAND (expr, 0);
3101 if (TREE_CODE (expr) == ARRAY_REF
3102 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3104 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3105 || (TREE_OPERAND (expr, 2)
3106 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3107 || (TREE_OPERAND (expr, 3)
3108 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3110 error ("invalid operands to array reference");
3111 debug_generic_stmt (expr);
3112 return true;
3116 /* Verify if the reference array element types are compatible. */
3117 if (TREE_CODE (expr) == ARRAY_REF
3118 && !useless_type_conversion_p (TREE_TYPE (expr),
3119 TREE_TYPE (TREE_TYPE (op))))
3121 error ("type mismatch in array reference");
3122 debug_generic_stmt (TREE_TYPE (expr));
3123 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3124 return true;
3126 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3127 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3128 TREE_TYPE (TREE_TYPE (op))))
3130 error ("type mismatch in array range reference");
3131 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3132 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3133 return true;
3136 if ((TREE_CODE (expr) == REALPART_EXPR
3137 || TREE_CODE (expr) == IMAGPART_EXPR)
3138 && !useless_type_conversion_p (TREE_TYPE (expr),
3139 TREE_TYPE (TREE_TYPE (op))))
3141 error ("type mismatch in real/imagpart reference");
3142 debug_generic_stmt (TREE_TYPE (expr));
3143 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3144 return true;
3147 if (TREE_CODE (expr) == COMPONENT_REF
3148 && !useless_type_conversion_p (TREE_TYPE (expr),
3149 TREE_TYPE (TREE_OPERAND (expr, 1))))
3151 error ("type mismatch in component reference");
3152 debug_generic_stmt (TREE_TYPE (expr));
3153 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3154 return true;
3157 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3159 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3160 that their operand is not an SSA name or an invariant when
3161 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3162 bug). Otherwise there is nothing to verify, gross mismatches at
3163 most invoke undefined behavior. */
3164 if (require_lvalue
3165 && (TREE_CODE (op) == SSA_NAME
3166 || is_gimple_min_invariant (op)))
3168 error ("conversion of an SSA_NAME on the left hand side");
3169 debug_generic_stmt (expr);
3170 return true;
3172 else if (TREE_CODE (op) == SSA_NAME
3173 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3175 error ("conversion of register to a different size");
3176 debug_generic_stmt (expr);
3177 return true;
3179 else if (!handled_component_p (op))
3180 return false;
3183 expr = op;
3186 if (TREE_CODE (expr) == MEM_REF)
3188 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3190 error ("invalid address operand in MEM_REF");
3191 debug_generic_stmt (expr);
3192 return true;
3194 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3195 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3197 error ("invalid offset operand in MEM_REF");
3198 debug_generic_stmt (expr);
3199 return true;
3202 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3204 if (!TMR_BASE (expr)
3205 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3207 error ("invalid address operand in TARGET_MEM_REF");
3208 return true;
3210 if (!TMR_OFFSET (expr)
3211 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3212 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3214 error ("invalid offset operand in TARGET_MEM_REF");
3215 debug_generic_stmt (expr);
3216 return true;
3220 return ((require_lvalue || !is_gimple_min_invariant (expr))
3221 && verify_types_in_gimple_min_lval (expr));
3224 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3225 list of pointer-to types that is trivially convertible to DEST. */
3227 static bool
3228 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3230 tree src;
3232 if (!TYPE_POINTER_TO (src_obj))
3233 return true;
3235 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3236 if (useless_type_conversion_p (dest, src))
3237 return true;
3239 return false;
3242 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3243 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3245 static bool
3246 valid_fixed_convert_types_p (tree type1, tree type2)
3248 return (FIXED_POINT_TYPE_P (type1)
3249 && (INTEGRAL_TYPE_P (type2)
3250 || SCALAR_FLOAT_TYPE_P (type2)
3251 || FIXED_POINT_TYPE_P (type2)));
3254 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3255 is a problem, otherwise false. */
3257 static bool
3258 verify_gimple_call (gcall *stmt)
3260 tree fn = gimple_call_fn (stmt);
3261 tree fntype, fndecl;
3262 unsigned i;
3264 if (gimple_call_internal_p (stmt))
3266 if (fn)
3268 error ("gimple call has two targets");
3269 debug_generic_stmt (fn);
3270 return true;
3273 else
3275 if (!fn)
3277 error ("gimple call has no target");
3278 return true;
3282 if (fn && !is_gimple_call_addr (fn))
3284 error ("invalid function in gimple call");
3285 debug_generic_stmt (fn);
3286 return true;
3289 if (fn
3290 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3291 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3292 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3294 error ("non-function in gimple call");
3295 return true;
3298 fndecl = gimple_call_fndecl (stmt);
3299 if (fndecl
3300 && TREE_CODE (fndecl) == FUNCTION_DECL
3301 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3302 && !DECL_PURE_P (fndecl)
3303 && !TREE_READONLY (fndecl))
3305 error ("invalid pure const state for function");
3306 return true;
3309 if (gimple_call_lhs (stmt)
3310 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3311 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3313 error ("invalid LHS in gimple call");
3314 return true;
3317 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3319 error ("LHS in noreturn call");
3320 return true;
3323 fntype = gimple_call_fntype (stmt);
3324 if (fntype
3325 && gimple_call_lhs (stmt)
3326 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3327 TREE_TYPE (fntype))
3328 /* ??? At least C++ misses conversions at assignments from
3329 void * call results.
3330 ??? Java is completely off. Especially with functions
3331 returning java.lang.Object.
3332 For now simply allow arbitrary pointer type conversions. */
3333 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3334 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3336 error ("invalid conversion in gimple call");
3337 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3338 debug_generic_stmt (TREE_TYPE (fntype));
3339 return true;
3342 if (gimple_call_chain (stmt)
3343 && !is_gimple_val (gimple_call_chain (stmt)))
3345 error ("invalid static chain in gimple call");
3346 debug_generic_stmt (gimple_call_chain (stmt));
3347 return true;
3350 /* If there is a static chain argument, the call should either be
3351 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3352 if (gimple_call_chain (stmt)
3353 && fndecl
3354 && !DECL_STATIC_CHAIN (fndecl))
3356 error ("static chain with function that doesn%'t use one");
3357 return true;
3360 /* ??? The C frontend passes unpromoted arguments in case it
3361 didn't see a function declaration before the call. So for now
3362 leave the call arguments mostly unverified. Once we gimplify
3363 unit-at-a-time we have a chance to fix this. */
3365 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3367 tree arg = gimple_call_arg (stmt, i);
3368 if ((is_gimple_reg_type (TREE_TYPE (arg))
3369 && !is_gimple_val (arg))
3370 || (!is_gimple_reg_type (TREE_TYPE (arg))
3371 && !is_gimple_lvalue (arg)))
3373 error ("invalid argument to gimple call");
3374 debug_generic_expr (arg);
3375 return true;
3379 return false;
3382 /* Verifies the gimple comparison with the result type TYPE and
3383 the operands OP0 and OP1. */
3385 static bool
3386 verify_gimple_comparison (tree type, tree op0, tree op1)
3388 tree op0_type = TREE_TYPE (op0);
3389 tree op1_type = TREE_TYPE (op1);
3391 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3393 error ("invalid operands in gimple comparison");
3394 return true;
3397 /* For comparisons we do not have the operations type as the
3398 effective type the comparison is carried out in. Instead
3399 we require that either the first operand is trivially
3400 convertible into the second, or the other way around.
3401 Because we special-case pointers to void we allow
3402 comparisons of pointers with the same mode as well. */
3403 if (!useless_type_conversion_p (op0_type, op1_type)
3404 && !useless_type_conversion_p (op1_type, op0_type)
3405 && (!POINTER_TYPE_P (op0_type)
3406 || !POINTER_TYPE_P (op1_type)
3407 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3409 error ("mismatching comparison operand types");
3410 debug_generic_expr (op0_type);
3411 debug_generic_expr (op1_type);
3412 return true;
3415 /* The resulting type of a comparison may be an effective boolean type. */
3416 if (INTEGRAL_TYPE_P (type)
3417 && (TREE_CODE (type) == BOOLEAN_TYPE
3418 || TYPE_PRECISION (type) == 1))
3420 if (TREE_CODE (op0_type) == VECTOR_TYPE
3421 || TREE_CODE (op1_type) == VECTOR_TYPE)
3423 error ("vector comparison returning a boolean");
3424 debug_generic_expr (op0_type);
3425 debug_generic_expr (op1_type);
3426 return true;
3429 /* Or an integer vector type with the same size and element count
3430 as the comparison operand types. */
3431 else if (TREE_CODE (type) == VECTOR_TYPE
3432 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3434 if (TREE_CODE (op0_type) != VECTOR_TYPE
3435 || TREE_CODE (op1_type) != VECTOR_TYPE)
3437 error ("non-vector operands in vector comparison");
3438 debug_generic_expr (op0_type);
3439 debug_generic_expr (op1_type);
3440 return true;
3443 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3444 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3445 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3446 /* The result of a vector comparison is of signed
3447 integral type. */
3448 || TYPE_UNSIGNED (TREE_TYPE (type)))
3450 error ("invalid vector comparison resulting type");
3451 debug_generic_expr (type);
3452 return true;
3455 else
3457 error ("bogus comparison result type");
3458 debug_generic_expr (type);
3459 return true;
3462 return false;
3465 /* Verify a gimple assignment statement STMT with an unary rhs.
3466 Returns true if anything is wrong. */
3468 static bool
3469 verify_gimple_assign_unary (gassign *stmt)
3471 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3472 tree lhs = gimple_assign_lhs (stmt);
3473 tree lhs_type = TREE_TYPE (lhs);
3474 tree rhs1 = gimple_assign_rhs1 (stmt);
3475 tree rhs1_type = TREE_TYPE (rhs1);
3477 if (!is_gimple_reg (lhs))
3479 error ("non-register as LHS of unary operation");
3480 return true;
3483 if (!is_gimple_val (rhs1))
3485 error ("invalid operand in unary operation");
3486 return true;
3489 /* First handle conversions. */
3490 switch (rhs_code)
3492 CASE_CONVERT:
3494 /* Allow conversions from pointer type to integral type only if
3495 there is no sign or zero extension involved.
3496 For targets were the precision of ptrofftype doesn't match that
3497 of pointers we need to allow arbitrary conversions to ptrofftype. */
3498 if ((POINTER_TYPE_P (lhs_type)
3499 && INTEGRAL_TYPE_P (rhs1_type))
3500 || (POINTER_TYPE_P (rhs1_type)
3501 && INTEGRAL_TYPE_P (lhs_type)
3502 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3503 || ptrofftype_p (sizetype))))
3504 return false;
3506 /* Allow conversion from integral to offset type and vice versa. */
3507 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3508 && INTEGRAL_TYPE_P (rhs1_type))
3509 || (INTEGRAL_TYPE_P (lhs_type)
3510 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3511 return false;
3513 /* Otherwise assert we are converting between types of the
3514 same kind. */
3515 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3517 error ("invalid types in nop conversion");
3518 debug_generic_expr (lhs_type);
3519 debug_generic_expr (rhs1_type);
3520 return true;
3523 return false;
3526 case ADDR_SPACE_CONVERT_EXPR:
3528 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3529 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3530 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3532 error ("invalid types in address space conversion");
3533 debug_generic_expr (lhs_type);
3534 debug_generic_expr (rhs1_type);
3535 return true;
3538 return false;
3541 case FIXED_CONVERT_EXPR:
3543 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3544 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3546 error ("invalid types in fixed-point conversion");
3547 debug_generic_expr (lhs_type);
3548 debug_generic_expr (rhs1_type);
3549 return true;
3552 return false;
3555 case FLOAT_EXPR:
3557 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3558 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3559 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3561 error ("invalid types in conversion to floating point");
3562 debug_generic_expr (lhs_type);
3563 debug_generic_expr (rhs1_type);
3564 return true;
3567 return false;
3570 case FIX_TRUNC_EXPR:
3572 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3573 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3574 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3576 error ("invalid types in conversion to integer");
3577 debug_generic_expr (lhs_type);
3578 debug_generic_expr (rhs1_type);
3579 return true;
3582 return false;
3584 case REDUC_MAX_EXPR:
3585 case REDUC_MIN_EXPR:
3586 case REDUC_PLUS_EXPR:
3587 if (!VECTOR_TYPE_P (rhs1_type)
3588 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3590 error ("reduction should convert from vector to element type");
3591 debug_generic_expr (lhs_type);
3592 debug_generic_expr (rhs1_type);
3593 return true;
3595 return false;
3597 case VEC_UNPACK_HI_EXPR:
3598 case VEC_UNPACK_LO_EXPR:
3599 case VEC_UNPACK_FLOAT_HI_EXPR:
3600 case VEC_UNPACK_FLOAT_LO_EXPR:
3601 /* FIXME. */
3602 return false;
3604 case NEGATE_EXPR:
3605 case ABS_EXPR:
3606 case BIT_NOT_EXPR:
3607 case PAREN_EXPR:
3608 case CONJ_EXPR:
3609 break;
3611 default:
3612 gcc_unreachable ();
3615 /* For the remaining codes assert there is no conversion involved. */
3616 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3618 error ("non-trivial conversion in unary operation");
3619 debug_generic_expr (lhs_type);
3620 debug_generic_expr (rhs1_type);
3621 return true;
3624 return false;
3627 /* Verify a gimple assignment statement STMT with a binary rhs.
3628 Returns true if anything is wrong. */
3630 static bool
3631 verify_gimple_assign_binary (gassign *stmt)
3633 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3634 tree lhs = gimple_assign_lhs (stmt);
3635 tree lhs_type = TREE_TYPE (lhs);
3636 tree rhs1 = gimple_assign_rhs1 (stmt);
3637 tree rhs1_type = TREE_TYPE (rhs1);
3638 tree rhs2 = gimple_assign_rhs2 (stmt);
3639 tree rhs2_type = TREE_TYPE (rhs2);
3641 if (!is_gimple_reg (lhs))
3643 error ("non-register as LHS of binary operation");
3644 return true;
3647 if (!is_gimple_val (rhs1)
3648 || !is_gimple_val (rhs2))
3650 error ("invalid operands in binary operation");
3651 return true;
3654 /* First handle operations that involve different types. */
3655 switch (rhs_code)
3657 case COMPLEX_EXPR:
3659 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3660 || !(INTEGRAL_TYPE_P (rhs1_type)
3661 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3662 || !(INTEGRAL_TYPE_P (rhs2_type)
3663 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3665 error ("type mismatch in complex expression");
3666 debug_generic_expr (lhs_type);
3667 debug_generic_expr (rhs1_type);
3668 debug_generic_expr (rhs2_type);
3669 return true;
3672 return false;
3675 case LSHIFT_EXPR:
3676 case RSHIFT_EXPR:
3677 case LROTATE_EXPR:
3678 case RROTATE_EXPR:
3680 /* Shifts and rotates are ok on integral types, fixed point
3681 types and integer vector types. */
3682 if ((!INTEGRAL_TYPE_P (rhs1_type)
3683 && !FIXED_POINT_TYPE_P (rhs1_type)
3684 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3685 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3686 || (!INTEGRAL_TYPE_P (rhs2_type)
3687 /* Vector shifts of vectors are also ok. */
3688 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3689 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3690 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3691 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3692 || !useless_type_conversion_p (lhs_type, rhs1_type))
3694 error ("type mismatch in shift expression");
3695 debug_generic_expr (lhs_type);
3696 debug_generic_expr (rhs1_type);
3697 debug_generic_expr (rhs2_type);
3698 return true;
3701 return false;
3704 case WIDEN_LSHIFT_EXPR:
3706 if (!INTEGRAL_TYPE_P (lhs_type)
3707 || !INTEGRAL_TYPE_P (rhs1_type)
3708 || TREE_CODE (rhs2) != INTEGER_CST
3709 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3711 error ("type mismatch in widening vector shift expression");
3712 debug_generic_expr (lhs_type);
3713 debug_generic_expr (rhs1_type);
3714 debug_generic_expr (rhs2_type);
3715 return true;
3718 return false;
3721 case VEC_WIDEN_LSHIFT_HI_EXPR:
3722 case VEC_WIDEN_LSHIFT_LO_EXPR:
3724 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3725 || TREE_CODE (lhs_type) != VECTOR_TYPE
3726 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3727 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3728 || TREE_CODE (rhs2) != INTEGER_CST
3729 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3730 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3732 error ("type mismatch in widening vector shift expression");
3733 debug_generic_expr (lhs_type);
3734 debug_generic_expr (rhs1_type);
3735 debug_generic_expr (rhs2_type);
3736 return true;
3739 return false;
3742 case PLUS_EXPR:
3743 case MINUS_EXPR:
3745 tree lhs_etype = lhs_type;
3746 tree rhs1_etype = rhs1_type;
3747 tree rhs2_etype = rhs2_type;
3748 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3750 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3751 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3753 error ("invalid non-vector operands to vector valued plus");
3754 return true;
3756 lhs_etype = TREE_TYPE (lhs_type);
3757 rhs1_etype = TREE_TYPE (rhs1_type);
3758 rhs2_etype = TREE_TYPE (rhs2_type);
3760 if (POINTER_TYPE_P (lhs_etype)
3761 || POINTER_TYPE_P (rhs1_etype)
3762 || POINTER_TYPE_P (rhs2_etype))
3764 error ("invalid (pointer) operands to plus/minus");
3765 return true;
3768 /* Continue with generic binary expression handling. */
3769 break;
3772 case POINTER_PLUS_EXPR:
3774 if (!POINTER_TYPE_P (rhs1_type)
3775 || !useless_type_conversion_p (lhs_type, rhs1_type)
3776 || !ptrofftype_p (rhs2_type))
3778 error ("type mismatch in pointer plus expression");
3779 debug_generic_stmt (lhs_type);
3780 debug_generic_stmt (rhs1_type);
3781 debug_generic_stmt (rhs2_type);
3782 return true;
3785 return false;
3788 case TRUTH_ANDIF_EXPR:
3789 case TRUTH_ORIF_EXPR:
3790 case TRUTH_AND_EXPR:
3791 case TRUTH_OR_EXPR:
3792 case TRUTH_XOR_EXPR:
3794 gcc_unreachable ();
3796 case LT_EXPR:
3797 case LE_EXPR:
3798 case GT_EXPR:
3799 case GE_EXPR:
3800 case EQ_EXPR:
3801 case NE_EXPR:
3802 case UNORDERED_EXPR:
3803 case ORDERED_EXPR:
3804 case UNLT_EXPR:
3805 case UNLE_EXPR:
3806 case UNGT_EXPR:
3807 case UNGE_EXPR:
3808 case UNEQ_EXPR:
3809 case LTGT_EXPR:
3810 /* Comparisons are also binary, but the result type is not
3811 connected to the operand types. */
3812 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3814 case WIDEN_MULT_EXPR:
3815 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3816 return true;
3817 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3818 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3820 case WIDEN_SUM_EXPR:
3821 case VEC_WIDEN_MULT_HI_EXPR:
3822 case VEC_WIDEN_MULT_LO_EXPR:
3823 case VEC_WIDEN_MULT_EVEN_EXPR:
3824 case VEC_WIDEN_MULT_ODD_EXPR:
3825 case VEC_PACK_TRUNC_EXPR:
3826 case VEC_PACK_SAT_EXPR:
3827 case VEC_PACK_FIX_TRUNC_EXPR:
3828 /* FIXME. */
3829 return false;
3831 case MULT_EXPR:
3832 case MULT_HIGHPART_EXPR:
3833 case TRUNC_DIV_EXPR:
3834 case CEIL_DIV_EXPR:
3835 case FLOOR_DIV_EXPR:
3836 case ROUND_DIV_EXPR:
3837 case TRUNC_MOD_EXPR:
3838 case CEIL_MOD_EXPR:
3839 case FLOOR_MOD_EXPR:
3840 case ROUND_MOD_EXPR:
3841 case RDIV_EXPR:
3842 case EXACT_DIV_EXPR:
3843 case MIN_EXPR:
3844 case MAX_EXPR:
3845 case BIT_IOR_EXPR:
3846 case BIT_XOR_EXPR:
3847 case BIT_AND_EXPR:
3848 /* Continue with generic binary expression handling. */
3849 break;
3851 default:
3852 gcc_unreachable ();
3855 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3856 || !useless_type_conversion_p (lhs_type, rhs2_type))
3858 error ("type mismatch in binary expression");
3859 debug_generic_stmt (lhs_type);
3860 debug_generic_stmt (rhs1_type);
3861 debug_generic_stmt (rhs2_type);
3862 return true;
3865 return false;
3868 /* Verify a gimple assignment statement STMT with a ternary rhs.
3869 Returns true if anything is wrong. */
3871 static bool
3872 verify_gimple_assign_ternary (gassign *stmt)
3874 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3875 tree lhs = gimple_assign_lhs (stmt);
3876 tree lhs_type = TREE_TYPE (lhs);
3877 tree rhs1 = gimple_assign_rhs1 (stmt);
3878 tree rhs1_type = TREE_TYPE (rhs1);
3879 tree rhs2 = gimple_assign_rhs2 (stmt);
3880 tree rhs2_type = TREE_TYPE (rhs2);
3881 tree rhs3 = gimple_assign_rhs3 (stmt);
3882 tree rhs3_type = TREE_TYPE (rhs3);
3884 if (!is_gimple_reg (lhs))
3886 error ("non-register as LHS of ternary operation");
3887 return true;
3890 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3891 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3892 || !is_gimple_val (rhs2)
3893 || !is_gimple_val (rhs3))
3895 error ("invalid operands in ternary operation");
3896 return true;
3899 /* First handle operations that involve different types. */
3900 switch (rhs_code)
3902 case WIDEN_MULT_PLUS_EXPR:
3903 case WIDEN_MULT_MINUS_EXPR:
3904 if ((!INTEGRAL_TYPE_P (rhs1_type)
3905 && !FIXED_POINT_TYPE_P (rhs1_type))
3906 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3907 || !useless_type_conversion_p (lhs_type, rhs3_type)
3908 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3909 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3911 error ("type mismatch in widening multiply-accumulate expression");
3912 debug_generic_expr (lhs_type);
3913 debug_generic_expr (rhs1_type);
3914 debug_generic_expr (rhs2_type);
3915 debug_generic_expr (rhs3_type);
3916 return true;
3918 break;
3920 case FMA_EXPR:
3921 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3922 || !useless_type_conversion_p (lhs_type, rhs2_type)
3923 || !useless_type_conversion_p (lhs_type, rhs3_type))
3925 error ("type mismatch in fused multiply-add expression");
3926 debug_generic_expr (lhs_type);
3927 debug_generic_expr (rhs1_type);
3928 debug_generic_expr (rhs2_type);
3929 debug_generic_expr (rhs3_type);
3930 return true;
3932 break;
3934 case COND_EXPR:
3935 case VEC_COND_EXPR:
3936 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3937 || !useless_type_conversion_p (lhs_type, rhs3_type))
3939 error ("type mismatch in conditional expression");
3940 debug_generic_expr (lhs_type);
3941 debug_generic_expr (rhs2_type);
3942 debug_generic_expr (rhs3_type);
3943 return true;
3945 break;
3947 case VEC_PERM_EXPR:
3948 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3949 || !useless_type_conversion_p (lhs_type, rhs2_type))
3951 error ("type mismatch in vector permute expression");
3952 debug_generic_expr (lhs_type);
3953 debug_generic_expr (rhs1_type);
3954 debug_generic_expr (rhs2_type);
3955 debug_generic_expr (rhs3_type);
3956 return true;
3959 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3960 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3961 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3963 error ("vector types expected in vector permute expression");
3964 debug_generic_expr (lhs_type);
3965 debug_generic_expr (rhs1_type);
3966 debug_generic_expr (rhs2_type);
3967 debug_generic_expr (rhs3_type);
3968 return true;
3971 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3972 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3973 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3974 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3975 != TYPE_VECTOR_SUBPARTS (lhs_type))
3977 error ("vectors with different element number found "
3978 "in vector permute expression");
3979 debug_generic_expr (lhs_type);
3980 debug_generic_expr (rhs1_type);
3981 debug_generic_expr (rhs2_type);
3982 debug_generic_expr (rhs3_type);
3983 return true;
3986 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3987 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3988 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3990 error ("invalid mask type in vector permute expression");
3991 debug_generic_expr (lhs_type);
3992 debug_generic_expr (rhs1_type);
3993 debug_generic_expr (rhs2_type);
3994 debug_generic_expr (rhs3_type);
3995 return true;
3998 return false;
4000 case SAD_EXPR:
4001 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4002 || !useless_type_conversion_p (lhs_type, rhs3_type)
4003 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4004 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4005 > GET_MODE_BITSIZE (GET_MODE_INNER
4006 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4008 error ("type mismatch in sad expression");
4009 debug_generic_expr (lhs_type);
4010 debug_generic_expr (rhs1_type);
4011 debug_generic_expr (rhs2_type);
4012 debug_generic_expr (rhs3_type);
4013 return true;
4016 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4017 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4018 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4020 error ("vector types expected in sad expression");
4021 debug_generic_expr (lhs_type);
4022 debug_generic_expr (rhs1_type);
4023 debug_generic_expr (rhs2_type);
4024 debug_generic_expr (rhs3_type);
4025 return true;
4028 return false;
4030 case DOT_PROD_EXPR:
4031 case REALIGN_LOAD_EXPR:
4032 /* FIXME. */
4033 return false;
4035 default:
4036 gcc_unreachable ();
4038 return false;
4041 /* Verify a gimple assignment statement STMT with a single rhs.
4042 Returns true if anything is wrong. */
4044 static bool
4045 verify_gimple_assign_single (gassign *stmt)
4047 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4048 tree lhs = gimple_assign_lhs (stmt);
4049 tree lhs_type = TREE_TYPE (lhs);
4050 tree rhs1 = gimple_assign_rhs1 (stmt);
4051 tree rhs1_type = TREE_TYPE (rhs1);
4052 bool res = false;
4054 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4056 error ("non-trivial conversion at assignment");
4057 debug_generic_expr (lhs_type);
4058 debug_generic_expr (rhs1_type);
4059 return true;
4062 if (gimple_clobber_p (stmt)
4063 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4065 error ("non-decl/MEM_REF LHS in clobber statement");
4066 debug_generic_expr (lhs);
4067 return true;
4070 if (handled_component_p (lhs)
4071 || TREE_CODE (lhs) == MEM_REF
4072 || TREE_CODE (lhs) == TARGET_MEM_REF)
4073 res |= verify_types_in_gimple_reference (lhs, true);
4075 /* Special codes we cannot handle via their class. */
4076 switch (rhs_code)
4078 case ADDR_EXPR:
4080 tree op = TREE_OPERAND (rhs1, 0);
4081 if (!is_gimple_addressable (op))
4083 error ("invalid operand in unary expression");
4084 return true;
4087 /* Technically there is no longer a need for matching types, but
4088 gimple hygiene asks for this check. In LTO we can end up
4089 combining incompatible units and thus end up with addresses
4090 of globals that change their type to a common one. */
4091 if (!in_lto_p
4092 && !types_compatible_p (TREE_TYPE (op),
4093 TREE_TYPE (TREE_TYPE (rhs1)))
4094 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4095 TREE_TYPE (op)))
4097 error ("type mismatch in address expression");
4098 debug_generic_stmt (TREE_TYPE (rhs1));
4099 debug_generic_stmt (TREE_TYPE (op));
4100 return true;
4103 return verify_types_in_gimple_reference (op, true);
4106 /* tcc_reference */
4107 case INDIRECT_REF:
4108 error ("INDIRECT_REF in gimple IL");
4109 return true;
4111 case COMPONENT_REF:
4112 case BIT_FIELD_REF:
4113 case ARRAY_REF:
4114 case ARRAY_RANGE_REF:
4115 case VIEW_CONVERT_EXPR:
4116 case REALPART_EXPR:
4117 case IMAGPART_EXPR:
4118 case TARGET_MEM_REF:
4119 case MEM_REF:
4120 if (!is_gimple_reg (lhs)
4121 && is_gimple_reg_type (TREE_TYPE (lhs)))
4123 error ("invalid rhs for gimple memory store");
4124 debug_generic_stmt (lhs);
4125 debug_generic_stmt (rhs1);
4126 return true;
4128 return res || verify_types_in_gimple_reference (rhs1, false);
4130 /* tcc_constant */
4131 case SSA_NAME:
4132 case INTEGER_CST:
4133 case REAL_CST:
4134 case FIXED_CST:
4135 case COMPLEX_CST:
4136 case VECTOR_CST:
4137 case STRING_CST:
4138 return res;
4140 /* tcc_declaration */
4141 case CONST_DECL:
4142 return res;
4143 case VAR_DECL:
4144 case PARM_DECL:
4145 if (!is_gimple_reg (lhs)
4146 && !is_gimple_reg (rhs1)
4147 && is_gimple_reg_type (TREE_TYPE (lhs)))
4149 error ("invalid rhs for gimple memory store");
4150 debug_generic_stmt (lhs);
4151 debug_generic_stmt (rhs1);
4152 return true;
4154 return res;
4156 case CONSTRUCTOR:
4157 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4159 unsigned int i;
4160 tree elt_i, elt_v, elt_t = NULL_TREE;
4162 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4163 return res;
4164 /* For vector CONSTRUCTORs we require that either it is empty
4165 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4166 (then the element count must be correct to cover the whole
4167 outer vector and index must be NULL on all elements, or it is
4168 a CONSTRUCTOR of scalar elements, where we as an exception allow
4169 smaller number of elements (assuming zero filling) and
4170 consecutive indexes as compared to NULL indexes (such
4171 CONSTRUCTORs can appear in the IL from FEs). */
4172 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4174 if (elt_t == NULL_TREE)
4176 elt_t = TREE_TYPE (elt_v);
4177 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4179 tree elt_t = TREE_TYPE (elt_v);
4180 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4181 TREE_TYPE (elt_t)))
4183 error ("incorrect type of vector CONSTRUCTOR"
4184 " elements");
4185 debug_generic_stmt (rhs1);
4186 return true;
4188 else if (CONSTRUCTOR_NELTS (rhs1)
4189 * TYPE_VECTOR_SUBPARTS (elt_t)
4190 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4192 error ("incorrect number of vector CONSTRUCTOR"
4193 " elements");
4194 debug_generic_stmt (rhs1);
4195 return true;
4198 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4199 elt_t))
4201 error ("incorrect type of vector CONSTRUCTOR elements");
4202 debug_generic_stmt (rhs1);
4203 return true;
4205 else if (CONSTRUCTOR_NELTS (rhs1)
4206 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4208 error ("incorrect number of vector CONSTRUCTOR elements");
4209 debug_generic_stmt (rhs1);
4210 return true;
4213 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4215 error ("incorrect type of vector CONSTRUCTOR elements");
4216 debug_generic_stmt (rhs1);
4217 return true;
4219 if (elt_i != NULL_TREE
4220 && (TREE_CODE (elt_t) == VECTOR_TYPE
4221 || TREE_CODE (elt_i) != INTEGER_CST
4222 || compare_tree_int (elt_i, i) != 0))
4224 error ("vector CONSTRUCTOR with non-NULL element index");
4225 debug_generic_stmt (rhs1);
4226 return true;
4228 if (!is_gimple_val (elt_v))
4230 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4231 debug_generic_stmt (rhs1);
4232 return true;
4236 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4238 error ("non-vector CONSTRUCTOR with elements");
4239 debug_generic_stmt (rhs1);
4240 return true;
4242 return res;
4243 case OBJ_TYPE_REF:
4244 case ASSERT_EXPR:
4245 case WITH_SIZE_EXPR:
4246 /* FIXME. */
4247 return res;
4249 default:;
4252 return res;
4255 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4256 is a problem, otherwise false. */
4258 static bool
4259 verify_gimple_assign (gassign *stmt)
4261 switch (gimple_assign_rhs_class (stmt))
4263 case GIMPLE_SINGLE_RHS:
4264 return verify_gimple_assign_single (stmt);
4266 case GIMPLE_UNARY_RHS:
4267 return verify_gimple_assign_unary (stmt);
4269 case GIMPLE_BINARY_RHS:
4270 return verify_gimple_assign_binary (stmt);
4272 case GIMPLE_TERNARY_RHS:
4273 return verify_gimple_assign_ternary (stmt);
4275 default:
4276 gcc_unreachable ();
4280 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4281 is a problem, otherwise false. */
4283 static bool
4284 verify_gimple_return (greturn *stmt)
4286 tree op = gimple_return_retval (stmt);
4287 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4289 /* We cannot test for present return values as we do not fix up missing
4290 return values from the original source. */
4291 if (op == NULL)
4292 return false;
4294 if (!is_gimple_val (op)
4295 && TREE_CODE (op) != RESULT_DECL)
4297 error ("invalid operand in return statement");
4298 debug_generic_stmt (op);
4299 return true;
4302 if ((TREE_CODE (op) == RESULT_DECL
4303 && DECL_BY_REFERENCE (op))
4304 || (TREE_CODE (op) == SSA_NAME
4305 && SSA_NAME_VAR (op)
4306 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4307 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4308 op = TREE_TYPE (op);
4310 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4312 error ("invalid conversion in return statement");
4313 debug_generic_stmt (restype);
4314 debug_generic_stmt (TREE_TYPE (op));
4315 return true;
4318 return false;
4322 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4323 is a problem, otherwise false. */
4325 static bool
4326 verify_gimple_goto (ggoto *stmt)
4328 tree dest = gimple_goto_dest (stmt);
4330 /* ??? We have two canonical forms of direct goto destinations, a
4331 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4332 if (TREE_CODE (dest) != LABEL_DECL
4333 && (!is_gimple_val (dest)
4334 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4336 error ("goto destination is neither a label nor a pointer");
4337 return true;
4340 return false;
4343 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4344 is a problem, otherwise false. */
4346 static bool
4347 verify_gimple_switch (gswitch *stmt)
4349 unsigned int i, n;
4350 tree elt, prev_upper_bound = NULL_TREE;
4351 tree index_type, elt_type = NULL_TREE;
4353 if (!is_gimple_val (gimple_switch_index (stmt)))
4355 error ("invalid operand to switch statement");
4356 debug_generic_stmt (gimple_switch_index (stmt));
4357 return true;
4360 index_type = TREE_TYPE (gimple_switch_index (stmt));
4361 if (! INTEGRAL_TYPE_P (index_type))
4363 error ("non-integral type switch statement");
4364 debug_generic_expr (index_type);
4365 return true;
4368 elt = gimple_switch_label (stmt, 0);
4369 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4371 error ("invalid default case label in switch statement");
4372 debug_generic_expr (elt);
4373 return true;
4376 n = gimple_switch_num_labels (stmt);
4377 for (i = 1; i < n; i++)
4379 elt = gimple_switch_label (stmt, i);
4381 if (! CASE_LOW (elt))
4383 error ("invalid case label in switch statement");
4384 debug_generic_expr (elt);
4385 return true;
4387 if (CASE_HIGH (elt)
4388 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4390 error ("invalid case range in switch statement");
4391 debug_generic_expr (elt);
4392 return true;
4395 if (elt_type)
4397 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4398 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4400 error ("type mismatch for case label in switch statement");
4401 debug_generic_expr (elt);
4402 return true;
4405 else
4407 elt_type = TREE_TYPE (CASE_LOW (elt));
4408 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4410 error ("type precision mismatch in switch statement");
4411 return true;
4415 if (prev_upper_bound)
4417 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4419 error ("case labels not sorted in switch statement");
4420 return true;
4424 prev_upper_bound = CASE_HIGH (elt);
4425 if (! prev_upper_bound)
4426 prev_upper_bound = CASE_LOW (elt);
4429 return false;
4432 /* Verify a gimple debug statement STMT.
4433 Returns true if anything is wrong. */
4435 static bool
4436 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4438 /* There isn't much that could be wrong in a gimple debug stmt. A
4439 gimple debug bind stmt, for example, maps a tree, that's usually
4440 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4441 component or member of an aggregate type, to another tree, that
4442 can be an arbitrary expression. These stmts expand into debug
4443 insns, and are converted to debug notes by var-tracking.c. */
4444 return false;
4447 /* Verify a gimple label statement STMT.
4448 Returns true if anything is wrong. */
4450 static bool
4451 verify_gimple_label (glabel *stmt)
4453 tree decl = gimple_label_label (stmt);
4454 int uid;
4455 bool err = false;
4457 if (TREE_CODE (decl) != LABEL_DECL)
4458 return true;
4459 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4460 && DECL_CONTEXT (decl) != current_function_decl)
4462 error ("label's context is not the current function decl");
4463 err |= true;
4466 uid = LABEL_DECL_UID (decl);
4467 if (cfun->cfg
4468 && (uid == -1
4469 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4471 error ("incorrect entry in label_to_block_map");
4472 err |= true;
4475 uid = EH_LANDING_PAD_NR (decl);
4476 if (uid)
4478 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4479 if (decl != lp->post_landing_pad)
4481 error ("incorrect setting of landing pad number");
4482 err |= true;
4486 return err;
4489 /* Verify a gimple cond statement STMT.
4490 Returns true if anything is wrong. */
4492 static bool
4493 verify_gimple_cond (gcond *stmt)
4495 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4497 error ("invalid comparison code in gimple cond");
4498 return true;
4500 if (!(!gimple_cond_true_label (stmt)
4501 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4502 || !(!gimple_cond_false_label (stmt)
4503 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4505 error ("invalid labels in gimple cond");
4506 return true;
4509 return verify_gimple_comparison (boolean_type_node,
4510 gimple_cond_lhs (stmt),
4511 gimple_cond_rhs (stmt));
4514 /* Verify the GIMPLE statement STMT. Returns true if there is an
4515 error, otherwise false. */
4517 static bool
4518 verify_gimple_stmt (gimple stmt)
4520 switch (gimple_code (stmt))
4522 case GIMPLE_ASSIGN:
4523 return verify_gimple_assign (as_a <gassign *> (stmt));
4525 case GIMPLE_LABEL:
4526 return verify_gimple_label (as_a <glabel *> (stmt));
4528 case GIMPLE_CALL:
4529 return verify_gimple_call (as_a <gcall *> (stmt));
4531 case GIMPLE_COND:
4532 return verify_gimple_cond (as_a <gcond *> (stmt));
4534 case GIMPLE_GOTO:
4535 return verify_gimple_goto (as_a <ggoto *> (stmt));
4537 case GIMPLE_SWITCH:
4538 return verify_gimple_switch (as_a <gswitch *> (stmt));
4540 case GIMPLE_RETURN:
4541 return verify_gimple_return (as_a <greturn *> (stmt));
4543 case GIMPLE_ASM:
4544 return false;
4546 case GIMPLE_TRANSACTION:
4547 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4549 /* Tuples that do not have tree operands. */
4550 case GIMPLE_NOP:
4551 case GIMPLE_PREDICT:
4552 case GIMPLE_RESX:
4553 case GIMPLE_EH_DISPATCH:
4554 case GIMPLE_EH_MUST_NOT_THROW:
4555 return false;
4557 CASE_GIMPLE_OMP:
4558 /* OpenMP directives are validated by the FE and never operated
4559 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4560 non-gimple expressions when the main index variable has had
4561 its address taken. This does not affect the loop itself
4562 because the header of an GIMPLE_OMP_FOR is merely used to determine
4563 how to setup the parallel iteration. */
4564 return false;
4566 case GIMPLE_DEBUG:
4567 return verify_gimple_debug (stmt);
4569 default:
4570 gcc_unreachable ();
4574 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4575 and false otherwise. */
4577 static bool
4578 verify_gimple_phi (gimple phi)
4580 bool err = false;
4581 unsigned i;
4582 tree phi_result = gimple_phi_result (phi);
4583 bool virtual_p;
4585 if (!phi_result)
4587 error ("invalid PHI result");
4588 return true;
4591 virtual_p = virtual_operand_p (phi_result);
4592 if (TREE_CODE (phi_result) != SSA_NAME
4593 || (virtual_p
4594 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4596 error ("invalid PHI result");
4597 err = true;
4600 for (i = 0; i < gimple_phi_num_args (phi); i++)
4602 tree t = gimple_phi_arg_def (phi, i);
4604 if (!t)
4606 error ("missing PHI def");
4607 err |= true;
4608 continue;
4610 /* Addressable variables do have SSA_NAMEs but they
4611 are not considered gimple values. */
4612 else if ((TREE_CODE (t) == SSA_NAME
4613 && virtual_p != virtual_operand_p (t))
4614 || (virtual_p
4615 && (TREE_CODE (t) != SSA_NAME
4616 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4617 || (!virtual_p
4618 && !is_gimple_val (t)))
4620 error ("invalid PHI argument");
4621 debug_generic_expr (t);
4622 err |= true;
4624 #ifdef ENABLE_TYPES_CHECKING
4625 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4627 error ("incompatible types in PHI argument %u", i);
4628 debug_generic_stmt (TREE_TYPE (phi_result));
4629 debug_generic_stmt (TREE_TYPE (t));
4630 err |= true;
4632 #endif
4635 return err;
4638 /* Verify the GIMPLE statements inside the sequence STMTS. */
4640 static bool
4641 verify_gimple_in_seq_2 (gimple_seq stmts)
4643 gimple_stmt_iterator ittr;
4644 bool err = false;
4646 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4648 gimple stmt = gsi_stmt (ittr);
4650 switch (gimple_code (stmt))
4652 case GIMPLE_BIND:
4653 err |= verify_gimple_in_seq_2 (
4654 gimple_bind_body (as_a <gbind *> (stmt)));
4655 break;
4657 case GIMPLE_TRY:
4658 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4659 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4660 break;
4662 case GIMPLE_EH_FILTER:
4663 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4664 break;
4666 case GIMPLE_EH_ELSE:
4668 geh_else *eh_else = as_a <geh_else *> (stmt);
4669 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4670 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4672 break;
4674 case GIMPLE_CATCH:
4675 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4676 as_a <gcatch *> (stmt)));
4677 break;
4679 case GIMPLE_TRANSACTION:
4680 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4681 break;
4683 default:
4685 bool err2 = verify_gimple_stmt (stmt);
4686 if (err2)
4687 debug_gimple_stmt (stmt);
4688 err |= err2;
4693 return err;
4696 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4697 is a problem, otherwise false. */
4699 static bool
4700 verify_gimple_transaction (gtransaction *stmt)
4702 tree lab = gimple_transaction_label (stmt);
4703 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4704 return true;
4705 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4709 /* Verify the GIMPLE statements inside the statement list STMTS. */
4711 DEBUG_FUNCTION void
4712 verify_gimple_in_seq (gimple_seq stmts)
4714 timevar_push (TV_TREE_STMT_VERIFY);
4715 if (verify_gimple_in_seq_2 (stmts))
4716 internal_error ("verify_gimple failed");
4717 timevar_pop (TV_TREE_STMT_VERIFY);
4720 /* Return true when the T can be shared. */
4722 static bool
4723 tree_node_can_be_shared (tree t)
4725 if (IS_TYPE_OR_DECL_P (t)
4726 || is_gimple_min_invariant (t)
4727 || TREE_CODE (t) == SSA_NAME
4728 || t == error_mark_node
4729 || TREE_CODE (t) == IDENTIFIER_NODE)
4730 return true;
4732 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4733 return true;
4735 if (DECL_P (t))
4736 return true;
4738 return false;
4741 /* Called via walk_tree. Verify tree sharing. */
4743 static tree
4744 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4746 hash_set<void *> *visited = (hash_set<void *> *) data;
4748 if (tree_node_can_be_shared (*tp))
4750 *walk_subtrees = false;
4751 return NULL;
4754 if (visited->add (*tp))
4755 return *tp;
4757 return NULL;
4760 /* Called via walk_gimple_stmt. Verify tree sharing. */
4762 static tree
4763 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4765 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4766 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4769 static bool eh_error_found;
4770 bool
4771 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4772 hash_set<gimple> *visited)
4774 if (!visited->contains (stmt))
4776 error ("dead STMT in EH table");
4777 debug_gimple_stmt (stmt);
4778 eh_error_found = true;
4780 return true;
4783 /* Verify if the location LOCs block is in BLOCKS. */
4785 static bool
4786 verify_location (hash_set<tree> *blocks, location_t loc)
4788 tree block = LOCATION_BLOCK (loc);
4789 if (block != NULL_TREE
4790 && !blocks->contains (block))
4792 error ("location references block not in block tree");
4793 return true;
4795 if (block != NULL_TREE)
4796 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4797 return false;
4800 /* Called via walk_tree. Verify that expressions have no blocks. */
4802 static tree
4803 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4805 if (!EXPR_P (*tp))
4807 *walk_subtrees = false;
4808 return NULL;
4811 location_t loc = EXPR_LOCATION (*tp);
4812 if (LOCATION_BLOCK (loc) != NULL)
4813 return *tp;
4815 return NULL;
4818 /* Called via walk_tree. Verify locations of expressions. */
4820 static tree
4821 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4823 hash_set<tree> *blocks = (hash_set<tree> *) data;
4825 if (TREE_CODE (*tp) == VAR_DECL
4826 && DECL_HAS_DEBUG_EXPR_P (*tp))
4828 tree t = DECL_DEBUG_EXPR (*tp);
4829 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4830 if (addr)
4831 return addr;
4833 if ((TREE_CODE (*tp) == VAR_DECL
4834 || TREE_CODE (*tp) == PARM_DECL
4835 || TREE_CODE (*tp) == RESULT_DECL)
4836 && DECL_HAS_VALUE_EXPR_P (*tp))
4838 tree t = DECL_VALUE_EXPR (*tp);
4839 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4840 if (addr)
4841 return addr;
4844 if (!EXPR_P (*tp))
4846 *walk_subtrees = false;
4847 return NULL;
4850 location_t loc = EXPR_LOCATION (*tp);
4851 if (verify_location (blocks, loc))
4852 return *tp;
4854 return NULL;
4857 /* Called via walk_gimple_op. Verify locations of expressions. */
4859 static tree
4860 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4862 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4863 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4866 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4868 static void
4869 collect_subblocks (hash_set<tree> *blocks, tree block)
4871 tree t;
4872 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4874 blocks->add (t);
4875 collect_subblocks (blocks, t);
4879 /* Verify the GIMPLE statements in the CFG of FN. */
4881 DEBUG_FUNCTION void
4882 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4884 basic_block bb;
4885 bool err = false;
4887 timevar_push (TV_TREE_STMT_VERIFY);
4888 hash_set<void *> visited;
4889 hash_set<gimple> visited_stmts;
4891 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4892 hash_set<tree> blocks;
4893 if (DECL_INITIAL (fn->decl))
4895 blocks.add (DECL_INITIAL (fn->decl));
4896 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4899 FOR_EACH_BB_FN (bb, fn)
4901 gimple_stmt_iterator gsi;
4903 for (gphi_iterator gpi = gsi_start_phis (bb);
4904 !gsi_end_p (gpi);
4905 gsi_next (&gpi))
4907 gphi *phi = gpi.phi ();
4908 bool err2 = false;
4909 unsigned i;
4911 visited_stmts.add (phi);
4913 if (gimple_bb (phi) != bb)
4915 error ("gimple_bb (phi) is set to a wrong basic block");
4916 err2 = true;
4919 err2 |= verify_gimple_phi (phi);
4921 /* Only PHI arguments have locations. */
4922 if (gimple_location (phi) != UNKNOWN_LOCATION)
4924 error ("PHI node with location");
4925 err2 = true;
4928 for (i = 0; i < gimple_phi_num_args (phi); i++)
4930 tree arg = gimple_phi_arg_def (phi, i);
4931 tree addr = walk_tree (&arg, verify_node_sharing_1,
4932 &visited, NULL);
4933 if (addr)
4935 error ("incorrect sharing of tree nodes");
4936 debug_generic_expr (addr);
4937 err2 |= true;
4939 location_t loc = gimple_phi_arg_location (phi, i);
4940 if (virtual_operand_p (gimple_phi_result (phi))
4941 && loc != UNKNOWN_LOCATION)
4943 error ("virtual PHI with argument locations");
4944 err2 = true;
4946 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4947 if (addr)
4949 debug_generic_expr (addr);
4950 err2 = true;
4952 err2 |= verify_location (&blocks, loc);
4955 if (err2)
4956 debug_gimple_stmt (phi);
4957 err |= err2;
4960 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4962 gimple stmt = gsi_stmt (gsi);
4963 bool err2 = false;
4964 struct walk_stmt_info wi;
4965 tree addr;
4966 int lp_nr;
4968 visited_stmts.add (stmt);
4970 if (gimple_bb (stmt) != bb)
4972 error ("gimple_bb (stmt) is set to a wrong basic block");
4973 err2 = true;
4976 err2 |= verify_gimple_stmt (stmt);
4977 err2 |= verify_location (&blocks, gimple_location (stmt));
4979 memset (&wi, 0, sizeof (wi));
4980 wi.info = (void *) &visited;
4981 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4982 if (addr)
4984 error ("incorrect sharing of tree nodes");
4985 debug_generic_expr (addr);
4986 err2 |= true;
4989 memset (&wi, 0, sizeof (wi));
4990 wi.info = (void *) &blocks;
4991 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4992 if (addr)
4994 debug_generic_expr (addr);
4995 err2 |= true;
4998 /* ??? Instead of not checking these stmts at all the walker
4999 should know its context via wi. */
5000 if (!is_gimple_debug (stmt)
5001 && !is_gimple_omp (stmt))
5003 memset (&wi, 0, sizeof (wi));
5004 addr = walk_gimple_op (stmt, verify_expr, &wi);
5005 if (addr)
5007 debug_generic_expr (addr);
5008 inform (gimple_location (stmt), "in statement");
5009 err2 |= true;
5013 /* If the statement is marked as part of an EH region, then it is
5014 expected that the statement could throw. Verify that when we
5015 have optimizations that simplify statements such that we prove
5016 that they cannot throw, that we update other data structures
5017 to match. */
5018 lp_nr = lookup_stmt_eh_lp (stmt);
5019 if (lp_nr > 0)
5021 if (!stmt_could_throw_p (stmt))
5023 if (verify_nothrow)
5025 error ("statement marked for throw, but doesn%'t");
5026 err2 |= true;
5029 else if (!gsi_one_before_end_p (gsi))
5031 error ("statement marked for throw in middle of block");
5032 err2 |= true;
5036 if (err2)
5037 debug_gimple_stmt (stmt);
5038 err |= err2;
5042 eh_error_found = false;
5043 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5044 if (eh_table)
5045 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5046 (&visited_stmts);
5048 if (err || eh_error_found)
5049 internal_error ("verify_gimple failed");
5051 verify_histograms ();
5052 timevar_pop (TV_TREE_STMT_VERIFY);
5056 /* Verifies that the flow information is OK. */
5058 static int
5059 gimple_verify_flow_info (void)
5061 int err = 0;
5062 basic_block bb;
5063 gimple_stmt_iterator gsi;
5064 gimple stmt;
5065 edge e;
5066 edge_iterator ei;
5068 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5069 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5071 error ("ENTRY_BLOCK has IL associated with it");
5072 err = 1;
5075 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5076 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5078 error ("EXIT_BLOCK has IL associated with it");
5079 err = 1;
5082 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5083 if (e->flags & EDGE_FALLTHRU)
5085 error ("fallthru to exit from bb %d", e->src->index);
5086 err = 1;
5089 FOR_EACH_BB_FN (bb, cfun)
5091 bool found_ctrl_stmt = false;
5093 stmt = NULL;
5095 /* Skip labels on the start of basic block. */
5096 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5098 tree label;
5099 gimple prev_stmt = stmt;
5101 stmt = gsi_stmt (gsi);
5103 if (gimple_code (stmt) != GIMPLE_LABEL)
5104 break;
5106 label = gimple_label_label (as_a <glabel *> (stmt));
5107 if (prev_stmt && DECL_NONLOCAL (label))
5109 error ("nonlocal label ");
5110 print_generic_expr (stderr, label, 0);
5111 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5112 bb->index);
5113 err = 1;
5116 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5118 error ("EH landing pad label ");
5119 print_generic_expr (stderr, label, 0);
5120 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5121 bb->index);
5122 err = 1;
5125 if (label_to_block (label) != bb)
5127 error ("label ");
5128 print_generic_expr (stderr, label, 0);
5129 fprintf (stderr, " to block does not match in bb %d",
5130 bb->index);
5131 err = 1;
5134 if (decl_function_context (label) != current_function_decl)
5136 error ("label ");
5137 print_generic_expr (stderr, label, 0);
5138 fprintf (stderr, " has incorrect context in bb %d",
5139 bb->index);
5140 err = 1;
5144 /* Verify that body of basic block BB is free of control flow. */
5145 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5147 gimple stmt = gsi_stmt (gsi);
5149 if (found_ctrl_stmt)
5151 error ("control flow in the middle of basic block %d",
5152 bb->index);
5153 err = 1;
5156 if (stmt_ends_bb_p (stmt))
5157 found_ctrl_stmt = true;
5159 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5161 error ("label ");
5162 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5163 fprintf (stderr, " in the middle of basic block %d", bb->index);
5164 err = 1;
5168 gsi = gsi_last_bb (bb);
5169 if (gsi_end_p (gsi))
5170 continue;
5172 stmt = gsi_stmt (gsi);
5174 if (gimple_code (stmt) == GIMPLE_LABEL)
5175 continue;
5177 err |= verify_eh_edges (stmt);
5179 if (is_ctrl_stmt (stmt))
5181 FOR_EACH_EDGE (e, ei, bb->succs)
5182 if (e->flags & EDGE_FALLTHRU)
5184 error ("fallthru edge after a control statement in bb %d",
5185 bb->index);
5186 err = 1;
5190 if (gimple_code (stmt) != GIMPLE_COND)
5192 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5193 after anything else but if statement. */
5194 FOR_EACH_EDGE (e, ei, bb->succs)
5195 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5197 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5198 bb->index);
5199 err = 1;
5203 switch (gimple_code (stmt))
5205 case GIMPLE_COND:
5207 edge true_edge;
5208 edge false_edge;
5210 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5212 if (!true_edge
5213 || !false_edge
5214 || !(true_edge->flags & EDGE_TRUE_VALUE)
5215 || !(false_edge->flags & EDGE_FALSE_VALUE)
5216 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5217 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5218 || EDGE_COUNT (bb->succs) >= 3)
5220 error ("wrong outgoing edge flags at end of bb %d",
5221 bb->index);
5222 err = 1;
5225 break;
5227 case GIMPLE_GOTO:
5228 if (simple_goto_p (stmt))
5230 error ("explicit goto at end of bb %d", bb->index);
5231 err = 1;
5233 else
5235 /* FIXME. We should double check that the labels in the
5236 destination blocks have their address taken. */
5237 FOR_EACH_EDGE (e, ei, bb->succs)
5238 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5239 | EDGE_FALSE_VALUE))
5240 || !(e->flags & EDGE_ABNORMAL))
5242 error ("wrong outgoing edge flags at end of bb %d",
5243 bb->index);
5244 err = 1;
5247 break;
5249 case GIMPLE_CALL:
5250 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5251 break;
5252 /* ... fallthru ... */
5253 case GIMPLE_RETURN:
5254 if (!single_succ_p (bb)
5255 || (single_succ_edge (bb)->flags
5256 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5257 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5259 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5260 err = 1;
5262 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5264 error ("return edge does not point to exit in bb %d",
5265 bb->index);
5266 err = 1;
5268 break;
5270 case GIMPLE_SWITCH:
5272 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5273 tree prev;
5274 edge e;
5275 size_t i, n;
5277 n = gimple_switch_num_labels (switch_stmt);
5279 /* Mark all the destination basic blocks. */
5280 for (i = 0; i < n; ++i)
5282 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5283 basic_block label_bb = label_to_block (lab);
5284 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5285 label_bb->aux = (void *)1;
5288 /* Verify that the case labels are sorted. */
5289 prev = gimple_switch_label (switch_stmt, 0);
5290 for (i = 1; i < n; ++i)
5292 tree c = gimple_switch_label (switch_stmt, i);
5293 if (!CASE_LOW (c))
5295 error ("found default case not at the start of "
5296 "case vector");
5297 err = 1;
5298 continue;
5300 if (CASE_LOW (prev)
5301 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5303 error ("case labels not sorted: ");
5304 print_generic_expr (stderr, prev, 0);
5305 fprintf (stderr," is greater than ");
5306 print_generic_expr (stderr, c, 0);
5307 fprintf (stderr," but comes before it.\n");
5308 err = 1;
5310 prev = c;
5312 /* VRP will remove the default case if it can prove it will
5313 never be executed. So do not verify there always exists
5314 a default case here. */
5316 FOR_EACH_EDGE (e, ei, bb->succs)
5318 if (!e->dest->aux)
5320 error ("extra outgoing edge %d->%d",
5321 bb->index, e->dest->index);
5322 err = 1;
5325 e->dest->aux = (void *)2;
5326 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5327 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5329 error ("wrong outgoing edge flags at end of bb %d",
5330 bb->index);
5331 err = 1;
5335 /* Check that we have all of them. */
5336 for (i = 0; i < n; ++i)
5338 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5339 basic_block label_bb = label_to_block (lab);
5341 if (label_bb->aux != (void *)2)
5343 error ("missing edge %i->%i", bb->index, label_bb->index);
5344 err = 1;
5348 FOR_EACH_EDGE (e, ei, bb->succs)
5349 e->dest->aux = (void *)0;
5351 break;
5353 case GIMPLE_EH_DISPATCH:
5354 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5355 break;
5357 default:
5358 break;
5362 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5363 verify_dominators (CDI_DOMINATORS);
5365 return err;
5369 /* Updates phi nodes after creating a forwarder block joined
5370 by edge FALLTHRU. */
5372 static void
5373 gimple_make_forwarder_block (edge fallthru)
5375 edge e;
5376 edge_iterator ei;
5377 basic_block dummy, bb;
5378 tree var;
5379 gphi_iterator gsi;
5381 dummy = fallthru->src;
5382 bb = fallthru->dest;
5384 if (single_pred_p (bb))
5385 return;
5387 /* If we redirected a branch we must create new PHI nodes at the
5388 start of BB. */
5389 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5391 gphi *phi, *new_phi;
5393 phi = gsi.phi ();
5394 var = gimple_phi_result (phi);
5395 new_phi = create_phi_node (var, bb);
5396 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5397 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5398 UNKNOWN_LOCATION);
5401 /* Add the arguments we have stored on edges. */
5402 FOR_EACH_EDGE (e, ei, bb->preds)
5404 if (e == fallthru)
5405 continue;
5407 flush_pending_stmts (e);
5412 /* Return a non-special label in the head of basic block BLOCK.
5413 Create one if it doesn't exist. */
5415 tree
5416 gimple_block_label (basic_block bb)
5418 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5419 bool first = true;
5420 tree label;
5421 glabel *stmt;
5423 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5425 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5426 if (!stmt)
5427 break;
5428 label = gimple_label_label (stmt);
5429 if (!DECL_NONLOCAL (label))
5431 if (!first)
5432 gsi_move_before (&i, &s);
5433 return label;
5437 label = create_artificial_label (UNKNOWN_LOCATION);
5438 stmt = gimple_build_label (label);
5439 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5440 return label;
5444 /* Attempt to perform edge redirection by replacing a possibly complex
5445 jump instruction by a goto or by removing the jump completely.
5446 This can apply only if all edges now point to the same block. The
5447 parameters and return values are equivalent to
5448 redirect_edge_and_branch. */
5450 static edge
5451 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5453 basic_block src = e->src;
5454 gimple_stmt_iterator i;
5455 gimple stmt;
5457 /* We can replace or remove a complex jump only when we have exactly
5458 two edges. */
5459 if (EDGE_COUNT (src->succs) != 2
5460 /* Verify that all targets will be TARGET. Specifically, the
5461 edge that is not E must also go to TARGET. */
5462 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5463 return NULL;
5465 i = gsi_last_bb (src);
5466 if (gsi_end_p (i))
5467 return NULL;
5469 stmt = gsi_stmt (i);
5471 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5473 gsi_remove (&i, true);
5474 e = ssa_redirect_edge (e, target);
5475 e->flags = EDGE_FALLTHRU;
5476 return e;
5479 return NULL;
5483 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5484 edge representing the redirected branch. */
5486 static edge
5487 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5489 basic_block bb = e->src;
5490 gimple_stmt_iterator gsi;
5491 edge ret;
5492 gimple stmt;
5494 if (e->flags & EDGE_ABNORMAL)
5495 return NULL;
5497 if (e->dest == dest)
5498 return NULL;
5500 if (e->flags & EDGE_EH)
5501 return redirect_eh_edge (e, dest);
5503 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5505 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5506 if (ret)
5507 return ret;
5510 gsi = gsi_last_bb (bb);
5511 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5513 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5515 case GIMPLE_COND:
5516 /* For COND_EXPR, we only need to redirect the edge. */
5517 break;
5519 case GIMPLE_GOTO:
5520 /* No non-abnormal edges should lead from a non-simple goto, and
5521 simple ones should be represented implicitly. */
5522 gcc_unreachable ();
5524 case GIMPLE_SWITCH:
5526 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5527 tree label = gimple_block_label (dest);
5528 tree cases = get_cases_for_edge (e, switch_stmt);
5530 /* If we have a list of cases associated with E, then use it
5531 as it's a lot faster than walking the entire case vector. */
5532 if (cases)
5534 edge e2 = find_edge (e->src, dest);
5535 tree last, first;
5537 first = cases;
5538 while (cases)
5540 last = cases;
5541 CASE_LABEL (cases) = label;
5542 cases = CASE_CHAIN (cases);
5545 /* If there was already an edge in the CFG, then we need
5546 to move all the cases associated with E to E2. */
5547 if (e2)
5549 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5551 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5552 CASE_CHAIN (cases2) = first;
5554 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5556 else
5558 size_t i, n = gimple_switch_num_labels (switch_stmt);
5560 for (i = 0; i < n; i++)
5562 tree elt = gimple_switch_label (switch_stmt, i);
5563 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5564 CASE_LABEL (elt) = label;
5568 break;
5570 case GIMPLE_ASM:
5572 gasm *asm_stmt = as_a <gasm *> (stmt);
5573 int i, n = gimple_asm_nlabels (asm_stmt);
5574 tree label = NULL;
5576 for (i = 0; i < n; ++i)
5578 tree cons = gimple_asm_label_op (asm_stmt, i);
5579 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5581 if (!label)
5582 label = gimple_block_label (dest);
5583 TREE_VALUE (cons) = label;
5587 /* If we didn't find any label matching the former edge in the
5588 asm labels, we must be redirecting the fallthrough
5589 edge. */
5590 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5592 break;
5594 case GIMPLE_RETURN:
5595 gsi_remove (&gsi, true);
5596 e->flags |= EDGE_FALLTHRU;
5597 break;
5599 case GIMPLE_OMP_RETURN:
5600 case GIMPLE_OMP_CONTINUE:
5601 case GIMPLE_OMP_SECTIONS_SWITCH:
5602 case GIMPLE_OMP_FOR:
5603 /* The edges from OMP constructs can be simply redirected. */
5604 break;
5606 case GIMPLE_EH_DISPATCH:
5607 if (!(e->flags & EDGE_FALLTHRU))
5608 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5609 break;
5611 case GIMPLE_TRANSACTION:
5612 /* The ABORT edge has a stored label associated with it, otherwise
5613 the edges are simply redirectable. */
5614 if (e->flags == 0)
5615 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5616 gimple_block_label (dest));
5617 break;
5619 default:
5620 /* Otherwise it must be a fallthru edge, and we don't need to
5621 do anything besides redirecting it. */
5622 gcc_assert (e->flags & EDGE_FALLTHRU);
5623 break;
5626 /* Update/insert PHI nodes as necessary. */
5628 /* Now update the edges in the CFG. */
5629 e = ssa_redirect_edge (e, dest);
5631 return e;
5634 /* Returns true if it is possible to remove edge E by redirecting
5635 it to the destination of the other edge from E->src. */
5637 static bool
5638 gimple_can_remove_branch_p (const_edge e)
5640 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5641 return false;
5643 return true;
5646 /* Simple wrapper, as we can always redirect fallthru edges. */
5648 static basic_block
5649 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5651 e = gimple_redirect_edge_and_branch (e, dest);
5652 gcc_assert (e);
5654 return NULL;
5658 /* Splits basic block BB after statement STMT (but at least after the
5659 labels). If STMT is NULL, BB is split just after the labels. */
5661 static basic_block
5662 gimple_split_block (basic_block bb, void *stmt)
5664 gimple_stmt_iterator gsi;
5665 gimple_stmt_iterator gsi_tgt;
5666 gimple act;
5667 gimple_seq list;
5668 basic_block new_bb;
5669 edge e;
5670 edge_iterator ei;
5672 new_bb = create_empty_bb (bb);
5674 /* Redirect the outgoing edges. */
5675 new_bb->succs = bb->succs;
5676 bb->succs = NULL;
5677 FOR_EACH_EDGE (e, ei, new_bb->succs)
5678 e->src = new_bb;
5680 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5681 stmt = NULL;
5683 /* Move everything from GSI to the new basic block. */
5684 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5686 act = gsi_stmt (gsi);
5687 if (gimple_code (act) == GIMPLE_LABEL)
5688 continue;
5690 if (!stmt)
5691 break;
5693 if (stmt == act)
5695 gsi_next (&gsi);
5696 break;
5700 if (gsi_end_p (gsi))
5701 return new_bb;
5703 /* Split the statement list - avoid re-creating new containers as this
5704 brings ugly quadratic memory consumption in the inliner.
5705 (We are still quadratic since we need to update stmt BB pointers,
5706 sadly.) */
5707 gsi_split_seq_before (&gsi, &list);
5708 set_bb_seq (new_bb, list);
5709 for (gsi_tgt = gsi_start (list);
5710 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5711 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5713 return new_bb;
5717 /* Moves basic block BB after block AFTER. */
5719 static bool
5720 gimple_move_block_after (basic_block bb, basic_block after)
5722 if (bb->prev_bb == after)
5723 return true;
5725 unlink_block (bb);
5726 link_block (bb, after);
5728 return true;
5732 /* Return TRUE if block BB has no executable statements, otherwise return
5733 FALSE. */
5735 static bool
5736 gimple_empty_block_p (basic_block bb)
5738 /* BB must have no executable statements. */
5739 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5740 if (phi_nodes (bb))
5741 return false;
5742 if (gsi_end_p (gsi))
5743 return true;
5744 if (is_gimple_debug (gsi_stmt (gsi)))
5745 gsi_next_nondebug (&gsi);
5746 return gsi_end_p (gsi);
5750 /* Split a basic block if it ends with a conditional branch and if the
5751 other part of the block is not empty. */
5753 static basic_block
5754 gimple_split_block_before_cond_jump (basic_block bb)
5756 gimple last, split_point;
5757 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5758 if (gsi_end_p (gsi))
5759 return NULL;
5760 last = gsi_stmt (gsi);
5761 if (gimple_code (last) != GIMPLE_COND
5762 && gimple_code (last) != GIMPLE_SWITCH)
5763 return NULL;
5764 gsi_prev_nondebug (&gsi);
5765 split_point = gsi_stmt (gsi);
5766 return split_block (bb, split_point)->dest;
5770 /* Return true if basic_block can be duplicated. */
5772 static bool
5773 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5775 return true;
5778 /* Create a duplicate of the basic block BB. NOTE: This does not
5779 preserve SSA form. */
5781 static basic_block
5782 gimple_duplicate_bb (basic_block bb)
5784 basic_block new_bb;
5785 gimple_stmt_iterator gsi_tgt;
5787 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5789 /* Copy the PHI nodes. We ignore PHI node arguments here because
5790 the incoming edges have not been setup yet. */
5791 for (gphi_iterator gpi = gsi_start_phis (bb);
5792 !gsi_end_p (gpi);
5793 gsi_next (&gpi))
5795 gphi *phi, *copy;
5796 phi = gpi.phi ();
5797 copy = create_phi_node (NULL_TREE, new_bb);
5798 create_new_def_for (gimple_phi_result (phi), copy,
5799 gimple_phi_result_ptr (copy));
5800 gimple_set_uid (copy, gimple_uid (phi));
5803 gsi_tgt = gsi_start_bb (new_bb);
5804 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5805 !gsi_end_p (gsi);
5806 gsi_next (&gsi))
5808 def_operand_p def_p;
5809 ssa_op_iter op_iter;
5810 tree lhs;
5811 gimple stmt, copy;
5813 stmt = gsi_stmt (gsi);
5814 if (gimple_code (stmt) == GIMPLE_LABEL)
5815 continue;
5817 /* Don't duplicate label debug stmts. */
5818 if (gimple_debug_bind_p (stmt)
5819 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5820 == LABEL_DECL)
5821 continue;
5823 /* Create a new copy of STMT and duplicate STMT's virtual
5824 operands. */
5825 copy = gimple_copy (stmt);
5826 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5828 maybe_duplicate_eh_stmt (copy, stmt);
5829 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5831 /* When copying around a stmt writing into a local non-user
5832 aggregate, make sure it won't share stack slot with other
5833 vars. */
5834 lhs = gimple_get_lhs (stmt);
5835 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5837 tree base = get_base_address (lhs);
5838 if (base
5839 && (TREE_CODE (base) == VAR_DECL
5840 || TREE_CODE (base) == RESULT_DECL)
5841 && DECL_IGNORED_P (base)
5842 && !TREE_STATIC (base)
5843 && !DECL_EXTERNAL (base)
5844 && (TREE_CODE (base) != VAR_DECL
5845 || !DECL_HAS_VALUE_EXPR_P (base)))
5846 DECL_NONSHAREABLE (base) = 1;
5849 /* Create new names for all the definitions created by COPY and
5850 add replacement mappings for each new name. */
5851 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5852 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5855 return new_bb;
5858 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5860 static void
5861 add_phi_args_after_copy_edge (edge e_copy)
5863 basic_block bb, bb_copy = e_copy->src, dest;
5864 edge e;
5865 edge_iterator ei;
5866 gphi *phi, *phi_copy;
5867 tree def;
5868 gphi_iterator psi, psi_copy;
5870 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5871 return;
5873 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5875 if (e_copy->dest->flags & BB_DUPLICATED)
5876 dest = get_bb_original (e_copy->dest);
5877 else
5878 dest = e_copy->dest;
5880 e = find_edge (bb, dest);
5881 if (!e)
5883 /* During loop unrolling the target of the latch edge is copied.
5884 In this case we are not looking for edge to dest, but to
5885 duplicated block whose original was dest. */
5886 FOR_EACH_EDGE (e, ei, bb->succs)
5888 if ((e->dest->flags & BB_DUPLICATED)
5889 && get_bb_original (e->dest) == dest)
5890 break;
5893 gcc_assert (e != NULL);
5896 for (psi = gsi_start_phis (e->dest),
5897 psi_copy = gsi_start_phis (e_copy->dest);
5898 !gsi_end_p (psi);
5899 gsi_next (&psi), gsi_next (&psi_copy))
5901 phi = psi.phi ();
5902 phi_copy = psi_copy.phi ();
5903 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5904 add_phi_arg (phi_copy, def, e_copy,
5905 gimple_phi_arg_location_from_edge (phi, e));
5910 /* Basic block BB_COPY was created by code duplication. Add phi node
5911 arguments for edges going out of BB_COPY. The blocks that were
5912 duplicated have BB_DUPLICATED set. */
5914 void
5915 add_phi_args_after_copy_bb (basic_block bb_copy)
5917 edge e_copy;
5918 edge_iterator ei;
5920 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5922 add_phi_args_after_copy_edge (e_copy);
5926 /* Blocks in REGION_COPY array of length N_REGION were created by
5927 duplication of basic blocks. Add phi node arguments for edges
5928 going from these blocks. If E_COPY is not NULL, also add
5929 phi node arguments for its destination.*/
5931 void
5932 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5933 edge e_copy)
5935 unsigned i;
5937 for (i = 0; i < n_region; i++)
5938 region_copy[i]->flags |= BB_DUPLICATED;
5940 for (i = 0; i < n_region; i++)
5941 add_phi_args_after_copy_bb (region_copy[i]);
5942 if (e_copy)
5943 add_phi_args_after_copy_edge (e_copy);
5945 for (i = 0; i < n_region; i++)
5946 region_copy[i]->flags &= ~BB_DUPLICATED;
5949 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5950 important exit edge EXIT. By important we mean that no SSA name defined
5951 inside region is live over the other exit edges of the region. All entry
5952 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5953 to the duplicate of the region. Dominance and loop information is
5954 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5955 UPDATE_DOMINANCE is false then we assume that the caller will update the
5956 dominance information after calling this function. The new basic
5957 blocks are stored to REGION_COPY in the same order as they had in REGION,
5958 provided that REGION_COPY is not NULL.
5959 The function returns false if it is unable to copy the region,
5960 true otherwise. */
5962 bool
5963 gimple_duplicate_sese_region (edge entry, edge exit,
5964 basic_block *region, unsigned n_region,
5965 basic_block *region_copy,
5966 bool update_dominance)
5968 unsigned i;
5969 bool free_region_copy = false, copying_header = false;
5970 struct loop *loop = entry->dest->loop_father;
5971 edge exit_copy;
5972 vec<basic_block> doms;
5973 edge redirected;
5974 int total_freq = 0, entry_freq = 0;
5975 gcov_type total_count = 0, entry_count = 0;
5977 if (!can_copy_bbs_p (region, n_region))
5978 return false;
5980 /* Some sanity checking. Note that we do not check for all possible
5981 missuses of the functions. I.e. if you ask to copy something weird,
5982 it will work, but the state of structures probably will not be
5983 correct. */
5984 for (i = 0; i < n_region; i++)
5986 /* We do not handle subloops, i.e. all the blocks must belong to the
5987 same loop. */
5988 if (region[i]->loop_father != loop)
5989 return false;
5991 if (region[i] != entry->dest
5992 && region[i] == loop->header)
5993 return false;
5996 /* In case the function is used for loop header copying (which is the primary
5997 use), ensure that EXIT and its copy will be new latch and entry edges. */
5998 if (loop->header == entry->dest)
6000 copying_header = true;
6002 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6003 return false;
6005 for (i = 0; i < n_region; i++)
6006 if (region[i] != exit->src
6007 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6008 return false;
6011 initialize_original_copy_tables ();
6013 if (copying_header)
6014 set_loop_copy (loop, loop_outer (loop));
6015 else
6016 set_loop_copy (loop, loop);
6018 if (!region_copy)
6020 region_copy = XNEWVEC (basic_block, n_region);
6021 free_region_copy = true;
6024 /* Record blocks outside the region that are dominated by something
6025 inside. */
6026 if (update_dominance)
6028 doms.create (0);
6029 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6032 if (entry->dest->count)
6034 total_count = entry->dest->count;
6035 entry_count = entry->count;
6036 /* Fix up corner cases, to avoid division by zero or creation of negative
6037 frequencies. */
6038 if (entry_count > total_count)
6039 entry_count = total_count;
6041 else
6043 total_freq = entry->dest->frequency;
6044 entry_freq = EDGE_FREQUENCY (entry);
6045 /* Fix up corner cases, to avoid division by zero or creation of negative
6046 frequencies. */
6047 if (total_freq == 0)
6048 total_freq = 1;
6049 else if (entry_freq > total_freq)
6050 entry_freq = total_freq;
6053 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6054 split_edge_bb_loc (entry), update_dominance);
6055 if (total_count)
6057 scale_bbs_frequencies_gcov_type (region, n_region,
6058 total_count - entry_count,
6059 total_count);
6060 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6061 total_count);
6063 else
6065 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6066 total_freq);
6067 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6070 if (copying_header)
6072 loop->header = exit->dest;
6073 loop->latch = exit->src;
6076 /* Redirect the entry and add the phi node arguments. */
6077 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6078 gcc_assert (redirected != NULL);
6079 flush_pending_stmts (entry);
6081 /* Concerning updating of dominators: We must recount dominators
6082 for entry block and its copy. Anything that is outside of the
6083 region, but was dominated by something inside needs recounting as
6084 well. */
6085 if (update_dominance)
6087 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6088 doms.safe_push (get_bb_original (entry->dest));
6089 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6090 doms.release ();
6093 /* Add the other PHI node arguments. */
6094 add_phi_args_after_copy (region_copy, n_region, NULL);
6096 if (free_region_copy)
6097 free (region_copy);
6099 free_original_copy_tables ();
6100 return true;
6103 /* Checks if BB is part of the region defined by N_REGION BBS. */
6104 static bool
6105 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6107 unsigned int n;
6109 for (n = 0; n < n_region; n++)
6111 if (bb == bbs[n])
6112 return true;
6114 return false;
6117 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6118 are stored to REGION_COPY in the same order in that they appear
6119 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6120 the region, EXIT an exit from it. The condition guarding EXIT
6121 is moved to ENTRY. Returns true if duplication succeeds, false
6122 otherwise.
6124 For example,
6126 some_code;
6127 if (cond)
6129 else
6132 is transformed to
6134 if (cond)
6136 some_code;
6139 else
6141 some_code;
6146 bool
6147 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6148 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6149 basic_block *region_copy ATTRIBUTE_UNUSED)
6151 unsigned i;
6152 bool free_region_copy = false;
6153 struct loop *loop = exit->dest->loop_father;
6154 struct loop *orig_loop = entry->dest->loop_father;
6155 basic_block switch_bb, entry_bb, nentry_bb;
6156 vec<basic_block> doms;
6157 int total_freq = 0, exit_freq = 0;
6158 gcov_type total_count = 0, exit_count = 0;
6159 edge exits[2], nexits[2], e;
6160 gimple_stmt_iterator gsi;
6161 gimple cond_stmt;
6162 edge sorig, snew;
6163 basic_block exit_bb;
6164 gphi_iterator psi;
6165 gphi *phi;
6166 tree def;
6167 struct loop *target, *aloop, *cloop;
6169 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6170 exits[0] = exit;
6171 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6173 if (!can_copy_bbs_p (region, n_region))
6174 return false;
6176 initialize_original_copy_tables ();
6177 set_loop_copy (orig_loop, loop);
6179 target= loop;
6180 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6182 if (bb_part_of_region_p (aloop->header, region, n_region))
6184 cloop = duplicate_loop (aloop, target);
6185 duplicate_subloops (aloop, cloop);
6189 if (!region_copy)
6191 region_copy = XNEWVEC (basic_block, n_region);
6192 free_region_copy = true;
6195 gcc_assert (!need_ssa_update_p (cfun));
6197 /* Record blocks outside the region that are dominated by something
6198 inside. */
6199 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6201 if (exit->src->count)
6203 total_count = exit->src->count;
6204 exit_count = exit->count;
6205 /* Fix up corner cases, to avoid division by zero or creation of negative
6206 frequencies. */
6207 if (exit_count > total_count)
6208 exit_count = total_count;
6210 else
6212 total_freq = exit->src->frequency;
6213 exit_freq = EDGE_FREQUENCY (exit);
6214 /* Fix up corner cases, to avoid division by zero or creation of negative
6215 frequencies. */
6216 if (total_freq == 0)
6217 total_freq = 1;
6218 if (exit_freq > total_freq)
6219 exit_freq = total_freq;
6222 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6223 split_edge_bb_loc (exit), true);
6224 if (total_count)
6226 scale_bbs_frequencies_gcov_type (region, n_region,
6227 total_count - exit_count,
6228 total_count);
6229 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6230 total_count);
6232 else
6234 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6235 total_freq);
6236 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6239 /* Create the switch block, and put the exit condition to it. */
6240 entry_bb = entry->dest;
6241 nentry_bb = get_bb_copy (entry_bb);
6242 if (!last_stmt (entry->src)
6243 || !stmt_ends_bb_p (last_stmt (entry->src)))
6244 switch_bb = entry->src;
6245 else
6246 switch_bb = split_edge (entry);
6247 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6249 gsi = gsi_last_bb (switch_bb);
6250 cond_stmt = last_stmt (exit->src);
6251 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6252 cond_stmt = gimple_copy (cond_stmt);
6254 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6256 sorig = single_succ_edge (switch_bb);
6257 sorig->flags = exits[1]->flags;
6258 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6260 /* Register the new edge from SWITCH_BB in loop exit lists. */
6261 rescan_loop_exit (snew, true, false);
6263 /* Add the PHI node arguments. */
6264 add_phi_args_after_copy (region_copy, n_region, snew);
6266 /* Get rid of now superfluous conditions and associated edges (and phi node
6267 arguments). */
6268 exit_bb = exit->dest;
6270 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6271 PENDING_STMT (e) = NULL;
6273 /* The latch of ORIG_LOOP was copied, and so was the backedge
6274 to the original header. We redirect this backedge to EXIT_BB. */
6275 for (i = 0; i < n_region; i++)
6276 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6278 gcc_assert (single_succ_edge (region_copy[i]));
6279 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6280 PENDING_STMT (e) = NULL;
6281 for (psi = gsi_start_phis (exit_bb);
6282 !gsi_end_p (psi);
6283 gsi_next (&psi))
6285 phi = psi.phi ();
6286 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6287 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6290 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6291 PENDING_STMT (e) = NULL;
6293 /* Anything that is outside of the region, but was dominated by something
6294 inside needs to update dominance info. */
6295 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6296 doms.release ();
6297 /* Update the SSA web. */
6298 update_ssa (TODO_update_ssa);
6300 if (free_region_copy)
6301 free (region_copy);
6303 free_original_copy_tables ();
6304 return true;
6307 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6308 adding blocks when the dominator traversal reaches EXIT. This
6309 function silently assumes that ENTRY strictly dominates EXIT. */
6311 void
6312 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6313 vec<basic_block> *bbs_p)
6315 basic_block son;
6317 for (son = first_dom_son (CDI_DOMINATORS, entry);
6318 son;
6319 son = next_dom_son (CDI_DOMINATORS, son))
6321 bbs_p->safe_push (son);
6322 if (son != exit)
6323 gather_blocks_in_sese_region (son, exit, bbs_p);
6327 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6328 The duplicates are recorded in VARS_MAP. */
6330 static void
6331 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6332 tree to_context)
6334 tree t = *tp, new_t;
6335 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6337 if (DECL_CONTEXT (t) == to_context)
6338 return;
6340 bool existed;
6341 tree &loc = vars_map->get_or_insert (t, &existed);
6343 if (!existed)
6345 if (SSA_VAR_P (t))
6347 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6348 add_local_decl (f, new_t);
6350 else
6352 gcc_assert (TREE_CODE (t) == CONST_DECL);
6353 new_t = copy_node (t);
6355 DECL_CONTEXT (new_t) = to_context;
6357 loc = new_t;
6359 else
6360 new_t = loc;
6362 *tp = new_t;
6366 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6367 VARS_MAP maps old ssa names and var_decls to the new ones. */
6369 static tree
6370 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6371 tree to_context)
6373 tree new_name;
6375 gcc_assert (!virtual_operand_p (name));
6377 tree *loc = vars_map->get (name);
6379 if (!loc)
6381 tree decl = SSA_NAME_VAR (name);
6382 if (decl)
6384 replace_by_duplicate_decl (&decl, vars_map, to_context);
6385 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6386 decl, SSA_NAME_DEF_STMT (name));
6387 if (SSA_NAME_IS_DEFAULT_DEF (name))
6388 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6389 decl, new_name);
6391 else
6392 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6393 name, SSA_NAME_DEF_STMT (name));
6395 vars_map->put (name, new_name);
6397 else
6398 new_name = *loc;
6400 return new_name;
6403 struct move_stmt_d
6405 tree orig_block;
6406 tree new_block;
6407 tree from_context;
6408 tree to_context;
6409 hash_map<tree, tree> *vars_map;
6410 htab_t new_label_map;
6411 hash_map<void *, void *> *eh_map;
6412 bool remap_decls_p;
6415 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6416 contained in *TP if it has been ORIG_BLOCK previously and change the
6417 DECL_CONTEXT of every local variable referenced in *TP. */
6419 static tree
6420 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6422 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6423 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6424 tree t = *tp;
6426 if (EXPR_P (t))
6428 tree block = TREE_BLOCK (t);
6429 if (block == p->orig_block
6430 || (p->orig_block == NULL_TREE
6431 && block != NULL_TREE))
6432 TREE_SET_BLOCK (t, p->new_block);
6433 #ifdef ENABLE_CHECKING
6434 else if (block != NULL_TREE)
6436 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6437 block = BLOCK_SUPERCONTEXT (block);
6438 gcc_assert (block == p->orig_block);
6440 #endif
6442 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6444 if (TREE_CODE (t) == SSA_NAME)
6445 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6446 else if (TREE_CODE (t) == LABEL_DECL)
6448 if (p->new_label_map)
6450 struct tree_map in, *out;
6451 in.base.from = t;
6452 out = (struct tree_map *)
6453 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6454 if (out)
6455 *tp = t = out->to;
6458 DECL_CONTEXT (t) = p->to_context;
6460 else if (p->remap_decls_p)
6462 /* Replace T with its duplicate. T should no longer appear in the
6463 parent function, so this looks wasteful; however, it may appear
6464 in referenced_vars, and more importantly, as virtual operands of
6465 statements, and in alias lists of other variables. It would be
6466 quite difficult to expunge it from all those places. ??? It might
6467 suffice to do this for addressable variables. */
6468 if ((TREE_CODE (t) == VAR_DECL
6469 && !is_global_var (t))
6470 || TREE_CODE (t) == CONST_DECL)
6471 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6473 *walk_subtrees = 0;
6475 else if (TYPE_P (t))
6476 *walk_subtrees = 0;
6478 return NULL_TREE;
6481 /* Helper for move_stmt_r. Given an EH region number for the source
6482 function, map that to the duplicate EH regio number in the dest. */
6484 static int
6485 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6487 eh_region old_r, new_r;
6489 old_r = get_eh_region_from_number (old_nr);
6490 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6492 return new_r->index;
6495 /* Similar, but operate on INTEGER_CSTs. */
6497 static tree
6498 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6500 int old_nr, new_nr;
6502 old_nr = tree_to_shwi (old_t_nr);
6503 new_nr = move_stmt_eh_region_nr (old_nr, p);
6505 return build_int_cst (integer_type_node, new_nr);
6508 /* Like move_stmt_op, but for gimple statements.
6510 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6511 contained in the current statement in *GSI_P and change the
6512 DECL_CONTEXT of every local variable referenced in the current
6513 statement. */
6515 static tree
6516 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6517 struct walk_stmt_info *wi)
6519 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6520 gimple stmt = gsi_stmt (*gsi_p);
6521 tree block = gimple_block (stmt);
6523 if (block == p->orig_block
6524 || (p->orig_block == NULL_TREE
6525 && block != NULL_TREE))
6526 gimple_set_block (stmt, p->new_block);
6528 switch (gimple_code (stmt))
6530 case GIMPLE_CALL:
6531 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6533 tree r, fndecl = gimple_call_fndecl (stmt);
6534 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6535 switch (DECL_FUNCTION_CODE (fndecl))
6537 case BUILT_IN_EH_COPY_VALUES:
6538 r = gimple_call_arg (stmt, 1);
6539 r = move_stmt_eh_region_tree_nr (r, p);
6540 gimple_call_set_arg (stmt, 1, r);
6541 /* FALLTHRU */
6543 case BUILT_IN_EH_POINTER:
6544 case BUILT_IN_EH_FILTER:
6545 r = gimple_call_arg (stmt, 0);
6546 r = move_stmt_eh_region_tree_nr (r, p);
6547 gimple_call_set_arg (stmt, 0, r);
6548 break;
6550 default:
6551 break;
6554 break;
6556 case GIMPLE_RESX:
6558 gresx *resx_stmt = as_a <gresx *> (stmt);
6559 int r = gimple_resx_region (resx_stmt);
6560 r = move_stmt_eh_region_nr (r, p);
6561 gimple_resx_set_region (resx_stmt, r);
6563 break;
6565 case GIMPLE_EH_DISPATCH:
6567 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6568 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6569 r = move_stmt_eh_region_nr (r, p);
6570 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6572 break;
6574 case GIMPLE_OMP_RETURN:
6575 case GIMPLE_OMP_CONTINUE:
6576 break;
6577 default:
6578 if (is_gimple_omp (stmt))
6580 /* Do not remap variables inside OMP directives. Variables
6581 referenced in clauses and directive header belong to the
6582 parent function and should not be moved into the child
6583 function. */
6584 bool save_remap_decls_p = p->remap_decls_p;
6585 p->remap_decls_p = false;
6586 *handled_ops_p = true;
6588 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6589 move_stmt_op, wi);
6591 p->remap_decls_p = save_remap_decls_p;
6593 break;
6596 return NULL_TREE;
6599 /* Move basic block BB from function CFUN to function DEST_FN. The
6600 block is moved out of the original linked list and placed after
6601 block AFTER in the new list. Also, the block is removed from the
6602 original array of blocks and placed in DEST_FN's array of blocks.
6603 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6604 updated to reflect the moved edges.
6606 The local variables are remapped to new instances, VARS_MAP is used
6607 to record the mapping. */
6609 static void
6610 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6611 basic_block after, bool update_edge_count_p,
6612 struct move_stmt_d *d)
6614 struct control_flow_graph *cfg;
6615 edge_iterator ei;
6616 edge e;
6617 gimple_stmt_iterator si;
6618 unsigned old_len, new_len;
6620 /* Remove BB from dominance structures. */
6621 delete_from_dominance_info (CDI_DOMINATORS, bb);
6623 /* Move BB from its current loop to the copy in the new function. */
6624 if (current_loops)
6626 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6627 if (new_loop)
6628 bb->loop_father = new_loop;
6631 /* Link BB to the new linked list. */
6632 move_block_after (bb, after);
6634 /* Update the edge count in the corresponding flowgraphs. */
6635 if (update_edge_count_p)
6636 FOR_EACH_EDGE (e, ei, bb->succs)
6638 cfun->cfg->x_n_edges--;
6639 dest_cfun->cfg->x_n_edges++;
6642 /* Remove BB from the original basic block array. */
6643 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6644 cfun->cfg->x_n_basic_blocks--;
6646 /* Grow DEST_CFUN's basic block array if needed. */
6647 cfg = dest_cfun->cfg;
6648 cfg->x_n_basic_blocks++;
6649 if (bb->index >= cfg->x_last_basic_block)
6650 cfg->x_last_basic_block = bb->index + 1;
6652 old_len = vec_safe_length (cfg->x_basic_block_info);
6653 if ((unsigned) cfg->x_last_basic_block >= old_len)
6655 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6656 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6659 (*cfg->x_basic_block_info)[bb->index] = bb;
6661 /* Remap the variables in phi nodes. */
6662 for (gphi_iterator psi = gsi_start_phis (bb);
6663 !gsi_end_p (psi); )
6665 gphi *phi = psi.phi ();
6666 use_operand_p use;
6667 tree op = PHI_RESULT (phi);
6668 ssa_op_iter oi;
6669 unsigned i;
6671 if (virtual_operand_p (op))
6673 /* Remove the phi nodes for virtual operands (alias analysis will be
6674 run for the new function, anyway). */
6675 remove_phi_node (&psi, true);
6676 continue;
6679 SET_PHI_RESULT (phi,
6680 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6681 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6683 op = USE_FROM_PTR (use);
6684 if (TREE_CODE (op) == SSA_NAME)
6685 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6688 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6690 location_t locus = gimple_phi_arg_location (phi, i);
6691 tree block = LOCATION_BLOCK (locus);
6693 if (locus == UNKNOWN_LOCATION)
6694 continue;
6695 if (d->orig_block == NULL_TREE || block == d->orig_block)
6697 if (d->new_block == NULL_TREE)
6698 locus = LOCATION_LOCUS (locus);
6699 else
6700 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6701 gimple_phi_arg_set_location (phi, i, locus);
6705 gsi_next (&psi);
6708 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6710 gimple stmt = gsi_stmt (si);
6711 struct walk_stmt_info wi;
6713 memset (&wi, 0, sizeof (wi));
6714 wi.info = d;
6715 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6717 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6719 tree label = gimple_label_label (label_stmt);
6720 int uid = LABEL_DECL_UID (label);
6722 gcc_assert (uid > -1);
6724 old_len = vec_safe_length (cfg->x_label_to_block_map);
6725 if (old_len <= (unsigned) uid)
6727 new_len = 3 * uid / 2 + 1;
6728 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6731 (*cfg->x_label_to_block_map)[uid] = bb;
6732 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6734 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6736 if (uid >= dest_cfun->cfg->last_label_uid)
6737 dest_cfun->cfg->last_label_uid = uid + 1;
6740 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6741 remove_stmt_from_eh_lp_fn (cfun, stmt);
6743 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6744 gimple_remove_stmt_histograms (cfun, stmt);
6746 /* We cannot leave any operands allocated from the operand caches of
6747 the current function. */
6748 free_stmt_operands (cfun, stmt);
6749 push_cfun (dest_cfun);
6750 update_stmt (stmt);
6751 pop_cfun ();
6754 FOR_EACH_EDGE (e, ei, bb->succs)
6755 if (e->goto_locus != UNKNOWN_LOCATION)
6757 tree block = LOCATION_BLOCK (e->goto_locus);
6758 if (d->orig_block == NULL_TREE
6759 || block == d->orig_block)
6760 e->goto_locus = d->new_block ?
6761 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6762 LOCATION_LOCUS (e->goto_locus);
6766 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6767 the outermost EH region. Use REGION as the incoming base EH region. */
6769 static eh_region
6770 find_outermost_region_in_block (struct function *src_cfun,
6771 basic_block bb, eh_region region)
6773 gimple_stmt_iterator si;
6775 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6777 gimple stmt = gsi_stmt (si);
6778 eh_region stmt_region;
6779 int lp_nr;
6781 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6782 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6783 if (stmt_region)
6785 if (region == NULL)
6786 region = stmt_region;
6787 else if (stmt_region != region)
6789 region = eh_region_outermost (src_cfun, stmt_region, region);
6790 gcc_assert (region != NULL);
6795 return region;
6798 static tree
6799 new_label_mapper (tree decl, void *data)
6801 htab_t hash = (htab_t) data;
6802 struct tree_map *m;
6803 void **slot;
6805 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6807 m = XNEW (struct tree_map);
6808 m->hash = DECL_UID (decl);
6809 m->base.from = decl;
6810 m->to = create_artificial_label (UNKNOWN_LOCATION);
6811 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6812 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6813 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6815 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6816 gcc_assert (*slot == NULL);
6818 *slot = m;
6820 return m->to;
6823 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6824 subblocks. */
6826 static void
6827 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6828 tree to_context)
6830 tree *tp, t;
6832 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6834 t = *tp;
6835 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6836 continue;
6837 replace_by_duplicate_decl (&t, vars_map, to_context);
6838 if (t != *tp)
6840 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6842 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6843 DECL_HAS_VALUE_EXPR_P (t) = 1;
6845 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6846 *tp = t;
6850 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6851 replace_block_vars_by_duplicates (block, vars_map, to_context);
6854 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6855 from FN1 to FN2. */
6857 static void
6858 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6859 struct loop *loop)
6861 /* Discard it from the old loop array. */
6862 (*get_loops (fn1))[loop->num] = NULL;
6864 /* Place it in the new loop array, assigning it a new number. */
6865 loop->num = number_of_loops (fn2);
6866 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6868 /* Recurse to children. */
6869 for (loop = loop->inner; loop; loop = loop->next)
6870 fixup_loop_arrays_after_move (fn1, fn2, loop);
6873 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6874 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6876 DEBUG_FUNCTION void
6877 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6879 basic_block bb;
6880 edge_iterator ei;
6881 edge e;
6882 bitmap bbs = BITMAP_ALLOC (NULL);
6883 int i;
6885 gcc_assert (entry != NULL);
6886 gcc_assert (entry != exit);
6887 gcc_assert (bbs_p != NULL);
6889 gcc_assert (bbs_p->length () > 0);
6891 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6892 bitmap_set_bit (bbs, bb->index);
6894 gcc_assert (bitmap_bit_p (bbs, entry->index));
6895 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6897 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6899 if (bb == entry)
6901 gcc_assert (single_pred_p (entry));
6902 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6904 else
6905 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6907 e = ei_edge (ei);
6908 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6911 if (bb == exit)
6913 gcc_assert (single_succ_p (exit));
6914 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6916 else
6917 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6919 e = ei_edge (ei);
6920 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6924 BITMAP_FREE (bbs);
6928 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6929 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6930 single basic block in the original CFG and the new basic block is
6931 returned. DEST_CFUN must not have a CFG yet.
6933 Note that the region need not be a pure SESE region. Blocks inside
6934 the region may contain calls to abort/exit. The only restriction
6935 is that ENTRY_BB should be the only entry point and it must
6936 dominate EXIT_BB.
6938 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6939 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6940 to the new function.
6942 All local variables referenced in the region are assumed to be in
6943 the corresponding BLOCK_VARS and unexpanded variable lists
6944 associated with DEST_CFUN. */
6946 basic_block
6947 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6948 basic_block exit_bb, tree orig_block)
6950 vec<basic_block> bbs, dom_bbs;
6951 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6952 basic_block after, bb, *entry_pred, *exit_succ, abb;
6953 struct function *saved_cfun = cfun;
6954 int *entry_flag, *exit_flag;
6955 unsigned *entry_prob, *exit_prob;
6956 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6957 edge e;
6958 edge_iterator ei;
6959 htab_t new_label_map;
6960 hash_map<void *, void *> *eh_map;
6961 struct loop *loop = entry_bb->loop_father;
6962 struct loop *loop0 = get_loop (saved_cfun, 0);
6963 struct move_stmt_d d;
6965 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6966 region. */
6967 gcc_assert (entry_bb != exit_bb
6968 && (!exit_bb
6969 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6971 /* Collect all the blocks in the region. Manually add ENTRY_BB
6972 because it won't be added by dfs_enumerate_from. */
6973 bbs.create (0);
6974 bbs.safe_push (entry_bb);
6975 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6976 #ifdef ENABLE_CHECKING
6977 verify_sese (entry_bb, exit_bb, &bbs);
6978 #endif
6980 /* The blocks that used to be dominated by something in BBS will now be
6981 dominated by the new block. */
6982 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6983 bbs.address (),
6984 bbs.length ());
6986 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6987 the predecessor edges to ENTRY_BB and the successor edges to
6988 EXIT_BB so that we can re-attach them to the new basic block that
6989 will replace the region. */
6990 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6991 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6992 entry_flag = XNEWVEC (int, num_entry_edges);
6993 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6994 i = 0;
6995 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6997 entry_prob[i] = e->probability;
6998 entry_flag[i] = e->flags;
6999 entry_pred[i++] = e->src;
7000 remove_edge (e);
7003 if (exit_bb)
7005 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7006 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7007 exit_flag = XNEWVEC (int, num_exit_edges);
7008 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7009 i = 0;
7010 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7012 exit_prob[i] = e->probability;
7013 exit_flag[i] = e->flags;
7014 exit_succ[i++] = e->dest;
7015 remove_edge (e);
7018 else
7020 num_exit_edges = 0;
7021 exit_succ = NULL;
7022 exit_flag = NULL;
7023 exit_prob = NULL;
7026 /* Switch context to the child function to initialize DEST_FN's CFG. */
7027 gcc_assert (dest_cfun->cfg == NULL);
7028 push_cfun (dest_cfun);
7030 init_empty_tree_cfg ();
7032 /* Initialize EH information for the new function. */
7033 eh_map = NULL;
7034 new_label_map = NULL;
7035 if (saved_cfun->eh)
7037 eh_region region = NULL;
7039 FOR_EACH_VEC_ELT (bbs, i, bb)
7040 region = find_outermost_region_in_block (saved_cfun, bb, region);
7042 init_eh_for_function ();
7043 if (region != NULL)
7045 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7046 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7047 new_label_mapper, new_label_map);
7051 /* Initialize an empty loop tree. */
7052 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7053 init_loops_structure (dest_cfun, loops, 1);
7054 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7055 set_loops_for_fn (dest_cfun, loops);
7057 /* Move the outlined loop tree part. */
7058 num_nodes = bbs.length ();
7059 FOR_EACH_VEC_ELT (bbs, i, bb)
7061 if (bb->loop_father->header == bb)
7063 struct loop *this_loop = bb->loop_father;
7064 struct loop *outer = loop_outer (this_loop);
7065 if (outer == loop
7066 /* If the SESE region contains some bbs ending with
7067 a noreturn call, those are considered to belong
7068 to the outermost loop in saved_cfun, rather than
7069 the entry_bb's loop_father. */
7070 || outer == loop0)
7072 if (outer != loop)
7073 num_nodes -= this_loop->num_nodes;
7074 flow_loop_tree_node_remove (bb->loop_father);
7075 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7076 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7079 else if (bb->loop_father == loop0 && loop0 != loop)
7080 num_nodes--;
7082 /* Remove loop exits from the outlined region. */
7083 if (loops_for_fn (saved_cfun)->exits)
7084 FOR_EACH_EDGE (e, ei, bb->succs)
7086 struct loops *l = loops_for_fn (saved_cfun);
7087 loop_exit **slot
7088 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7089 NO_INSERT);
7090 if (slot)
7091 l->exits->clear_slot (slot);
7096 /* Adjust the number of blocks in the tree root of the outlined part. */
7097 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7099 /* Setup a mapping to be used by move_block_to_fn. */
7100 loop->aux = current_loops->tree_root;
7101 loop0->aux = current_loops->tree_root;
7103 pop_cfun ();
7105 /* Move blocks from BBS into DEST_CFUN. */
7106 gcc_assert (bbs.length () >= 2);
7107 after = dest_cfun->cfg->x_entry_block_ptr;
7108 hash_map<tree, tree> vars_map;
7110 memset (&d, 0, sizeof (d));
7111 d.orig_block = orig_block;
7112 d.new_block = DECL_INITIAL (dest_cfun->decl);
7113 d.from_context = cfun->decl;
7114 d.to_context = dest_cfun->decl;
7115 d.vars_map = &vars_map;
7116 d.new_label_map = new_label_map;
7117 d.eh_map = eh_map;
7118 d.remap_decls_p = true;
7120 FOR_EACH_VEC_ELT (bbs, i, bb)
7122 /* No need to update edge counts on the last block. It has
7123 already been updated earlier when we detached the region from
7124 the original CFG. */
7125 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7126 after = bb;
7129 loop->aux = NULL;
7130 loop0->aux = NULL;
7131 /* Loop sizes are no longer correct, fix them up. */
7132 loop->num_nodes -= num_nodes;
7133 for (struct loop *outer = loop_outer (loop);
7134 outer; outer = loop_outer (outer))
7135 outer->num_nodes -= num_nodes;
7136 loop0->num_nodes -= bbs.length () - num_nodes;
7138 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7140 struct loop *aloop;
7141 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7142 if (aloop != NULL)
7144 if (aloop->simduid)
7146 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7147 d.to_context);
7148 dest_cfun->has_simduid_loops = true;
7150 if (aloop->force_vectorize)
7151 dest_cfun->has_force_vectorize_loops = true;
7155 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7156 if (orig_block)
7158 tree block;
7159 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7160 == NULL_TREE);
7161 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7162 = BLOCK_SUBBLOCKS (orig_block);
7163 for (block = BLOCK_SUBBLOCKS (orig_block);
7164 block; block = BLOCK_CHAIN (block))
7165 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7166 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7169 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7170 &vars_map, dest_cfun->decl);
7172 if (new_label_map)
7173 htab_delete (new_label_map);
7174 if (eh_map)
7175 delete eh_map;
7177 /* Rewire the entry and exit blocks. The successor to the entry
7178 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7179 the child function. Similarly, the predecessor of DEST_FN's
7180 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7181 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7182 various CFG manipulation function get to the right CFG.
7184 FIXME, this is silly. The CFG ought to become a parameter to
7185 these helpers. */
7186 push_cfun (dest_cfun);
7187 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7188 if (exit_bb)
7189 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7190 pop_cfun ();
7192 /* Back in the original function, the SESE region has disappeared,
7193 create a new basic block in its place. */
7194 bb = create_empty_bb (entry_pred[0]);
7195 if (current_loops)
7196 add_bb_to_loop (bb, loop);
7197 for (i = 0; i < num_entry_edges; i++)
7199 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7200 e->probability = entry_prob[i];
7203 for (i = 0; i < num_exit_edges; i++)
7205 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7206 e->probability = exit_prob[i];
7209 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7210 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7211 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7212 dom_bbs.release ();
7214 if (exit_bb)
7216 free (exit_prob);
7217 free (exit_flag);
7218 free (exit_succ);
7220 free (entry_prob);
7221 free (entry_flag);
7222 free (entry_pred);
7223 bbs.release ();
7225 return bb;
7229 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7232 void
7233 dump_function_to_file (tree fndecl, FILE *file, int flags)
7235 tree arg, var, old_current_fndecl = current_function_decl;
7236 struct function *dsf;
7237 bool ignore_topmost_bind = false, any_var = false;
7238 basic_block bb;
7239 tree chain;
7240 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7241 && decl_is_tm_clone (fndecl));
7242 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7244 current_function_decl = fndecl;
7245 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7247 arg = DECL_ARGUMENTS (fndecl);
7248 while (arg)
7250 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7251 fprintf (file, " ");
7252 print_generic_expr (file, arg, dump_flags);
7253 if (flags & TDF_VERBOSE)
7254 print_node (file, "", arg, 4);
7255 if (DECL_CHAIN (arg))
7256 fprintf (file, ", ");
7257 arg = DECL_CHAIN (arg);
7259 fprintf (file, ")\n");
7261 if (flags & TDF_VERBOSE)
7262 print_node (file, "", fndecl, 2);
7264 dsf = DECL_STRUCT_FUNCTION (fndecl);
7265 if (dsf && (flags & TDF_EH))
7266 dump_eh_tree (file, dsf);
7268 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7270 dump_node (fndecl, TDF_SLIM | flags, file);
7271 current_function_decl = old_current_fndecl;
7272 return;
7275 /* When GIMPLE is lowered, the variables are no longer available in
7276 BIND_EXPRs, so display them separately. */
7277 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7279 unsigned ix;
7280 ignore_topmost_bind = true;
7282 fprintf (file, "{\n");
7283 if (!vec_safe_is_empty (fun->local_decls))
7284 FOR_EACH_LOCAL_DECL (fun, ix, var)
7286 print_generic_decl (file, var, flags);
7287 if (flags & TDF_VERBOSE)
7288 print_node (file, "", var, 4);
7289 fprintf (file, "\n");
7291 any_var = true;
7293 if (gimple_in_ssa_p (cfun))
7294 for (ix = 1; ix < num_ssa_names; ++ix)
7296 tree name = ssa_name (ix);
7297 if (name && !SSA_NAME_VAR (name))
7299 fprintf (file, " ");
7300 print_generic_expr (file, TREE_TYPE (name), flags);
7301 fprintf (file, " ");
7302 print_generic_expr (file, name, flags);
7303 fprintf (file, ";\n");
7305 any_var = true;
7310 if (fun && fun->decl == fndecl
7311 && fun->cfg
7312 && basic_block_info_for_fn (fun))
7314 /* If the CFG has been built, emit a CFG-based dump. */
7315 if (!ignore_topmost_bind)
7316 fprintf (file, "{\n");
7318 if (any_var && n_basic_blocks_for_fn (fun))
7319 fprintf (file, "\n");
7321 FOR_EACH_BB_FN (bb, fun)
7322 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7324 fprintf (file, "}\n");
7326 else if (DECL_SAVED_TREE (fndecl) == NULL)
7328 /* The function is now in GIMPLE form but the CFG has not been
7329 built yet. Emit the single sequence of GIMPLE statements
7330 that make up its body. */
7331 gimple_seq body = gimple_body (fndecl);
7333 if (gimple_seq_first_stmt (body)
7334 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7335 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7336 print_gimple_seq (file, body, 0, flags);
7337 else
7339 if (!ignore_topmost_bind)
7340 fprintf (file, "{\n");
7342 if (any_var)
7343 fprintf (file, "\n");
7345 print_gimple_seq (file, body, 2, flags);
7346 fprintf (file, "}\n");
7349 else
7351 int indent;
7353 /* Make a tree based dump. */
7354 chain = DECL_SAVED_TREE (fndecl);
7355 if (chain && TREE_CODE (chain) == BIND_EXPR)
7357 if (ignore_topmost_bind)
7359 chain = BIND_EXPR_BODY (chain);
7360 indent = 2;
7362 else
7363 indent = 0;
7365 else
7367 if (!ignore_topmost_bind)
7368 fprintf (file, "{\n");
7369 indent = 2;
7372 if (any_var)
7373 fprintf (file, "\n");
7375 print_generic_stmt_indented (file, chain, flags, indent);
7376 if (ignore_topmost_bind)
7377 fprintf (file, "}\n");
7380 if (flags & TDF_ENUMERATE_LOCALS)
7381 dump_enumerated_decls (file, flags);
7382 fprintf (file, "\n\n");
7384 current_function_decl = old_current_fndecl;
7387 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7389 DEBUG_FUNCTION void
7390 debug_function (tree fn, int flags)
7392 dump_function_to_file (fn, stderr, flags);
7396 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7398 static void
7399 print_pred_bbs (FILE *file, basic_block bb)
7401 edge e;
7402 edge_iterator ei;
7404 FOR_EACH_EDGE (e, ei, bb->preds)
7405 fprintf (file, "bb_%d ", e->src->index);
7409 /* Print on FILE the indexes for the successors of basic_block BB. */
7411 static void
7412 print_succ_bbs (FILE *file, basic_block bb)
7414 edge e;
7415 edge_iterator ei;
7417 FOR_EACH_EDGE (e, ei, bb->succs)
7418 fprintf (file, "bb_%d ", e->dest->index);
7421 /* Print to FILE the basic block BB following the VERBOSITY level. */
7423 void
7424 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7426 char *s_indent = (char *) alloca ((size_t) indent + 1);
7427 memset ((void *) s_indent, ' ', (size_t) indent);
7428 s_indent[indent] = '\0';
7430 /* Print basic_block's header. */
7431 if (verbosity >= 2)
7433 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7434 print_pred_bbs (file, bb);
7435 fprintf (file, "}, succs = {");
7436 print_succ_bbs (file, bb);
7437 fprintf (file, "})\n");
7440 /* Print basic_block's body. */
7441 if (verbosity >= 3)
7443 fprintf (file, "%s {\n", s_indent);
7444 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7445 fprintf (file, "%s }\n", s_indent);
7449 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7451 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7452 VERBOSITY level this outputs the contents of the loop, or just its
7453 structure. */
7455 static void
7456 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7458 char *s_indent;
7459 basic_block bb;
7461 if (loop == NULL)
7462 return;
7464 s_indent = (char *) alloca ((size_t) indent + 1);
7465 memset ((void *) s_indent, ' ', (size_t) indent);
7466 s_indent[indent] = '\0';
7468 /* Print loop's header. */
7469 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7470 if (loop->header)
7471 fprintf (file, "header = %d", loop->header->index);
7472 else
7474 fprintf (file, "deleted)\n");
7475 return;
7477 if (loop->latch)
7478 fprintf (file, ", latch = %d", loop->latch->index);
7479 else
7480 fprintf (file, ", multiple latches");
7481 fprintf (file, ", niter = ");
7482 print_generic_expr (file, loop->nb_iterations, 0);
7484 if (loop->any_upper_bound)
7486 fprintf (file, ", upper_bound = ");
7487 print_decu (loop->nb_iterations_upper_bound, file);
7490 if (loop->any_estimate)
7492 fprintf (file, ", estimate = ");
7493 print_decu (loop->nb_iterations_estimate, file);
7495 fprintf (file, ")\n");
7497 /* Print loop's body. */
7498 if (verbosity >= 1)
7500 fprintf (file, "%s{\n", s_indent);
7501 FOR_EACH_BB_FN (bb, cfun)
7502 if (bb->loop_father == loop)
7503 print_loops_bb (file, bb, indent, verbosity);
7505 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7506 fprintf (file, "%s}\n", s_indent);
7510 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7511 spaces. Following VERBOSITY level this outputs the contents of the
7512 loop, or just its structure. */
7514 static void
7515 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7516 int verbosity)
7518 if (loop == NULL)
7519 return;
7521 print_loop (file, loop, indent, verbosity);
7522 print_loop_and_siblings (file, loop->next, indent, verbosity);
7525 /* Follow a CFG edge from the entry point of the program, and on entry
7526 of a loop, pretty print the loop structure on FILE. */
7528 void
7529 print_loops (FILE *file, int verbosity)
7531 basic_block bb;
7533 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7534 if (bb && bb->loop_father)
7535 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7538 /* Dump a loop. */
7540 DEBUG_FUNCTION void
7541 debug (struct loop &ref)
7543 print_loop (stderr, &ref, 0, /*verbosity*/0);
7546 DEBUG_FUNCTION void
7547 debug (struct loop *ptr)
7549 if (ptr)
7550 debug (*ptr);
7551 else
7552 fprintf (stderr, "<nil>\n");
7555 /* Dump a loop verbosely. */
7557 DEBUG_FUNCTION void
7558 debug_verbose (struct loop &ref)
7560 print_loop (stderr, &ref, 0, /*verbosity*/3);
7563 DEBUG_FUNCTION void
7564 debug_verbose (struct loop *ptr)
7566 if (ptr)
7567 debug (*ptr);
7568 else
7569 fprintf (stderr, "<nil>\n");
7573 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7575 DEBUG_FUNCTION void
7576 debug_loops (int verbosity)
7578 print_loops (stderr, verbosity);
7581 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7583 DEBUG_FUNCTION void
7584 debug_loop (struct loop *loop, int verbosity)
7586 print_loop (stderr, loop, 0, verbosity);
7589 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7590 level. */
7592 DEBUG_FUNCTION void
7593 debug_loop_num (unsigned num, int verbosity)
7595 debug_loop (get_loop (cfun, num), verbosity);
7598 /* Return true if BB ends with a call, possibly followed by some
7599 instructions that must stay with the call. Return false,
7600 otherwise. */
7602 static bool
7603 gimple_block_ends_with_call_p (basic_block bb)
7605 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7606 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7610 /* Return true if BB ends with a conditional branch. Return false,
7611 otherwise. */
7613 static bool
7614 gimple_block_ends_with_condjump_p (const_basic_block bb)
7616 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7617 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7621 /* Return true if we need to add fake edge to exit at statement T.
7622 Helper function for gimple_flow_call_edges_add. */
7624 static bool
7625 need_fake_edge_p (gimple t)
7627 tree fndecl = NULL_TREE;
7628 int call_flags = 0;
7630 /* NORETURN and LONGJMP calls already have an edge to exit.
7631 CONST and PURE calls do not need one.
7632 We don't currently check for CONST and PURE here, although
7633 it would be a good idea, because those attributes are
7634 figured out from the RTL in mark_constant_function, and
7635 the counter incrementation code from -fprofile-arcs
7636 leads to different results from -fbranch-probabilities. */
7637 if (is_gimple_call (t))
7639 fndecl = gimple_call_fndecl (t);
7640 call_flags = gimple_call_flags (t);
7643 if (is_gimple_call (t)
7644 && fndecl
7645 && DECL_BUILT_IN (fndecl)
7646 && (call_flags & ECF_NOTHROW)
7647 && !(call_flags & ECF_RETURNS_TWICE)
7648 /* fork() doesn't really return twice, but the effect of
7649 wrapping it in __gcov_fork() which calls __gcov_flush()
7650 and clears the counters before forking has the same
7651 effect as returning twice. Force a fake edge. */
7652 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7653 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7654 return false;
7656 if (is_gimple_call (t))
7658 edge_iterator ei;
7659 edge e;
7660 basic_block bb;
7662 if (!(call_flags & ECF_NORETURN))
7663 return true;
7665 bb = gimple_bb (t);
7666 FOR_EACH_EDGE (e, ei, bb->succs)
7667 if ((e->flags & EDGE_FAKE) == 0)
7668 return true;
7671 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7672 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7673 return true;
7675 return false;
7679 /* Add fake edges to the function exit for any non constant and non
7680 noreturn calls (or noreturn calls with EH/abnormal edges),
7681 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7682 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7683 that were split.
7685 The goal is to expose cases in which entering a basic block does
7686 not imply that all subsequent instructions must be executed. */
7688 static int
7689 gimple_flow_call_edges_add (sbitmap blocks)
7691 int i;
7692 int blocks_split = 0;
7693 int last_bb = last_basic_block_for_fn (cfun);
7694 bool check_last_block = false;
7696 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7697 return 0;
7699 if (! blocks)
7700 check_last_block = true;
7701 else
7702 check_last_block = bitmap_bit_p (blocks,
7703 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7705 /* In the last basic block, before epilogue generation, there will be
7706 a fallthru edge to EXIT. Special care is required if the last insn
7707 of the last basic block is a call because make_edge folds duplicate
7708 edges, which would result in the fallthru edge also being marked
7709 fake, which would result in the fallthru edge being removed by
7710 remove_fake_edges, which would result in an invalid CFG.
7712 Moreover, we can't elide the outgoing fake edge, since the block
7713 profiler needs to take this into account in order to solve the minimal
7714 spanning tree in the case that the call doesn't return.
7716 Handle this by adding a dummy instruction in a new last basic block. */
7717 if (check_last_block)
7719 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7720 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7721 gimple t = NULL;
7723 if (!gsi_end_p (gsi))
7724 t = gsi_stmt (gsi);
7726 if (t && need_fake_edge_p (t))
7728 edge e;
7730 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7731 if (e)
7733 gsi_insert_on_edge (e, gimple_build_nop ());
7734 gsi_commit_edge_inserts ();
7739 /* Now add fake edges to the function exit for any non constant
7740 calls since there is no way that we can determine if they will
7741 return or not... */
7742 for (i = 0; i < last_bb; i++)
7744 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7745 gimple_stmt_iterator gsi;
7746 gimple stmt, last_stmt;
7748 if (!bb)
7749 continue;
7751 if (blocks && !bitmap_bit_p (blocks, i))
7752 continue;
7754 gsi = gsi_last_nondebug_bb (bb);
7755 if (!gsi_end_p (gsi))
7757 last_stmt = gsi_stmt (gsi);
7760 stmt = gsi_stmt (gsi);
7761 if (need_fake_edge_p (stmt))
7763 edge e;
7765 /* The handling above of the final block before the
7766 epilogue should be enough to verify that there is
7767 no edge to the exit block in CFG already.
7768 Calling make_edge in such case would cause us to
7769 mark that edge as fake and remove it later. */
7770 #ifdef ENABLE_CHECKING
7771 if (stmt == last_stmt)
7773 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7774 gcc_assert (e == NULL);
7776 #endif
7778 /* Note that the following may create a new basic block
7779 and renumber the existing basic blocks. */
7780 if (stmt != last_stmt)
7782 e = split_block (bb, stmt);
7783 if (e)
7784 blocks_split++;
7786 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7788 gsi_prev (&gsi);
7790 while (!gsi_end_p (gsi));
7794 if (blocks_split)
7795 verify_flow_info ();
7797 return blocks_split;
7800 /* Removes edge E and all the blocks dominated by it, and updates dominance
7801 information. The IL in E->src needs to be updated separately.
7802 If dominance info is not available, only the edge E is removed.*/
7804 void
7805 remove_edge_and_dominated_blocks (edge e)
7807 vec<basic_block> bbs_to_remove = vNULL;
7808 vec<basic_block> bbs_to_fix_dom = vNULL;
7809 bitmap df, df_idom;
7810 edge f;
7811 edge_iterator ei;
7812 bool none_removed = false;
7813 unsigned i;
7814 basic_block bb, dbb;
7815 bitmap_iterator bi;
7817 if (!dom_info_available_p (CDI_DOMINATORS))
7819 remove_edge (e);
7820 return;
7823 /* No updating is needed for edges to exit. */
7824 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7826 if (cfgcleanup_altered_bbs)
7827 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7828 remove_edge (e);
7829 return;
7832 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7833 that is not dominated by E->dest, then this set is empty. Otherwise,
7834 all the basic blocks dominated by E->dest are removed.
7836 Also, to DF_IDOM we store the immediate dominators of the blocks in
7837 the dominance frontier of E (i.e., of the successors of the
7838 removed blocks, if there are any, and of E->dest otherwise). */
7839 FOR_EACH_EDGE (f, ei, e->dest->preds)
7841 if (f == e)
7842 continue;
7844 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7846 none_removed = true;
7847 break;
7851 df = BITMAP_ALLOC (NULL);
7852 df_idom = BITMAP_ALLOC (NULL);
7854 if (none_removed)
7855 bitmap_set_bit (df_idom,
7856 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7857 else
7859 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7860 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7862 FOR_EACH_EDGE (f, ei, bb->succs)
7864 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7865 bitmap_set_bit (df, f->dest->index);
7868 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7869 bitmap_clear_bit (df, bb->index);
7871 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7873 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7874 bitmap_set_bit (df_idom,
7875 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7879 if (cfgcleanup_altered_bbs)
7881 /* Record the set of the altered basic blocks. */
7882 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7883 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7886 /* Remove E and the cancelled blocks. */
7887 if (none_removed)
7888 remove_edge (e);
7889 else
7891 /* Walk backwards so as to get a chance to substitute all
7892 released DEFs into debug stmts. See
7893 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7894 details. */
7895 for (i = bbs_to_remove.length (); i-- > 0; )
7896 delete_basic_block (bbs_to_remove[i]);
7899 /* Update the dominance information. The immediate dominator may change only
7900 for blocks whose immediate dominator belongs to DF_IDOM:
7902 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7903 removal. Let Z the arbitrary block such that idom(Z) = Y and
7904 Z dominates X after the removal. Before removal, there exists a path P
7905 from Y to X that avoids Z. Let F be the last edge on P that is
7906 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7907 dominates W, and because of P, Z does not dominate W), and W belongs to
7908 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7909 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7911 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7912 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7913 dbb;
7914 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7915 bbs_to_fix_dom.safe_push (dbb);
7918 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7920 BITMAP_FREE (df);
7921 BITMAP_FREE (df_idom);
7922 bbs_to_remove.release ();
7923 bbs_to_fix_dom.release ();
7926 /* Purge dead EH edges from basic block BB. */
7928 bool
7929 gimple_purge_dead_eh_edges (basic_block bb)
7931 bool changed = false;
7932 edge e;
7933 edge_iterator ei;
7934 gimple stmt = last_stmt (bb);
7936 if (stmt && stmt_can_throw_internal (stmt))
7937 return false;
7939 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7941 if (e->flags & EDGE_EH)
7943 remove_edge_and_dominated_blocks (e);
7944 changed = true;
7946 else
7947 ei_next (&ei);
7950 return changed;
7953 /* Purge dead EH edges from basic block listed in BLOCKS. */
7955 bool
7956 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7958 bool changed = false;
7959 unsigned i;
7960 bitmap_iterator bi;
7962 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7964 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7966 /* Earlier gimple_purge_dead_eh_edges could have removed
7967 this basic block already. */
7968 gcc_assert (bb || changed);
7969 if (bb != NULL)
7970 changed |= gimple_purge_dead_eh_edges (bb);
7973 return changed;
7976 /* Purge dead abnormal call edges from basic block BB. */
7978 bool
7979 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7981 bool changed = false;
7982 edge e;
7983 edge_iterator ei;
7984 gimple stmt = last_stmt (bb);
7986 if (!cfun->has_nonlocal_label
7987 && !cfun->calls_setjmp)
7988 return false;
7990 if (stmt && stmt_can_make_abnormal_goto (stmt))
7991 return false;
7993 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7995 if (e->flags & EDGE_ABNORMAL)
7997 if (e->flags & EDGE_FALLTHRU)
7998 e->flags &= ~EDGE_ABNORMAL;
7999 else
8000 remove_edge_and_dominated_blocks (e);
8001 changed = true;
8003 else
8004 ei_next (&ei);
8007 return changed;
8010 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8012 bool
8013 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8015 bool changed = false;
8016 unsigned i;
8017 bitmap_iterator bi;
8019 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8021 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8023 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8024 this basic block already. */
8025 gcc_assert (bb || changed);
8026 if (bb != NULL)
8027 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8030 return changed;
8033 /* This function is called whenever a new edge is created or
8034 redirected. */
8036 static void
8037 gimple_execute_on_growing_pred (edge e)
8039 basic_block bb = e->dest;
8041 if (!gimple_seq_empty_p (phi_nodes (bb)))
8042 reserve_phi_args_for_new_edge (bb);
8045 /* This function is called immediately before edge E is removed from
8046 the edge vector E->dest->preds. */
8048 static void
8049 gimple_execute_on_shrinking_pred (edge e)
8051 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8052 remove_phi_args (e);
8055 /*---------------------------------------------------------------------------
8056 Helper functions for Loop versioning
8057 ---------------------------------------------------------------------------*/
8059 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8060 of 'first'. Both of them are dominated by 'new_head' basic block. When
8061 'new_head' was created by 'second's incoming edge it received phi arguments
8062 on the edge by split_edge(). Later, additional edge 'e' was created to
8063 connect 'new_head' and 'first'. Now this routine adds phi args on this
8064 additional edge 'e' that new_head to second edge received as part of edge
8065 splitting. */
8067 static void
8068 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8069 basic_block new_head, edge e)
8071 gphi *phi1, *phi2;
8072 gphi_iterator psi1, psi2;
8073 tree def;
8074 edge e2 = find_edge (new_head, second);
8076 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8077 edge, we should always have an edge from NEW_HEAD to SECOND. */
8078 gcc_assert (e2 != NULL);
8080 /* Browse all 'second' basic block phi nodes and add phi args to
8081 edge 'e' for 'first' head. PHI args are always in correct order. */
8083 for (psi2 = gsi_start_phis (second),
8084 psi1 = gsi_start_phis (first);
8085 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8086 gsi_next (&psi2), gsi_next (&psi1))
8088 phi1 = psi1.phi ();
8089 phi2 = psi2.phi ();
8090 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8091 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8096 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8097 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8098 the destination of the ELSE part. */
8100 static void
8101 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8102 basic_block second_head ATTRIBUTE_UNUSED,
8103 basic_block cond_bb, void *cond_e)
8105 gimple_stmt_iterator gsi;
8106 gimple new_cond_expr;
8107 tree cond_expr = (tree) cond_e;
8108 edge e0;
8110 /* Build new conditional expr */
8111 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8112 NULL_TREE, NULL_TREE);
8114 /* Add new cond in cond_bb. */
8115 gsi = gsi_last_bb (cond_bb);
8116 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8118 /* Adjust edges appropriately to connect new head with first head
8119 as well as second head. */
8120 e0 = single_succ_edge (cond_bb);
8121 e0->flags &= ~EDGE_FALLTHRU;
8122 e0->flags |= EDGE_FALSE_VALUE;
8126 /* Do book-keeping of basic block BB for the profile consistency checker.
8127 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8128 then do post-pass accounting. Store the counting in RECORD. */
8129 static void
8130 gimple_account_profile_record (basic_block bb, int after_pass,
8131 struct profile_record *record)
8133 gimple_stmt_iterator i;
8134 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8136 record->size[after_pass]
8137 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8138 if (profile_status_for_fn (cfun) == PROFILE_READ)
8139 record->time[after_pass]
8140 += estimate_num_insns (gsi_stmt (i),
8141 &eni_time_weights) * bb->count;
8142 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8143 record->time[after_pass]
8144 += estimate_num_insns (gsi_stmt (i),
8145 &eni_time_weights) * bb->frequency;
8149 struct cfg_hooks gimple_cfg_hooks = {
8150 "gimple",
8151 gimple_verify_flow_info,
8152 gimple_dump_bb, /* dump_bb */
8153 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8154 create_bb, /* create_basic_block */
8155 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8156 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8157 gimple_can_remove_branch_p, /* can_remove_branch_p */
8158 remove_bb, /* delete_basic_block */
8159 gimple_split_block, /* split_block */
8160 gimple_move_block_after, /* move_block_after */
8161 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8162 gimple_merge_blocks, /* merge_blocks */
8163 gimple_predict_edge, /* predict_edge */
8164 gimple_predicted_by_p, /* predicted_by_p */
8165 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8166 gimple_duplicate_bb, /* duplicate_block */
8167 gimple_split_edge, /* split_edge */
8168 gimple_make_forwarder_block, /* make_forward_block */
8169 NULL, /* tidy_fallthru_edge */
8170 NULL, /* force_nonfallthru */
8171 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8172 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8173 gimple_flow_call_edges_add, /* flow_call_edges_add */
8174 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8175 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8176 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8177 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8178 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8179 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8180 flush_pending_stmts, /* flush_pending_stmts */
8181 gimple_empty_block_p, /* block_empty_p */
8182 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8183 gimple_account_profile_record,
8187 /* Split all critical edges. */
8189 unsigned int
8190 split_critical_edges (void)
8192 basic_block bb;
8193 edge e;
8194 edge_iterator ei;
8196 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8197 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8198 mappings around the calls to split_edge. */
8199 start_recording_case_labels ();
8200 FOR_ALL_BB_FN (bb, cfun)
8202 FOR_EACH_EDGE (e, ei, bb->succs)
8204 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8205 split_edge (e);
8206 /* PRE inserts statements to edges and expects that
8207 since split_critical_edges was done beforehand, committing edge
8208 insertions will not split more edges. In addition to critical
8209 edges we must split edges that have multiple successors and
8210 end by control flow statements, such as RESX.
8211 Go ahead and split them too. This matches the logic in
8212 gimple_find_edge_insert_loc. */
8213 else if ((!single_pred_p (e->dest)
8214 || !gimple_seq_empty_p (phi_nodes (e->dest))
8215 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8216 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8217 && !(e->flags & EDGE_ABNORMAL))
8219 gimple_stmt_iterator gsi;
8221 gsi = gsi_last_bb (e->src);
8222 if (!gsi_end_p (gsi)
8223 && stmt_ends_bb_p (gsi_stmt (gsi))
8224 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8225 && !gimple_call_builtin_p (gsi_stmt (gsi),
8226 BUILT_IN_RETURN)))
8227 split_edge (e);
8231 end_recording_case_labels ();
8232 return 0;
8235 namespace {
8237 const pass_data pass_data_split_crit_edges =
8239 GIMPLE_PASS, /* type */
8240 "crited", /* name */
8241 OPTGROUP_NONE, /* optinfo_flags */
8242 TV_TREE_SPLIT_EDGES, /* tv_id */
8243 PROP_cfg, /* properties_required */
8244 PROP_no_crit_edges, /* properties_provided */
8245 0, /* properties_destroyed */
8246 0, /* todo_flags_start */
8247 0, /* todo_flags_finish */
8250 class pass_split_crit_edges : public gimple_opt_pass
8252 public:
8253 pass_split_crit_edges (gcc::context *ctxt)
8254 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8257 /* opt_pass methods: */
8258 virtual unsigned int execute (function *) { return split_critical_edges (); }
8260 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8261 }; // class pass_split_crit_edges
8263 } // anon namespace
8265 gimple_opt_pass *
8266 make_pass_split_crit_edges (gcc::context *ctxt)
8268 return new pass_split_crit_edges (ctxt);
8272 /* Insert COND expression which is GIMPLE_COND after STMT
8273 in basic block BB with appropriate basic block split
8274 and creation of a new conditionally executed basic block.
8275 Return created basic block. */
8276 basic_block
8277 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8279 edge fall = split_block (bb, stmt);
8280 gimple_stmt_iterator iter = gsi_last_bb (bb);
8281 basic_block new_bb;
8283 /* Insert cond statement. */
8284 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8285 if (gsi_end_p (iter))
8286 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8287 else
8288 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8290 /* Create conditionally executed block. */
8291 new_bb = create_empty_bb (bb);
8292 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8293 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8295 /* Fix edge for split bb. */
8296 fall->flags = EDGE_FALSE_VALUE;
8298 /* Update dominance info. */
8299 if (dom_info_available_p (CDI_DOMINATORS))
8301 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8302 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8305 /* Update loop info. */
8306 if (current_loops)
8307 add_bb_to_loop (new_bb, bb->loop_father);
8309 return new_bb;
8312 /* Build a ternary operation and gimplify it. Emit code before GSI.
8313 Return the gimple_val holding the result. */
8315 tree
8316 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8317 tree type, tree a, tree b, tree c)
8319 tree ret;
8320 location_t loc = gimple_location (gsi_stmt (*gsi));
8322 ret = fold_build3_loc (loc, code, type, a, b, c);
8323 STRIP_NOPS (ret);
8325 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8326 GSI_SAME_STMT);
8329 /* Build a binary operation and gimplify it. Emit code before GSI.
8330 Return the gimple_val holding the result. */
8332 tree
8333 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8334 tree type, tree a, tree b)
8336 tree ret;
8338 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8339 STRIP_NOPS (ret);
8341 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8342 GSI_SAME_STMT);
8345 /* Build a unary operation and gimplify it. Emit code before GSI.
8346 Return the gimple_val holding the result. */
8348 tree
8349 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8350 tree a)
8352 tree ret;
8354 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8355 STRIP_NOPS (ret);
8357 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8358 GSI_SAME_STMT);
8363 /* Given a basic block B which ends with a conditional and has
8364 precisely two successors, determine which of the edges is taken if
8365 the conditional is true and which is taken if the conditional is
8366 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8368 void
8369 extract_true_false_edges_from_block (basic_block b,
8370 edge *true_edge,
8371 edge *false_edge)
8373 edge e = EDGE_SUCC (b, 0);
8375 if (e->flags & EDGE_TRUE_VALUE)
8377 *true_edge = e;
8378 *false_edge = EDGE_SUCC (b, 1);
8380 else
8382 *false_edge = e;
8383 *true_edge = EDGE_SUCC (b, 1);
8387 /* Emit return warnings. */
8389 namespace {
8391 const pass_data pass_data_warn_function_return =
8393 GIMPLE_PASS, /* type */
8394 "*warn_function_return", /* name */
8395 OPTGROUP_NONE, /* optinfo_flags */
8396 TV_NONE, /* tv_id */
8397 PROP_cfg, /* properties_required */
8398 0, /* properties_provided */
8399 0, /* properties_destroyed */
8400 0, /* todo_flags_start */
8401 0, /* todo_flags_finish */
8404 class pass_warn_function_return : public gimple_opt_pass
8406 public:
8407 pass_warn_function_return (gcc::context *ctxt)
8408 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8411 /* opt_pass methods: */
8412 virtual unsigned int execute (function *);
8414 }; // class pass_warn_function_return
8416 unsigned int
8417 pass_warn_function_return::execute (function *fun)
8419 source_location location;
8420 gimple last;
8421 edge e;
8422 edge_iterator ei;
8424 if (!targetm.warn_func_return (fun->decl))
8425 return 0;
8427 /* If we have a path to EXIT, then we do return. */
8428 if (TREE_THIS_VOLATILE (fun->decl)
8429 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8431 location = UNKNOWN_LOCATION;
8432 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8434 last = last_stmt (e->src);
8435 if ((gimple_code (last) == GIMPLE_RETURN
8436 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8437 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8438 break;
8440 if (location == UNKNOWN_LOCATION)
8441 location = cfun->function_end_locus;
8442 warning_at (location, 0, "%<noreturn%> function does return");
8445 /* If we see "return;" in some basic block, then we do reach the end
8446 without returning a value. */
8447 else if (warn_return_type
8448 && !TREE_NO_WARNING (fun->decl)
8449 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8450 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8452 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8454 gimple last = last_stmt (e->src);
8455 greturn *return_stmt = dyn_cast <greturn *> (last);
8456 if (return_stmt
8457 && gimple_return_retval (return_stmt) == NULL
8458 && !gimple_no_warning_p (last))
8460 location = gimple_location (last);
8461 if (location == UNKNOWN_LOCATION)
8462 location = fun->function_end_locus;
8463 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8464 TREE_NO_WARNING (fun->decl) = 1;
8465 break;
8469 return 0;
8472 } // anon namespace
8474 gimple_opt_pass *
8475 make_pass_warn_function_return (gcc::context *ctxt)
8477 return new pass_warn_function_return (ctxt);
8480 /* Walk a gimplified function and warn for functions whose return value is
8481 ignored and attribute((warn_unused_result)) is set. This is done before
8482 inlining, so we don't have to worry about that. */
8484 static void
8485 do_warn_unused_result (gimple_seq seq)
8487 tree fdecl, ftype;
8488 gimple_stmt_iterator i;
8490 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8492 gimple g = gsi_stmt (i);
8494 switch (gimple_code (g))
8496 case GIMPLE_BIND:
8497 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8498 break;
8499 case GIMPLE_TRY:
8500 do_warn_unused_result (gimple_try_eval (g));
8501 do_warn_unused_result (gimple_try_cleanup (g));
8502 break;
8503 case GIMPLE_CATCH:
8504 do_warn_unused_result (gimple_catch_handler (
8505 as_a <gcatch *> (g)));
8506 break;
8507 case GIMPLE_EH_FILTER:
8508 do_warn_unused_result (gimple_eh_filter_failure (g));
8509 break;
8511 case GIMPLE_CALL:
8512 if (gimple_call_lhs (g))
8513 break;
8514 if (gimple_call_internal_p (g))
8515 break;
8517 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8518 LHS. All calls whose value is ignored should be
8519 represented like this. Look for the attribute. */
8520 fdecl = gimple_call_fndecl (g);
8521 ftype = gimple_call_fntype (g);
8523 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8525 location_t loc = gimple_location (g);
8527 if (fdecl)
8528 warning_at (loc, OPT_Wunused_result,
8529 "ignoring return value of %qD, "
8530 "declared with attribute warn_unused_result",
8531 fdecl);
8532 else
8533 warning_at (loc, OPT_Wunused_result,
8534 "ignoring return value of function "
8535 "declared with attribute warn_unused_result");
8537 break;
8539 default:
8540 /* Not a container, not a call, or a call whose value is used. */
8541 break;
8546 namespace {
8548 const pass_data pass_data_warn_unused_result =
8550 GIMPLE_PASS, /* type */
8551 "*warn_unused_result", /* name */
8552 OPTGROUP_NONE, /* optinfo_flags */
8553 TV_NONE, /* tv_id */
8554 PROP_gimple_any, /* properties_required */
8555 0, /* properties_provided */
8556 0, /* properties_destroyed */
8557 0, /* todo_flags_start */
8558 0, /* todo_flags_finish */
8561 class pass_warn_unused_result : public gimple_opt_pass
8563 public:
8564 pass_warn_unused_result (gcc::context *ctxt)
8565 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8568 /* opt_pass methods: */
8569 virtual bool gate (function *) { return flag_warn_unused_result; }
8570 virtual unsigned int execute (function *)
8572 do_warn_unused_result (gimple_body (current_function_decl));
8573 return 0;
8576 }; // class pass_warn_unused_result
8578 } // anon namespace
8580 gimple_opt_pass *
8581 make_pass_warn_unused_result (gcc::context *ctxt)
8583 return new pass_warn_unused_result (ctxt);
8586 /* IPA passes, compilation of earlier functions or inlining
8587 might have changed some properties, such as marked functions nothrow,
8588 pure, const or noreturn.
8589 Remove redundant edges and basic blocks, and create new ones if necessary.
8591 This pass can't be executed as stand alone pass from pass manager, because
8592 in between inlining and this fixup the verify_flow_info would fail. */
8594 unsigned int
8595 execute_fixup_cfg (void)
8597 basic_block bb;
8598 gimple_stmt_iterator gsi;
8599 int todo = 0;
8600 gcov_type count_scale;
8601 edge e;
8602 edge_iterator ei;
8604 count_scale
8605 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8606 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8608 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8609 cgraph_node::get (current_function_decl)->count;
8610 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8611 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8612 count_scale);
8614 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8615 e->count = apply_scale (e->count, count_scale);
8617 FOR_EACH_BB_FN (bb, cfun)
8619 bb->count = apply_scale (bb->count, count_scale);
8620 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8622 gimple stmt = gsi_stmt (gsi);
8623 tree decl = is_gimple_call (stmt)
8624 ? gimple_call_fndecl (stmt)
8625 : NULL;
8626 if (decl)
8628 int flags = gimple_call_flags (stmt);
8629 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8631 if (gimple_purge_dead_abnormal_call_edges (bb))
8632 todo |= TODO_cleanup_cfg;
8634 if (gimple_in_ssa_p (cfun))
8636 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8637 update_stmt (stmt);
8641 if (flags & ECF_NORETURN
8642 && fixup_noreturn_call (stmt))
8643 todo |= TODO_cleanup_cfg;
8646 /* Remove stores to variables we marked write-only.
8647 Keep access when store has side effect, i.e. in case when source
8648 is volatile. */
8649 if (gimple_store_p (stmt)
8650 && !gimple_has_side_effects (stmt))
8652 tree lhs = get_base_address (gimple_get_lhs (stmt));
8654 if (TREE_CODE (lhs) == VAR_DECL
8655 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8656 && varpool_node::get (lhs)->writeonly)
8658 unlink_stmt_vdef (stmt);
8659 gsi_remove (&gsi, true);
8660 release_defs (stmt);
8661 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8662 continue;
8665 /* For calls we can simply remove LHS when it is known
8666 to be write-only. */
8667 if (is_gimple_call (stmt)
8668 && gimple_get_lhs (stmt))
8670 tree lhs = get_base_address (gimple_get_lhs (stmt));
8672 if (TREE_CODE (lhs) == VAR_DECL
8673 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8674 && varpool_node::get (lhs)->writeonly)
8676 gimple_call_set_lhs (stmt, NULL);
8677 update_stmt (stmt);
8678 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8682 if (maybe_clean_eh_stmt (stmt)
8683 && gimple_purge_dead_eh_edges (bb))
8684 todo |= TODO_cleanup_cfg;
8685 gsi_next (&gsi);
8688 FOR_EACH_EDGE (e, ei, bb->succs)
8689 e->count = apply_scale (e->count, count_scale);
8691 /* If we have a basic block with no successors that does not
8692 end with a control statement or a noreturn call end it with
8693 a call to __builtin_unreachable. This situation can occur
8694 when inlining a noreturn call that does in fact return. */
8695 if (EDGE_COUNT (bb->succs) == 0)
8697 gimple stmt = last_stmt (bb);
8698 if (!stmt
8699 || (!is_ctrl_stmt (stmt)
8700 && (!is_gimple_call (stmt)
8701 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8703 if (stmt && is_gimple_call (stmt))
8704 gimple_call_set_ctrl_altering (stmt, false);
8705 stmt = gimple_build_call
8706 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8707 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8708 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8712 if (count_scale != REG_BR_PROB_BASE)
8713 compute_function_frequency ();
8715 /* Dump a textual representation of the flowgraph. */
8716 if (dump_file)
8717 gimple_dump_cfg (dump_file, dump_flags);
8719 if (current_loops
8720 && (todo & TODO_cleanup_cfg))
8721 loops_state_set (LOOPS_NEED_FIXUP);
8723 return todo;
8726 namespace {
8728 const pass_data pass_data_fixup_cfg =
8730 GIMPLE_PASS, /* type */
8731 "*free_cfg_annotations", /* name */
8732 OPTGROUP_NONE, /* optinfo_flags */
8733 TV_NONE, /* tv_id */
8734 PROP_cfg, /* properties_required */
8735 0, /* properties_provided */
8736 0, /* properties_destroyed */
8737 0, /* todo_flags_start */
8738 0, /* todo_flags_finish */
8741 class pass_fixup_cfg : public gimple_opt_pass
8743 public:
8744 pass_fixup_cfg (gcc::context *ctxt)
8745 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8748 /* opt_pass methods: */
8749 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8750 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8752 }; // class pass_fixup_cfg
8754 } // anon namespace
8756 gimple_opt_pass *
8757 make_pass_fixup_cfg (gcc::context *ctxt)
8759 return new pass_fixup_cfg (ctxt);
8762 /* Garbage collection support for edge_def. */
8764 extern void gt_ggc_mx (tree&);
8765 extern void gt_ggc_mx (gimple&);
8766 extern void gt_ggc_mx (rtx&);
8767 extern void gt_ggc_mx (basic_block&);
8769 static void
8770 gt_ggc_mx (rtx_insn *& x)
8772 if (x)
8773 gt_ggc_mx_rtx_def ((void *) x);
8776 void
8777 gt_ggc_mx (edge_def *e)
8779 tree block = LOCATION_BLOCK (e->goto_locus);
8780 gt_ggc_mx (e->src);
8781 gt_ggc_mx (e->dest);
8782 if (current_ir_type () == IR_GIMPLE)
8783 gt_ggc_mx (e->insns.g);
8784 else
8785 gt_ggc_mx (e->insns.r);
8786 gt_ggc_mx (block);
8789 /* PCH support for edge_def. */
8791 extern void gt_pch_nx (tree&);
8792 extern void gt_pch_nx (gimple&);
8793 extern void gt_pch_nx (rtx&);
8794 extern void gt_pch_nx (basic_block&);
8796 static void
8797 gt_pch_nx (rtx_insn *& x)
8799 if (x)
8800 gt_pch_nx_rtx_def ((void *) x);
8803 void
8804 gt_pch_nx (edge_def *e)
8806 tree block = LOCATION_BLOCK (e->goto_locus);
8807 gt_pch_nx (e->src);
8808 gt_pch_nx (e->dest);
8809 if (current_ir_type () == IR_GIMPLE)
8810 gt_pch_nx (e->insns.g);
8811 else
8812 gt_pch_nx (e->insns.r);
8813 gt_pch_nx (block);
8816 void
8817 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8819 tree block = LOCATION_BLOCK (e->goto_locus);
8820 op (&(e->src), cookie);
8821 op (&(e->dest), cookie);
8822 if (current_ir_type () == IR_GIMPLE)
8823 op (&(e->insns.g), cookie);
8824 else
8825 op (&(e->insns.r), cookie);
8826 op (&(block), cookie);