Introduce gimple_transaction
[official-gcc.git] / gcc / tree-cfg.c
blob1662d4900d507ab8a6110c6fd6ba39be6d333f9e
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "trans-mem.h"
29 #include "stor-layout.h"
30 #include "print-tree.h"
31 #include "tm_p.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "gimple-pretty-print.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
47 #include "cgraph.h"
48 #include "tree-cfg.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
62 #include "except.h"
63 #include "cfgloop.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
67 #include "target.h"
68 #include "tree-ssa-live.h"
69 #include "omp-low.h"
70 #include "tree-cfgcleanup.h"
71 #include "wide-int.h"
72 #include "wide-int-print.h"
74 /* This file contains functions for building the Control Flow Graph (CFG)
75 for a function tree. */
77 /* Local declarations. */
79 /* Initial capacity for the basic block array. */
80 static const int initial_cfg_capacity = 20;
82 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
83 which use a particular edge. The CASE_LABEL_EXPRs are chained together
84 via their CASE_CHAIN field, which we clear after we're done with the
85 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
87 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
88 update the case vector in response to edge redirections.
90 Right now this table is set up and torn down at key points in the
91 compilation process. It would be nice if we could make the table
92 more persistent. The key is getting notification of changes to
93 the CFG (particularly edge removal, creation and redirection). */
95 static hash_map<edge, tree> *edge_to_cases;
97 /* If we record edge_to_cases, this bitmap will hold indexes
98 of basic blocks that end in a GIMPLE_SWITCH which we touched
99 due to edge manipulations. */
101 static bitmap touched_switch_bbs;
103 /* CFG statistics. */
104 struct cfg_stats_d
106 long num_merged_labels;
109 static struct cfg_stats_d cfg_stats;
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
114 location_t locus;
115 int discriminator;
118 /* Hashtable helpers. */
120 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
122 typedef locus_discrim_map value_type;
123 typedef locus_discrim_map compare_type;
124 static inline hashval_t hash (const value_type *);
125 static inline bool equal (const value_type *, const compare_type *);
128 /* Trivial hash function for a location_t. ITEM is a pointer to
129 a hash table entry that maps a location_t to a discriminator. */
131 inline hashval_t
132 locus_discrim_hasher::hash (const value_type *item)
134 return LOCATION_LINE (item->locus);
137 /* Equality function for the locus-to-discriminator map. A and B
138 point to the two hash table entries to compare. */
140 inline bool
141 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
143 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gimple_switch, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple, gimple);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gimple_transaction);
167 static bool call_can_make_abnormal_goto (gimple);
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (basic_block, tree);
175 static edge find_taken_edge_switch_expr (gimple_switch, basic_block, tree);
176 static tree find_case_label_for_value (gimple_switch, tree);
178 void
179 init_empty_tree_cfg_for_function (struct function *fn)
181 /* Initialize the basic block array. */
182 init_flow (fn);
183 profile_status_for_fn (fn) = PROFILE_ABSENT;
184 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
185 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
186 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
188 initial_cfg_capacity);
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
193 initial_cfg_capacity);
195 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
196 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
198 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn);
200 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn);
204 void
205 init_empty_tree_cfg (void)
207 init_empty_tree_cfg_for_function (cfun);
210 /*---------------------------------------------------------------------------
211 Create basic blocks
212 ---------------------------------------------------------------------------*/
214 /* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
217 static void
218 build_gimple_cfg (gimple_seq seq)
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
223 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
225 init_empty_tree_cfg ();
227 make_blocks (seq);
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
237 n_basic_blocks_for_fn (cfun));
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
249 make_edges ();
250 assign_discriminators ();
251 cleanup_dead_labels ();
252 delete discriminator_per_locus;
253 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
258 them and propagate the information to the loop. We assume that the
259 annotations come immediately before the condition of the loop. */
261 static void
262 replace_loop_annotate ()
264 struct loop *loop;
265 basic_block bb;
266 gimple_stmt_iterator gsi;
267 gimple stmt;
269 FOR_EACH_LOOP (loop, 0)
271 gsi = gsi_last_bb (loop->header);
272 stmt = gsi_stmt (gsi);
273 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
274 continue;
275 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
277 stmt = gsi_stmt (gsi);
278 if (gimple_code (stmt) != GIMPLE_CALL)
279 break;
280 if (!gimple_call_internal_p (stmt)
281 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
282 break;
283 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
285 case annot_expr_ivdep_kind:
286 loop->safelen = INT_MAX;
287 break;
288 case annot_expr_no_vector_kind:
289 loop->dont_vectorize = true;
290 break;
291 case annot_expr_vector_kind:
292 loop->force_vectorize = true;
293 cfun->has_force_vectorize_loops = true;
294 break;
295 default:
296 gcc_unreachable ();
298 stmt = gimple_build_assign (gimple_call_lhs (stmt),
299 gimple_call_arg (stmt, 0));
300 gsi_replace (&gsi, stmt, true);
304 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
305 FOR_EACH_BB_FN (bb, cfun)
307 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
309 stmt = gsi_stmt (gsi);
310 if (gimple_code (stmt) != GIMPLE_CALL)
311 break;
312 if (!gimple_call_internal_p (stmt)
313 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
314 break;
315 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
317 case annot_expr_ivdep_kind:
318 case annot_expr_no_vector_kind:
319 case annot_expr_vector_kind:
320 break;
321 default:
322 gcc_unreachable ();
324 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
325 stmt = gimple_build_assign (gimple_call_lhs (stmt),
326 gimple_call_arg (stmt, 0));
327 gsi_replace (&gsi, stmt, true);
333 static unsigned int
334 execute_build_cfg (void)
336 gimple_seq body = gimple_body (current_function_decl);
338 build_gimple_cfg (body);
339 gimple_set_body (current_function_decl, NULL);
340 if (dump_file && (dump_flags & TDF_DETAILS))
342 fprintf (dump_file, "Scope blocks:\n");
343 dump_scope_blocks (dump_file, dump_flags);
345 cleanup_tree_cfg ();
346 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
347 replace_loop_annotate ();
348 return 0;
351 namespace {
353 const pass_data pass_data_build_cfg =
355 GIMPLE_PASS, /* type */
356 "cfg", /* name */
357 OPTGROUP_NONE, /* optinfo_flags */
358 TV_TREE_CFG, /* tv_id */
359 PROP_gimple_leh, /* properties_required */
360 ( PROP_cfg | PROP_loops ), /* properties_provided */
361 0, /* properties_destroyed */
362 0, /* todo_flags_start */
363 0, /* todo_flags_finish */
366 class pass_build_cfg : public gimple_opt_pass
368 public:
369 pass_build_cfg (gcc::context *ctxt)
370 : gimple_opt_pass (pass_data_build_cfg, ctxt)
373 /* opt_pass methods: */
374 virtual unsigned int execute (function *) { return execute_build_cfg (); }
376 }; // class pass_build_cfg
378 } // anon namespace
380 gimple_opt_pass *
381 make_pass_build_cfg (gcc::context *ctxt)
383 return new pass_build_cfg (ctxt);
387 /* Return true if T is a computed goto. */
389 bool
390 computed_goto_p (gimple t)
392 return (gimple_code (t) == GIMPLE_GOTO
393 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
396 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
397 the other edge points to a bb with just __builtin_unreachable ().
398 I.e. return true for C->M edge in:
399 <bb C>:
401 if (something)
402 goto <bb N>;
403 else
404 goto <bb M>;
405 <bb N>:
406 __builtin_unreachable ();
407 <bb M>: */
409 bool
410 assert_unreachable_fallthru_edge_p (edge e)
412 basic_block pred_bb = e->src;
413 gimple last = last_stmt (pred_bb);
414 if (last && gimple_code (last) == GIMPLE_COND)
416 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
417 if (other_bb == e->dest)
418 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
419 if (EDGE_COUNT (other_bb->succs) == 0)
421 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
422 gimple stmt;
424 if (gsi_end_p (gsi))
425 return false;
426 stmt = gsi_stmt (gsi);
427 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
429 gsi_next (&gsi);
430 if (gsi_end_p (gsi))
431 return false;
432 stmt = gsi_stmt (gsi);
434 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
437 return false;
441 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
442 could alter control flow except via eh. We initialize the flag at
443 CFG build time and only ever clear it later. */
445 static void
446 gimple_call_initialize_ctrl_altering (gimple stmt)
448 int flags = gimple_call_flags (stmt);
450 /* A call alters control flow if it can make an abnormal goto. */
451 if (call_can_make_abnormal_goto (stmt)
452 /* A call also alters control flow if it does not return. */
453 || flags & ECF_NORETURN
454 /* TM ending statements have backedges out of the transaction.
455 Return true so we split the basic block containing them.
456 Note that the TM_BUILTIN test is merely an optimization. */
457 || ((flags & ECF_TM_BUILTIN)
458 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
459 /* BUILT_IN_RETURN call is same as return statement. */
460 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
461 gimple_call_set_ctrl_altering (stmt, true);
462 else
463 gimple_call_set_ctrl_altering (stmt, false);
467 /* Build a flowgraph for the sequence of stmts SEQ. */
469 static void
470 make_blocks (gimple_seq seq)
472 gimple_stmt_iterator i = gsi_start (seq);
473 gimple stmt = NULL;
474 bool start_new_block = true;
475 bool first_stmt_of_seq = true;
476 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
478 while (!gsi_end_p (i))
480 gimple prev_stmt;
482 prev_stmt = stmt;
483 stmt = gsi_stmt (i);
485 if (stmt && is_gimple_call (stmt))
486 gimple_call_initialize_ctrl_altering (stmt);
488 /* If the statement starts a new basic block or if we have determined
489 in a previous pass that we need to create a new block for STMT, do
490 so now. */
491 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
493 if (!first_stmt_of_seq)
494 gsi_split_seq_before (&i, &seq);
495 bb = create_basic_block (seq, NULL, bb);
496 start_new_block = false;
499 /* Now add STMT to BB and create the subgraphs for special statement
500 codes. */
501 gimple_set_bb (stmt, bb);
503 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
504 next iteration. */
505 if (stmt_ends_bb_p (stmt))
507 /* If the stmt can make abnormal goto use a new temporary
508 for the assignment to the LHS. This makes sure the old value
509 of the LHS is available on the abnormal edge. Otherwise
510 we will end up with overlapping life-ranges for abnormal
511 SSA names. */
512 if (gimple_has_lhs (stmt)
513 && stmt_can_make_abnormal_goto (stmt)
514 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
516 tree lhs = gimple_get_lhs (stmt);
517 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
518 gimple s = gimple_build_assign (lhs, tmp);
519 gimple_set_location (s, gimple_location (stmt));
520 gimple_set_block (s, gimple_block (stmt));
521 gimple_set_lhs (stmt, tmp);
522 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
523 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
524 DECL_GIMPLE_REG_P (tmp) = 1;
525 gsi_insert_after (&i, s, GSI_SAME_STMT);
527 start_new_block = true;
530 gsi_next (&i);
531 first_stmt_of_seq = false;
536 /* Create and return a new empty basic block after bb AFTER. */
538 static basic_block
539 create_bb (void *h, void *e, basic_block after)
541 basic_block bb;
543 gcc_assert (!e);
545 /* Create and initialize a new basic block. Since alloc_block uses
546 GC allocation that clears memory to allocate a basic block, we do
547 not have to clear the newly allocated basic block here. */
548 bb = alloc_block ();
550 bb->index = last_basic_block_for_fn (cfun);
551 bb->flags = BB_NEW;
552 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
554 /* Add the new block to the linked list of blocks. */
555 link_block (bb, after);
557 /* Grow the basic block array if needed. */
558 if ((size_t) last_basic_block_for_fn (cfun)
559 == basic_block_info_for_fn (cfun)->length ())
561 size_t new_size =
562 (last_basic_block_for_fn (cfun)
563 + (last_basic_block_for_fn (cfun) + 3) / 4);
564 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
567 /* Add the newly created block to the array. */
568 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
570 n_basic_blocks_for_fn (cfun)++;
571 last_basic_block_for_fn (cfun)++;
573 return bb;
577 /*---------------------------------------------------------------------------
578 Edge creation
579 ---------------------------------------------------------------------------*/
581 /* Fold COND_EXPR_COND of each COND_EXPR. */
583 void
584 fold_cond_expr_cond (void)
586 basic_block bb;
588 FOR_EACH_BB_FN (bb, cfun)
590 gimple stmt = last_stmt (bb);
592 if (stmt && gimple_code (stmt) == GIMPLE_COND)
594 location_t loc = gimple_location (stmt);
595 tree cond;
596 bool zerop, onep;
598 fold_defer_overflow_warnings ();
599 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
600 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
601 if (cond)
603 zerop = integer_zerop (cond);
604 onep = integer_onep (cond);
606 else
607 zerop = onep = false;
609 fold_undefer_overflow_warnings (zerop || onep,
610 stmt,
611 WARN_STRICT_OVERFLOW_CONDITIONAL);
612 if (zerop)
613 gimple_cond_make_false (stmt);
614 else if (onep)
615 gimple_cond_make_true (stmt);
620 /* If basic block BB has an abnormal edge to a basic block
621 containing IFN_ABNORMAL_DISPATCHER internal call, return
622 that the dispatcher's basic block, otherwise return NULL. */
624 basic_block
625 get_abnormal_succ_dispatcher (basic_block bb)
627 edge e;
628 edge_iterator ei;
630 FOR_EACH_EDGE (e, ei, bb->succs)
631 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
633 gimple_stmt_iterator gsi
634 = gsi_start_nondebug_after_labels_bb (e->dest);
635 gimple g = gsi_stmt (gsi);
636 if (g
637 && is_gimple_call (g)
638 && gimple_call_internal_p (g)
639 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
640 return e->dest;
642 return NULL;
645 /* Helper function for make_edges. Create a basic block with
646 with ABNORMAL_DISPATCHER internal call in it if needed, and
647 create abnormal edges from BBS to it and from it to FOR_BB
648 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
650 static void
651 handle_abnormal_edges (basic_block *dispatcher_bbs,
652 basic_block for_bb, int *bb_to_omp_idx,
653 auto_vec<basic_block> *bbs, bool computed_goto)
655 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
656 unsigned int idx = 0;
657 basic_block bb;
658 bool inner = false;
660 if (bb_to_omp_idx)
662 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
663 if (bb_to_omp_idx[for_bb->index] != 0)
664 inner = true;
667 /* If the dispatcher has been created already, then there are basic
668 blocks with abnormal edges to it, so just make a new edge to
669 for_bb. */
670 if (*dispatcher == NULL)
672 /* Check if there are any basic blocks that need to have
673 abnormal edges to this dispatcher. If there are none, return
674 early. */
675 if (bb_to_omp_idx == NULL)
677 if (bbs->is_empty ())
678 return;
680 else
682 FOR_EACH_VEC_ELT (*bbs, idx, bb)
683 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
684 break;
685 if (bb == NULL)
686 return;
689 /* Create the dispatcher bb. */
690 *dispatcher = create_basic_block (NULL, NULL, for_bb);
691 if (computed_goto)
693 /* Factor computed gotos into a common computed goto site. Also
694 record the location of that site so that we can un-factor the
695 gotos after we have converted back to normal form. */
696 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
698 /* Create the destination of the factored goto. Each original
699 computed goto will put its desired destination into this
700 variable and jump to the label we create immediately below. */
701 tree var = create_tmp_var (ptr_type_node, "gotovar");
703 /* Build a label for the new block which will contain the
704 factored computed goto. */
705 tree factored_label_decl
706 = create_artificial_label (UNKNOWN_LOCATION);
707 gimple factored_computed_goto_label
708 = gimple_build_label (factored_label_decl);
709 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
711 /* Build our new computed goto. */
712 gimple factored_computed_goto = gimple_build_goto (var);
713 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
715 FOR_EACH_VEC_ELT (*bbs, idx, bb)
717 if (bb_to_omp_idx
718 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
719 continue;
721 gsi = gsi_last_bb (bb);
722 gimple last = gsi_stmt (gsi);
724 gcc_assert (computed_goto_p (last));
726 /* Copy the original computed goto's destination into VAR. */
727 gimple assignment
728 = gimple_build_assign (var, gimple_goto_dest (last));
729 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
731 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
732 e->goto_locus = gimple_location (last);
733 gsi_remove (&gsi, true);
736 else
738 tree arg = inner ? boolean_true_node : boolean_false_node;
739 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
740 1, arg);
741 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
742 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
744 /* Create predecessor edges of the dispatcher. */
745 FOR_EACH_VEC_ELT (*bbs, idx, bb)
747 if (bb_to_omp_idx
748 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
749 continue;
750 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
755 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
758 /* Join all the blocks in the flowgraph. */
760 static void
761 make_edges (void)
763 basic_block bb;
764 struct omp_region *cur_region = NULL;
765 auto_vec<basic_block> ab_edge_goto;
766 auto_vec<basic_block> ab_edge_call;
767 int *bb_to_omp_idx = NULL;
768 int cur_omp_region_idx = 0;
770 /* Create an edge from entry to the first block with executable
771 statements in it. */
772 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
773 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
774 EDGE_FALLTHRU);
776 /* Traverse the basic block array placing edges. */
777 FOR_EACH_BB_FN (bb, cfun)
779 gimple last = last_stmt (bb);
780 bool fallthru;
782 if (bb_to_omp_idx)
783 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
785 if (last)
787 enum gimple_code code = gimple_code (last);
788 switch (code)
790 case GIMPLE_GOTO:
791 if (make_goto_expr_edges (bb))
792 ab_edge_goto.safe_push (bb);
793 fallthru = false;
794 break;
795 case GIMPLE_RETURN:
797 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
798 e->goto_locus = gimple_location (last);
799 fallthru = false;
801 break;
802 case GIMPLE_COND:
803 make_cond_expr_edges (bb);
804 fallthru = false;
805 break;
806 case GIMPLE_SWITCH:
807 make_gimple_switch_edges (as_a <gimple_switch> (last), bb);
808 fallthru = false;
809 break;
810 case GIMPLE_RESX:
811 make_eh_edges (last);
812 fallthru = false;
813 break;
814 case GIMPLE_EH_DISPATCH:
815 fallthru = make_eh_dispatch_edges (last);
816 break;
818 case GIMPLE_CALL:
819 /* If this function receives a nonlocal goto, then we need to
820 make edges from this call site to all the nonlocal goto
821 handlers. */
822 if (stmt_can_make_abnormal_goto (last))
823 ab_edge_call.safe_push (bb);
825 /* If this statement has reachable exception handlers, then
826 create abnormal edges to them. */
827 make_eh_edges (last);
829 /* BUILTIN_RETURN is really a return statement. */
830 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
832 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
833 fallthru = false;
835 /* Some calls are known not to return. */
836 else
837 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
838 break;
840 case GIMPLE_ASSIGN:
841 /* A GIMPLE_ASSIGN may throw internally and thus be considered
842 control-altering. */
843 if (is_ctrl_altering_stmt (last))
844 make_eh_edges (last);
845 fallthru = true;
846 break;
848 case GIMPLE_ASM:
849 make_gimple_asm_edges (bb);
850 fallthru = true;
851 break;
853 CASE_GIMPLE_OMP:
854 fallthru = make_gimple_omp_edges (bb, &cur_region,
855 &cur_omp_region_idx);
856 if (cur_region && bb_to_omp_idx == NULL)
857 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
858 break;
860 case GIMPLE_TRANSACTION:
862 tree abort_label =
863 gimple_transaction_label (as_a <gimple_transaction> (last));
864 if (abort_label)
865 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
866 fallthru = true;
868 break;
870 default:
871 gcc_assert (!stmt_ends_bb_p (last));
872 fallthru = true;
875 else
876 fallthru = true;
878 if (fallthru)
879 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
882 /* Computed gotos are hell to deal with, especially if there are
883 lots of them with a large number of destinations. So we factor
884 them to a common computed goto location before we build the
885 edge list. After we convert back to normal form, we will un-factor
886 the computed gotos since factoring introduces an unwanted jump.
887 For non-local gotos and abnormal edges from calls to calls that return
888 twice or forced labels, factor the abnormal edges too, by having all
889 abnormal edges from the calls go to a common artificial basic block
890 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
891 basic block to all forced labels and calls returning twice.
892 We do this per-OpenMP structured block, because those regions
893 are guaranteed to be single entry single exit by the standard,
894 so it is not allowed to enter or exit such regions abnormally this way,
895 thus all computed gotos, non-local gotos and setjmp/longjmp calls
896 must not transfer control across SESE region boundaries. */
897 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
899 gimple_stmt_iterator gsi;
900 basic_block dispatcher_bb_array[2] = { NULL, NULL };
901 basic_block *dispatcher_bbs = dispatcher_bb_array;
902 int count = n_basic_blocks_for_fn (cfun);
904 if (bb_to_omp_idx)
905 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
907 FOR_EACH_BB_FN (bb, cfun)
909 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
911 gimple label_stmt = gsi_stmt (gsi);
912 tree target;
914 if (gimple_code (label_stmt) != GIMPLE_LABEL)
915 break;
917 target = gimple_label_label (label_stmt);
919 /* Make an edge to every label block that has been marked as a
920 potential target for a computed goto or a non-local goto. */
921 if (FORCED_LABEL (target))
922 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
923 &ab_edge_goto, true);
924 if (DECL_NONLOCAL (target))
926 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
927 &ab_edge_call, false);
928 break;
932 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
933 gsi_next_nondebug (&gsi);
934 if (!gsi_end_p (gsi))
936 /* Make an edge to every setjmp-like call. */
937 gimple call_stmt = gsi_stmt (gsi);
938 if (is_gimple_call (call_stmt)
939 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
940 || gimple_call_builtin_p (call_stmt,
941 BUILT_IN_SETJMP_RECEIVER)))
942 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
943 &ab_edge_call, false);
947 if (bb_to_omp_idx)
948 XDELETE (dispatcher_bbs);
951 XDELETE (bb_to_omp_idx);
953 free_omp_regions ();
955 /* Fold COND_EXPR_COND of each COND_EXPR. */
956 fold_cond_expr_cond ();
959 /* Find the next available discriminator value for LOCUS. The
960 discriminator distinguishes among several basic blocks that
961 share a common locus, allowing for more accurate sample-based
962 profiling. */
964 static int
965 next_discriminator_for_locus (location_t locus)
967 struct locus_discrim_map item;
968 struct locus_discrim_map **slot;
970 item.locus = locus;
971 item.discriminator = 0;
972 slot = discriminator_per_locus->find_slot_with_hash (
973 &item, LOCATION_LINE (locus), INSERT);
974 gcc_assert (slot);
975 if (*slot == HTAB_EMPTY_ENTRY)
977 *slot = XNEW (struct locus_discrim_map);
978 gcc_assert (*slot);
979 (*slot)->locus = locus;
980 (*slot)->discriminator = 0;
982 (*slot)->discriminator++;
983 return (*slot)->discriminator;
986 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
988 static bool
989 same_line_p (location_t locus1, location_t locus2)
991 expanded_location from, to;
993 if (locus1 == locus2)
994 return true;
996 from = expand_location (locus1);
997 to = expand_location (locus2);
999 if (from.line != to.line)
1000 return false;
1001 if (from.file == to.file)
1002 return true;
1003 return (from.file != NULL
1004 && to.file != NULL
1005 && filename_cmp (from.file, to.file) == 0);
1008 /* Assign discriminators to each basic block. */
1010 static void
1011 assign_discriminators (void)
1013 basic_block bb;
1015 FOR_EACH_BB_FN (bb, cfun)
1017 edge e;
1018 edge_iterator ei;
1019 gimple last = last_stmt (bb);
1020 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1022 if (locus == UNKNOWN_LOCATION)
1023 continue;
1025 FOR_EACH_EDGE (e, ei, bb->succs)
1027 gimple first = first_non_label_stmt (e->dest);
1028 gimple last = last_stmt (e->dest);
1029 if ((first && same_line_p (locus, gimple_location (first)))
1030 || (last && same_line_p (locus, gimple_location (last))))
1032 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1033 bb->discriminator = next_discriminator_for_locus (locus);
1034 else
1035 e->dest->discriminator = next_discriminator_for_locus (locus);
1041 /* Create the edges for a GIMPLE_COND starting at block BB. */
1043 static void
1044 make_cond_expr_edges (basic_block bb)
1046 gimple entry = last_stmt (bb);
1047 gimple then_stmt, else_stmt;
1048 basic_block then_bb, else_bb;
1049 tree then_label, else_label;
1050 edge e;
1052 gcc_assert (entry);
1053 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1055 /* Entry basic blocks for each component. */
1056 then_label = gimple_cond_true_label (entry);
1057 else_label = gimple_cond_false_label (entry);
1058 then_bb = label_to_block (then_label);
1059 else_bb = label_to_block (else_label);
1060 then_stmt = first_stmt (then_bb);
1061 else_stmt = first_stmt (else_bb);
1063 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1064 e->goto_locus = gimple_location (then_stmt);
1065 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1066 if (e)
1067 e->goto_locus = gimple_location (else_stmt);
1069 /* We do not need the labels anymore. */
1070 gimple_cond_set_true_label (entry, NULL_TREE);
1071 gimple_cond_set_false_label (entry, NULL_TREE);
1075 /* Called for each element in the hash table (P) as we delete the
1076 edge to cases hash table.
1078 Clear all the TREE_CHAINs to prevent problems with copying of
1079 SWITCH_EXPRs and structure sharing rules, then free the hash table
1080 element. */
1082 bool
1083 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1085 tree t, next;
1087 for (t = value; t; t = next)
1089 next = CASE_CHAIN (t);
1090 CASE_CHAIN (t) = NULL;
1093 return true;
1096 /* Start recording information mapping edges to case labels. */
1098 void
1099 start_recording_case_labels (void)
1101 gcc_assert (edge_to_cases == NULL);
1102 edge_to_cases = new hash_map<edge, tree>;
1103 touched_switch_bbs = BITMAP_ALLOC (NULL);
1106 /* Return nonzero if we are recording information for case labels. */
1108 static bool
1109 recording_case_labels_p (void)
1111 return (edge_to_cases != NULL);
1114 /* Stop recording information mapping edges to case labels and
1115 remove any information we have recorded. */
1116 void
1117 end_recording_case_labels (void)
1119 bitmap_iterator bi;
1120 unsigned i;
1121 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1122 delete edge_to_cases;
1123 edge_to_cases = NULL;
1124 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1126 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1127 if (bb)
1129 gimple stmt = last_stmt (bb);
1130 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1131 group_case_labels_stmt (as_a <gimple_switch> (stmt));
1134 BITMAP_FREE (touched_switch_bbs);
1137 /* If we are inside a {start,end}_recording_cases block, then return
1138 a chain of CASE_LABEL_EXPRs from T which reference E.
1140 Otherwise return NULL. */
1142 static tree
1143 get_cases_for_edge (edge e, gimple_switch t)
1145 tree *slot;
1146 size_t i, n;
1148 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1149 chains available. Return NULL so the caller can detect this case. */
1150 if (!recording_case_labels_p ())
1151 return NULL;
1153 slot = edge_to_cases->get (e);
1154 if (slot)
1155 return *slot;
1157 /* If we did not find E in the hash table, then this must be the first
1158 time we have been queried for information about E & T. Add all the
1159 elements from T to the hash table then perform the query again. */
1161 n = gimple_switch_num_labels (t);
1162 for (i = 0; i < n; i++)
1164 tree elt = gimple_switch_label (t, i);
1165 tree lab = CASE_LABEL (elt);
1166 basic_block label_bb = label_to_block (lab);
1167 edge this_edge = find_edge (e->src, label_bb);
1169 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1170 a new chain. */
1171 tree &s = edge_to_cases->get_or_insert (this_edge);
1172 CASE_CHAIN (elt) = s;
1173 s = elt;
1176 return *edge_to_cases->get (e);
1179 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1181 static void
1182 make_gimple_switch_edges (gimple_switch entry, basic_block bb)
1184 size_t i, n;
1186 n = gimple_switch_num_labels (entry);
1188 for (i = 0; i < n; ++i)
1190 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1191 basic_block label_bb = label_to_block (lab);
1192 make_edge (bb, label_bb, 0);
1197 /* Return the basic block holding label DEST. */
1199 basic_block
1200 label_to_block_fn (struct function *ifun, tree dest)
1202 int uid = LABEL_DECL_UID (dest);
1204 /* We would die hard when faced by an undefined label. Emit a label to
1205 the very first basic block. This will hopefully make even the dataflow
1206 and undefined variable warnings quite right. */
1207 if (seen_error () && uid < 0)
1209 gimple_stmt_iterator gsi =
1210 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1211 gimple stmt;
1213 stmt = gimple_build_label (dest);
1214 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1215 uid = LABEL_DECL_UID (dest);
1217 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1218 return NULL;
1219 return (*ifun->cfg->x_label_to_block_map)[uid];
1222 /* Create edges for a goto statement at block BB. Returns true
1223 if abnormal edges should be created. */
1225 static bool
1226 make_goto_expr_edges (basic_block bb)
1228 gimple_stmt_iterator last = gsi_last_bb (bb);
1229 gimple goto_t = gsi_stmt (last);
1231 /* A simple GOTO creates normal edges. */
1232 if (simple_goto_p (goto_t))
1234 tree dest = gimple_goto_dest (goto_t);
1235 basic_block label_bb = label_to_block (dest);
1236 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1237 e->goto_locus = gimple_location (goto_t);
1238 gsi_remove (&last, true);
1239 return false;
1242 /* A computed GOTO creates abnormal edges. */
1243 return true;
1246 /* Create edges for an asm statement with labels at block BB. */
1248 static void
1249 make_gimple_asm_edges (basic_block bb)
1251 gimple_asm stmt = as_a <gimple_asm> (last_stmt (bb));
1252 int i, n = gimple_asm_nlabels (stmt);
1254 for (i = 0; i < n; ++i)
1256 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1257 basic_block label_bb = label_to_block (label);
1258 make_edge (bb, label_bb, 0);
1262 /*---------------------------------------------------------------------------
1263 Flowgraph analysis
1264 ---------------------------------------------------------------------------*/
1266 /* Cleanup useless labels in basic blocks. This is something we wish
1267 to do early because it allows us to group case labels before creating
1268 the edges for the CFG, and it speeds up block statement iterators in
1269 all passes later on.
1270 We rerun this pass after CFG is created, to get rid of the labels that
1271 are no longer referenced. After then we do not run it any more, since
1272 (almost) no new labels should be created. */
1274 /* A map from basic block index to the leading label of that block. */
1275 static struct label_record
1277 /* The label. */
1278 tree label;
1280 /* True if the label is referenced from somewhere. */
1281 bool used;
1282 } *label_for_bb;
1284 /* Given LABEL return the first label in the same basic block. */
1286 static tree
1287 main_block_label (tree label)
1289 basic_block bb = label_to_block (label);
1290 tree main_label = label_for_bb[bb->index].label;
1292 /* label_to_block possibly inserted undefined label into the chain. */
1293 if (!main_label)
1295 label_for_bb[bb->index].label = label;
1296 main_label = label;
1299 label_for_bb[bb->index].used = true;
1300 return main_label;
1303 /* Clean up redundant labels within the exception tree. */
1305 static void
1306 cleanup_dead_labels_eh (void)
1308 eh_landing_pad lp;
1309 eh_region r;
1310 tree lab;
1311 int i;
1313 if (cfun->eh == NULL)
1314 return;
1316 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1317 if (lp && lp->post_landing_pad)
1319 lab = main_block_label (lp->post_landing_pad);
1320 if (lab != lp->post_landing_pad)
1322 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1323 EH_LANDING_PAD_NR (lab) = lp->index;
1327 FOR_ALL_EH_REGION (r)
1328 switch (r->type)
1330 case ERT_CLEANUP:
1331 case ERT_MUST_NOT_THROW:
1332 break;
1334 case ERT_TRY:
1336 eh_catch c;
1337 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1339 lab = c->label;
1340 if (lab)
1341 c->label = main_block_label (lab);
1344 break;
1346 case ERT_ALLOWED_EXCEPTIONS:
1347 lab = r->u.allowed.label;
1348 if (lab)
1349 r->u.allowed.label = main_block_label (lab);
1350 break;
1355 /* Cleanup redundant labels. This is a three-step process:
1356 1) Find the leading label for each block.
1357 2) Redirect all references to labels to the leading labels.
1358 3) Cleanup all useless labels. */
1360 void
1361 cleanup_dead_labels (void)
1363 basic_block bb;
1364 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1366 /* Find a suitable label for each block. We use the first user-defined
1367 label if there is one, or otherwise just the first label we see. */
1368 FOR_EACH_BB_FN (bb, cfun)
1370 gimple_stmt_iterator i;
1372 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1374 tree label;
1375 gimple stmt = gsi_stmt (i);
1377 if (gimple_code (stmt) != GIMPLE_LABEL)
1378 break;
1380 label = gimple_label_label (stmt);
1382 /* If we have not yet seen a label for the current block,
1383 remember this one and see if there are more labels. */
1384 if (!label_for_bb[bb->index].label)
1386 label_for_bb[bb->index].label = label;
1387 continue;
1390 /* If we did see a label for the current block already, but it
1391 is an artificially created label, replace it if the current
1392 label is a user defined label. */
1393 if (!DECL_ARTIFICIAL (label)
1394 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1396 label_for_bb[bb->index].label = label;
1397 break;
1402 /* Now redirect all jumps/branches to the selected label.
1403 First do so for each block ending in a control statement. */
1404 FOR_EACH_BB_FN (bb, cfun)
1406 gimple stmt = last_stmt (bb);
1407 tree label, new_label;
1409 if (!stmt)
1410 continue;
1412 switch (gimple_code (stmt))
1414 case GIMPLE_COND:
1415 label = gimple_cond_true_label (stmt);
1416 if (label)
1418 new_label = main_block_label (label);
1419 if (new_label != label)
1420 gimple_cond_set_true_label (stmt, new_label);
1423 label = gimple_cond_false_label (stmt);
1424 if (label)
1426 new_label = main_block_label (label);
1427 if (new_label != label)
1428 gimple_cond_set_false_label (stmt, new_label);
1430 break;
1432 case GIMPLE_SWITCH:
1434 gimple_switch switch_stmt = as_a <gimple_switch> (stmt);
1435 size_t i, n = gimple_switch_num_labels (switch_stmt);
1437 /* Replace all destination labels. */
1438 for (i = 0; i < n; ++i)
1440 tree case_label = gimple_switch_label (switch_stmt, i);
1441 label = CASE_LABEL (case_label);
1442 new_label = main_block_label (label);
1443 if (new_label != label)
1444 CASE_LABEL (case_label) = new_label;
1446 break;
1449 case GIMPLE_ASM:
1451 gimple_asm asm_stmt = as_a <gimple_asm> (stmt);
1452 int i, n = gimple_asm_nlabels (asm_stmt);
1454 for (i = 0; i < n; ++i)
1456 tree cons = gimple_asm_label_op (asm_stmt, i);
1457 tree label = main_block_label (TREE_VALUE (cons));
1458 TREE_VALUE (cons) = label;
1460 break;
1463 /* We have to handle gotos until they're removed, and we don't
1464 remove them until after we've created the CFG edges. */
1465 case GIMPLE_GOTO:
1466 if (!computed_goto_p (stmt))
1468 label = gimple_goto_dest (stmt);
1469 new_label = main_block_label (label);
1470 if (new_label != label)
1471 gimple_goto_set_dest (stmt, new_label);
1473 break;
1475 case GIMPLE_TRANSACTION:
1477 gimple_transaction trans_stmt = as_a <gimple_transaction> (stmt);
1478 tree label = gimple_transaction_label (trans_stmt);
1479 if (label)
1481 tree new_label = main_block_label (label);
1482 if (new_label != label)
1483 gimple_transaction_set_label (trans_stmt, new_label);
1486 break;
1488 default:
1489 break;
1493 /* Do the same for the exception region tree labels. */
1494 cleanup_dead_labels_eh ();
1496 /* Finally, purge dead labels. All user-defined labels and labels that
1497 can be the target of non-local gotos and labels which have their
1498 address taken are preserved. */
1499 FOR_EACH_BB_FN (bb, cfun)
1501 gimple_stmt_iterator i;
1502 tree label_for_this_bb = label_for_bb[bb->index].label;
1504 if (!label_for_this_bb)
1505 continue;
1507 /* If the main label of the block is unused, we may still remove it. */
1508 if (!label_for_bb[bb->index].used)
1509 label_for_this_bb = NULL;
1511 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1513 tree label;
1514 gimple stmt = gsi_stmt (i);
1516 if (gimple_code (stmt) != GIMPLE_LABEL)
1517 break;
1519 label = gimple_label_label (stmt);
1521 if (label == label_for_this_bb
1522 || !DECL_ARTIFICIAL (label)
1523 || DECL_NONLOCAL (label)
1524 || FORCED_LABEL (label))
1525 gsi_next (&i);
1526 else
1527 gsi_remove (&i, true);
1531 free (label_for_bb);
1534 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1535 the ones jumping to the same label.
1536 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1538 void
1539 group_case_labels_stmt (gimple_switch stmt)
1541 int old_size = gimple_switch_num_labels (stmt);
1542 int i, j, new_size = old_size;
1543 basic_block default_bb = NULL;
1545 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1547 /* Look for possible opportunities to merge cases. */
1548 i = 1;
1549 while (i < old_size)
1551 tree base_case, base_high;
1552 basic_block base_bb;
1554 base_case = gimple_switch_label (stmt, i);
1556 gcc_assert (base_case);
1557 base_bb = label_to_block (CASE_LABEL (base_case));
1559 /* Discard cases that have the same destination as the
1560 default case. */
1561 if (base_bb == default_bb)
1563 gimple_switch_set_label (stmt, i, NULL_TREE);
1564 i++;
1565 new_size--;
1566 continue;
1569 base_high = CASE_HIGH (base_case)
1570 ? CASE_HIGH (base_case)
1571 : CASE_LOW (base_case);
1572 i++;
1574 /* Try to merge case labels. Break out when we reach the end
1575 of the label vector or when we cannot merge the next case
1576 label with the current one. */
1577 while (i < old_size)
1579 tree merge_case = gimple_switch_label (stmt, i);
1580 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1581 wide_int bhp1 = wi::add (base_high, 1);
1583 /* Merge the cases if they jump to the same place,
1584 and their ranges are consecutive. */
1585 if (merge_bb == base_bb
1586 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1588 base_high = CASE_HIGH (merge_case) ?
1589 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1590 CASE_HIGH (base_case) = base_high;
1591 gimple_switch_set_label (stmt, i, NULL_TREE);
1592 new_size--;
1593 i++;
1595 else
1596 break;
1600 /* Compress the case labels in the label vector, and adjust the
1601 length of the vector. */
1602 for (i = 0, j = 0; i < new_size; i++)
1604 while (! gimple_switch_label (stmt, j))
1605 j++;
1606 gimple_switch_set_label (stmt, i,
1607 gimple_switch_label (stmt, j++));
1610 gcc_assert (new_size <= old_size);
1611 gimple_switch_set_num_labels (stmt, new_size);
1614 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1615 and scan the sorted vector of cases. Combine the ones jumping to the
1616 same label. */
1618 void
1619 group_case_labels (void)
1621 basic_block bb;
1623 FOR_EACH_BB_FN (bb, cfun)
1625 gimple stmt = last_stmt (bb);
1626 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1627 group_case_labels_stmt (as_a <gimple_switch> (stmt));
1631 /* Checks whether we can merge block B into block A. */
1633 static bool
1634 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1636 gimple stmt;
1637 gimple_stmt_iterator gsi;
1639 if (!single_succ_p (a))
1640 return false;
1642 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1643 return false;
1645 if (single_succ (a) != b)
1646 return false;
1648 if (!single_pred_p (b))
1649 return false;
1651 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1652 return false;
1654 /* If A ends by a statement causing exceptions or something similar, we
1655 cannot merge the blocks. */
1656 stmt = last_stmt (a);
1657 if (stmt && stmt_ends_bb_p (stmt))
1658 return false;
1660 /* Do not allow a block with only a non-local label to be merged. */
1661 if (stmt
1662 && gimple_code (stmt) == GIMPLE_LABEL
1663 && DECL_NONLOCAL (gimple_label_label (stmt)))
1664 return false;
1666 /* Examine the labels at the beginning of B. */
1667 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1669 tree lab;
1670 stmt = gsi_stmt (gsi);
1671 if (gimple_code (stmt) != GIMPLE_LABEL)
1672 break;
1673 lab = gimple_label_label (stmt);
1675 /* Do not remove user forced labels or for -O0 any user labels. */
1676 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1677 return false;
1680 /* Protect the loop latches. */
1681 if (current_loops && b->loop_father->latch == b)
1682 return false;
1684 /* It must be possible to eliminate all phi nodes in B. If ssa form
1685 is not up-to-date and a name-mapping is registered, we cannot eliminate
1686 any phis. Symbols marked for renaming are never a problem though. */
1687 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1689 gimple phi = gsi_stmt (gsi);
1690 /* Technically only new names matter. */
1691 if (name_registered_for_update_p (PHI_RESULT (phi)))
1692 return false;
1695 /* When not optimizing, don't merge if we'd lose goto_locus. */
1696 if (!optimize
1697 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1699 location_t goto_locus = single_succ_edge (a)->goto_locus;
1700 gimple_stmt_iterator prev, next;
1701 prev = gsi_last_nondebug_bb (a);
1702 next = gsi_after_labels (b);
1703 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1704 gsi_next_nondebug (&next);
1705 if ((gsi_end_p (prev)
1706 || gimple_location (gsi_stmt (prev)) != goto_locus)
1707 && (gsi_end_p (next)
1708 || gimple_location (gsi_stmt (next)) != goto_locus))
1709 return false;
1712 return true;
1715 /* Replaces all uses of NAME by VAL. */
1717 void
1718 replace_uses_by (tree name, tree val)
1720 imm_use_iterator imm_iter;
1721 use_operand_p use;
1722 gimple stmt;
1723 edge e;
1725 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1727 /* Mark the block if we change the last stmt in it. */
1728 if (cfgcleanup_altered_bbs
1729 && stmt_ends_bb_p (stmt))
1730 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1732 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1734 replace_exp (use, val);
1736 if (gimple_code (stmt) == GIMPLE_PHI)
1738 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1739 if (e->flags & EDGE_ABNORMAL)
1741 /* This can only occur for virtual operands, since
1742 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1743 would prevent replacement. */
1744 gcc_checking_assert (virtual_operand_p (name));
1745 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1750 if (gimple_code (stmt) != GIMPLE_PHI)
1752 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1753 gimple orig_stmt = stmt;
1754 size_t i;
1756 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1757 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1758 only change sth from non-invariant to invariant, and only
1759 when propagating constants. */
1760 if (is_gimple_min_invariant (val))
1761 for (i = 0; i < gimple_num_ops (stmt); i++)
1763 tree op = gimple_op (stmt, i);
1764 /* Operands may be empty here. For example, the labels
1765 of a GIMPLE_COND are nulled out following the creation
1766 of the corresponding CFG edges. */
1767 if (op && TREE_CODE (op) == ADDR_EXPR)
1768 recompute_tree_invariant_for_addr_expr (op);
1771 if (fold_stmt (&gsi))
1772 stmt = gsi_stmt (gsi);
1774 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1775 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1777 update_stmt (stmt);
1781 gcc_checking_assert (has_zero_uses (name));
1783 /* Also update the trees stored in loop structures. */
1784 if (current_loops)
1786 struct loop *loop;
1788 FOR_EACH_LOOP (loop, 0)
1790 substitute_in_loop_info (loop, name, val);
1795 /* Merge block B into block A. */
1797 static void
1798 gimple_merge_blocks (basic_block a, basic_block b)
1800 gimple_stmt_iterator last, gsi, psi;
1802 if (dump_file)
1803 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1805 /* Remove all single-valued PHI nodes from block B of the form
1806 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1807 gsi = gsi_last_bb (a);
1808 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1810 gimple phi = gsi_stmt (psi);
1811 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1812 gimple copy;
1813 bool may_replace_uses = (virtual_operand_p (def)
1814 || may_propagate_copy (def, use));
1816 /* In case we maintain loop closed ssa form, do not propagate arguments
1817 of loop exit phi nodes. */
1818 if (current_loops
1819 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1820 && !virtual_operand_p (def)
1821 && TREE_CODE (use) == SSA_NAME
1822 && a->loop_father != b->loop_father)
1823 may_replace_uses = false;
1825 if (!may_replace_uses)
1827 gcc_assert (!virtual_operand_p (def));
1829 /* Note that just emitting the copies is fine -- there is no problem
1830 with ordering of phi nodes. This is because A is the single
1831 predecessor of B, therefore results of the phi nodes cannot
1832 appear as arguments of the phi nodes. */
1833 copy = gimple_build_assign (def, use);
1834 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1835 remove_phi_node (&psi, false);
1837 else
1839 /* If we deal with a PHI for virtual operands, we can simply
1840 propagate these without fussing with folding or updating
1841 the stmt. */
1842 if (virtual_operand_p (def))
1844 imm_use_iterator iter;
1845 use_operand_p use_p;
1846 gimple stmt;
1848 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1849 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1850 SET_USE (use_p, use);
1852 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1853 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1855 else
1856 replace_uses_by (def, use);
1858 remove_phi_node (&psi, true);
1862 /* Ensure that B follows A. */
1863 move_block_after (b, a);
1865 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1866 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1868 /* Remove labels from B and set gimple_bb to A for other statements. */
1869 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1871 gimple stmt = gsi_stmt (gsi);
1872 if (gimple_code (stmt) == GIMPLE_LABEL)
1874 tree label = gimple_label_label (stmt);
1875 int lp_nr;
1877 gsi_remove (&gsi, false);
1879 /* Now that we can thread computed gotos, we might have
1880 a situation where we have a forced label in block B
1881 However, the label at the start of block B might still be
1882 used in other ways (think about the runtime checking for
1883 Fortran assigned gotos). So we can not just delete the
1884 label. Instead we move the label to the start of block A. */
1885 if (FORCED_LABEL (label))
1887 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1888 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1890 /* Other user labels keep around in a form of a debug stmt. */
1891 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1893 gimple dbg = gimple_build_debug_bind (label,
1894 integer_zero_node,
1895 stmt);
1896 gimple_debug_bind_reset_value (dbg);
1897 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1900 lp_nr = EH_LANDING_PAD_NR (label);
1901 if (lp_nr)
1903 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1904 lp->post_landing_pad = NULL;
1907 else
1909 gimple_set_bb (stmt, a);
1910 gsi_next (&gsi);
1914 /* When merging two BBs, if their counts are different, the larger count
1915 is selected as the new bb count. This is to handle inconsistent
1916 profiles. */
1917 if (a->loop_father == b->loop_father)
1919 a->count = MAX (a->count, b->count);
1920 a->frequency = MAX (a->frequency, b->frequency);
1923 /* Merge the sequences. */
1924 last = gsi_last_bb (a);
1925 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1926 set_bb_seq (b, NULL);
1928 if (cfgcleanup_altered_bbs)
1929 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1933 /* Return the one of two successors of BB that is not reachable by a
1934 complex edge, if there is one. Else, return BB. We use
1935 this in optimizations that use post-dominators for their heuristics,
1936 to catch the cases in C++ where function calls are involved. */
1938 basic_block
1939 single_noncomplex_succ (basic_block bb)
1941 edge e0, e1;
1942 if (EDGE_COUNT (bb->succs) != 2)
1943 return bb;
1945 e0 = EDGE_SUCC (bb, 0);
1946 e1 = EDGE_SUCC (bb, 1);
1947 if (e0->flags & EDGE_COMPLEX)
1948 return e1->dest;
1949 if (e1->flags & EDGE_COMPLEX)
1950 return e0->dest;
1952 return bb;
1955 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1957 void
1958 notice_special_calls (gimple_call call)
1960 int flags = gimple_call_flags (call);
1962 if (flags & ECF_MAY_BE_ALLOCA)
1963 cfun->calls_alloca = true;
1964 if (flags & ECF_RETURNS_TWICE)
1965 cfun->calls_setjmp = true;
1969 /* Clear flags set by notice_special_calls. Used by dead code removal
1970 to update the flags. */
1972 void
1973 clear_special_calls (void)
1975 cfun->calls_alloca = false;
1976 cfun->calls_setjmp = false;
1979 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1981 static void
1982 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1984 /* Since this block is no longer reachable, we can just delete all
1985 of its PHI nodes. */
1986 remove_phi_nodes (bb);
1988 /* Remove edges to BB's successors. */
1989 while (EDGE_COUNT (bb->succs) > 0)
1990 remove_edge (EDGE_SUCC (bb, 0));
1994 /* Remove statements of basic block BB. */
1996 static void
1997 remove_bb (basic_block bb)
1999 gimple_stmt_iterator i;
2001 if (dump_file)
2003 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2004 if (dump_flags & TDF_DETAILS)
2006 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2007 fprintf (dump_file, "\n");
2011 if (current_loops)
2013 struct loop *loop = bb->loop_father;
2015 /* If a loop gets removed, clean up the information associated
2016 with it. */
2017 if (loop->latch == bb
2018 || loop->header == bb)
2019 free_numbers_of_iterations_estimates_loop (loop);
2022 /* Remove all the instructions in the block. */
2023 if (bb_seq (bb) != NULL)
2025 /* Walk backwards so as to get a chance to substitute all
2026 released DEFs into debug stmts. See
2027 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2028 details. */
2029 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2031 gimple stmt = gsi_stmt (i);
2032 if (gimple_code (stmt) == GIMPLE_LABEL
2033 && (FORCED_LABEL (gimple_label_label (stmt))
2034 || DECL_NONLOCAL (gimple_label_label (stmt))))
2036 basic_block new_bb;
2037 gimple_stmt_iterator new_gsi;
2039 /* A non-reachable non-local label may still be referenced.
2040 But it no longer needs to carry the extra semantics of
2041 non-locality. */
2042 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2044 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2045 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2048 new_bb = bb->prev_bb;
2049 new_gsi = gsi_start_bb (new_bb);
2050 gsi_remove (&i, false);
2051 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2053 else
2055 /* Release SSA definitions if we are in SSA. Note that we
2056 may be called when not in SSA. For example,
2057 final_cleanup calls this function via
2058 cleanup_tree_cfg. */
2059 if (gimple_in_ssa_p (cfun))
2060 release_defs (stmt);
2062 gsi_remove (&i, true);
2065 if (gsi_end_p (i))
2066 i = gsi_last_bb (bb);
2067 else
2068 gsi_prev (&i);
2072 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2073 bb->il.gimple.seq = NULL;
2074 bb->il.gimple.phi_nodes = NULL;
2078 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2079 predicate VAL, return the edge that will be taken out of the block.
2080 If VAL does not match a unique edge, NULL is returned. */
2082 edge
2083 find_taken_edge (basic_block bb, tree val)
2085 gimple stmt;
2087 stmt = last_stmt (bb);
2089 gcc_assert (stmt);
2090 gcc_assert (is_ctrl_stmt (stmt));
2092 if (val == NULL)
2093 return NULL;
2095 if (!is_gimple_min_invariant (val))
2096 return NULL;
2098 if (gimple_code (stmt) == GIMPLE_COND)
2099 return find_taken_edge_cond_expr (bb, val);
2101 if (gimple_code (stmt) == GIMPLE_SWITCH)
2102 return find_taken_edge_switch_expr (as_a <gimple_switch> (stmt), bb, val);
2104 if (computed_goto_p (stmt))
2106 /* Only optimize if the argument is a label, if the argument is
2107 not a label then we can not construct a proper CFG.
2109 It may be the case that we only need to allow the LABEL_REF to
2110 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2111 appear inside a LABEL_EXPR just to be safe. */
2112 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2113 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2114 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2115 return NULL;
2118 gcc_unreachable ();
2121 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2122 statement, determine which of the outgoing edges will be taken out of the
2123 block. Return NULL if either edge may be taken. */
2125 static edge
2126 find_taken_edge_computed_goto (basic_block bb, tree val)
2128 basic_block dest;
2129 edge e = NULL;
2131 dest = label_to_block (val);
2132 if (dest)
2134 e = find_edge (bb, dest);
2135 gcc_assert (e != NULL);
2138 return e;
2141 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2142 statement, determine which of the two edges will be taken out of the
2143 block. Return NULL if either edge may be taken. */
2145 static edge
2146 find_taken_edge_cond_expr (basic_block bb, tree val)
2148 edge true_edge, false_edge;
2150 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2152 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2153 return (integer_zerop (val) ? false_edge : true_edge);
2156 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2157 statement, determine which edge will be taken out of the block. Return
2158 NULL if any edge may be taken. */
2160 static edge
2161 find_taken_edge_switch_expr (gimple_switch switch_stmt, basic_block bb,
2162 tree val)
2164 basic_block dest_bb;
2165 edge e;
2166 tree taken_case;
2168 taken_case = find_case_label_for_value (switch_stmt, val);
2169 dest_bb = label_to_block (CASE_LABEL (taken_case));
2171 e = find_edge (bb, dest_bb);
2172 gcc_assert (e);
2173 return e;
2177 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2178 We can make optimal use here of the fact that the case labels are
2179 sorted: We can do a binary search for a case matching VAL. */
2181 static tree
2182 find_case_label_for_value (gimple_switch switch_stmt, tree val)
2184 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2185 tree default_case = gimple_switch_default_label (switch_stmt);
2187 for (low = 0, high = n; high - low > 1; )
2189 size_t i = (high + low) / 2;
2190 tree t = gimple_switch_label (switch_stmt, i);
2191 int cmp;
2193 /* Cache the result of comparing CASE_LOW and val. */
2194 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2196 if (cmp > 0)
2197 high = i;
2198 else
2199 low = i;
2201 if (CASE_HIGH (t) == NULL)
2203 /* A singe-valued case label. */
2204 if (cmp == 0)
2205 return t;
2207 else
2209 /* A case range. We can only handle integer ranges. */
2210 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2211 return t;
2215 return default_case;
2219 /* Dump a basic block on stderr. */
2221 void
2222 gimple_debug_bb (basic_block bb)
2224 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2228 /* Dump basic block with index N on stderr. */
2230 basic_block
2231 gimple_debug_bb_n (int n)
2233 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2234 return BASIC_BLOCK_FOR_FN (cfun, n);
2238 /* Dump the CFG on stderr.
2240 FLAGS are the same used by the tree dumping functions
2241 (see TDF_* in dumpfile.h). */
2243 void
2244 gimple_debug_cfg (int flags)
2246 gimple_dump_cfg (stderr, flags);
2250 /* Dump the program showing basic block boundaries on the given FILE.
2252 FLAGS are the same used by the tree dumping functions (see TDF_* in
2253 tree.h). */
2255 void
2256 gimple_dump_cfg (FILE *file, int flags)
2258 if (flags & TDF_DETAILS)
2260 dump_function_header (file, current_function_decl, flags);
2261 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2262 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2263 last_basic_block_for_fn (cfun));
2265 brief_dump_cfg (file, flags | TDF_COMMENT);
2266 fprintf (file, "\n");
2269 if (flags & TDF_STATS)
2270 dump_cfg_stats (file);
2272 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2276 /* Dump CFG statistics on FILE. */
2278 void
2279 dump_cfg_stats (FILE *file)
2281 static long max_num_merged_labels = 0;
2282 unsigned long size, total = 0;
2283 long num_edges;
2284 basic_block bb;
2285 const char * const fmt_str = "%-30s%-13s%12s\n";
2286 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2287 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2288 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2289 const char *funcname = current_function_name ();
2291 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2293 fprintf (file, "---------------------------------------------------------\n");
2294 fprintf (file, fmt_str, "", " Number of ", "Memory");
2295 fprintf (file, fmt_str, "", " instances ", "used ");
2296 fprintf (file, "---------------------------------------------------------\n");
2298 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2299 total += size;
2300 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2301 SCALE (size), LABEL (size));
2303 num_edges = 0;
2304 FOR_EACH_BB_FN (bb, cfun)
2305 num_edges += EDGE_COUNT (bb->succs);
2306 size = num_edges * sizeof (struct edge_def);
2307 total += size;
2308 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2310 fprintf (file, "---------------------------------------------------------\n");
2311 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2312 LABEL (total));
2313 fprintf (file, "---------------------------------------------------------\n");
2314 fprintf (file, "\n");
2316 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2317 max_num_merged_labels = cfg_stats.num_merged_labels;
2319 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2320 cfg_stats.num_merged_labels, max_num_merged_labels);
2322 fprintf (file, "\n");
2326 /* Dump CFG statistics on stderr. Keep extern so that it's always
2327 linked in the final executable. */
2329 DEBUG_FUNCTION void
2330 debug_cfg_stats (void)
2332 dump_cfg_stats (stderr);
2335 /*---------------------------------------------------------------------------
2336 Miscellaneous helpers
2337 ---------------------------------------------------------------------------*/
2339 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2340 flow. Transfers of control flow associated with EH are excluded. */
2342 static bool
2343 call_can_make_abnormal_goto (gimple t)
2345 /* If the function has no non-local labels, then a call cannot make an
2346 abnormal transfer of control. */
2347 if (!cfun->has_nonlocal_label
2348 && !cfun->calls_setjmp)
2349 return false;
2351 /* Likewise if the call has no side effects. */
2352 if (!gimple_has_side_effects (t))
2353 return false;
2355 /* Likewise if the called function is leaf. */
2356 if (gimple_call_flags (t) & ECF_LEAF)
2357 return false;
2359 return true;
2363 /* Return true if T can make an abnormal transfer of control flow.
2364 Transfers of control flow associated with EH are excluded. */
2366 bool
2367 stmt_can_make_abnormal_goto (gimple t)
2369 if (computed_goto_p (t))
2370 return true;
2371 if (is_gimple_call (t))
2372 return call_can_make_abnormal_goto (t);
2373 return false;
2377 /* Return true if T represents a stmt that always transfers control. */
2379 bool
2380 is_ctrl_stmt (gimple t)
2382 switch (gimple_code (t))
2384 case GIMPLE_COND:
2385 case GIMPLE_SWITCH:
2386 case GIMPLE_GOTO:
2387 case GIMPLE_RETURN:
2388 case GIMPLE_RESX:
2389 return true;
2390 default:
2391 return false;
2396 /* Return true if T is a statement that may alter the flow of control
2397 (e.g., a call to a non-returning function). */
2399 bool
2400 is_ctrl_altering_stmt (gimple t)
2402 gcc_assert (t);
2404 switch (gimple_code (t))
2406 case GIMPLE_CALL:
2407 /* Per stmt call flag indicates whether the call could alter
2408 controlflow. */
2409 if (gimple_call_ctrl_altering_p (t))
2410 return true;
2411 break;
2413 case GIMPLE_EH_DISPATCH:
2414 /* EH_DISPATCH branches to the individual catch handlers at
2415 this level of a try or allowed-exceptions region. It can
2416 fallthru to the next statement as well. */
2417 return true;
2419 case GIMPLE_ASM:
2420 if (gimple_asm_nlabels (as_a <gimple_asm> (t)) > 0)
2421 return true;
2422 break;
2424 CASE_GIMPLE_OMP:
2425 /* OpenMP directives alter control flow. */
2426 return true;
2428 case GIMPLE_TRANSACTION:
2429 /* A transaction start alters control flow. */
2430 return true;
2432 default:
2433 break;
2436 /* If a statement can throw, it alters control flow. */
2437 return stmt_can_throw_internal (t);
2441 /* Return true if T is a simple local goto. */
2443 bool
2444 simple_goto_p (gimple t)
2446 return (gimple_code (t) == GIMPLE_GOTO
2447 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2451 /* Return true if STMT should start a new basic block. PREV_STMT is
2452 the statement preceding STMT. It is used when STMT is a label or a
2453 case label. Labels should only start a new basic block if their
2454 previous statement wasn't a label. Otherwise, sequence of labels
2455 would generate unnecessary basic blocks that only contain a single
2456 label. */
2458 static inline bool
2459 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2461 if (stmt == NULL)
2462 return false;
2464 /* Labels start a new basic block only if the preceding statement
2465 wasn't a label of the same type. This prevents the creation of
2466 consecutive blocks that have nothing but a single label. */
2467 if (gimple_code (stmt) == GIMPLE_LABEL)
2469 /* Nonlocal and computed GOTO targets always start a new block. */
2470 if (DECL_NONLOCAL (gimple_label_label (stmt))
2471 || FORCED_LABEL (gimple_label_label (stmt)))
2472 return true;
2474 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2476 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2477 return true;
2479 cfg_stats.num_merged_labels++;
2480 return false;
2482 else
2483 return true;
2485 else if (gimple_code (stmt) == GIMPLE_CALL
2486 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2487 /* setjmp acts similar to a nonlocal GOTO target and thus should
2488 start a new block. */
2489 return true;
2491 return false;
2495 /* Return true if T should end a basic block. */
2497 bool
2498 stmt_ends_bb_p (gimple t)
2500 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2503 /* Remove block annotations and other data structures. */
2505 void
2506 delete_tree_cfg_annotations (void)
2508 vec_free (label_to_block_map_for_fn (cfun));
2512 /* Return the first statement in basic block BB. */
2514 gimple
2515 first_stmt (basic_block bb)
2517 gimple_stmt_iterator i = gsi_start_bb (bb);
2518 gimple stmt = NULL;
2520 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2522 gsi_next (&i);
2523 stmt = NULL;
2525 return stmt;
2528 /* Return the first non-label statement in basic block BB. */
2530 static gimple
2531 first_non_label_stmt (basic_block bb)
2533 gimple_stmt_iterator i = gsi_start_bb (bb);
2534 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2535 gsi_next (&i);
2536 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2539 /* Return the last statement in basic block BB. */
2541 gimple
2542 last_stmt (basic_block bb)
2544 gimple_stmt_iterator i = gsi_last_bb (bb);
2545 gimple stmt = NULL;
2547 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2549 gsi_prev (&i);
2550 stmt = NULL;
2552 return stmt;
2555 /* Return the last statement of an otherwise empty block. Return NULL
2556 if the block is totally empty, or if it contains more than one
2557 statement. */
2559 gimple
2560 last_and_only_stmt (basic_block bb)
2562 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2563 gimple last, prev;
2565 if (gsi_end_p (i))
2566 return NULL;
2568 last = gsi_stmt (i);
2569 gsi_prev_nondebug (&i);
2570 if (gsi_end_p (i))
2571 return last;
2573 /* Empty statements should no longer appear in the instruction stream.
2574 Everything that might have appeared before should be deleted by
2575 remove_useless_stmts, and the optimizers should just gsi_remove
2576 instead of smashing with build_empty_stmt.
2578 Thus the only thing that should appear here in a block containing
2579 one executable statement is a label. */
2580 prev = gsi_stmt (i);
2581 if (gimple_code (prev) == GIMPLE_LABEL)
2582 return last;
2583 else
2584 return NULL;
2587 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2589 static void
2590 reinstall_phi_args (edge new_edge, edge old_edge)
2592 edge_var_map *vm;
2593 int i;
2594 gimple_phi_iterator phis;
2596 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2597 if (!v)
2598 return;
2600 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2601 v->iterate (i, &vm) && !gsi_end_p (phis);
2602 i++, gsi_next (&phis))
2604 gimple_phi phi = phis.phi ();
2605 tree result = redirect_edge_var_map_result (vm);
2606 tree arg = redirect_edge_var_map_def (vm);
2608 gcc_assert (result == gimple_phi_result (phi));
2610 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2613 redirect_edge_var_map_clear (old_edge);
2616 /* Returns the basic block after which the new basic block created
2617 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2618 near its "logical" location. This is of most help to humans looking
2619 at debugging dumps. */
2621 static basic_block
2622 split_edge_bb_loc (edge edge_in)
2624 basic_block dest = edge_in->dest;
2625 basic_block dest_prev = dest->prev_bb;
2627 if (dest_prev)
2629 edge e = find_edge (dest_prev, dest);
2630 if (e && !(e->flags & EDGE_COMPLEX))
2631 return edge_in->src;
2633 return dest_prev;
2636 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2637 Abort on abnormal edges. */
2639 static basic_block
2640 gimple_split_edge (edge edge_in)
2642 basic_block new_bb, after_bb, dest;
2643 edge new_edge, e;
2645 /* Abnormal edges cannot be split. */
2646 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2648 dest = edge_in->dest;
2650 after_bb = split_edge_bb_loc (edge_in);
2652 new_bb = create_empty_bb (after_bb);
2653 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2654 new_bb->count = edge_in->count;
2655 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2656 new_edge->probability = REG_BR_PROB_BASE;
2657 new_edge->count = edge_in->count;
2659 e = redirect_edge_and_branch (edge_in, new_bb);
2660 gcc_assert (e == edge_in);
2661 reinstall_phi_args (new_edge, e);
2663 return new_bb;
2667 /* Verify properties of the address expression T with base object BASE. */
2669 static tree
2670 verify_address (tree t, tree base)
2672 bool old_constant;
2673 bool old_side_effects;
2674 bool new_constant;
2675 bool new_side_effects;
2677 old_constant = TREE_CONSTANT (t);
2678 old_side_effects = TREE_SIDE_EFFECTS (t);
2680 recompute_tree_invariant_for_addr_expr (t);
2681 new_side_effects = TREE_SIDE_EFFECTS (t);
2682 new_constant = TREE_CONSTANT (t);
2684 if (old_constant != new_constant)
2686 error ("constant not recomputed when ADDR_EXPR changed");
2687 return t;
2689 if (old_side_effects != new_side_effects)
2691 error ("side effects not recomputed when ADDR_EXPR changed");
2692 return t;
2695 if (!(TREE_CODE (base) == VAR_DECL
2696 || TREE_CODE (base) == PARM_DECL
2697 || TREE_CODE (base) == RESULT_DECL))
2698 return NULL_TREE;
2700 if (DECL_GIMPLE_REG_P (base))
2702 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2703 return base;
2706 return NULL_TREE;
2709 /* Callback for walk_tree, check that all elements with address taken are
2710 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2711 inside a PHI node. */
2713 static tree
2714 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2716 tree t = *tp, x;
2718 if (TYPE_P (t))
2719 *walk_subtrees = 0;
2721 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2722 #define CHECK_OP(N, MSG) \
2723 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2724 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2726 switch (TREE_CODE (t))
2728 case SSA_NAME:
2729 if (SSA_NAME_IN_FREE_LIST (t))
2731 error ("SSA name in freelist but still referenced");
2732 return *tp;
2734 break;
2736 case INDIRECT_REF:
2737 error ("INDIRECT_REF in gimple IL");
2738 return t;
2740 case MEM_REF:
2741 x = TREE_OPERAND (t, 0);
2742 if (!POINTER_TYPE_P (TREE_TYPE (x))
2743 || !is_gimple_mem_ref_addr (x))
2745 error ("invalid first operand of MEM_REF");
2746 return x;
2748 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2749 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2751 error ("invalid offset operand of MEM_REF");
2752 return TREE_OPERAND (t, 1);
2754 if (TREE_CODE (x) == ADDR_EXPR
2755 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2756 return x;
2757 *walk_subtrees = 0;
2758 break;
2760 case ASSERT_EXPR:
2761 x = fold (ASSERT_EXPR_COND (t));
2762 if (x == boolean_false_node)
2764 error ("ASSERT_EXPR with an always-false condition");
2765 return *tp;
2767 break;
2769 case MODIFY_EXPR:
2770 error ("MODIFY_EXPR not expected while having tuples");
2771 return *tp;
2773 case ADDR_EXPR:
2775 tree tem;
2777 gcc_assert (is_gimple_address (t));
2779 /* Skip any references (they will be checked when we recurse down the
2780 tree) and ensure that any variable used as a prefix is marked
2781 addressable. */
2782 for (x = TREE_OPERAND (t, 0);
2783 handled_component_p (x);
2784 x = TREE_OPERAND (x, 0))
2787 if ((tem = verify_address (t, x)))
2788 return tem;
2790 if (!(TREE_CODE (x) == VAR_DECL
2791 || TREE_CODE (x) == PARM_DECL
2792 || TREE_CODE (x) == RESULT_DECL))
2793 return NULL;
2795 if (!TREE_ADDRESSABLE (x))
2797 error ("address taken, but ADDRESSABLE bit not set");
2798 return x;
2801 break;
2804 case COND_EXPR:
2805 x = COND_EXPR_COND (t);
2806 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2808 error ("non-integral used in condition");
2809 return x;
2811 if (!is_gimple_condexpr (x))
2813 error ("invalid conditional operand");
2814 return x;
2816 break;
2818 case NON_LVALUE_EXPR:
2819 case TRUTH_NOT_EXPR:
2820 gcc_unreachable ();
2822 CASE_CONVERT:
2823 case FIX_TRUNC_EXPR:
2824 case FLOAT_EXPR:
2825 case NEGATE_EXPR:
2826 case ABS_EXPR:
2827 case BIT_NOT_EXPR:
2828 CHECK_OP (0, "invalid operand to unary operator");
2829 break;
2831 case REALPART_EXPR:
2832 case IMAGPART_EXPR:
2833 case BIT_FIELD_REF:
2834 if (!is_gimple_reg_type (TREE_TYPE (t)))
2836 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2837 return t;
2840 if (TREE_CODE (t) == BIT_FIELD_REF)
2842 tree t0 = TREE_OPERAND (t, 0);
2843 tree t1 = TREE_OPERAND (t, 1);
2844 tree t2 = TREE_OPERAND (t, 2);
2845 if (!tree_fits_uhwi_p (t1)
2846 || !tree_fits_uhwi_p (t2))
2848 error ("invalid position or size operand to BIT_FIELD_REF");
2849 return t;
2851 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2852 && (TYPE_PRECISION (TREE_TYPE (t))
2853 != tree_to_uhwi (t1)))
2855 error ("integral result type precision does not match "
2856 "field size of BIT_FIELD_REF");
2857 return t;
2859 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2860 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2861 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2862 != tree_to_uhwi (t1)))
2864 error ("mode precision of non-integral result does not "
2865 "match field size of BIT_FIELD_REF");
2866 return t;
2868 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2869 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2870 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2872 error ("position plus size exceeds size of referenced object in "
2873 "BIT_FIELD_REF");
2874 return t;
2877 t = TREE_OPERAND (t, 0);
2879 /* Fall-through. */
2880 case COMPONENT_REF:
2881 case ARRAY_REF:
2882 case ARRAY_RANGE_REF:
2883 case VIEW_CONVERT_EXPR:
2884 /* We have a nest of references. Verify that each of the operands
2885 that determine where to reference is either a constant or a variable,
2886 verify that the base is valid, and then show we've already checked
2887 the subtrees. */
2888 while (handled_component_p (t))
2890 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2891 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2892 else if (TREE_CODE (t) == ARRAY_REF
2893 || TREE_CODE (t) == ARRAY_RANGE_REF)
2895 CHECK_OP (1, "invalid array index");
2896 if (TREE_OPERAND (t, 2))
2897 CHECK_OP (2, "invalid array lower bound");
2898 if (TREE_OPERAND (t, 3))
2899 CHECK_OP (3, "invalid array stride");
2901 else if (TREE_CODE (t) == BIT_FIELD_REF
2902 || TREE_CODE (t) == REALPART_EXPR
2903 || TREE_CODE (t) == IMAGPART_EXPR)
2905 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2906 "REALPART_EXPR");
2907 return t;
2910 t = TREE_OPERAND (t, 0);
2913 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2915 error ("invalid reference prefix");
2916 return t;
2918 *walk_subtrees = 0;
2919 break;
2920 case PLUS_EXPR:
2921 case MINUS_EXPR:
2922 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2923 POINTER_PLUS_EXPR. */
2924 if (POINTER_TYPE_P (TREE_TYPE (t)))
2926 error ("invalid operand to plus/minus, type is a pointer");
2927 return t;
2929 CHECK_OP (0, "invalid operand to binary operator");
2930 CHECK_OP (1, "invalid operand to binary operator");
2931 break;
2933 case POINTER_PLUS_EXPR:
2934 /* Check to make sure the first operand is a pointer or reference type. */
2935 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2937 error ("invalid operand to pointer plus, first operand is not a pointer");
2938 return t;
2940 /* Check to make sure the second operand is a ptrofftype. */
2941 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2943 error ("invalid operand to pointer plus, second operand is not an "
2944 "integer type of appropriate width");
2945 return t;
2947 /* FALLTHROUGH */
2948 case LT_EXPR:
2949 case LE_EXPR:
2950 case GT_EXPR:
2951 case GE_EXPR:
2952 case EQ_EXPR:
2953 case NE_EXPR:
2954 case UNORDERED_EXPR:
2955 case ORDERED_EXPR:
2956 case UNLT_EXPR:
2957 case UNLE_EXPR:
2958 case UNGT_EXPR:
2959 case UNGE_EXPR:
2960 case UNEQ_EXPR:
2961 case LTGT_EXPR:
2962 case MULT_EXPR:
2963 case TRUNC_DIV_EXPR:
2964 case CEIL_DIV_EXPR:
2965 case FLOOR_DIV_EXPR:
2966 case ROUND_DIV_EXPR:
2967 case TRUNC_MOD_EXPR:
2968 case CEIL_MOD_EXPR:
2969 case FLOOR_MOD_EXPR:
2970 case ROUND_MOD_EXPR:
2971 case RDIV_EXPR:
2972 case EXACT_DIV_EXPR:
2973 case MIN_EXPR:
2974 case MAX_EXPR:
2975 case LSHIFT_EXPR:
2976 case RSHIFT_EXPR:
2977 case LROTATE_EXPR:
2978 case RROTATE_EXPR:
2979 case BIT_IOR_EXPR:
2980 case BIT_XOR_EXPR:
2981 case BIT_AND_EXPR:
2982 CHECK_OP (0, "invalid operand to binary operator");
2983 CHECK_OP (1, "invalid operand to binary operator");
2984 break;
2986 case CONSTRUCTOR:
2987 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2988 *walk_subtrees = 0;
2989 break;
2991 case CASE_LABEL_EXPR:
2992 if (CASE_CHAIN (t))
2994 error ("invalid CASE_CHAIN");
2995 return t;
2997 break;
2999 default:
3000 break;
3002 return NULL;
3004 #undef CHECK_OP
3008 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3009 Returns true if there is an error, otherwise false. */
3011 static bool
3012 verify_types_in_gimple_min_lval (tree expr)
3014 tree op;
3016 if (is_gimple_id (expr))
3017 return false;
3019 if (TREE_CODE (expr) != TARGET_MEM_REF
3020 && TREE_CODE (expr) != MEM_REF)
3022 error ("invalid expression for min lvalue");
3023 return true;
3026 /* TARGET_MEM_REFs are strange beasts. */
3027 if (TREE_CODE (expr) == TARGET_MEM_REF)
3028 return false;
3030 op = TREE_OPERAND (expr, 0);
3031 if (!is_gimple_val (op))
3033 error ("invalid operand in indirect reference");
3034 debug_generic_stmt (op);
3035 return true;
3037 /* Memory references now generally can involve a value conversion. */
3039 return false;
3042 /* Verify if EXPR is a valid GIMPLE reference expression. If
3043 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3044 if there is an error, otherwise false. */
3046 static bool
3047 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3049 while (handled_component_p (expr))
3051 tree op = TREE_OPERAND (expr, 0);
3053 if (TREE_CODE (expr) == ARRAY_REF
3054 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3056 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3057 || (TREE_OPERAND (expr, 2)
3058 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3059 || (TREE_OPERAND (expr, 3)
3060 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3062 error ("invalid operands to array reference");
3063 debug_generic_stmt (expr);
3064 return true;
3068 /* Verify if the reference array element types are compatible. */
3069 if (TREE_CODE (expr) == ARRAY_REF
3070 && !useless_type_conversion_p (TREE_TYPE (expr),
3071 TREE_TYPE (TREE_TYPE (op))))
3073 error ("type mismatch in array reference");
3074 debug_generic_stmt (TREE_TYPE (expr));
3075 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3076 return true;
3078 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3079 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3080 TREE_TYPE (TREE_TYPE (op))))
3082 error ("type mismatch in array range reference");
3083 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3084 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3085 return true;
3088 if ((TREE_CODE (expr) == REALPART_EXPR
3089 || TREE_CODE (expr) == IMAGPART_EXPR)
3090 && !useless_type_conversion_p (TREE_TYPE (expr),
3091 TREE_TYPE (TREE_TYPE (op))))
3093 error ("type mismatch in real/imagpart reference");
3094 debug_generic_stmt (TREE_TYPE (expr));
3095 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3096 return true;
3099 if (TREE_CODE (expr) == COMPONENT_REF
3100 && !useless_type_conversion_p (TREE_TYPE (expr),
3101 TREE_TYPE (TREE_OPERAND (expr, 1))))
3103 error ("type mismatch in component reference");
3104 debug_generic_stmt (TREE_TYPE (expr));
3105 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3106 return true;
3109 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3111 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3112 that their operand is not an SSA name or an invariant when
3113 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3114 bug). Otherwise there is nothing to verify, gross mismatches at
3115 most invoke undefined behavior. */
3116 if (require_lvalue
3117 && (TREE_CODE (op) == SSA_NAME
3118 || is_gimple_min_invariant (op)))
3120 error ("conversion of an SSA_NAME on the left hand side");
3121 debug_generic_stmt (expr);
3122 return true;
3124 else if (TREE_CODE (op) == SSA_NAME
3125 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3127 error ("conversion of register to a different size");
3128 debug_generic_stmt (expr);
3129 return true;
3131 else if (!handled_component_p (op))
3132 return false;
3135 expr = op;
3138 if (TREE_CODE (expr) == MEM_REF)
3140 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3142 error ("invalid address operand in MEM_REF");
3143 debug_generic_stmt (expr);
3144 return true;
3146 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3147 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3149 error ("invalid offset operand in MEM_REF");
3150 debug_generic_stmt (expr);
3151 return true;
3154 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3156 if (!TMR_BASE (expr)
3157 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3159 error ("invalid address operand in TARGET_MEM_REF");
3160 return true;
3162 if (!TMR_OFFSET (expr)
3163 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3164 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3166 error ("invalid offset operand in TARGET_MEM_REF");
3167 debug_generic_stmt (expr);
3168 return true;
3172 return ((require_lvalue || !is_gimple_min_invariant (expr))
3173 && verify_types_in_gimple_min_lval (expr));
3176 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3177 list of pointer-to types that is trivially convertible to DEST. */
3179 static bool
3180 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3182 tree src;
3184 if (!TYPE_POINTER_TO (src_obj))
3185 return true;
3187 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3188 if (useless_type_conversion_p (dest, src))
3189 return true;
3191 return false;
3194 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3195 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3197 static bool
3198 valid_fixed_convert_types_p (tree type1, tree type2)
3200 return (FIXED_POINT_TYPE_P (type1)
3201 && (INTEGRAL_TYPE_P (type2)
3202 || SCALAR_FLOAT_TYPE_P (type2)
3203 || FIXED_POINT_TYPE_P (type2)));
3206 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3207 is a problem, otherwise false. */
3209 static bool
3210 verify_gimple_call (gimple stmt)
3212 tree fn = gimple_call_fn (stmt);
3213 tree fntype, fndecl;
3214 unsigned i;
3216 if (gimple_call_internal_p (stmt))
3218 if (fn)
3220 error ("gimple call has two targets");
3221 debug_generic_stmt (fn);
3222 return true;
3225 else
3227 if (!fn)
3229 error ("gimple call has no target");
3230 return true;
3234 if (fn && !is_gimple_call_addr (fn))
3236 error ("invalid function in gimple call");
3237 debug_generic_stmt (fn);
3238 return true;
3241 if (fn
3242 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3243 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3244 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3246 error ("non-function in gimple call");
3247 return true;
3250 fndecl = gimple_call_fndecl (stmt);
3251 if (fndecl
3252 && TREE_CODE (fndecl) == FUNCTION_DECL
3253 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3254 && !DECL_PURE_P (fndecl)
3255 && !TREE_READONLY (fndecl))
3257 error ("invalid pure const state for function");
3258 return true;
3261 if (gimple_call_lhs (stmt)
3262 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3263 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3265 error ("invalid LHS in gimple call");
3266 return true;
3269 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3271 error ("LHS in noreturn call");
3272 return true;
3275 fntype = gimple_call_fntype (stmt);
3276 if (fntype
3277 && gimple_call_lhs (stmt)
3278 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3279 TREE_TYPE (fntype))
3280 /* ??? At least C++ misses conversions at assignments from
3281 void * call results.
3282 ??? Java is completely off. Especially with functions
3283 returning java.lang.Object.
3284 For now simply allow arbitrary pointer type conversions. */
3285 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3286 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3288 error ("invalid conversion in gimple call");
3289 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3290 debug_generic_stmt (TREE_TYPE (fntype));
3291 return true;
3294 if (gimple_call_chain (stmt)
3295 && !is_gimple_val (gimple_call_chain (stmt)))
3297 error ("invalid static chain in gimple call");
3298 debug_generic_stmt (gimple_call_chain (stmt));
3299 return true;
3302 /* If there is a static chain argument, this should not be an indirect
3303 call, and the decl should have DECL_STATIC_CHAIN set. */
3304 if (gimple_call_chain (stmt))
3306 if (!gimple_call_fndecl (stmt))
3308 error ("static chain in indirect gimple call");
3309 return true;
3311 fn = TREE_OPERAND (fn, 0);
3313 if (!DECL_STATIC_CHAIN (fn))
3315 error ("static chain with function that doesn%'t use one");
3316 return true;
3320 /* ??? The C frontend passes unpromoted arguments in case it
3321 didn't see a function declaration before the call. So for now
3322 leave the call arguments mostly unverified. Once we gimplify
3323 unit-at-a-time we have a chance to fix this. */
3325 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3327 tree arg = gimple_call_arg (stmt, i);
3328 if ((is_gimple_reg_type (TREE_TYPE (arg))
3329 && !is_gimple_val (arg))
3330 || (!is_gimple_reg_type (TREE_TYPE (arg))
3331 && !is_gimple_lvalue (arg)))
3333 error ("invalid argument to gimple call");
3334 debug_generic_expr (arg);
3335 return true;
3339 return false;
3342 /* Verifies the gimple comparison with the result type TYPE and
3343 the operands OP0 and OP1. */
3345 static bool
3346 verify_gimple_comparison (tree type, tree op0, tree op1)
3348 tree op0_type = TREE_TYPE (op0);
3349 tree op1_type = TREE_TYPE (op1);
3351 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3353 error ("invalid operands in gimple comparison");
3354 return true;
3357 /* For comparisons we do not have the operations type as the
3358 effective type the comparison is carried out in. Instead
3359 we require that either the first operand is trivially
3360 convertible into the second, or the other way around.
3361 Because we special-case pointers to void we allow
3362 comparisons of pointers with the same mode as well. */
3363 if (!useless_type_conversion_p (op0_type, op1_type)
3364 && !useless_type_conversion_p (op1_type, op0_type)
3365 && (!POINTER_TYPE_P (op0_type)
3366 || !POINTER_TYPE_P (op1_type)
3367 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3369 error ("mismatching comparison operand types");
3370 debug_generic_expr (op0_type);
3371 debug_generic_expr (op1_type);
3372 return true;
3375 /* The resulting type of a comparison may be an effective boolean type. */
3376 if (INTEGRAL_TYPE_P (type)
3377 && (TREE_CODE (type) == BOOLEAN_TYPE
3378 || TYPE_PRECISION (type) == 1))
3380 if (TREE_CODE (op0_type) == VECTOR_TYPE
3381 || TREE_CODE (op1_type) == VECTOR_TYPE)
3383 error ("vector comparison returning a boolean");
3384 debug_generic_expr (op0_type);
3385 debug_generic_expr (op1_type);
3386 return true;
3389 /* Or an integer vector type with the same size and element count
3390 as the comparison operand types. */
3391 else if (TREE_CODE (type) == VECTOR_TYPE
3392 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3394 if (TREE_CODE (op0_type) != VECTOR_TYPE
3395 || TREE_CODE (op1_type) != VECTOR_TYPE)
3397 error ("non-vector operands in vector comparison");
3398 debug_generic_expr (op0_type);
3399 debug_generic_expr (op1_type);
3400 return true;
3403 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3404 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3405 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3406 /* The result of a vector comparison is of signed
3407 integral type. */
3408 || TYPE_UNSIGNED (TREE_TYPE (type)))
3410 error ("invalid vector comparison resulting type");
3411 debug_generic_expr (type);
3412 return true;
3415 else
3417 error ("bogus comparison result type");
3418 debug_generic_expr (type);
3419 return true;
3422 return false;
3425 /* Verify a gimple assignment statement STMT with an unary rhs.
3426 Returns true if anything is wrong. */
3428 static bool
3429 verify_gimple_assign_unary (gimple_assign stmt)
3431 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3432 tree lhs = gimple_assign_lhs (stmt);
3433 tree lhs_type = TREE_TYPE (lhs);
3434 tree rhs1 = gimple_assign_rhs1 (stmt);
3435 tree rhs1_type = TREE_TYPE (rhs1);
3437 if (!is_gimple_reg (lhs))
3439 error ("non-register as LHS of unary operation");
3440 return true;
3443 if (!is_gimple_val (rhs1))
3445 error ("invalid operand in unary operation");
3446 return true;
3449 /* First handle conversions. */
3450 switch (rhs_code)
3452 CASE_CONVERT:
3454 /* Allow conversions from pointer type to integral type only if
3455 there is no sign or zero extension involved.
3456 For targets were the precision of ptrofftype doesn't match that
3457 of pointers we need to allow arbitrary conversions to ptrofftype. */
3458 if ((POINTER_TYPE_P (lhs_type)
3459 && INTEGRAL_TYPE_P (rhs1_type))
3460 || (POINTER_TYPE_P (rhs1_type)
3461 && INTEGRAL_TYPE_P (lhs_type)
3462 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3463 || ptrofftype_p (sizetype))))
3464 return false;
3466 /* Allow conversion from integral to offset type and vice versa. */
3467 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3468 && INTEGRAL_TYPE_P (rhs1_type))
3469 || (INTEGRAL_TYPE_P (lhs_type)
3470 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3471 return false;
3473 /* Otherwise assert we are converting between types of the
3474 same kind. */
3475 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3477 error ("invalid types in nop conversion");
3478 debug_generic_expr (lhs_type);
3479 debug_generic_expr (rhs1_type);
3480 return true;
3483 return false;
3486 case ADDR_SPACE_CONVERT_EXPR:
3488 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3489 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3490 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3492 error ("invalid types in address space conversion");
3493 debug_generic_expr (lhs_type);
3494 debug_generic_expr (rhs1_type);
3495 return true;
3498 return false;
3501 case FIXED_CONVERT_EXPR:
3503 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3504 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3506 error ("invalid types in fixed-point conversion");
3507 debug_generic_expr (lhs_type);
3508 debug_generic_expr (rhs1_type);
3509 return true;
3512 return false;
3515 case FLOAT_EXPR:
3517 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3518 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3519 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3521 error ("invalid types in conversion to floating point");
3522 debug_generic_expr (lhs_type);
3523 debug_generic_expr (rhs1_type);
3524 return true;
3527 return false;
3530 case FIX_TRUNC_EXPR:
3532 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3533 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3534 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3536 error ("invalid types in conversion to integer");
3537 debug_generic_expr (lhs_type);
3538 debug_generic_expr (rhs1_type);
3539 return true;
3542 return false;
3545 case VEC_UNPACK_HI_EXPR:
3546 case VEC_UNPACK_LO_EXPR:
3547 case REDUC_MAX_EXPR:
3548 case REDUC_MIN_EXPR:
3549 case REDUC_PLUS_EXPR:
3550 case VEC_UNPACK_FLOAT_HI_EXPR:
3551 case VEC_UNPACK_FLOAT_LO_EXPR:
3552 /* FIXME. */
3553 return false;
3555 case NEGATE_EXPR:
3556 case ABS_EXPR:
3557 case BIT_NOT_EXPR:
3558 case PAREN_EXPR:
3559 case CONJ_EXPR:
3560 break;
3562 default:
3563 gcc_unreachable ();
3566 /* For the remaining codes assert there is no conversion involved. */
3567 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3569 error ("non-trivial conversion in unary operation");
3570 debug_generic_expr (lhs_type);
3571 debug_generic_expr (rhs1_type);
3572 return true;
3575 return false;
3578 /* Verify a gimple assignment statement STMT with a binary rhs.
3579 Returns true if anything is wrong. */
3581 static bool
3582 verify_gimple_assign_binary (gimple_assign stmt)
3584 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3585 tree lhs = gimple_assign_lhs (stmt);
3586 tree lhs_type = TREE_TYPE (lhs);
3587 tree rhs1 = gimple_assign_rhs1 (stmt);
3588 tree rhs1_type = TREE_TYPE (rhs1);
3589 tree rhs2 = gimple_assign_rhs2 (stmt);
3590 tree rhs2_type = TREE_TYPE (rhs2);
3592 if (!is_gimple_reg (lhs))
3594 error ("non-register as LHS of binary operation");
3595 return true;
3598 if (!is_gimple_val (rhs1)
3599 || !is_gimple_val (rhs2))
3601 error ("invalid operands in binary operation");
3602 return true;
3605 /* First handle operations that involve different types. */
3606 switch (rhs_code)
3608 case COMPLEX_EXPR:
3610 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3611 || !(INTEGRAL_TYPE_P (rhs1_type)
3612 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3613 || !(INTEGRAL_TYPE_P (rhs2_type)
3614 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3616 error ("type mismatch in complex expression");
3617 debug_generic_expr (lhs_type);
3618 debug_generic_expr (rhs1_type);
3619 debug_generic_expr (rhs2_type);
3620 return true;
3623 return false;
3626 case LSHIFT_EXPR:
3627 case RSHIFT_EXPR:
3628 case LROTATE_EXPR:
3629 case RROTATE_EXPR:
3631 /* Shifts and rotates are ok on integral types, fixed point
3632 types and integer vector types. */
3633 if ((!INTEGRAL_TYPE_P (rhs1_type)
3634 && !FIXED_POINT_TYPE_P (rhs1_type)
3635 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3636 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3637 || (!INTEGRAL_TYPE_P (rhs2_type)
3638 /* Vector shifts of vectors are also ok. */
3639 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3640 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3641 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3642 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3643 || !useless_type_conversion_p (lhs_type, rhs1_type))
3645 error ("type mismatch in shift expression");
3646 debug_generic_expr (lhs_type);
3647 debug_generic_expr (rhs1_type);
3648 debug_generic_expr (rhs2_type);
3649 return true;
3652 return false;
3655 case VEC_LSHIFT_EXPR:
3656 case VEC_RSHIFT_EXPR:
3658 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3659 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3660 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3661 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3662 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3663 || (!INTEGRAL_TYPE_P (rhs2_type)
3664 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3665 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3666 || !useless_type_conversion_p (lhs_type, rhs1_type))
3668 error ("type mismatch in vector shift expression");
3669 debug_generic_expr (lhs_type);
3670 debug_generic_expr (rhs1_type);
3671 debug_generic_expr (rhs2_type);
3672 return true;
3674 /* For shifting a vector of non-integral components we
3675 only allow shifting by a constant multiple of the element size. */
3676 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3677 && (TREE_CODE (rhs2) != INTEGER_CST
3678 || !div_if_zero_remainder (rhs2,
3679 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3681 error ("non-element sized vector shift of floating point vector");
3682 return true;
3685 return false;
3688 case WIDEN_LSHIFT_EXPR:
3690 if (!INTEGRAL_TYPE_P (lhs_type)
3691 || !INTEGRAL_TYPE_P (rhs1_type)
3692 || TREE_CODE (rhs2) != INTEGER_CST
3693 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3695 error ("type mismatch in widening vector shift expression");
3696 debug_generic_expr (lhs_type);
3697 debug_generic_expr (rhs1_type);
3698 debug_generic_expr (rhs2_type);
3699 return true;
3702 return false;
3705 case VEC_WIDEN_LSHIFT_HI_EXPR:
3706 case VEC_WIDEN_LSHIFT_LO_EXPR:
3708 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3709 || TREE_CODE (lhs_type) != VECTOR_TYPE
3710 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3711 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3712 || TREE_CODE (rhs2) != INTEGER_CST
3713 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3714 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3716 error ("type mismatch in widening vector shift expression");
3717 debug_generic_expr (lhs_type);
3718 debug_generic_expr (rhs1_type);
3719 debug_generic_expr (rhs2_type);
3720 return true;
3723 return false;
3726 case PLUS_EXPR:
3727 case MINUS_EXPR:
3729 tree lhs_etype = lhs_type;
3730 tree rhs1_etype = rhs1_type;
3731 tree rhs2_etype = rhs2_type;
3732 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3734 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3735 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3737 error ("invalid non-vector operands to vector valued plus");
3738 return true;
3740 lhs_etype = TREE_TYPE (lhs_type);
3741 rhs1_etype = TREE_TYPE (rhs1_type);
3742 rhs2_etype = TREE_TYPE (rhs2_type);
3744 if (POINTER_TYPE_P (lhs_etype)
3745 || POINTER_TYPE_P (rhs1_etype)
3746 || POINTER_TYPE_P (rhs2_etype))
3748 error ("invalid (pointer) operands to plus/minus");
3749 return true;
3752 /* Continue with generic binary expression handling. */
3753 break;
3756 case POINTER_PLUS_EXPR:
3758 if (!POINTER_TYPE_P (rhs1_type)
3759 || !useless_type_conversion_p (lhs_type, rhs1_type)
3760 || !ptrofftype_p (rhs2_type))
3762 error ("type mismatch in pointer plus expression");
3763 debug_generic_stmt (lhs_type);
3764 debug_generic_stmt (rhs1_type);
3765 debug_generic_stmt (rhs2_type);
3766 return true;
3769 return false;
3772 case TRUTH_ANDIF_EXPR:
3773 case TRUTH_ORIF_EXPR:
3774 case TRUTH_AND_EXPR:
3775 case TRUTH_OR_EXPR:
3776 case TRUTH_XOR_EXPR:
3778 gcc_unreachable ();
3780 case LT_EXPR:
3781 case LE_EXPR:
3782 case GT_EXPR:
3783 case GE_EXPR:
3784 case EQ_EXPR:
3785 case NE_EXPR:
3786 case UNORDERED_EXPR:
3787 case ORDERED_EXPR:
3788 case UNLT_EXPR:
3789 case UNLE_EXPR:
3790 case UNGT_EXPR:
3791 case UNGE_EXPR:
3792 case UNEQ_EXPR:
3793 case LTGT_EXPR:
3794 /* Comparisons are also binary, but the result type is not
3795 connected to the operand types. */
3796 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3798 case WIDEN_MULT_EXPR:
3799 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3800 return true;
3801 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3802 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3804 case WIDEN_SUM_EXPR:
3805 case VEC_WIDEN_MULT_HI_EXPR:
3806 case VEC_WIDEN_MULT_LO_EXPR:
3807 case VEC_WIDEN_MULT_EVEN_EXPR:
3808 case VEC_WIDEN_MULT_ODD_EXPR:
3809 case VEC_PACK_TRUNC_EXPR:
3810 case VEC_PACK_SAT_EXPR:
3811 case VEC_PACK_FIX_TRUNC_EXPR:
3812 /* FIXME. */
3813 return false;
3815 case MULT_EXPR:
3816 case MULT_HIGHPART_EXPR:
3817 case TRUNC_DIV_EXPR:
3818 case CEIL_DIV_EXPR:
3819 case FLOOR_DIV_EXPR:
3820 case ROUND_DIV_EXPR:
3821 case TRUNC_MOD_EXPR:
3822 case CEIL_MOD_EXPR:
3823 case FLOOR_MOD_EXPR:
3824 case ROUND_MOD_EXPR:
3825 case RDIV_EXPR:
3826 case EXACT_DIV_EXPR:
3827 case MIN_EXPR:
3828 case MAX_EXPR:
3829 case BIT_IOR_EXPR:
3830 case BIT_XOR_EXPR:
3831 case BIT_AND_EXPR:
3832 /* Continue with generic binary expression handling. */
3833 break;
3835 default:
3836 gcc_unreachable ();
3839 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3840 || !useless_type_conversion_p (lhs_type, rhs2_type))
3842 error ("type mismatch in binary expression");
3843 debug_generic_stmt (lhs_type);
3844 debug_generic_stmt (rhs1_type);
3845 debug_generic_stmt (rhs2_type);
3846 return true;
3849 return false;
3852 /* Verify a gimple assignment statement STMT with a ternary rhs.
3853 Returns true if anything is wrong. */
3855 static bool
3856 verify_gimple_assign_ternary (gimple_assign stmt)
3858 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3859 tree lhs = gimple_assign_lhs (stmt);
3860 tree lhs_type = TREE_TYPE (lhs);
3861 tree rhs1 = gimple_assign_rhs1 (stmt);
3862 tree rhs1_type = TREE_TYPE (rhs1);
3863 tree rhs2 = gimple_assign_rhs2 (stmt);
3864 tree rhs2_type = TREE_TYPE (rhs2);
3865 tree rhs3 = gimple_assign_rhs3 (stmt);
3866 tree rhs3_type = TREE_TYPE (rhs3);
3868 if (!is_gimple_reg (lhs))
3870 error ("non-register as LHS of ternary operation");
3871 return true;
3874 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3875 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3876 || !is_gimple_val (rhs2)
3877 || !is_gimple_val (rhs3))
3879 error ("invalid operands in ternary operation");
3880 return true;
3883 /* First handle operations that involve different types. */
3884 switch (rhs_code)
3886 case WIDEN_MULT_PLUS_EXPR:
3887 case WIDEN_MULT_MINUS_EXPR:
3888 if ((!INTEGRAL_TYPE_P (rhs1_type)
3889 && !FIXED_POINT_TYPE_P (rhs1_type))
3890 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3891 || !useless_type_conversion_p (lhs_type, rhs3_type)
3892 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3893 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3895 error ("type mismatch in widening multiply-accumulate expression");
3896 debug_generic_expr (lhs_type);
3897 debug_generic_expr (rhs1_type);
3898 debug_generic_expr (rhs2_type);
3899 debug_generic_expr (rhs3_type);
3900 return true;
3902 break;
3904 case FMA_EXPR:
3905 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3906 || !useless_type_conversion_p (lhs_type, rhs2_type)
3907 || !useless_type_conversion_p (lhs_type, rhs3_type))
3909 error ("type mismatch in fused multiply-add expression");
3910 debug_generic_expr (lhs_type);
3911 debug_generic_expr (rhs1_type);
3912 debug_generic_expr (rhs2_type);
3913 debug_generic_expr (rhs3_type);
3914 return true;
3916 break;
3918 case COND_EXPR:
3919 case VEC_COND_EXPR:
3920 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3921 || !useless_type_conversion_p (lhs_type, rhs3_type))
3923 error ("type mismatch in conditional expression");
3924 debug_generic_expr (lhs_type);
3925 debug_generic_expr (rhs2_type);
3926 debug_generic_expr (rhs3_type);
3927 return true;
3929 break;
3931 case VEC_PERM_EXPR:
3932 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3933 || !useless_type_conversion_p (lhs_type, rhs2_type))
3935 error ("type mismatch in vector permute expression");
3936 debug_generic_expr (lhs_type);
3937 debug_generic_expr (rhs1_type);
3938 debug_generic_expr (rhs2_type);
3939 debug_generic_expr (rhs3_type);
3940 return true;
3943 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3944 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3945 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3947 error ("vector types expected in vector permute expression");
3948 debug_generic_expr (lhs_type);
3949 debug_generic_expr (rhs1_type);
3950 debug_generic_expr (rhs2_type);
3951 debug_generic_expr (rhs3_type);
3952 return true;
3955 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3956 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3957 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3958 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3959 != TYPE_VECTOR_SUBPARTS (lhs_type))
3961 error ("vectors with different element number found "
3962 "in vector permute expression");
3963 debug_generic_expr (lhs_type);
3964 debug_generic_expr (rhs1_type);
3965 debug_generic_expr (rhs2_type);
3966 debug_generic_expr (rhs3_type);
3967 return true;
3970 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3971 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3972 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3974 error ("invalid mask type in vector permute expression");
3975 debug_generic_expr (lhs_type);
3976 debug_generic_expr (rhs1_type);
3977 debug_generic_expr (rhs2_type);
3978 debug_generic_expr (rhs3_type);
3979 return true;
3982 return false;
3984 case SAD_EXPR:
3985 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
3986 || !useless_type_conversion_p (lhs_type, rhs3_type)
3987 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
3988 (TYPE_MODE (TREE_TYPE (rhs1_type))))
3989 > GET_MODE_BITSIZE (GET_MODE_INNER
3990 (TYPE_MODE (TREE_TYPE (lhs_type)))))
3992 error ("type mismatch in sad expression");
3993 debug_generic_expr (lhs_type);
3994 debug_generic_expr (rhs1_type);
3995 debug_generic_expr (rhs2_type);
3996 debug_generic_expr (rhs3_type);
3997 return true;
4000 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4001 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4002 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4004 error ("vector types expected in sad expression");
4005 debug_generic_expr (lhs_type);
4006 debug_generic_expr (rhs1_type);
4007 debug_generic_expr (rhs2_type);
4008 debug_generic_expr (rhs3_type);
4009 return true;
4012 return false;
4014 case DOT_PROD_EXPR:
4015 case REALIGN_LOAD_EXPR:
4016 /* FIXME. */
4017 return false;
4019 default:
4020 gcc_unreachable ();
4022 return false;
4025 /* Verify a gimple assignment statement STMT with a single rhs.
4026 Returns true if anything is wrong. */
4028 static bool
4029 verify_gimple_assign_single (gimple_assign stmt)
4031 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4032 tree lhs = gimple_assign_lhs (stmt);
4033 tree lhs_type = TREE_TYPE (lhs);
4034 tree rhs1 = gimple_assign_rhs1 (stmt);
4035 tree rhs1_type = TREE_TYPE (rhs1);
4036 bool res = false;
4038 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4040 error ("non-trivial conversion at assignment");
4041 debug_generic_expr (lhs_type);
4042 debug_generic_expr (rhs1_type);
4043 return true;
4046 if (gimple_clobber_p (stmt)
4047 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4049 error ("non-decl/MEM_REF LHS in clobber statement");
4050 debug_generic_expr (lhs);
4051 return true;
4054 if (handled_component_p (lhs)
4055 || TREE_CODE (lhs) == MEM_REF
4056 || TREE_CODE (lhs) == TARGET_MEM_REF)
4057 res |= verify_types_in_gimple_reference (lhs, true);
4059 /* Special codes we cannot handle via their class. */
4060 switch (rhs_code)
4062 case ADDR_EXPR:
4064 tree op = TREE_OPERAND (rhs1, 0);
4065 if (!is_gimple_addressable (op))
4067 error ("invalid operand in unary expression");
4068 return true;
4071 /* Technically there is no longer a need for matching types, but
4072 gimple hygiene asks for this check. In LTO we can end up
4073 combining incompatible units and thus end up with addresses
4074 of globals that change their type to a common one. */
4075 if (!in_lto_p
4076 && !types_compatible_p (TREE_TYPE (op),
4077 TREE_TYPE (TREE_TYPE (rhs1)))
4078 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4079 TREE_TYPE (op)))
4081 error ("type mismatch in address expression");
4082 debug_generic_stmt (TREE_TYPE (rhs1));
4083 debug_generic_stmt (TREE_TYPE (op));
4084 return true;
4087 return verify_types_in_gimple_reference (op, true);
4090 /* tcc_reference */
4091 case INDIRECT_REF:
4092 error ("INDIRECT_REF in gimple IL");
4093 return true;
4095 case COMPONENT_REF:
4096 case BIT_FIELD_REF:
4097 case ARRAY_REF:
4098 case ARRAY_RANGE_REF:
4099 case VIEW_CONVERT_EXPR:
4100 case REALPART_EXPR:
4101 case IMAGPART_EXPR:
4102 case TARGET_MEM_REF:
4103 case MEM_REF:
4104 if (!is_gimple_reg (lhs)
4105 && is_gimple_reg_type (TREE_TYPE (lhs)))
4107 error ("invalid rhs for gimple memory store");
4108 debug_generic_stmt (lhs);
4109 debug_generic_stmt (rhs1);
4110 return true;
4112 return res || verify_types_in_gimple_reference (rhs1, false);
4114 /* tcc_constant */
4115 case SSA_NAME:
4116 case INTEGER_CST:
4117 case REAL_CST:
4118 case FIXED_CST:
4119 case COMPLEX_CST:
4120 case VECTOR_CST:
4121 case STRING_CST:
4122 return res;
4124 /* tcc_declaration */
4125 case CONST_DECL:
4126 return res;
4127 case VAR_DECL:
4128 case PARM_DECL:
4129 if (!is_gimple_reg (lhs)
4130 && !is_gimple_reg (rhs1)
4131 && is_gimple_reg_type (TREE_TYPE (lhs)))
4133 error ("invalid rhs for gimple memory store");
4134 debug_generic_stmt (lhs);
4135 debug_generic_stmt (rhs1);
4136 return true;
4138 return res;
4140 case CONSTRUCTOR:
4141 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4143 unsigned int i;
4144 tree elt_i, elt_v, elt_t = NULL_TREE;
4146 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4147 return res;
4148 /* For vector CONSTRUCTORs we require that either it is empty
4149 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4150 (then the element count must be correct to cover the whole
4151 outer vector and index must be NULL on all elements, or it is
4152 a CONSTRUCTOR of scalar elements, where we as an exception allow
4153 smaller number of elements (assuming zero filling) and
4154 consecutive indexes as compared to NULL indexes (such
4155 CONSTRUCTORs can appear in the IL from FEs). */
4156 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4158 if (elt_t == NULL_TREE)
4160 elt_t = TREE_TYPE (elt_v);
4161 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4163 tree elt_t = TREE_TYPE (elt_v);
4164 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4165 TREE_TYPE (elt_t)))
4167 error ("incorrect type of vector CONSTRUCTOR"
4168 " elements");
4169 debug_generic_stmt (rhs1);
4170 return true;
4172 else if (CONSTRUCTOR_NELTS (rhs1)
4173 * TYPE_VECTOR_SUBPARTS (elt_t)
4174 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4176 error ("incorrect number of vector CONSTRUCTOR"
4177 " elements");
4178 debug_generic_stmt (rhs1);
4179 return true;
4182 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4183 elt_t))
4185 error ("incorrect type of vector CONSTRUCTOR elements");
4186 debug_generic_stmt (rhs1);
4187 return true;
4189 else if (CONSTRUCTOR_NELTS (rhs1)
4190 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4192 error ("incorrect number of vector CONSTRUCTOR elements");
4193 debug_generic_stmt (rhs1);
4194 return true;
4197 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4199 error ("incorrect type of vector CONSTRUCTOR elements");
4200 debug_generic_stmt (rhs1);
4201 return true;
4203 if (elt_i != NULL_TREE
4204 && (TREE_CODE (elt_t) == VECTOR_TYPE
4205 || TREE_CODE (elt_i) != INTEGER_CST
4206 || compare_tree_int (elt_i, i) != 0))
4208 error ("vector CONSTRUCTOR with non-NULL element index");
4209 debug_generic_stmt (rhs1);
4210 return true;
4212 if (!is_gimple_val (elt_v))
4214 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4215 debug_generic_stmt (rhs1);
4216 return true;
4220 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4222 error ("non-vector CONSTRUCTOR with elements");
4223 debug_generic_stmt (rhs1);
4224 return true;
4226 return res;
4227 case OBJ_TYPE_REF:
4228 case ASSERT_EXPR:
4229 case WITH_SIZE_EXPR:
4230 /* FIXME. */
4231 return res;
4233 default:;
4236 return res;
4239 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4240 is a problem, otherwise false. */
4242 static bool
4243 verify_gimple_assign (gimple_assign stmt)
4245 switch (gimple_assign_rhs_class (stmt))
4247 case GIMPLE_SINGLE_RHS:
4248 return verify_gimple_assign_single (stmt);
4250 case GIMPLE_UNARY_RHS:
4251 return verify_gimple_assign_unary (stmt);
4253 case GIMPLE_BINARY_RHS:
4254 return verify_gimple_assign_binary (stmt);
4256 case GIMPLE_TERNARY_RHS:
4257 return verify_gimple_assign_ternary (stmt);
4259 default:
4260 gcc_unreachable ();
4264 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4265 is a problem, otherwise false. */
4267 static bool
4268 verify_gimple_return (gimple_return stmt)
4270 tree op = gimple_return_retval (stmt);
4271 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4273 /* We cannot test for present return values as we do not fix up missing
4274 return values from the original source. */
4275 if (op == NULL)
4276 return false;
4278 if (!is_gimple_val (op)
4279 && TREE_CODE (op) != RESULT_DECL)
4281 error ("invalid operand in return statement");
4282 debug_generic_stmt (op);
4283 return true;
4286 if ((TREE_CODE (op) == RESULT_DECL
4287 && DECL_BY_REFERENCE (op))
4288 || (TREE_CODE (op) == SSA_NAME
4289 && SSA_NAME_VAR (op)
4290 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4291 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4292 op = TREE_TYPE (op);
4294 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4296 error ("invalid conversion in return statement");
4297 debug_generic_stmt (restype);
4298 debug_generic_stmt (TREE_TYPE (op));
4299 return true;
4302 return false;
4306 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4307 is a problem, otherwise false. */
4309 static bool
4310 verify_gimple_goto (gimple_goto stmt)
4312 tree dest = gimple_goto_dest (stmt);
4314 /* ??? We have two canonical forms of direct goto destinations, a
4315 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4316 if (TREE_CODE (dest) != LABEL_DECL
4317 && (!is_gimple_val (dest)
4318 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4320 error ("goto destination is neither a label nor a pointer");
4321 return true;
4324 return false;
4327 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4328 is a problem, otherwise false. */
4330 static bool
4331 verify_gimple_switch (gimple_switch stmt)
4333 unsigned int i, n;
4334 tree elt, prev_upper_bound = NULL_TREE;
4335 tree index_type, elt_type = NULL_TREE;
4337 if (!is_gimple_val (gimple_switch_index (stmt)))
4339 error ("invalid operand to switch statement");
4340 debug_generic_stmt (gimple_switch_index (stmt));
4341 return true;
4344 index_type = TREE_TYPE (gimple_switch_index (stmt));
4345 if (! INTEGRAL_TYPE_P (index_type))
4347 error ("non-integral type switch statement");
4348 debug_generic_expr (index_type);
4349 return true;
4352 elt = gimple_switch_label (stmt, 0);
4353 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4355 error ("invalid default case label in switch statement");
4356 debug_generic_expr (elt);
4357 return true;
4360 n = gimple_switch_num_labels (stmt);
4361 for (i = 1; i < n; i++)
4363 elt = gimple_switch_label (stmt, i);
4365 if (! CASE_LOW (elt))
4367 error ("invalid case label in switch statement");
4368 debug_generic_expr (elt);
4369 return true;
4371 if (CASE_HIGH (elt)
4372 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4374 error ("invalid case range in switch statement");
4375 debug_generic_expr (elt);
4376 return true;
4379 if (elt_type)
4381 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4382 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4384 error ("type mismatch for case label in switch statement");
4385 debug_generic_expr (elt);
4386 return true;
4389 else
4391 elt_type = TREE_TYPE (CASE_LOW (elt));
4392 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4394 error ("type precision mismatch in switch statement");
4395 return true;
4399 if (prev_upper_bound)
4401 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4403 error ("case labels not sorted in switch statement");
4404 return true;
4408 prev_upper_bound = CASE_HIGH (elt);
4409 if (! prev_upper_bound)
4410 prev_upper_bound = CASE_LOW (elt);
4413 return false;
4416 /* Verify a gimple debug statement STMT.
4417 Returns true if anything is wrong. */
4419 static bool
4420 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4422 /* There isn't much that could be wrong in a gimple debug stmt. A
4423 gimple debug bind stmt, for example, maps a tree, that's usually
4424 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4425 component or member of an aggregate type, to another tree, that
4426 can be an arbitrary expression. These stmts expand into debug
4427 insns, and are converted to debug notes by var-tracking.c. */
4428 return false;
4431 /* Verify a gimple label statement STMT.
4432 Returns true if anything is wrong. */
4434 static bool
4435 verify_gimple_label (gimple_label stmt)
4437 tree decl = gimple_label_label (stmt);
4438 int uid;
4439 bool err = false;
4441 if (TREE_CODE (decl) != LABEL_DECL)
4442 return true;
4443 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4444 && DECL_CONTEXT (decl) != current_function_decl)
4446 error ("label's context is not the current function decl");
4447 err |= true;
4450 uid = LABEL_DECL_UID (decl);
4451 if (cfun->cfg
4452 && (uid == -1
4453 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4455 error ("incorrect entry in label_to_block_map");
4456 err |= true;
4459 uid = EH_LANDING_PAD_NR (decl);
4460 if (uid)
4462 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4463 if (decl != lp->post_landing_pad)
4465 error ("incorrect setting of landing pad number");
4466 err |= true;
4470 return err;
4473 /* Verify the GIMPLE statement STMT. Returns true if there is an
4474 error, otherwise false. */
4476 static bool
4477 verify_gimple_stmt (gimple stmt)
4479 switch (gimple_code (stmt))
4481 case GIMPLE_ASSIGN:
4482 return verify_gimple_assign (as_a <gimple_assign> (stmt));
4484 case GIMPLE_LABEL:
4485 return verify_gimple_label (as_a <gimple_label> (stmt));
4487 case GIMPLE_CALL:
4488 return verify_gimple_call (stmt);
4490 case GIMPLE_COND:
4491 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4493 error ("invalid comparison code in gimple cond");
4494 return true;
4496 if (!(!gimple_cond_true_label (stmt)
4497 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4498 || !(!gimple_cond_false_label (stmt)
4499 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4501 error ("invalid labels in gimple cond");
4502 return true;
4505 return verify_gimple_comparison (boolean_type_node,
4506 gimple_cond_lhs (stmt),
4507 gimple_cond_rhs (stmt));
4509 case GIMPLE_GOTO:
4510 return verify_gimple_goto (as_a <gimple_goto> (stmt));
4512 case GIMPLE_SWITCH:
4513 return verify_gimple_switch (as_a <gimple_switch> (stmt));
4515 case GIMPLE_RETURN:
4516 return verify_gimple_return (as_a <gimple_return> (stmt));
4518 case GIMPLE_ASM:
4519 return false;
4521 case GIMPLE_TRANSACTION:
4522 return verify_gimple_transaction (as_a <gimple_transaction> (stmt));
4524 /* Tuples that do not have tree operands. */
4525 case GIMPLE_NOP:
4526 case GIMPLE_PREDICT:
4527 case GIMPLE_RESX:
4528 case GIMPLE_EH_DISPATCH:
4529 case GIMPLE_EH_MUST_NOT_THROW:
4530 return false;
4532 CASE_GIMPLE_OMP:
4533 /* OpenMP directives are validated by the FE and never operated
4534 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4535 non-gimple expressions when the main index variable has had
4536 its address taken. This does not affect the loop itself
4537 because the header of an GIMPLE_OMP_FOR is merely used to determine
4538 how to setup the parallel iteration. */
4539 return false;
4541 case GIMPLE_DEBUG:
4542 return verify_gimple_debug (stmt);
4544 default:
4545 gcc_unreachable ();
4549 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4550 and false otherwise. */
4552 static bool
4553 verify_gimple_phi (gimple phi)
4555 bool err = false;
4556 unsigned i;
4557 tree phi_result = gimple_phi_result (phi);
4558 bool virtual_p;
4560 if (!phi_result)
4562 error ("invalid PHI result");
4563 return true;
4566 virtual_p = virtual_operand_p (phi_result);
4567 if (TREE_CODE (phi_result) != SSA_NAME
4568 || (virtual_p
4569 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4571 error ("invalid PHI result");
4572 err = true;
4575 for (i = 0; i < gimple_phi_num_args (phi); i++)
4577 tree t = gimple_phi_arg_def (phi, i);
4579 if (!t)
4581 error ("missing PHI def");
4582 err |= true;
4583 continue;
4585 /* Addressable variables do have SSA_NAMEs but they
4586 are not considered gimple values. */
4587 else if ((TREE_CODE (t) == SSA_NAME
4588 && virtual_p != virtual_operand_p (t))
4589 || (virtual_p
4590 && (TREE_CODE (t) != SSA_NAME
4591 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4592 || (!virtual_p
4593 && !is_gimple_val (t)))
4595 error ("invalid PHI argument");
4596 debug_generic_expr (t);
4597 err |= true;
4599 #ifdef ENABLE_TYPES_CHECKING
4600 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4602 error ("incompatible types in PHI argument %u", i);
4603 debug_generic_stmt (TREE_TYPE (phi_result));
4604 debug_generic_stmt (TREE_TYPE (t));
4605 err |= true;
4607 #endif
4610 return err;
4613 /* Verify the GIMPLE statements inside the sequence STMTS. */
4615 static bool
4616 verify_gimple_in_seq_2 (gimple_seq stmts)
4618 gimple_stmt_iterator ittr;
4619 bool err = false;
4621 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4623 gimple stmt = gsi_stmt (ittr);
4625 switch (gimple_code (stmt))
4627 case GIMPLE_BIND:
4628 err |= verify_gimple_in_seq_2 (
4629 gimple_bind_body (as_a <gimple_bind> (stmt)));
4630 break;
4632 case GIMPLE_TRY:
4633 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4634 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4635 break;
4637 case GIMPLE_EH_FILTER:
4638 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4639 break;
4641 case GIMPLE_EH_ELSE:
4642 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4643 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4644 break;
4646 case GIMPLE_CATCH:
4647 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4648 break;
4650 case GIMPLE_TRANSACTION:
4651 err |= verify_gimple_transaction (as_a <gimple_transaction> (stmt));
4652 break;
4654 default:
4656 bool err2 = verify_gimple_stmt (stmt);
4657 if (err2)
4658 debug_gimple_stmt (stmt);
4659 err |= err2;
4664 return err;
4667 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4668 is a problem, otherwise false. */
4670 static bool
4671 verify_gimple_transaction (gimple_transaction stmt)
4673 tree lab = gimple_transaction_label (stmt);
4674 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4675 return true;
4676 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4680 /* Verify the GIMPLE statements inside the statement list STMTS. */
4682 DEBUG_FUNCTION void
4683 verify_gimple_in_seq (gimple_seq stmts)
4685 timevar_push (TV_TREE_STMT_VERIFY);
4686 if (verify_gimple_in_seq_2 (stmts))
4687 internal_error ("verify_gimple failed");
4688 timevar_pop (TV_TREE_STMT_VERIFY);
4691 /* Return true when the T can be shared. */
4693 static bool
4694 tree_node_can_be_shared (tree t)
4696 if (IS_TYPE_OR_DECL_P (t)
4697 || is_gimple_min_invariant (t)
4698 || TREE_CODE (t) == SSA_NAME
4699 || t == error_mark_node
4700 || TREE_CODE (t) == IDENTIFIER_NODE)
4701 return true;
4703 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4704 return true;
4706 if (DECL_P (t))
4707 return true;
4709 return false;
4712 /* Called via walk_tree. Verify tree sharing. */
4714 static tree
4715 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4717 hash_set<void *> *visited = (hash_set<void *> *) data;
4719 if (tree_node_can_be_shared (*tp))
4721 *walk_subtrees = false;
4722 return NULL;
4725 if (visited->add (*tp))
4726 return *tp;
4728 return NULL;
4731 /* Called via walk_gimple_stmt. Verify tree sharing. */
4733 static tree
4734 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4736 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4737 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4740 static bool eh_error_found;
4741 bool
4742 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4743 hash_set<gimple> *visited)
4745 if (!visited->contains (stmt))
4747 error ("dead STMT in EH table");
4748 debug_gimple_stmt (stmt);
4749 eh_error_found = true;
4751 return true;
4754 /* Verify if the location LOCs block is in BLOCKS. */
4756 static bool
4757 verify_location (hash_set<tree> *blocks, location_t loc)
4759 tree block = LOCATION_BLOCK (loc);
4760 if (block != NULL_TREE
4761 && !blocks->contains (block))
4763 error ("location references block not in block tree");
4764 return true;
4766 if (block != NULL_TREE)
4767 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4768 return false;
4771 /* Called via walk_tree. Verify that expressions have no blocks. */
4773 static tree
4774 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4776 if (!EXPR_P (*tp))
4778 *walk_subtrees = false;
4779 return NULL;
4782 location_t loc = EXPR_LOCATION (*tp);
4783 if (LOCATION_BLOCK (loc) != NULL)
4784 return *tp;
4786 return NULL;
4789 /* Called via walk_tree. Verify locations of expressions. */
4791 static tree
4792 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4794 hash_set<tree> *blocks = (hash_set<tree> *) data;
4796 if (TREE_CODE (*tp) == VAR_DECL
4797 && DECL_HAS_DEBUG_EXPR_P (*tp))
4799 tree t = DECL_DEBUG_EXPR (*tp);
4800 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4801 if (addr)
4802 return addr;
4804 if ((TREE_CODE (*tp) == VAR_DECL
4805 || TREE_CODE (*tp) == PARM_DECL
4806 || TREE_CODE (*tp) == RESULT_DECL)
4807 && DECL_HAS_VALUE_EXPR_P (*tp))
4809 tree t = DECL_VALUE_EXPR (*tp);
4810 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4811 if (addr)
4812 return addr;
4815 if (!EXPR_P (*tp))
4817 *walk_subtrees = false;
4818 return NULL;
4821 location_t loc = EXPR_LOCATION (*tp);
4822 if (verify_location (blocks, loc))
4823 return *tp;
4825 return NULL;
4828 /* Called via walk_gimple_op. Verify locations of expressions. */
4830 static tree
4831 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4833 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4834 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4837 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4839 static void
4840 collect_subblocks (hash_set<tree> *blocks, tree block)
4842 tree t;
4843 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4845 blocks->add (t);
4846 collect_subblocks (blocks, t);
4850 /* Verify the GIMPLE statements in the CFG of FN. */
4852 DEBUG_FUNCTION void
4853 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4855 basic_block bb;
4856 bool err = false;
4858 timevar_push (TV_TREE_STMT_VERIFY);
4859 hash_set<void *> visited;
4860 hash_set<gimple> visited_stmts;
4862 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4863 hash_set<tree> blocks;
4864 if (DECL_INITIAL (fn->decl))
4866 blocks.add (DECL_INITIAL (fn->decl));
4867 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4870 FOR_EACH_BB_FN (bb, fn)
4872 gimple_stmt_iterator gsi;
4874 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4876 gimple phi = gsi_stmt (gsi);
4877 bool err2 = false;
4878 unsigned i;
4880 visited_stmts.add (phi);
4882 if (gimple_bb (phi) != bb)
4884 error ("gimple_bb (phi) is set to a wrong basic block");
4885 err2 = true;
4888 err2 |= verify_gimple_phi (phi);
4890 /* Only PHI arguments have locations. */
4891 if (gimple_location (phi) != UNKNOWN_LOCATION)
4893 error ("PHI node with location");
4894 err2 = true;
4897 for (i = 0; i < gimple_phi_num_args (phi); i++)
4899 tree arg = gimple_phi_arg_def (phi, i);
4900 tree addr = walk_tree (&arg, verify_node_sharing_1,
4901 &visited, NULL);
4902 if (addr)
4904 error ("incorrect sharing of tree nodes");
4905 debug_generic_expr (addr);
4906 err2 |= true;
4908 location_t loc = gimple_phi_arg_location (phi, i);
4909 if (virtual_operand_p (gimple_phi_result (phi))
4910 && loc != UNKNOWN_LOCATION)
4912 error ("virtual PHI with argument locations");
4913 err2 = true;
4915 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4916 if (addr)
4918 debug_generic_expr (addr);
4919 err2 = true;
4921 err2 |= verify_location (&blocks, loc);
4924 if (err2)
4925 debug_gimple_stmt (phi);
4926 err |= err2;
4929 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4931 gimple stmt = gsi_stmt (gsi);
4932 bool err2 = false;
4933 struct walk_stmt_info wi;
4934 tree addr;
4935 int lp_nr;
4937 visited_stmts.add (stmt);
4939 if (gimple_bb (stmt) != bb)
4941 error ("gimple_bb (stmt) is set to a wrong basic block");
4942 err2 = true;
4945 err2 |= verify_gimple_stmt (stmt);
4946 err2 |= verify_location (&blocks, gimple_location (stmt));
4948 memset (&wi, 0, sizeof (wi));
4949 wi.info = (void *) &visited;
4950 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4951 if (addr)
4953 error ("incorrect sharing of tree nodes");
4954 debug_generic_expr (addr);
4955 err2 |= true;
4958 memset (&wi, 0, sizeof (wi));
4959 wi.info = (void *) &blocks;
4960 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4961 if (addr)
4963 debug_generic_expr (addr);
4964 err2 |= true;
4967 /* ??? Instead of not checking these stmts at all the walker
4968 should know its context via wi. */
4969 if (!is_gimple_debug (stmt)
4970 && !is_gimple_omp (stmt))
4972 memset (&wi, 0, sizeof (wi));
4973 addr = walk_gimple_op (stmt, verify_expr, &wi);
4974 if (addr)
4976 debug_generic_expr (addr);
4977 inform (gimple_location (stmt), "in statement");
4978 err2 |= true;
4982 /* If the statement is marked as part of an EH region, then it is
4983 expected that the statement could throw. Verify that when we
4984 have optimizations that simplify statements such that we prove
4985 that they cannot throw, that we update other data structures
4986 to match. */
4987 lp_nr = lookup_stmt_eh_lp (stmt);
4988 if (lp_nr > 0)
4990 if (!stmt_could_throw_p (stmt))
4992 if (verify_nothrow)
4994 error ("statement marked for throw, but doesn%'t");
4995 err2 |= true;
4998 else if (!gsi_one_before_end_p (gsi))
5000 error ("statement marked for throw in middle of block");
5001 err2 |= true;
5005 if (err2)
5006 debug_gimple_stmt (stmt);
5007 err |= err2;
5011 eh_error_found = false;
5012 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5013 if (eh_table)
5014 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5015 (&visited_stmts);
5017 if (err || eh_error_found)
5018 internal_error ("verify_gimple failed");
5020 verify_histograms ();
5021 timevar_pop (TV_TREE_STMT_VERIFY);
5025 /* Verifies that the flow information is OK. */
5027 static int
5028 gimple_verify_flow_info (void)
5030 int err = 0;
5031 basic_block bb;
5032 gimple_stmt_iterator gsi;
5033 gimple stmt;
5034 edge e;
5035 edge_iterator ei;
5037 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5038 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5040 error ("ENTRY_BLOCK has IL associated with it");
5041 err = 1;
5044 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5045 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5047 error ("EXIT_BLOCK has IL associated with it");
5048 err = 1;
5051 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5052 if (e->flags & EDGE_FALLTHRU)
5054 error ("fallthru to exit from bb %d", e->src->index);
5055 err = 1;
5058 FOR_EACH_BB_FN (bb, cfun)
5060 bool found_ctrl_stmt = false;
5062 stmt = NULL;
5064 /* Skip labels on the start of basic block. */
5065 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5067 tree label;
5068 gimple prev_stmt = stmt;
5070 stmt = gsi_stmt (gsi);
5072 if (gimple_code (stmt) != GIMPLE_LABEL)
5073 break;
5075 label = gimple_label_label (stmt);
5076 if (prev_stmt && DECL_NONLOCAL (label))
5078 error ("nonlocal label ");
5079 print_generic_expr (stderr, label, 0);
5080 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5081 bb->index);
5082 err = 1;
5085 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5087 error ("EH landing pad label ");
5088 print_generic_expr (stderr, label, 0);
5089 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5090 bb->index);
5091 err = 1;
5094 if (label_to_block (label) != bb)
5096 error ("label ");
5097 print_generic_expr (stderr, label, 0);
5098 fprintf (stderr, " to block does not match in bb %d",
5099 bb->index);
5100 err = 1;
5103 if (decl_function_context (label) != current_function_decl)
5105 error ("label ");
5106 print_generic_expr (stderr, label, 0);
5107 fprintf (stderr, " has incorrect context in bb %d",
5108 bb->index);
5109 err = 1;
5113 /* Verify that body of basic block BB is free of control flow. */
5114 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5116 gimple stmt = gsi_stmt (gsi);
5118 if (found_ctrl_stmt)
5120 error ("control flow in the middle of basic block %d",
5121 bb->index);
5122 err = 1;
5125 if (stmt_ends_bb_p (stmt))
5126 found_ctrl_stmt = true;
5128 if (gimple_code (stmt) == GIMPLE_LABEL)
5130 error ("label ");
5131 print_generic_expr (stderr, gimple_label_label (stmt), 0);
5132 fprintf (stderr, " in the middle of basic block %d", bb->index);
5133 err = 1;
5137 gsi = gsi_last_bb (bb);
5138 if (gsi_end_p (gsi))
5139 continue;
5141 stmt = gsi_stmt (gsi);
5143 if (gimple_code (stmt) == GIMPLE_LABEL)
5144 continue;
5146 err |= verify_eh_edges (stmt);
5148 if (is_ctrl_stmt (stmt))
5150 FOR_EACH_EDGE (e, ei, bb->succs)
5151 if (e->flags & EDGE_FALLTHRU)
5153 error ("fallthru edge after a control statement in bb %d",
5154 bb->index);
5155 err = 1;
5159 if (gimple_code (stmt) != GIMPLE_COND)
5161 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5162 after anything else but if statement. */
5163 FOR_EACH_EDGE (e, ei, bb->succs)
5164 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5166 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5167 bb->index);
5168 err = 1;
5172 switch (gimple_code (stmt))
5174 case GIMPLE_COND:
5176 edge true_edge;
5177 edge false_edge;
5179 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5181 if (!true_edge
5182 || !false_edge
5183 || !(true_edge->flags & EDGE_TRUE_VALUE)
5184 || !(false_edge->flags & EDGE_FALSE_VALUE)
5185 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5186 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5187 || EDGE_COUNT (bb->succs) >= 3)
5189 error ("wrong outgoing edge flags at end of bb %d",
5190 bb->index);
5191 err = 1;
5194 break;
5196 case GIMPLE_GOTO:
5197 if (simple_goto_p (stmt))
5199 error ("explicit goto at end of bb %d", bb->index);
5200 err = 1;
5202 else
5204 /* FIXME. We should double check that the labels in the
5205 destination blocks have their address taken. */
5206 FOR_EACH_EDGE (e, ei, bb->succs)
5207 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5208 | EDGE_FALSE_VALUE))
5209 || !(e->flags & EDGE_ABNORMAL))
5211 error ("wrong outgoing edge flags at end of bb %d",
5212 bb->index);
5213 err = 1;
5216 break;
5218 case GIMPLE_CALL:
5219 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5220 break;
5221 /* ... fallthru ... */
5222 case GIMPLE_RETURN:
5223 if (!single_succ_p (bb)
5224 || (single_succ_edge (bb)->flags
5225 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5226 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5228 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5229 err = 1;
5231 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5233 error ("return edge does not point to exit in bb %d",
5234 bb->index);
5235 err = 1;
5237 break;
5239 case GIMPLE_SWITCH:
5241 gimple_switch switch_stmt = as_a <gimple_switch> (stmt);
5242 tree prev;
5243 edge e;
5244 size_t i, n;
5246 n = gimple_switch_num_labels (switch_stmt);
5248 /* Mark all the destination basic blocks. */
5249 for (i = 0; i < n; ++i)
5251 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5252 basic_block label_bb = label_to_block (lab);
5253 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5254 label_bb->aux = (void *)1;
5257 /* Verify that the case labels are sorted. */
5258 prev = gimple_switch_label (switch_stmt, 0);
5259 for (i = 1; i < n; ++i)
5261 tree c = gimple_switch_label (switch_stmt, i);
5262 if (!CASE_LOW (c))
5264 error ("found default case not at the start of "
5265 "case vector");
5266 err = 1;
5267 continue;
5269 if (CASE_LOW (prev)
5270 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5272 error ("case labels not sorted: ");
5273 print_generic_expr (stderr, prev, 0);
5274 fprintf (stderr," is greater than ");
5275 print_generic_expr (stderr, c, 0);
5276 fprintf (stderr," but comes before it.\n");
5277 err = 1;
5279 prev = c;
5281 /* VRP will remove the default case if it can prove it will
5282 never be executed. So do not verify there always exists
5283 a default case here. */
5285 FOR_EACH_EDGE (e, ei, bb->succs)
5287 if (!e->dest->aux)
5289 error ("extra outgoing edge %d->%d",
5290 bb->index, e->dest->index);
5291 err = 1;
5294 e->dest->aux = (void *)2;
5295 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5296 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5298 error ("wrong outgoing edge flags at end of bb %d",
5299 bb->index);
5300 err = 1;
5304 /* Check that we have all of them. */
5305 for (i = 0; i < n; ++i)
5307 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5308 basic_block label_bb = label_to_block (lab);
5310 if (label_bb->aux != (void *)2)
5312 error ("missing edge %i->%i", bb->index, label_bb->index);
5313 err = 1;
5317 FOR_EACH_EDGE (e, ei, bb->succs)
5318 e->dest->aux = (void *)0;
5320 break;
5322 case GIMPLE_EH_DISPATCH:
5323 err |= verify_eh_dispatch_edge (stmt);
5324 break;
5326 default:
5327 break;
5331 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5332 verify_dominators (CDI_DOMINATORS);
5334 return err;
5338 /* Updates phi nodes after creating a forwarder block joined
5339 by edge FALLTHRU. */
5341 static void
5342 gimple_make_forwarder_block (edge fallthru)
5344 edge e;
5345 edge_iterator ei;
5346 basic_block dummy, bb;
5347 tree var;
5348 gimple_phi_iterator gsi;
5350 dummy = fallthru->src;
5351 bb = fallthru->dest;
5353 if (single_pred_p (bb))
5354 return;
5356 /* If we redirected a branch we must create new PHI nodes at the
5357 start of BB. */
5358 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5360 gimple_phi phi, new_phi;
5362 phi = gsi.phi ();
5363 var = gimple_phi_result (phi);
5364 new_phi = create_phi_node (var, bb);
5365 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5366 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5367 UNKNOWN_LOCATION);
5370 /* Add the arguments we have stored on edges. */
5371 FOR_EACH_EDGE (e, ei, bb->preds)
5373 if (e == fallthru)
5374 continue;
5376 flush_pending_stmts (e);
5381 /* Return a non-special label in the head of basic block BLOCK.
5382 Create one if it doesn't exist. */
5384 tree
5385 gimple_block_label (basic_block bb)
5387 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5388 bool first = true;
5389 tree label;
5390 gimple stmt;
5392 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5394 stmt = gsi_stmt (i);
5395 if (gimple_code (stmt) != GIMPLE_LABEL)
5396 break;
5397 label = gimple_label_label (stmt);
5398 if (!DECL_NONLOCAL (label))
5400 if (!first)
5401 gsi_move_before (&i, &s);
5402 return label;
5406 label = create_artificial_label (UNKNOWN_LOCATION);
5407 stmt = gimple_build_label (label);
5408 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5409 return label;
5413 /* Attempt to perform edge redirection by replacing a possibly complex
5414 jump instruction by a goto or by removing the jump completely.
5415 This can apply only if all edges now point to the same block. The
5416 parameters and return values are equivalent to
5417 redirect_edge_and_branch. */
5419 static edge
5420 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5422 basic_block src = e->src;
5423 gimple_stmt_iterator i;
5424 gimple stmt;
5426 /* We can replace or remove a complex jump only when we have exactly
5427 two edges. */
5428 if (EDGE_COUNT (src->succs) != 2
5429 /* Verify that all targets will be TARGET. Specifically, the
5430 edge that is not E must also go to TARGET. */
5431 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5432 return NULL;
5434 i = gsi_last_bb (src);
5435 if (gsi_end_p (i))
5436 return NULL;
5438 stmt = gsi_stmt (i);
5440 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5442 gsi_remove (&i, true);
5443 e = ssa_redirect_edge (e, target);
5444 e->flags = EDGE_FALLTHRU;
5445 return e;
5448 return NULL;
5452 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5453 edge representing the redirected branch. */
5455 static edge
5456 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5458 basic_block bb = e->src;
5459 gimple_stmt_iterator gsi;
5460 edge ret;
5461 gimple stmt;
5463 if (e->flags & EDGE_ABNORMAL)
5464 return NULL;
5466 if (e->dest == dest)
5467 return NULL;
5469 if (e->flags & EDGE_EH)
5470 return redirect_eh_edge (e, dest);
5472 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5474 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5475 if (ret)
5476 return ret;
5479 gsi = gsi_last_bb (bb);
5480 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5482 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5484 case GIMPLE_COND:
5485 /* For COND_EXPR, we only need to redirect the edge. */
5486 break;
5488 case GIMPLE_GOTO:
5489 /* No non-abnormal edges should lead from a non-simple goto, and
5490 simple ones should be represented implicitly. */
5491 gcc_unreachable ();
5493 case GIMPLE_SWITCH:
5495 gimple_switch switch_stmt = as_a <gimple_switch> (stmt);
5496 tree label = gimple_block_label (dest);
5497 tree cases = get_cases_for_edge (e, switch_stmt);
5499 /* If we have a list of cases associated with E, then use it
5500 as it's a lot faster than walking the entire case vector. */
5501 if (cases)
5503 edge e2 = find_edge (e->src, dest);
5504 tree last, first;
5506 first = cases;
5507 while (cases)
5509 last = cases;
5510 CASE_LABEL (cases) = label;
5511 cases = CASE_CHAIN (cases);
5514 /* If there was already an edge in the CFG, then we need
5515 to move all the cases associated with E to E2. */
5516 if (e2)
5518 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5520 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5521 CASE_CHAIN (cases2) = first;
5523 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5525 else
5527 size_t i, n = gimple_switch_num_labels (switch_stmt);
5529 for (i = 0; i < n; i++)
5531 tree elt = gimple_switch_label (switch_stmt, i);
5532 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5533 CASE_LABEL (elt) = label;
5537 break;
5539 case GIMPLE_ASM:
5541 gimple_asm asm_stmt = as_a <gimple_asm> (stmt);
5542 int i, n = gimple_asm_nlabels (asm_stmt);
5543 tree label = NULL;
5545 for (i = 0; i < n; ++i)
5547 tree cons = gimple_asm_label_op (asm_stmt, i);
5548 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5550 if (!label)
5551 label = gimple_block_label (dest);
5552 TREE_VALUE (cons) = label;
5556 /* If we didn't find any label matching the former edge in the
5557 asm labels, we must be redirecting the fallthrough
5558 edge. */
5559 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5561 break;
5563 case GIMPLE_RETURN:
5564 gsi_remove (&gsi, true);
5565 e->flags |= EDGE_FALLTHRU;
5566 break;
5568 case GIMPLE_OMP_RETURN:
5569 case GIMPLE_OMP_CONTINUE:
5570 case GIMPLE_OMP_SECTIONS_SWITCH:
5571 case GIMPLE_OMP_FOR:
5572 /* The edges from OMP constructs can be simply redirected. */
5573 break;
5575 case GIMPLE_EH_DISPATCH:
5576 if (!(e->flags & EDGE_FALLTHRU))
5577 redirect_eh_dispatch_edge (stmt, e, dest);
5578 break;
5580 case GIMPLE_TRANSACTION:
5581 /* The ABORT edge has a stored label associated with it, otherwise
5582 the edges are simply redirectable. */
5583 if (e->flags == 0)
5584 gimple_transaction_set_label (as_a <gimple_transaction> (stmt),
5585 gimple_block_label (dest));
5586 break;
5588 default:
5589 /* Otherwise it must be a fallthru edge, and we don't need to
5590 do anything besides redirecting it. */
5591 gcc_assert (e->flags & EDGE_FALLTHRU);
5592 break;
5595 /* Update/insert PHI nodes as necessary. */
5597 /* Now update the edges in the CFG. */
5598 e = ssa_redirect_edge (e, dest);
5600 return e;
5603 /* Returns true if it is possible to remove edge E by redirecting
5604 it to the destination of the other edge from E->src. */
5606 static bool
5607 gimple_can_remove_branch_p (const_edge e)
5609 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5610 return false;
5612 return true;
5615 /* Simple wrapper, as we can always redirect fallthru edges. */
5617 static basic_block
5618 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5620 e = gimple_redirect_edge_and_branch (e, dest);
5621 gcc_assert (e);
5623 return NULL;
5627 /* Splits basic block BB after statement STMT (but at least after the
5628 labels). If STMT is NULL, BB is split just after the labels. */
5630 static basic_block
5631 gimple_split_block (basic_block bb, void *stmt)
5633 gimple_stmt_iterator gsi;
5634 gimple_stmt_iterator gsi_tgt;
5635 gimple act;
5636 gimple_seq list;
5637 basic_block new_bb;
5638 edge e;
5639 edge_iterator ei;
5641 new_bb = create_empty_bb (bb);
5643 /* Redirect the outgoing edges. */
5644 new_bb->succs = bb->succs;
5645 bb->succs = NULL;
5646 FOR_EACH_EDGE (e, ei, new_bb->succs)
5647 e->src = new_bb;
5649 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5650 stmt = NULL;
5652 /* Move everything from GSI to the new basic block. */
5653 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5655 act = gsi_stmt (gsi);
5656 if (gimple_code (act) == GIMPLE_LABEL)
5657 continue;
5659 if (!stmt)
5660 break;
5662 if (stmt == act)
5664 gsi_next (&gsi);
5665 break;
5669 if (gsi_end_p (gsi))
5670 return new_bb;
5672 /* Split the statement list - avoid re-creating new containers as this
5673 brings ugly quadratic memory consumption in the inliner.
5674 (We are still quadratic since we need to update stmt BB pointers,
5675 sadly.) */
5676 gsi_split_seq_before (&gsi, &list);
5677 set_bb_seq (new_bb, list);
5678 for (gsi_tgt = gsi_start (list);
5679 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5680 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5682 return new_bb;
5686 /* Moves basic block BB after block AFTER. */
5688 static bool
5689 gimple_move_block_after (basic_block bb, basic_block after)
5691 if (bb->prev_bb == after)
5692 return true;
5694 unlink_block (bb);
5695 link_block (bb, after);
5697 return true;
5701 /* Return TRUE if block BB has no executable statements, otherwise return
5702 FALSE. */
5704 static bool
5705 gimple_empty_block_p (basic_block bb)
5707 /* BB must have no executable statements. */
5708 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5709 if (phi_nodes (bb))
5710 return false;
5711 if (gsi_end_p (gsi))
5712 return true;
5713 if (is_gimple_debug (gsi_stmt (gsi)))
5714 gsi_next_nondebug (&gsi);
5715 return gsi_end_p (gsi);
5719 /* Split a basic block if it ends with a conditional branch and if the
5720 other part of the block is not empty. */
5722 static basic_block
5723 gimple_split_block_before_cond_jump (basic_block bb)
5725 gimple last, split_point;
5726 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5727 if (gsi_end_p (gsi))
5728 return NULL;
5729 last = gsi_stmt (gsi);
5730 if (gimple_code (last) != GIMPLE_COND
5731 && gimple_code (last) != GIMPLE_SWITCH)
5732 return NULL;
5733 gsi_prev_nondebug (&gsi);
5734 split_point = gsi_stmt (gsi);
5735 return split_block (bb, split_point)->dest;
5739 /* Return true if basic_block can be duplicated. */
5741 static bool
5742 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5744 return true;
5747 /* Create a duplicate of the basic block BB. NOTE: This does not
5748 preserve SSA form. */
5750 static basic_block
5751 gimple_duplicate_bb (basic_block bb)
5753 basic_block new_bb;
5754 gimple_stmt_iterator gsi, gsi_tgt;
5755 gimple_seq phis = phi_nodes (bb);
5756 gimple phi, stmt, copy;
5758 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5760 /* Copy the PHI nodes. We ignore PHI node arguments here because
5761 the incoming edges have not been setup yet. */
5762 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5764 phi = gsi_stmt (gsi);
5765 copy = create_phi_node (NULL_TREE, new_bb);
5766 create_new_def_for (gimple_phi_result (phi), copy,
5767 gimple_phi_result_ptr (copy));
5768 gimple_set_uid (copy, gimple_uid (phi));
5771 gsi_tgt = gsi_start_bb (new_bb);
5772 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5774 def_operand_p def_p;
5775 ssa_op_iter op_iter;
5776 tree lhs;
5778 stmt = gsi_stmt (gsi);
5779 if (gimple_code (stmt) == GIMPLE_LABEL)
5780 continue;
5782 /* Don't duplicate label debug stmts. */
5783 if (gimple_debug_bind_p (stmt)
5784 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5785 == LABEL_DECL)
5786 continue;
5788 /* Create a new copy of STMT and duplicate STMT's virtual
5789 operands. */
5790 copy = gimple_copy (stmt);
5791 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5793 maybe_duplicate_eh_stmt (copy, stmt);
5794 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5796 /* When copying around a stmt writing into a local non-user
5797 aggregate, make sure it won't share stack slot with other
5798 vars. */
5799 lhs = gimple_get_lhs (stmt);
5800 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5802 tree base = get_base_address (lhs);
5803 if (base
5804 && (TREE_CODE (base) == VAR_DECL
5805 || TREE_CODE (base) == RESULT_DECL)
5806 && DECL_IGNORED_P (base)
5807 && !TREE_STATIC (base)
5808 && !DECL_EXTERNAL (base)
5809 && (TREE_CODE (base) != VAR_DECL
5810 || !DECL_HAS_VALUE_EXPR_P (base)))
5811 DECL_NONSHAREABLE (base) = 1;
5814 /* Create new names for all the definitions created by COPY and
5815 add replacement mappings for each new name. */
5816 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5817 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5820 return new_bb;
5823 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5825 static void
5826 add_phi_args_after_copy_edge (edge e_copy)
5828 basic_block bb, bb_copy = e_copy->src, dest;
5829 edge e;
5830 edge_iterator ei;
5831 gimple_phi phi, phi_copy;
5832 tree def;
5833 gimple_phi_iterator psi, psi_copy;
5835 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5836 return;
5838 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5840 if (e_copy->dest->flags & BB_DUPLICATED)
5841 dest = get_bb_original (e_copy->dest);
5842 else
5843 dest = e_copy->dest;
5845 e = find_edge (bb, dest);
5846 if (!e)
5848 /* During loop unrolling the target of the latch edge is copied.
5849 In this case we are not looking for edge to dest, but to
5850 duplicated block whose original was dest. */
5851 FOR_EACH_EDGE (e, ei, bb->succs)
5853 if ((e->dest->flags & BB_DUPLICATED)
5854 && get_bb_original (e->dest) == dest)
5855 break;
5858 gcc_assert (e != NULL);
5861 for (psi = gsi_start_phis (e->dest),
5862 psi_copy = gsi_start_phis (e_copy->dest);
5863 !gsi_end_p (psi);
5864 gsi_next (&psi), gsi_next (&psi_copy))
5866 phi = psi.phi ();
5867 phi_copy = psi_copy.phi ();
5868 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5869 add_phi_arg (phi_copy, def, e_copy,
5870 gimple_phi_arg_location_from_edge (phi, e));
5875 /* Basic block BB_COPY was created by code duplication. Add phi node
5876 arguments for edges going out of BB_COPY. The blocks that were
5877 duplicated have BB_DUPLICATED set. */
5879 void
5880 add_phi_args_after_copy_bb (basic_block bb_copy)
5882 edge e_copy;
5883 edge_iterator ei;
5885 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5887 add_phi_args_after_copy_edge (e_copy);
5891 /* Blocks in REGION_COPY array of length N_REGION were created by
5892 duplication of basic blocks. Add phi node arguments for edges
5893 going from these blocks. If E_COPY is not NULL, also add
5894 phi node arguments for its destination.*/
5896 void
5897 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5898 edge e_copy)
5900 unsigned i;
5902 for (i = 0; i < n_region; i++)
5903 region_copy[i]->flags |= BB_DUPLICATED;
5905 for (i = 0; i < n_region; i++)
5906 add_phi_args_after_copy_bb (region_copy[i]);
5907 if (e_copy)
5908 add_phi_args_after_copy_edge (e_copy);
5910 for (i = 0; i < n_region; i++)
5911 region_copy[i]->flags &= ~BB_DUPLICATED;
5914 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5915 important exit edge EXIT. By important we mean that no SSA name defined
5916 inside region is live over the other exit edges of the region. All entry
5917 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5918 to the duplicate of the region. Dominance and loop information is
5919 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5920 UPDATE_DOMINANCE is false then we assume that the caller will update the
5921 dominance information after calling this function. The new basic
5922 blocks are stored to REGION_COPY in the same order as they had in REGION,
5923 provided that REGION_COPY is not NULL.
5924 The function returns false if it is unable to copy the region,
5925 true otherwise. */
5927 bool
5928 gimple_duplicate_sese_region (edge entry, edge exit,
5929 basic_block *region, unsigned n_region,
5930 basic_block *region_copy,
5931 bool update_dominance)
5933 unsigned i;
5934 bool free_region_copy = false, copying_header = false;
5935 struct loop *loop = entry->dest->loop_father;
5936 edge exit_copy;
5937 vec<basic_block> doms;
5938 edge redirected;
5939 int total_freq = 0, entry_freq = 0;
5940 gcov_type total_count = 0, entry_count = 0;
5942 if (!can_copy_bbs_p (region, n_region))
5943 return false;
5945 /* Some sanity checking. Note that we do not check for all possible
5946 missuses of the functions. I.e. if you ask to copy something weird,
5947 it will work, but the state of structures probably will not be
5948 correct. */
5949 for (i = 0; i < n_region; i++)
5951 /* We do not handle subloops, i.e. all the blocks must belong to the
5952 same loop. */
5953 if (region[i]->loop_father != loop)
5954 return false;
5956 if (region[i] != entry->dest
5957 && region[i] == loop->header)
5958 return false;
5961 /* In case the function is used for loop header copying (which is the primary
5962 use), ensure that EXIT and its copy will be new latch and entry edges. */
5963 if (loop->header == entry->dest)
5965 copying_header = true;
5967 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5968 return false;
5970 for (i = 0; i < n_region; i++)
5971 if (region[i] != exit->src
5972 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5973 return false;
5976 initialize_original_copy_tables ();
5978 if (copying_header)
5979 set_loop_copy (loop, loop_outer (loop));
5980 else
5981 set_loop_copy (loop, loop);
5983 if (!region_copy)
5985 region_copy = XNEWVEC (basic_block, n_region);
5986 free_region_copy = true;
5989 /* Record blocks outside the region that are dominated by something
5990 inside. */
5991 if (update_dominance)
5993 doms.create (0);
5994 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5997 if (entry->dest->count)
5999 total_count = entry->dest->count;
6000 entry_count = entry->count;
6001 /* Fix up corner cases, to avoid division by zero or creation of negative
6002 frequencies. */
6003 if (entry_count > total_count)
6004 entry_count = total_count;
6006 else
6008 total_freq = entry->dest->frequency;
6009 entry_freq = EDGE_FREQUENCY (entry);
6010 /* Fix up corner cases, to avoid division by zero or creation of negative
6011 frequencies. */
6012 if (total_freq == 0)
6013 total_freq = 1;
6014 else if (entry_freq > total_freq)
6015 entry_freq = total_freq;
6018 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6019 split_edge_bb_loc (entry), update_dominance);
6020 if (total_count)
6022 scale_bbs_frequencies_gcov_type (region, n_region,
6023 total_count - entry_count,
6024 total_count);
6025 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6026 total_count);
6028 else
6030 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6031 total_freq);
6032 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6035 if (copying_header)
6037 loop->header = exit->dest;
6038 loop->latch = exit->src;
6041 /* Redirect the entry and add the phi node arguments. */
6042 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6043 gcc_assert (redirected != NULL);
6044 flush_pending_stmts (entry);
6046 /* Concerning updating of dominators: We must recount dominators
6047 for entry block and its copy. Anything that is outside of the
6048 region, but was dominated by something inside needs recounting as
6049 well. */
6050 if (update_dominance)
6052 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6053 doms.safe_push (get_bb_original (entry->dest));
6054 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6055 doms.release ();
6058 /* Add the other PHI node arguments. */
6059 add_phi_args_after_copy (region_copy, n_region, NULL);
6061 if (free_region_copy)
6062 free (region_copy);
6064 free_original_copy_tables ();
6065 return true;
6068 /* Checks if BB is part of the region defined by N_REGION BBS. */
6069 static bool
6070 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6072 unsigned int n;
6074 for (n = 0; n < n_region; n++)
6076 if (bb == bbs[n])
6077 return true;
6079 return false;
6082 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6083 are stored to REGION_COPY in the same order in that they appear
6084 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6085 the region, EXIT an exit from it. The condition guarding EXIT
6086 is moved to ENTRY. Returns true if duplication succeeds, false
6087 otherwise.
6089 For example,
6091 some_code;
6092 if (cond)
6094 else
6097 is transformed to
6099 if (cond)
6101 some_code;
6104 else
6106 some_code;
6111 bool
6112 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6113 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6114 basic_block *region_copy ATTRIBUTE_UNUSED)
6116 unsigned i;
6117 bool free_region_copy = false;
6118 struct loop *loop = exit->dest->loop_father;
6119 struct loop *orig_loop = entry->dest->loop_father;
6120 basic_block switch_bb, entry_bb, nentry_bb;
6121 vec<basic_block> doms;
6122 int total_freq = 0, exit_freq = 0;
6123 gcov_type total_count = 0, exit_count = 0;
6124 edge exits[2], nexits[2], e;
6125 gimple_stmt_iterator gsi;
6126 gimple cond_stmt;
6127 edge sorig, snew;
6128 basic_block exit_bb;
6129 gimple_stmt_iterator psi;
6130 gimple phi;
6131 tree def;
6132 struct loop *target, *aloop, *cloop;
6134 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6135 exits[0] = exit;
6136 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6138 if (!can_copy_bbs_p (region, n_region))
6139 return false;
6141 initialize_original_copy_tables ();
6142 set_loop_copy (orig_loop, loop);
6144 target= loop;
6145 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6147 if (bb_part_of_region_p (aloop->header, region, n_region))
6149 cloop = duplicate_loop (aloop, target);
6150 duplicate_subloops (aloop, cloop);
6154 if (!region_copy)
6156 region_copy = XNEWVEC (basic_block, n_region);
6157 free_region_copy = true;
6160 gcc_assert (!need_ssa_update_p (cfun));
6162 /* Record blocks outside the region that are dominated by something
6163 inside. */
6164 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6166 if (exit->src->count)
6168 total_count = exit->src->count;
6169 exit_count = exit->count;
6170 /* Fix up corner cases, to avoid division by zero or creation of negative
6171 frequencies. */
6172 if (exit_count > total_count)
6173 exit_count = total_count;
6175 else
6177 total_freq = exit->src->frequency;
6178 exit_freq = EDGE_FREQUENCY (exit);
6179 /* Fix up corner cases, to avoid division by zero or creation of negative
6180 frequencies. */
6181 if (total_freq == 0)
6182 total_freq = 1;
6183 if (exit_freq > total_freq)
6184 exit_freq = total_freq;
6187 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6188 split_edge_bb_loc (exit), true);
6189 if (total_count)
6191 scale_bbs_frequencies_gcov_type (region, n_region,
6192 total_count - exit_count,
6193 total_count);
6194 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6195 total_count);
6197 else
6199 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6200 total_freq);
6201 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6204 /* Create the switch block, and put the exit condition to it. */
6205 entry_bb = entry->dest;
6206 nentry_bb = get_bb_copy (entry_bb);
6207 if (!last_stmt (entry->src)
6208 || !stmt_ends_bb_p (last_stmt (entry->src)))
6209 switch_bb = entry->src;
6210 else
6211 switch_bb = split_edge (entry);
6212 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6214 gsi = gsi_last_bb (switch_bb);
6215 cond_stmt = last_stmt (exit->src);
6216 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6217 cond_stmt = gimple_copy (cond_stmt);
6219 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6221 sorig = single_succ_edge (switch_bb);
6222 sorig->flags = exits[1]->flags;
6223 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6225 /* Register the new edge from SWITCH_BB in loop exit lists. */
6226 rescan_loop_exit (snew, true, false);
6228 /* Add the PHI node arguments. */
6229 add_phi_args_after_copy (region_copy, n_region, snew);
6231 /* Get rid of now superfluous conditions and associated edges (and phi node
6232 arguments). */
6233 exit_bb = exit->dest;
6235 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6236 PENDING_STMT (e) = NULL;
6238 /* The latch of ORIG_LOOP was copied, and so was the backedge
6239 to the original header. We redirect this backedge to EXIT_BB. */
6240 for (i = 0; i < n_region; i++)
6241 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6243 gcc_assert (single_succ_edge (region_copy[i]));
6244 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6245 PENDING_STMT (e) = NULL;
6246 for (psi = gsi_start_phis (exit_bb);
6247 !gsi_end_p (psi);
6248 gsi_next (&psi))
6250 phi = gsi_stmt (psi);
6251 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6252 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6255 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6256 PENDING_STMT (e) = NULL;
6258 /* Anything that is outside of the region, but was dominated by something
6259 inside needs to update dominance info. */
6260 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6261 doms.release ();
6262 /* Update the SSA web. */
6263 update_ssa (TODO_update_ssa);
6265 if (free_region_copy)
6266 free (region_copy);
6268 free_original_copy_tables ();
6269 return true;
6272 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6273 adding blocks when the dominator traversal reaches EXIT. This
6274 function silently assumes that ENTRY strictly dominates EXIT. */
6276 void
6277 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6278 vec<basic_block> *bbs_p)
6280 basic_block son;
6282 for (son = first_dom_son (CDI_DOMINATORS, entry);
6283 son;
6284 son = next_dom_son (CDI_DOMINATORS, son))
6286 bbs_p->safe_push (son);
6287 if (son != exit)
6288 gather_blocks_in_sese_region (son, exit, bbs_p);
6292 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6293 The duplicates are recorded in VARS_MAP. */
6295 static void
6296 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6297 tree to_context)
6299 tree t = *tp, new_t;
6300 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6302 if (DECL_CONTEXT (t) == to_context)
6303 return;
6305 bool existed;
6306 tree &loc = vars_map->get_or_insert (t, &existed);
6308 if (!existed)
6310 if (SSA_VAR_P (t))
6312 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6313 add_local_decl (f, new_t);
6315 else
6317 gcc_assert (TREE_CODE (t) == CONST_DECL);
6318 new_t = copy_node (t);
6320 DECL_CONTEXT (new_t) = to_context;
6322 loc = new_t;
6324 else
6325 new_t = loc;
6327 *tp = new_t;
6331 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6332 VARS_MAP maps old ssa names and var_decls to the new ones. */
6334 static tree
6335 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6336 tree to_context)
6338 tree new_name;
6340 gcc_assert (!virtual_operand_p (name));
6342 tree *loc = vars_map->get (name);
6344 if (!loc)
6346 tree decl = SSA_NAME_VAR (name);
6347 if (decl)
6349 replace_by_duplicate_decl (&decl, vars_map, to_context);
6350 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6351 decl, SSA_NAME_DEF_STMT (name));
6352 if (SSA_NAME_IS_DEFAULT_DEF (name))
6353 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6354 decl, new_name);
6356 else
6357 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6358 name, SSA_NAME_DEF_STMT (name));
6360 vars_map->put (name, new_name);
6362 else
6363 new_name = *loc;
6365 return new_name;
6368 struct move_stmt_d
6370 tree orig_block;
6371 tree new_block;
6372 tree from_context;
6373 tree to_context;
6374 hash_map<tree, tree> *vars_map;
6375 htab_t new_label_map;
6376 hash_map<void *, void *> *eh_map;
6377 bool remap_decls_p;
6380 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6381 contained in *TP if it has been ORIG_BLOCK previously and change the
6382 DECL_CONTEXT of every local variable referenced in *TP. */
6384 static tree
6385 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6387 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6388 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6389 tree t = *tp;
6391 if (EXPR_P (t))
6393 tree block = TREE_BLOCK (t);
6394 if (block == p->orig_block
6395 || (p->orig_block == NULL_TREE
6396 && block != NULL_TREE))
6397 TREE_SET_BLOCK (t, p->new_block);
6398 #ifdef ENABLE_CHECKING
6399 else if (block != NULL_TREE)
6401 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6402 block = BLOCK_SUPERCONTEXT (block);
6403 gcc_assert (block == p->orig_block);
6405 #endif
6407 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6409 if (TREE_CODE (t) == SSA_NAME)
6410 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6411 else if (TREE_CODE (t) == LABEL_DECL)
6413 if (p->new_label_map)
6415 struct tree_map in, *out;
6416 in.base.from = t;
6417 out = (struct tree_map *)
6418 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6419 if (out)
6420 *tp = t = out->to;
6423 DECL_CONTEXT (t) = p->to_context;
6425 else if (p->remap_decls_p)
6427 /* Replace T with its duplicate. T should no longer appear in the
6428 parent function, so this looks wasteful; however, it may appear
6429 in referenced_vars, and more importantly, as virtual operands of
6430 statements, and in alias lists of other variables. It would be
6431 quite difficult to expunge it from all those places. ??? It might
6432 suffice to do this for addressable variables. */
6433 if ((TREE_CODE (t) == VAR_DECL
6434 && !is_global_var (t))
6435 || TREE_CODE (t) == CONST_DECL)
6436 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6438 *walk_subtrees = 0;
6440 else if (TYPE_P (t))
6441 *walk_subtrees = 0;
6443 return NULL_TREE;
6446 /* Helper for move_stmt_r. Given an EH region number for the source
6447 function, map that to the duplicate EH regio number in the dest. */
6449 static int
6450 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6452 eh_region old_r, new_r;
6454 old_r = get_eh_region_from_number (old_nr);
6455 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6457 return new_r->index;
6460 /* Similar, but operate on INTEGER_CSTs. */
6462 static tree
6463 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6465 int old_nr, new_nr;
6467 old_nr = tree_to_shwi (old_t_nr);
6468 new_nr = move_stmt_eh_region_nr (old_nr, p);
6470 return build_int_cst (integer_type_node, new_nr);
6473 /* Like move_stmt_op, but for gimple statements.
6475 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6476 contained in the current statement in *GSI_P and change the
6477 DECL_CONTEXT of every local variable referenced in the current
6478 statement. */
6480 static tree
6481 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6482 struct walk_stmt_info *wi)
6484 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6485 gimple stmt = gsi_stmt (*gsi_p);
6486 tree block = gimple_block (stmt);
6488 if (block == p->orig_block
6489 || (p->orig_block == NULL_TREE
6490 && block != NULL_TREE))
6491 gimple_set_block (stmt, p->new_block);
6493 switch (gimple_code (stmt))
6495 case GIMPLE_CALL:
6496 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6498 tree r, fndecl = gimple_call_fndecl (stmt);
6499 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6500 switch (DECL_FUNCTION_CODE (fndecl))
6502 case BUILT_IN_EH_COPY_VALUES:
6503 r = gimple_call_arg (stmt, 1);
6504 r = move_stmt_eh_region_tree_nr (r, p);
6505 gimple_call_set_arg (stmt, 1, r);
6506 /* FALLTHRU */
6508 case BUILT_IN_EH_POINTER:
6509 case BUILT_IN_EH_FILTER:
6510 r = gimple_call_arg (stmt, 0);
6511 r = move_stmt_eh_region_tree_nr (r, p);
6512 gimple_call_set_arg (stmt, 0, r);
6513 break;
6515 default:
6516 break;
6519 break;
6521 case GIMPLE_RESX:
6523 int r = gimple_resx_region (stmt);
6524 r = move_stmt_eh_region_nr (r, p);
6525 gimple_resx_set_region (stmt, r);
6527 break;
6529 case GIMPLE_EH_DISPATCH:
6531 int r = gimple_eh_dispatch_region (stmt);
6532 r = move_stmt_eh_region_nr (r, p);
6533 gimple_eh_dispatch_set_region (stmt, r);
6535 break;
6537 case GIMPLE_OMP_RETURN:
6538 case GIMPLE_OMP_CONTINUE:
6539 break;
6540 default:
6541 if (is_gimple_omp (stmt))
6543 /* Do not remap variables inside OMP directives. Variables
6544 referenced in clauses and directive header belong to the
6545 parent function and should not be moved into the child
6546 function. */
6547 bool save_remap_decls_p = p->remap_decls_p;
6548 p->remap_decls_p = false;
6549 *handled_ops_p = true;
6551 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6552 move_stmt_op, wi);
6554 p->remap_decls_p = save_remap_decls_p;
6556 break;
6559 return NULL_TREE;
6562 /* Move basic block BB from function CFUN to function DEST_FN. The
6563 block is moved out of the original linked list and placed after
6564 block AFTER in the new list. Also, the block is removed from the
6565 original array of blocks and placed in DEST_FN's array of blocks.
6566 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6567 updated to reflect the moved edges.
6569 The local variables are remapped to new instances, VARS_MAP is used
6570 to record the mapping. */
6572 static void
6573 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6574 basic_block after, bool update_edge_count_p,
6575 struct move_stmt_d *d)
6577 struct control_flow_graph *cfg;
6578 edge_iterator ei;
6579 edge e;
6580 gimple_stmt_iterator si;
6581 unsigned old_len, new_len;
6583 /* Remove BB from dominance structures. */
6584 delete_from_dominance_info (CDI_DOMINATORS, bb);
6586 /* Move BB from its current loop to the copy in the new function. */
6587 if (current_loops)
6589 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6590 if (new_loop)
6591 bb->loop_father = new_loop;
6594 /* Link BB to the new linked list. */
6595 move_block_after (bb, after);
6597 /* Update the edge count in the corresponding flowgraphs. */
6598 if (update_edge_count_p)
6599 FOR_EACH_EDGE (e, ei, bb->succs)
6601 cfun->cfg->x_n_edges--;
6602 dest_cfun->cfg->x_n_edges++;
6605 /* Remove BB from the original basic block array. */
6606 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6607 cfun->cfg->x_n_basic_blocks--;
6609 /* Grow DEST_CFUN's basic block array if needed. */
6610 cfg = dest_cfun->cfg;
6611 cfg->x_n_basic_blocks++;
6612 if (bb->index >= cfg->x_last_basic_block)
6613 cfg->x_last_basic_block = bb->index + 1;
6615 old_len = vec_safe_length (cfg->x_basic_block_info);
6616 if ((unsigned) cfg->x_last_basic_block >= old_len)
6618 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6619 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6622 (*cfg->x_basic_block_info)[bb->index] = bb;
6624 /* Remap the variables in phi nodes. */
6625 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6627 gimple phi = gsi_stmt (si);
6628 use_operand_p use;
6629 tree op = PHI_RESULT (phi);
6630 ssa_op_iter oi;
6631 unsigned i;
6633 if (virtual_operand_p (op))
6635 /* Remove the phi nodes for virtual operands (alias analysis will be
6636 run for the new function, anyway). */
6637 remove_phi_node (&si, true);
6638 continue;
6641 SET_PHI_RESULT (phi,
6642 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6643 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6645 op = USE_FROM_PTR (use);
6646 if (TREE_CODE (op) == SSA_NAME)
6647 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6650 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6652 location_t locus = gimple_phi_arg_location (phi, i);
6653 tree block = LOCATION_BLOCK (locus);
6655 if (locus == UNKNOWN_LOCATION)
6656 continue;
6657 if (d->orig_block == NULL_TREE || block == d->orig_block)
6659 if (d->new_block == NULL_TREE)
6660 locus = LOCATION_LOCUS (locus);
6661 else
6662 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6663 gimple_phi_arg_set_location (phi, i, locus);
6667 gsi_next (&si);
6670 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6672 gimple stmt = gsi_stmt (si);
6673 struct walk_stmt_info wi;
6675 memset (&wi, 0, sizeof (wi));
6676 wi.info = d;
6677 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6679 if (gimple_code (stmt) == GIMPLE_LABEL)
6681 tree label = gimple_label_label (stmt);
6682 int uid = LABEL_DECL_UID (label);
6684 gcc_assert (uid > -1);
6686 old_len = vec_safe_length (cfg->x_label_to_block_map);
6687 if (old_len <= (unsigned) uid)
6689 new_len = 3 * uid / 2 + 1;
6690 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6693 (*cfg->x_label_to_block_map)[uid] = bb;
6694 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6696 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6698 if (uid >= dest_cfun->cfg->last_label_uid)
6699 dest_cfun->cfg->last_label_uid = uid + 1;
6702 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6703 remove_stmt_from_eh_lp_fn (cfun, stmt);
6705 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6706 gimple_remove_stmt_histograms (cfun, stmt);
6708 /* We cannot leave any operands allocated from the operand caches of
6709 the current function. */
6710 free_stmt_operands (cfun, stmt);
6711 push_cfun (dest_cfun);
6712 update_stmt (stmt);
6713 pop_cfun ();
6716 FOR_EACH_EDGE (e, ei, bb->succs)
6717 if (e->goto_locus != UNKNOWN_LOCATION)
6719 tree block = LOCATION_BLOCK (e->goto_locus);
6720 if (d->orig_block == NULL_TREE
6721 || block == d->orig_block)
6722 e->goto_locus = d->new_block ?
6723 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6724 LOCATION_LOCUS (e->goto_locus);
6728 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6729 the outermost EH region. Use REGION as the incoming base EH region. */
6731 static eh_region
6732 find_outermost_region_in_block (struct function *src_cfun,
6733 basic_block bb, eh_region region)
6735 gimple_stmt_iterator si;
6737 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6739 gimple stmt = gsi_stmt (si);
6740 eh_region stmt_region;
6741 int lp_nr;
6743 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6744 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6745 if (stmt_region)
6747 if (region == NULL)
6748 region = stmt_region;
6749 else if (stmt_region != region)
6751 region = eh_region_outermost (src_cfun, stmt_region, region);
6752 gcc_assert (region != NULL);
6757 return region;
6760 static tree
6761 new_label_mapper (tree decl, void *data)
6763 htab_t hash = (htab_t) data;
6764 struct tree_map *m;
6765 void **slot;
6767 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6769 m = XNEW (struct tree_map);
6770 m->hash = DECL_UID (decl);
6771 m->base.from = decl;
6772 m->to = create_artificial_label (UNKNOWN_LOCATION);
6773 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6774 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6775 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6777 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6778 gcc_assert (*slot == NULL);
6780 *slot = m;
6782 return m->to;
6785 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6786 subblocks. */
6788 static void
6789 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6790 tree to_context)
6792 tree *tp, t;
6794 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6796 t = *tp;
6797 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6798 continue;
6799 replace_by_duplicate_decl (&t, vars_map, to_context);
6800 if (t != *tp)
6802 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6804 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6805 DECL_HAS_VALUE_EXPR_P (t) = 1;
6807 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6808 *tp = t;
6812 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6813 replace_block_vars_by_duplicates (block, vars_map, to_context);
6816 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6817 from FN1 to FN2. */
6819 static void
6820 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6821 struct loop *loop)
6823 /* Discard it from the old loop array. */
6824 (*get_loops (fn1))[loop->num] = NULL;
6826 /* Place it in the new loop array, assigning it a new number. */
6827 loop->num = number_of_loops (fn2);
6828 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6830 /* Recurse to children. */
6831 for (loop = loop->inner; loop; loop = loop->next)
6832 fixup_loop_arrays_after_move (fn1, fn2, loop);
6835 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6836 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6837 single basic block in the original CFG and the new basic block is
6838 returned. DEST_CFUN must not have a CFG yet.
6840 Note that the region need not be a pure SESE region. Blocks inside
6841 the region may contain calls to abort/exit. The only restriction
6842 is that ENTRY_BB should be the only entry point and it must
6843 dominate EXIT_BB.
6845 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6846 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6847 to the new function.
6849 All local variables referenced in the region are assumed to be in
6850 the corresponding BLOCK_VARS and unexpanded variable lists
6851 associated with DEST_CFUN. */
6853 basic_block
6854 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6855 basic_block exit_bb, tree orig_block)
6857 vec<basic_block> bbs, dom_bbs;
6858 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6859 basic_block after, bb, *entry_pred, *exit_succ, abb;
6860 struct function *saved_cfun = cfun;
6861 int *entry_flag, *exit_flag;
6862 unsigned *entry_prob, *exit_prob;
6863 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6864 edge e;
6865 edge_iterator ei;
6866 htab_t new_label_map;
6867 hash_map<void *, void *> *eh_map;
6868 struct loop *loop = entry_bb->loop_father;
6869 struct loop *loop0 = get_loop (saved_cfun, 0);
6870 struct move_stmt_d d;
6872 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6873 region. */
6874 gcc_assert (entry_bb != exit_bb
6875 && (!exit_bb
6876 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6878 /* Collect all the blocks in the region. Manually add ENTRY_BB
6879 because it won't be added by dfs_enumerate_from. */
6880 bbs.create (0);
6881 bbs.safe_push (entry_bb);
6882 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6884 /* The blocks that used to be dominated by something in BBS will now be
6885 dominated by the new block. */
6886 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6887 bbs.address (),
6888 bbs.length ());
6890 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6891 the predecessor edges to ENTRY_BB and the successor edges to
6892 EXIT_BB so that we can re-attach them to the new basic block that
6893 will replace the region. */
6894 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6895 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6896 entry_flag = XNEWVEC (int, num_entry_edges);
6897 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6898 i = 0;
6899 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6901 entry_prob[i] = e->probability;
6902 entry_flag[i] = e->flags;
6903 entry_pred[i++] = e->src;
6904 remove_edge (e);
6907 if (exit_bb)
6909 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6910 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6911 exit_flag = XNEWVEC (int, num_exit_edges);
6912 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6913 i = 0;
6914 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6916 exit_prob[i] = e->probability;
6917 exit_flag[i] = e->flags;
6918 exit_succ[i++] = e->dest;
6919 remove_edge (e);
6922 else
6924 num_exit_edges = 0;
6925 exit_succ = NULL;
6926 exit_flag = NULL;
6927 exit_prob = NULL;
6930 /* Switch context to the child function to initialize DEST_FN's CFG. */
6931 gcc_assert (dest_cfun->cfg == NULL);
6932 push_cfun (dest_cfun);
6934 init_empty_tree_cfg ();
6936 /* Initialize EH information for the new function. */
6937 eh_map = NULL;
6938 new_label_map = NULL;
6939 if (saved_cfun->eh)
6941 eh_region region = NULL;
6943 FOR_EACH_VEC_ELT (bbs, i, bb)
6944 region = find_outermost_region_in_block (saved_cfun, bb, region);
6946 init_eh_for_function ();
6947 if (region != NULL)
6949 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6950 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6951 new_label_mapper, new_label_map);
6955 /* Initialize an empty loop tree. */
6956 struct loops *loops = ggc_cleared_alloc<struct loops> ();
6957 init_loops_structure (dest_cfun, loops, 1);
6958 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6959 set_loops_for_fn (dest_cfun, loops);
6961 /* Move the outlined loop tree part. */
6962 num_nodes = bbs.length ();
6963 FOR_EACH_VEC_ELT (bbs, i, bb)
6965 if (bb->loop_father->header == bb)
6967 struct loop *this_loop = bb->loop_father;
6968 struct loop *outer = loop_outer (this_loop);
6969 if (outer == loop
6970 /* If the SESE region contains some bbs ending with
6971 a noreturn call, those are considered to belong
6972 to the outermost loop in saved_cfun, rather than
6973 the entry_bb's loop_father. */
6974 || outer == loop0)
6976 if (outer != loop)
6977 num_nodes -= this_loop->num_nodes;
6978 flow_loop_tree_node_remove (bb->loop_father);
6979 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6980 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6983 else if (bb->loop_father == loop0 && loop0 != loop)
6984 num_nodes--;
6986 /* Remove loop exits from the outlined region. */
6987 if (loops_for_fn (saved_cfun)->exits)
6988 FOR_EACH_EDGE (e, ei, bb->succs)
6990 struct loops *l = loops_for_fn (saved_cfun);
6991 loop_exit **slot
6992 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
6993 NO_INSERT);
6994 if (slot)
6995 l->exits->clear_slot (slot);
7000 /* Adjust the number of blocks in the tree root of the outlined part. */
7001 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7003 /* Setup a mapping to be used by move_block_to_fn. */
7004 loop->aux = current_loops->tree_root;
7005 loop0->aux = current_loops->tree_root;
7007 pop_cfun ();
7009 /* Move blocks from BBS into DEST_CFUN. */
7010 gcc_assert (bbs.length () >= 2);
7011 after = dest_cfun->cfg->x_entry_block_ptr;
7012 hash_map<tree, tree> vars_map;
7014 memset (&d, 0, sizeof (d));
7015 d.orig_block = orig_block;
7016 d.new_block = DECL_INITIAL (dest_cfun->decl);
7017 d.from_context = cfun->decl;
7018 d.to_context = dest_cfun->decl;
7019 d.vars_map = &vars_map;
7020 d.new_label_map = new_label_map;
7021 d.eh_map = eh_map;
7022 d.remap_decls_p = true;
7024 FOR_EACH_VEC_ELT (bbs, i, bb)
7026 /* No need to update edge counts on the last block. It has
7027 already been updated earlier when we detached the region from
7028 the original CFG. */
7029 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7030 after = bb;
7033 loop->aux = NULL;
7034 loop0->aux = NULL;
7035 /* Loop sizes are no longer correct, fix them up. */
7036 loop->num_nodes -= num_nodes;
7037 for (struct loop *outer = loop_outer (loop);
7038 outer; outer = loop_outer (outer))
7039 outer->num_nodes -= num_nodes;
7040 loop0->num_nodes -= bbs.length () - num_nodes;
7042 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7044 struct loop *aloop;
7045 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7046 if (aloop != NULL)
7048 if (aloop->simduid)
7050 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7051 d.to_context);
7052 dest_cfun->has_simduid_loops = true;
7054 if (aloop->force_vectorize)
7055 dest_cfun->has_force_vectorize_loops = true;
7059 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7060 if (orig_block)
7062 tree block;
7063 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7064 == NULL_TREE);
7065 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7066 = BLOCK_SUBBLOCKS (orig_block);
7067 for (block = BLOCK_SUBBLOCKS (orig_block);
7068 block; block = BLOCK_CHAIN (block))
7069 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7070 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7073 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7074 &vars_map, dest_cfun->decl);
7076 if (new_label_map)
7077 htab_delete (new_label_map);
7078 if (eh_map)
7079 delete eh_map;
7081 /* Rewire the entry and exit blocks. The successor to the entry
7082 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7083 the child function. Similarly, the predecessor of DEST_FN's
7084 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7085 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7086 various CFG manipulation function get to the right CFG.
7088 FIXME, this is silly. The CFG ought to become a parameter to
7089 these helpers. */
7090 push_cfun (dest_cfun);
7091 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7092 if (exit_bb)
7093 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7094 pop_cfun ();
7096 /* Back in the original function, the SESE region has disappeared,
7097 create a new basic block in its place. */
7098 bb = create_empty_bb (entry_pred[0]);
7099 if (current_loops)
7100 add_bb_to_loop (bb, loop);
7101 for (i = 0; i < num_entry_edges; i++)
7103 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7104 e->probability = entry_prob[i];
7107 for (i = 0; i < num_exit_edges; i++)
7109 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7110 e->probability = exit_prob[i];
7113 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7114 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7115 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7116 dom_bbs.release ();
7118 if (exit_bb)
7120 free (exit_prob);
7121 free (exit_flag);
7122 free (exit_succ);
7124 free (entry_prob);
7125 free (entry_flag);
7126 free (entry_pred);
7127 bbs.release ();
7129 return bb;
7133 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7136 void
7137 dump_function_to_file (tree fndecl, FILE *file, int flags)
7139 tree arg, var, old_current_fndecl = current_function_decl;
7140 struct function *dsf;
7141 bool ignore_topmost_bind = false, any_var = false;
7142 basic_block bb;
7143 tree chain;
7144 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7145 && decl_is_tm_clone (fndecl));
7146 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7148 current_function_decl = fndecl;
7149 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7151 arg = DECL_ARGUMENTS (fndecl);
7152 while (arg)
7154 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7155 fprintf (file, " ");
7156 print_generic_expr (file, arg, dump_flags);
7157 if (flags & TDF_VERBOSE)
7158 print_node (file, "", arg, 4);
7159 if (DECL_CHAIN (arg))
7160 fprintf (file, ", ");
7161 arg = DECL_CHAIN (arg);
7163 fprintf (file, ")\n");
7165 if (flags & TDF_VERBOSE)
7166 print_node (file, "", fndecl, 2);
7168 dsf = DECL_STRUCT_FUNCTION (fndecl);
7169 if (dsf && (flags & TDF_EH))
7170 dump_eh_tree (file, dsf);
7172 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7174 dump_node (fndecl, TDF_SLIM | flags, file);
7175 current_function_decl = old_current_fndecl;
7176 return;
7179 /* When GIMPLE is lowered, the variables are no longer available in
7180 BIND_EXPRs, so display them separately. */
7181 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7183 unsigned ix;
7184 ignore_topmost_bind = true;
7186 fprintf (file, "{\n");
7187 if (!vec_safe_is_empty (fun->local_decls))
7188 FOR_EACH_LOCAL_DECL (fun, ix, var)
7190 print_generic_decl (file, var, flags);
7191 if (flags & TDF_VERBOSE)
7192 print_node (file, "", var, 4);
7193 fprintf (file, "\n");
7195 any_var = true;
7197 if (gimple_in_ssa_p (cfun))
7198 for (ix = 1; ix < num_ssa_names; ++ix)
7200 tree name = ssa_name (ix);
7201 if (name && !SSA_NAME_VAR (name))
7203 fprintf (file, " ");
7204 print_generic_expr (file, TREE_TYPE (name), flags);
7205 fprintf (file, " ");
7206 print_generic_expr (file, name, flags);
7207 fprintf (file, ";\n");
7209 any_var = true;
7214 if (fun && fun->decl == fndecl
7215 && fun->cfg
7216 && basic_block_info_for_fn (fun))
7218 /* If the CFG has been built, emit a CFG-based dump. */
7219 if (!ignore_topmost_bind)
7220 fprintf (file, "{\n");
7222 if (any_var && n_basic_blocks_for_fn (fun))
7223 fprintf (file, "\n");
7225 FOR_EACH_BB_FN (bb, fun)
7226 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7228 fprintf (file, "}\n");
7230 else if (DECL_SAVED_TREE (fndecl) == NULL)
7232 /* The function is now in GIMPLE form but the CFG has not been
7233 built yet. Emit the single sequence of GIMPLE statements
7234 that make up its body. */
7235 gimple_seq body = gimple_body (fndecl);
7237 if (gimple_seq_first_stmt (body)
7238 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7239 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7240 print_gimple_seq (file, body, 0, flags);
7241 else
7243 if (!ignore_topmost_bind)
7244 fprintf (file, "{\n");
7246 if (any_var)
7247 fprintf (file, "\n");
7249 print_gimple_seq (file, body, 2, flags);
7250 fprintf (file, "}\n");
7253 else
7255 int indent;
7257 /* Make a tree based dump. */
7258 chain = DECL_SAVED_TREE (fndecl);
7259 if (chain && TREE_CODE (chain) == BIND_EXPR)
7261 if (ignore_topmost_bind)
7263 chain = BIND_EXPR_BODY (chain);
7264 indent = 2;
7266 else
7267 indent = 0;
7269 else
7271 if (!ignore_topmost_bind)
7272 fprintf (file, "{\n");
7273 indent = 2;
7276 if (any_var)
7277 fprintf (file, "\n");
7279 print_generic_stmt_indented (file, chain, flags, indent);
7280 if (ignore_topmost_bind)
7281 fprintf (file, "}\n");
7284 if (flags & TDF_ENUMERATE_LOCALS)
7285 dump_enumerated_decls (file, flags);
7286 fprintf (file, "\n\n");
7288 current_function_decl = old_current_fndecl;
7291 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7293 DEBUG_FUNCTION void
7294 debug_function (tree fn, int flags)
7296 dump_function_to_file (fn, stderr, flags);
7300 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7302 static void
7303 print_pred_bbs (FILE *file, basic_block bb)
7305 edge e;
7306 edge_iterator ei;
7308 FOR_EACH_EDGE (e, ei, bb->preds)
7309 fprintf (file, "bb_%d ", e->src->index);
7313 /* Print on FILE the indexes for the successors of basic_block BB. */
7315 static void
7316 print_succ_bbs (FILE *file, basic_block bb)
7318 edge e;
7319 edge_iterator ei;
7321 FOR_EACH_EDGE (e, ei, bb->succs)
7322 fprintf (file, "bb_%d ", e->dest->index);
7325 /* Print to FILE the basic block BB following the VERBOSITY level. */
7327 void
7328 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7330 char *s_indent = (char *) alloca ((size_t) indent + 1);
7331 memset ((void *) s_indent, ' ', (size_t) indent);
7332 s_indent[indent] = '\0';
7334 /* Print basic_block's header. */
7335 if (verbosity >= 2)
7337 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7338 print_pred_bbs (file, bb);
7339 fprintf (file, "}, succs = {");
7340 print_succ_bbs (file, bb);
7341 fprintf (file, "})\n");
7344 /* Print basic_block's body. */
7345 if (verbosity >= 3)
7347 fprintf (file, "%s {\n", s_indent);
7348 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7349 fprintf (file, "%s }\n", s_indent);
7353 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7355 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7356 VERBOSITY level this outputs the contents of the loop, or just its
7357 structure. */
7359 static void
7360 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7362 char *s_indent;
7363 basic_block bb;
7365 if (loop == NULL)
7366 return;
7368 s_indent = (char *) alloca ((size_t) indent + 1);
7369 memset ((void *) s_indent, ' ', (size_t) indent);
7370 s_indent[indent] = '\0';
7372 /* Print loop's header. */
7373 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7374 if (loop->header)
7375 fprintf (file, "header = %d", loop->header->index);
7376 else
7378 fprintf (file, "deleted)\n");
7379 return;
7381 if (loop->latch)
7382 fprintf (file, ", latch = %d", loop->latch->index);
7383 else
7384 fprintf (file, ", multiple latches");
7385 fprintf (file, ", niter = ");
7386 print_generic_expr (file, loop->nb_iterations, 0);
7388 if (loop->any_upper_bound)
7390 fprintf (file, ", upper_bound = ");
7391 print_decu (loop->nb_iterations_upper_bound, file);
7394 if (loop->any_estimate)
7396 fprintf (file, ", estimate = ");
7397 print_decu (loop->nb_iterations_estimate, file);
7399 fprintf (file, ")\n");
7401 /* Print loop's body. */
7402 if (verbosity >= 1)
7404 fprintf (file, "%s{\n", s_indent);
7405 FOR_EACH_BB_FN (bb, cfun)
7406 if (bb->loop_father == loop)
7407 print_loops_bb (file, bb, indent, verbosity);
7409 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7410 fprintf (file, "%s}\n", s_indent);
7414 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7415 spaces. Following VERBOSITY level this outputs the contents of the
7416 loop, or just its structure. */
7418 static void
7419 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7420 int verbosity)
7422 if (loop == NULL)
7423 return;
7425 print_loop (file, loop, indent, verbosity);
7426 print_loop_and_siblings (file, loop->next, indent, verbosity);
7429 /* Follow a CFG edge from the entry point of the program, and on entry
7430 of a loop, pretty print the loop structure on FILE. */
7432 void
7433 print_loops (FILE *file, int verbosity)
7435 basic_block bb;
7437 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7438 if (bb && bb->loop_father)
7439 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7442 /* Dump a loop. */
7444 DEBUG_FUNCTION void
7445 debug (struct loop &ref)
7447 print_loop (stderr, &ref, 0, /*verbosity*/0);
7450 DEBUG_FUNCTION void
7451 debug (struct loop *ptr)
7453 if (ptr)
7454 debug (*ptr);
7455 else
7456 fprintf (stderr, "<nil>\n");
7459 /* Dump a loop verbosely. */
7461 DEBUG_FUNCTION void
7462 debug_verbose (struct loop &ref)
7464 print_loop (stderr, &ref, 0, /*verbosity*/3);
7467 DEBUG_FUNCTION void
7468 debug_verbose (struct loop *ptr)
7470 if (ptr)
7471 debug (*ptr);
7472 else
7473 fprintf (stderr, "<nil>\n");
7477 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7479 DEBUG_FUNCTION void
7480 debug_loops (int verbosity)
7482 print_loops (stderr, verbosity);
7485 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7487 DEBUG_FUNCTION void
7488 debug_loop (struct loop *loop, int verbosity)
7490 print_loop (stderr, loop, 0, verbosity);
7493 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7494 level. */
7496 DEBUG_FUNCTION void
7497 debug_loop_num (unsigned num, int verbosity)
7499 debug_loop (get_loop (cfun, num), verbosity);
7502 /* Return true if BB ends with a call, possibly followed by some
7503 instructions that must stay with the call. Return false,
7504 otherwise. */
7506 static bool
7507 gimple_block_ends_with_call_p (basic_block bb)
7509 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7510 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7514 /* Return true if BB ends with a conditional branch. Return false,
7515 otherwise. */
7517 static bool
7518 gimple_block_ends_with_condjump_p (const_basic_block bb)
7520 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7521 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7525 /* Return true if we need to add fake edge to exit at statement T.
7526 Helper function for gimple_flow_call_edges_add. */
7528 static bool
7529 need_fake_edge_p (gimple t)
7531 tree fndecl = NULL_TREE;
7532 int call_flags = 0;
7534 /* NORETURN and LONGJMP calls already have an edge to exit.
7535 CONST and PURE calls do not need one.
7536 We don't currently check for CONST and PURE here, although
7537 it would be a good idea, because those attributes are
7538 figured out from the RTL in mark_constant_function, and
7539 the counter incrementation code from -fprofile-arcs
7540 leads to different results from -fbranch-probabilities. */
7541 if (is_gimple_call (t))
7543 fndecl = gimple_call_fndecl (t);
7544 call_flags = gimple_call_flags (t);
7547 if (is_gimple_call (t)
7548 && fndecl
7549 && DECL_BUILT_IN (fndecl)
7550 && (call_flags & ECF_NOTHROW)
7551 && !(call_flags & ECF_RETURNS_TWICE)
7552 /* fork() doesn't really return twice, but the effect of
7553 wrapping it in __gcov_fork() which calls __gcov_flush()
7554 and clears the counters before forking has the same
7555 effect as returning twice. Force a fake edge. */
7556 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7557 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7558 return false;
7560 if (is_gimple_call (t))
7562 edge_iterator ei;
7563 edge e;
7564 basic_block bb;
7566 if (!(call_flags & ECF_NORETURN))
7567 return true;
7569 bb = gimple_bb (t);
7570 FOR_EACH_EDGE (e, ei, bb->succs)
7571 if ((e->flags & EDGE_FAKE) == 0)
7572 return true;
7575 if (gimple_asm asm_stmt = dyn_cast <gimple_asm> (t))
7576 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7577 return true;
7579 return false;
7583 /* Add fake edges to the function exit for any non constant and non
7584 noreturn calls (or noreturn calls with EH/abnormal edges),
7585 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7586 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7587 that were split.
7589 The goal is to expose cases in which entering a basic block does
7590 not imply that all subsequent instructions must be executed. */
7592 static int
7593 gimple_flow_call_edges_add (sbitmap blocks)
7595 int i;
7596 int blocks_split = 0;
7597 int last_bb = last_basic_block_for_fn (cfun);
7598 bool check_last_block = false;
7600 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7601 return 0;
7603 if (! blocks)
7604 check_last_block = true;
7605 else
7606 check_last_block = bitmap_bit_p (blocks,
7607 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7609 /* In the last basic block, before epilogue generation, there will be
7610 a fallthru edge to EXIT. Special care is required if the last insn
7611 of the last basic block is a call because make_edge folds duplicate
7612 edges, which would result in the fallthru edge also being marked
7613 fake, which would result in the fallthru edge being removed by
7614 remove_fake_edges, which would result in an invalid CFG.
7616 Moreover, we can't elide the outgoing fake edge, since the block
7617 profiler needs to take this into account in order to solve the minimal
7618 spanning tree in the case that the call doesn't return.
7620 Handle this by adding a dummy instruction in a new last basic block. */
7621 if (check_last_block)
7623 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7624 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7625 gimple t = NULL;
7627 if (!gsi_end_p (gsi))
7628 t = gsi_stmt (gsi);
7630 if (t && need_fake_edge_p (t))
7632 edge e;
7634 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7635 if (e)
7637 gsi_insert_on_edge (e, gimple_build_nop ());
7638 gsi_commit_edge_inserts ();
7643 /* Now add fake edges to the function exit for any non constant
7644 calls since there is no way that we can determine if they will
7645 return or not... */
7646 for (i = 0; i < last_bb; i++)
7648 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7649 gimple_stmt_iterator gsi;
7650 gimple stmt, last_stmt;
7652 if (!bb)
7653 continue;
7655 if (blocks && !bitmap_bit_p (blocks, i))
7656 continue;
7658 gsi = gsi_last_nondebug_bb (bb);
7659 if (!gsi_end_p (gsi))
7661 last_stmt = gsi_stmt (gsi);
7664 stmt = gsi_stmt (gsi);
7665 if (need_fake_edge_p (stmt))
7667 edge e;
7669 /* The handling above of the final block before the
7670 epilogue should be enough to verify that there is
7671 no edge to the exit block in CFG already.
7672 Calling make_edge in such case would cause us to
7673 mark that edge as fake and remove it later. */
7674 #ifdef ENABLE_CHECKING
7675 if (stmt == last_stmt)
7677 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7678 gcc_assert (e == NULL);
7680 #endif
7682 /* Note that the following may create a new basic block
7683 and renumber the existing basic blocks. */
7684 if (stmt != last_stmt)
7686 e = split_block (bb, stmt);
7687 if (e)
7688 blocks_split++;
7690 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7692 gsi_prev (&gsi);
7694 while (!gsi_end_p (gsi));
7698 if (blocks_split)
7699 verify_flow_info ();
7701 return blocks_split;
7704 /* Removes edge E and all the blocks dominated by it, and updates dominance
7705 information. The IL in E->src needs to be updated separately.
7706 If dominance info is not available, only the edge E is removed.*/
7708 void
7709 remove_edge_and_dominated_blocks (edge e)
7711 vec<basic_block> bbs_to_remove = vNULL;
7712 vec<basic_block> bbs_to_fix_dom = vNULL;
7713 bitmap df, df_idom;
7714 edge f;
7715 edge_iterator ei;
7716 bool none_removed = false;
7717 unsigned i;
7718 basic_block bb, dbb;
7719 bitmap_iterator bi;
7721 if (!dom_info_available_p (CDI_DOMINATORS))
7723 remove_edge (e);
7724 return;
7727 /* No updating is needed for edges to exit. */
7728 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7730 if (cfgcleanup_altered_bbs)
7731 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7732 remove_edge (e);
7733 return;
7736 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7737 that is not dominated by E->dest, then this set is empty. Otherwise,
7738 all the basic blocks dominated by E->dest are removed.
7740 Also, to DF_IDOM we store the immediate dominators of the blocks in
7741 the dominance frontier of E (i.e., of the successors of the
7742 removed blocks, if there are any, and of E->dest otherwise). */
7743 FOR_EACH_EDGE (f, ei, e->dest->preds)
7745 if (f == e)
7746 continue;
7748 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7750 none_removed = true;
7751 break;
7755 df = BITMAP_ALLOC (NULL);
7756 df_idom = BITMAP_ALLOC (NULL);
7758 if (none_removed)
7759 bitmap_set_bit (df_idom,
7760 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7761 else
7763 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7764 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7766 FOR_EACH_EDGE (f, ei, bb->succs)
7768 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7769 bitmap_set_bit (df, f->dest->index);
7772 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7773 bitmap_clear_bit (df, bb->index);
7775 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7777 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7778 bitmap_set_bit (df_idom,
7779 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7783 if (cfgcleanup_altered_bbs)
7785 /* Record the set of the altered basic blocks. */
7786 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7787 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7790 /* Remove E and the cancelled blocks. */
7791 if (none_removed)
7792 remove_edge (e);
7793 else
7795 /* Walk backwards so as to get a chance to substitute all
7796 released DEFs into debug stmts. See
7797 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7798 details. */
7799 for (i = bbs_to_remove.length (); i-- > 0; )
7800 delete_basic_block (bbs_to_remove[i]);
7803 /* Update the dominance information. The immediate dominator may change only
7804 for blocks whose immediate dominator belongs to DF_IDOM:
7806 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7807 removal. Let Z the arbitrary block such that idom(Z) = Y and
7808 Z dominates X after the removal. Before removal, there exists a path P
7809 from Y to X that avoids Z. Let F be the last edge on P that is
7810 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7811 dominates W, and because of P, Z does not dominate W), and W belongs to
7812 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7813 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7815 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7816 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7817 dbb;
7818 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7819 bbs_to_fix_dom.safe_push (dbb);
7822 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7824 BITMAP_FREE (df);
7825 BITMAP_FREE (df_idom);
7826 bbs_to_remove.release ();
7827 bbs_to_fix_dom.release ();
7830 /* Purge dead EH edges from basic block BB. */
7832 bool
7833 gimple_purge_dead_eh_edges (basic_block bb)
7835 bool changed = false;
7836 edge e;
7837 edge_iterator ei;
7838 gimple stmt = last_stmt (bb);
7840 if (stmt && stmt_can_throw_internal (stmt))
7841 return false;
7843 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7845 if (e->flags & EDGE_EH)
7847 remove_edge_and_dominated_blocks (e);
7848 changed = true;
7850 else
7851 ei_next (&ei);
7854 return changed;
7857 /* Purge dead EH edges from basic block listed in BLOCKS. */
7859 bool
7860 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7862 bool changed = false;
7863 unsigned i;
7864 bitmap_iterator bi;
7866 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7868 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7870 /* Earlier gimple_purge_dead_eh_edges could have removed
7871 this basic block already. */
7872 gcc_assert (bb || changed);
7873 if (bb != NULL)
7874 changed |= gimple_purge_dead_eh_edges (bb);
7877 return changed;
7880 /* Purge dead abnormal call edges from basic block BB. */
7882 bool
7883 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7885 bool changed = false;
7886 edge e;
7887 edge_iterator ei;
7888 gimple stmt = last_stmt (bb);
7890 if (!cfun->has_nonlocal_label
7891 && !cfun->calls_setjmp)
7892 return false;
7894 if (stmt && stmt_can_make_abnormal_goto (stmt))
7895 return false;
7897 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7899 if (e->flags & EDGE_ABNORMAL)
7901 if (e->flags & EDGE_FALLTHRU)
7902 e->flags &= ~EDGE_ABNORMAL;
7903 else
7904 remove_edge_and_dominated_blocks (e);
7905 changed = true;
7907 else
7908 ei_next (&ei);
7911 return changed;
7914 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7916 bool
7917 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7919 bool changed = false;
7920 unsigned i;
7921 bitmap_iterator bi;
7923 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7925 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7927 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7928 this basic block already. */
7929 gcc_assert (bb || changed);
7930 if (bb != NULL)
7931 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7934 return changed;
7937 /* This function is called whenever a new edge is created or
7938 redirected. */
7940 static void
7941 gimple_execute_on_growing_pred (edge e)
7943 basic_block bb = e->dest;
7945 if (!gimple_seq_empty_p (phi_nodes (bb)))
7946 reserve_phi_args_for_new_edge (bb);
7949 /* This function is called immediately before edge E is removed from
7950 the edge vector E->dest->preds. */
7952 static void
7953 gimple_execute_on_shrinking_pred (edge e)
7955 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7956 remove_phi_args (e);
7959 /*---------------------------------------------------------------------------
7960 Helper functions for Loop versioning
7961 ---------------------------------------------------------------------------*/
7963 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7964 of 'first'. Both of them are dominated by 'new_head' basic block. When
7965 'new_head' was created by 'second's incoming edge it received phi arguments
7966 on the edge by split_edge(). Later, additional edge 'e' was created to
7967 connect 'new_head' and 'first'. Now this routine adds phi args on this
7968 additional edge 'e' that new_head to second edge received as part of edge
7969 splitting. */
7971 static void
7972 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7973 basic_block new_head, edge e)
7975 gimple_phi phi1, phi2;
7976 gimple_phi_iterator psi1, psi2;
7977 tree def;
7978 edge e2 = find_edge (new_head, second);
7980 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7981 edge, we should always have an edge from NEW_HEAD to SECOND. */
7982 gcc_assert (e2 != NULL);
7984 /* Browse all 'second' basic block phi nodes and add phi args to
7985 edge 'e' for 'first' head. PHI args are always in correct order. */
7987 for (psi2 = gsi_start_phis (second),
7988 psi1 = gsi_start_phis (first);
7989 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7990 gsi_next (&psi2), gsi_next (&psi1))
7992 phi1 = psi1.phi ();
7993 phi2 = psi2.phi ();
7994 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7995 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8000 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8001 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8002 the destination of the ELSE part. */
8004 static void
8005 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8006 basic_block second_head ATTRIBUTE_UNUSED,
8007 basic_block cond_bb, void *cond_e)
8009 gimple_stmt_iterator gsi;
8010 gimple new_cond_expr;
8011 tree cond_expr = (tree) cond_e;
8012 edge e0;
8014 /* Build new conditional expr */
8015 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8016 NULL_TREE, NULL_TREE);
8018 /* Add new cond in cond_bb. */
8019 gsi = gsi_last_bb (cond_bb);
8020 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8022 /* Adjust edges appropriately to connect new head with first head
8023 as well as second head. */
8024 e0 = single_succ_edge (cond_bb);
8025 e0->flags &= ~EDGE_FALLTHRU;
8026 e0->flags |= EDGE_FALSE_VALUE;
8030 /* Do book-keeping of basic block BB for the profile consistency checker.
8031 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8032 then do post-pass accounting. Store the counting in RECORD. */
8033 static void
8034 gimple_account_profile_record (basic_block bb, int after_pass,
8035 struct profile_record *record)
8037 gimple_stmt_iterator i;
8038 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8040 record->size[after_pass]
8041 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8042 if (profile_status_for_fn (cfun) == PROFILE_READ)
8043 record->time[after_pass]
8044 += estimate_num_insns (gsi_stmt (i),
8045 &eni_time_weights) * bb->count;
8046 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8047 record->time[after_pass]
8048 += estimate_num_insns (gsi_stmt (i),
8049 &eni_time_weights) * bb->frequency;
8053 struct cfg_hooks gimple_cfg_hooks = {
8054 "gimple",
8055 gimple_verify_flow_info,
8056 gimple_dump_bb, /* dump_bb */
8057 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8058 create_bb, /* create_basic_block */
8059 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8060 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8061 gimple_can_remove_branch_p, /* can_remove_branch_p */
8062 remove_bb, /* delete_basic_block */
8063 gimple_split_block, /* split_block */
8064 gimple_move_block_after, /* move_block_after */
8065 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8066 gimple_merge_blocks, /* merge_blocks */
8067 gimple_predict_edge, /* predict_edge */
8068 gimple_predicted_by_p, /* predicted_by_p */
8069 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8070 gimple_duplicate_bb, /* duplicate_block */
8071 gimple_split_edge, /* split_edge */
8072 gimple_make_forwarder_block, /* make_forward_block */
8073 NULL, /* tidy_fallthru_edge */
8074 NULL, /* force_nonfallthru */
8075 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8076 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8077 gimple_flow_call_edges_add, /* flow_call_edges_add */
8078 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8079 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8080 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8081 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8082 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8083 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8084 flush_pending_stmts, /* flush_pending_stmts */
8085 gimple_empty_block_p, /* block_empty_p */
8086 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8087 gimple_account_profile_record,
8091 /* Split all critical edges. */
8093 unsigned int
8094 split_critical_edges (void)
8096 basic_block bb;
8097 edge e;
8098 edge_iterator ei;
8100 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8101 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8102 mappings around the calls to split_edge. */
8103 start_recording_case_labels ();
8104 FOR_ALL_BB_FN (bb, cfun)
8106 FOR_EACH_EDGE (e, ei, bb->succs)
8108 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8109 split_edge (e);
8110 /* PRE inserts statements to edges and expects that
8111 since split_critical_edges was done beforehand, committing edge
8112 insertions will not split more edges. In addition to critical
8113 edges we must split edges that have multiple successors and
8114 end by control flow statements, such as RESX.
8115 Go ahead and split them too. This matches the logic in
8116 gimple_find_edge_insert_loc. */
8117 else if ((!single_pred_p (e->dest)
8118 || !gimple_seq_empty_p (phi_nodes (e->dest))
8119 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8120 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8121 && !(e->flags & EDGE_ABNORMAL))
8123 gimple_stmt_iterator gsi;
8125 gsi = gsi_last_bb (e->src);
8126 if (!gsi_end_p (gsi)
8127 && stmt_ends_bb_p (gsi_stmt (gsi))
8128 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8129 && !gimple_call_builtin_p (gsi_stmt (gsi),
8130 BUILT_IN_RETURN)))
8131 split_edge (e);
8135 end_recording_case_labels ();
8136 return 0;
8139 namespace {
8141 const pass_data pass_data_split_crit_edges =
8143 GIMPLE_PASS, /* type */
8144 "crited", /* name */
8145 OPTGROUP_NONE, /* optinfo_flags */
8146 TV_TREE_SPLIT_EDGES, /* tv_id */
8147 PROP_cfg, /* properties_required */
8148 PROP_no_crit_edges, /* properties_provided */
8149 0, /* properties_destroyed */
8150 0, /* todo_flags_start */
8151 0, /* todo_flags_finish */
8154 class pass_split_crit_edges : public gimple_opt_pass
8156 public:
8157 pass_split_crit_edges (gcc::context *ctxt)
8158 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8161 /* opt_pass methods: */
8162 virtual unsigned int execute (function *) { return split_critical_edges (); }
8164 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8165 }; // class pass_split_crit_edges
8167 } // anon namespace
8169 gimple_opt_pass *
8170 make_pass_split_crit_edges (gcc::context *ctxt)
8172 return new pass_split_crit_edges (ctxt);
8176 /* Build a ternary operation and gimplify it. Emit code before GSI.
8177 Return the gimple_val holding the result. */
8179 tree
8180 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8181 tree type, tree a, tree b, tree c)
8183 tree ret;
8184 location_t loc = gimple_location (gsi_stmt (*gsi));
8186 ret = fold_build3_loc (loc, code, type, a, b, c);
8187 STRIP_NOPS (ret);
8189 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8190 GSI_SAME_STMT);
8193 /* Build a binary operation and gimplify it. Emit code before GSI.
8194 Return the gimple_val holding the result. */
8196 tree
8197 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8198 tree type, tree a, tree b)
8200 tree ret;
8202 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8203 STRIP_NOPS (ret);
8205 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8206 GSI_SAME_STMT);
8209 /* Build a unary operation and gimplify it. Emit code before GSI.
8210 Return the gimple_val holding the result. */
8212 tree
8213 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8214 tree a)
8216 tree ret;
8218 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8219 STRIP_NOPS (ret);
8221 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8222 GSI_SAME_STMT);
8227 /* Given a basic block B which ends with a conditional and has
8228 precisely two successors, determine which of the edges is taken if
8229 the conditional is true and which is taken if the conditional is
8230 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8232 void
8233 extract_true_false_edges_from_block (basic_block b,
8234 edge *true_edge,
8235 edge *false_edge)
8237 edge e = EDGE_SUCC (b, 0);
8239 if (e->flags & EDGE_TRUE_VALUE)
8241 *true_edge = e;
8242 *false_edge = EDGE_SUCC (b, 1);
8244 else
8246 *false_edge = e;
8247 *true_edge = EDGE_SUCC (b, 1);
8251 /* Emit return warnings. */
8253 namespace {
8255 const pass_data pass_data_warn_function_return =
8257 GIMPLE_PASS, /* type */
8258 "*warn_function_return", /* name */
8259 OPTGROUP_NONE, /* optinfo_flags */
8260 TV_NONE, /* tv_id */
8261 PROP_cfg, /* properties_required */
8262 0, /* properties_provided */
8263 0, /* properties_destroyed */
8264 0, /* todo_flags_start */
8265 0, /* todo_flags_finish */
8268 class pass_warn_function_return : public gimple_opt_pass
8270 public:
8271 pass_warn_function_return (gcc::context *ctxt)
8272 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8275 /* opt_pass methods: */
8276 virtual unsigned int execute (function *);
8278 }; // class pass_warn_function_return
8280 unsigned int
8281 pass_warn_function_return::execute (function *fun)
8283 source_location location;
8284 gimple last;
8285 edge e;
8286 edge_iterator ei;
8288 if (!targetm.warn_func_return (fun->decl))
8289 return 0;
8291 /* If we have a path to EXIT, then we do return. */
8292 if (TREE_THIS_VOLATILE (fun->decl)
8293 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8295 location = UNKNOWN_LOCATION;
8296 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8298 last = last_stmt (e->src);
8299 if ((gimple_code (last) == GIMPLE_RETURN
8300 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8301 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8302 break;
8304 if (location == UNKNOWN_LOCATION)
8305 location = cfun->function_end_locus;
8306 warning_at (location, 0, "%<noreturn%> function does return");
8309 /* If we see "return;" in some basic block, then we do reach the end
8310 without returning a value. */
8311 else if (warn_return_type
8312 && !TREE_NO_WARNING (fun->decl)
8313 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8314 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8316 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8318 gimple last = last_stmt (e->src);
8319 if (gimple_code (last) == GIMPLE_RETURN
8320 && gimple_return_retval (last) == NULL
8321 && !gimple_no_warning_p (last))
8323 location = gimple_location (last);
8324 if (location == UNKNOWN_LOCATION)
8325 location = fun->function_end_locus;
8326 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8327 TREE_NO_WARNING (fun->decl) = 1;
8328 break;
8332 return 0;
8335 } // anon namespace
8337 gimple_opt_pass *
8338 make_pass_warn_function_return (gcc::context *ctxt)
8340 return new pass_warn_function_return (ctxt);
8343 /* Walk a gimplified function and warn for functions whose return value is
8344 ignored and attribute((warn_unused_result)) is set. This is done before
8345 inlining, so we don't have to worry about that. */
8347 static void
8348 do_warn_unused_result (gimple_seq seq)
8350 tree fdecl, ftype;
8351 gimple_stmt_iterator i;
8353 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8355 gimple g = gsi_stmt (i);
8357 switch (gimple_code (g))
8359 case GIMPLE_BIND:
8360 do_warn_unused_result (gimple_bind_body (as_a <gimple_bind >(g)));
8361 break;
8362 case GIMPLE_TRY:
8363 do_warn_unused_result (gimple_try_eval (g));
8364 do_warn_unused_result (gimple_try_cleanup (g));
8365 break;
8366 case GIMPLE_CATCH:
8367 do_warn_unused_result (gimple_catch_handler (g));
8368 break;
8369 case GIMPLE_EH_FILTER:
8370 do_warn_unused_result (gimple_eh_filter_failure (g));
8371 break;
8373 case GIMPLE_CALL:
8374 if (gimple_call_lhs (g))
8375 break;
8376 if (gimple_call_internal_p (g))
8377 break;
8379 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8380 LHS. All calls whose value is ignored should be
8381 represented like this. Look for the attribute. */
8382 fdecl = gimple_call_fndecl (g);
8383 ftype = gimple_call_fntype (g);
8385 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8387 location_t loc = gimple_location (g);
8389 if (fdecl)
8390 warning_at (loc, OPT_Wunused_result,
8391 "ignoring return value of %qD, "
8392 "declared with attribute warn_unused_result",
8393 fdecl);
8394 else
8395 warning_at (loc, OPT_Wunused_result,
8396 "ignoring return value of function "
8397 "declared with attribute warn_unused_result");
8399 break;
8401 default:
8402 /* Not a container, not a call, or a call whose value is used. */
8403 break;
8408 namespace {
8410 const pass_data pass_data_warn_unused_result =
8412 GIMPLE_PASS, /* type */
8413 "*warn_unused_result", /* name */
8414 OPTGROUP_NONE, /* optinfo_flags */
8415 TV_NONE, /* tv_id */
8416 PROP_gimple_any, /* properties_required */
8417 0, /* properties_provided */
8418 0, /* properties_destroyed */
8419 0, /* todo_flags_start */
8420 0, /* todo_flags_finish */
8423 class pass_warn_unused_result : public gimple_opt_pass
8425 public:
8426 pass_warn_unused_result (gcc::context *ctxt)
8427 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8430 /* opt_pass methods: */
8431 virtual bool gate (function *) { return flag_warn_unused_result; }
8432 virtual unsigned int execute (function *)
8434 do_warn_unused_result (gimple_body (current_function_decl));
8435 return 0;
8438 }; // class pass_warn_unused_result
8440 } // anon namespace
8442 gimple_opt_pass *
8443 make_pass_warn_unused_result (gcc::context *ctxt)
8445 return new pass_warn_unused_result (ctxt);
8448 /* IPA passes, compilation of earlier functions or inlining
8449 might have changed some properties, such as marked functions nothrow,
8450 pure, const or noreturn.
8451 Remove redundant edges and basic blocks, and create new ones if necessary.
8453 This pass can't be executed as stand alone pass from pass manager, because
8454 in between inlining and this fixup the verify_flow_info would fail. */
8456 unsigned int
8457 execute_fixup_cfg (void)
8459 basic_block bb;
8460 gimple_stmt_iterator gsi;
8461 int todo = 0;
8462 gcov_type count_scale;
8463 edge e;
8464 edge_iterator ei;
8466 count_scale
8467 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8468 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8470 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8471 cgraph_node::get (current_function_decl)->count;
8472 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8473 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8474 count_scale);
8476 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8477 e->count = apply_scale (e->count, count_scale);
8479 FOR_EACH_BB_FN (bb, cfun)
8481 bb->count = apply_scale (bb->count, count_scale);
8482 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8484 gimple stmt = gsi_stmt (gsi);
8485 tree decl = is_gimple_call (stmt)
8486 ? gimple_call_fndecl (stmt)
8487 : NULL;
8488 if (decl)
8490 int flags = gimple_call_flags (stmt);
8491 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8493 if (gimple_purge_dead_abnormal_call_edges (bb))
8494 todo |= TODO_cleanup_cfg;
8496 if (gimple_in_ssa_p (cfun))
8498 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8499 update_stmt (stmt);
8503 if (flags & ECF_NORETURN
8504 && fixup_noreturn_call (stmt))
8505 todo |= TODO_cleanup_cfg;
8508 /* Remove stores to variables we marked write-only.
8509 Keep access when store has side effect, i.e. in case when source
8510 is volatile. */
8511 if (gimple_store_p (stmt)
8512 && !gimple_has_side_effects (stmt))
8514 tree lhs = get_base_address (gimple_get_lhs (stmt));
8516 if (TREE_CODE (lhs) == VAR_DECL
8517 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8518 && varpool_node::get (lhs)->writeonly)
8520 unlink_stmt_vdef (stmt);
8521 gsi_remove (&gsi, true);
8522 release_defs (stmt);
8523 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8524 continue;
8527 /* For calls we can simply remove LHS when it is known
8528 to be write-only. */
8529 if (is_gimple_call (stmt)
8530 && gimple_get_lhs (stmt))
8532 tree lhs = get_base_address (gimple_get_lhs (stmt));
8534 if (TREE_CODE (lhs) == VAR_DECL
8535 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8536 && varpool_node::get (lhs)->writeonly)
8538 gimple_call_set_lhs (stmt, NULL);
8539 update_stmt (stmt);
8540 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8544 if (maybe_clean_eh_stmt (stmt)
8545 && gimple_purge_dead_eh_edges (bb))
8546 todo |= TODO_cleanup_cfg;
8547 gsi_next (&gsi);
8550 FOR_EACH_EDGE (e, ei, bb->succs)
8551 e->count = apply_scale (e->count, count_scale);
8553 /* If we have a basic block with no successors that does not
8554 end with a control statement or a noreturn call end it with
8555 a call to __builtin_unreachable. This situation can occur
8556 when inlining a noreturn call that does in fact return. */
8557 if (EDGE_COUNT (bb->succs) == 0)
8559 gimple stmt = last_stmt (bb);
8560 if (!stmt
8561 || (!is_ctrl_stmt (stmt)
8562 && (!is_gimple_call (stmt)
8563 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8565 if (stmt && is_gimple_call (stmt))
8566 gimple_call_set_ctrl_altering (stmt, false);
8567 stmt = gimple_build_call
8568 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8569 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8570 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8574 if (count_scale != REG_BR_PROB_BASE)
8575 compute_function_frequency ();
8577 /* Dump a textual representation of the flowgraph. */
8578 if (dump_file)
8579 gimple_dump_cfg (dump_file, dump_flags);
8581 if (current_loops
8582 && (todo & TODO_cleanup_cfg))
8583 loops_state_set (LOOPS_NEED_FIXUP);
8585 return todo;
8588 namespace {
8590 const pass_data pass_data_fixup_cfg =
8592 GIMPLE_PASS, /* type */
8593 "*free_cfg_annotations", /* name */
8594 OPTGROUP_NONE, /* optinfo_flags */
8595 TV_NONE, /* tv_id */
8596 PROP_cfg, /* properties_required */
8597 0, /* properties_provided */
8598 0, /* properties_destroyed */
8599 0, /* todo_flags_start */
8600 0, /* todo_flags_finish */
8603 class pass_fixup_cfg : public gimple_opt_pass
8605 public:
8606 pass_fixup_cfg (gcc::context *ctxt)
8607 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8610 /* opt_pass methods: */
8611 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8612 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8614 }; // class pass_fixup_cfg
8616 } // anon namespace
8618 gimple_opt_pass *
8619 make_pass_fixup_cfg (gcc::context *ctxt)
8621 return new pass_fixup_cfg (ctxt);
8624 /* Garbage collection support for edge_def. */
8626 extern void gt_ggc_mx (tree&);
8627 extern void gt_ggc_mx (gimple&);
8628 extern void gt_ggc_mx (rtx&);
8629 extern void gt_ggc_mx (basic_block&);
8631 static void
8632 gt_ggc_mx (rtx_insn *& x)
8634 if (x)
8635 gt_ggc_mx_rtx_def ((void *) x);
8638 void
8639 gt_ggc_mx (edge_def *e)
8641 tree block = LOCATION_BLOCK (e->goto_locus);
8642 gt_ggc_mx (e->src);
8643 gt_ggc_mx (e->dest);
8644 if (current_ir_type () == IR_GIMPLE)
8645 gt_ggc_mx (e->insns.g);
8646 else
8647 gt_ggc_mx (e->insns.r);
8648 gt_ggc_mx (block);
8651 /* PCH support for edge_def. */
8653 extern void gt_pch_nx (tree&);
8654 extern void gt_pch_nx (gimple&);
8655 extern void gt_pch_nx (rtx&);
8656 extern void gt_pch_nx (basic_block&);
8658 static void
8659 gt_pch_nx (rtx_insn *& x)
8661 if (x)
8662 gt_pch_nx_rtx_def ((void *) x);
8665 void
8666 gt_pch_nx (edge_def *e)
8668 tree block = LOCATION_BLOCK (e->goto_locus);
8669 gt_pch_nx (e->src);
8670 gt_pch_nx (e->dest);
8671 if (current_ir_type () == IR_GIMPLE)
8672 gt_pch_nx (e->insns.g);
8673 else
8674 gt_pch_nx (e->insns.r);
8675 gt_pch_nx (block);
8678 void
8679 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8681 tree block = LOCATION_BLOCK (e->goto_locus);
8682 op (&(e->src), cookie);
8683 op (&(e->dest), cookie);
8684 if (current_ir_type () == IR_GIMPLE)
8685 op (&(e->insns.g), cookie);
8686 else
8687 op (&(e->insns.r), cookie);
8688 op (&(block), cookie);