2013-11-21 Edward Smith-Rowland <3dw4rd@verizon.net>
[official-gcc.git] / gcc / tree-cfg.c
blobf64fc52065650cae09feadf7ce3bb942657a9c8d
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "ggc.h"
35 #include "gimple-pretty-print.h"
36 #include "gimple.h"
37 #include "gimple-iterator.h"
38 #include "gimplify-me.h"
39 #include "gimple-walk.h"
40 #include "gimple-ssa.h"
41 #include "cgraph.h"
42 #include "tree-cfg.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
45 #include "stringpool.h"
46 #include "tree-ssanames.h"
47 #include "tree-ssa-loop-manip.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "tree-into-ssa.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "tree-dump.h"
54 #include "tree-pass.h"
55 #include "diagnostic-core.h"
56 #include "except.h"
57 #include "cfgloop.h"
58 #include "tree-ssa-propagate.h"
59 #include "value-prof.h"
60 #include "pointer-set.h"
61 #include "tree-inline.h"
62 #include "target.h"
63 #include "tree-ssa-live.h"
64 #include "omp-low.h"
65 #include "tree-cfgcleanup.h"
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
70 /* Local declarations. */
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
88 static struct pointer_map_t *edge_to_cases;
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
94 static bitmap touched_switch_bbs;
96 /* CFG statistics. */
97 struct cfg_stats_d
99 long num_merged_labels;
102 static struct cfg_stats_d cfg_stats;
104 /* Nonzero if we found a computed goto while building basic blocks. */
105 static bool found_computed_goto;
107 /* Hash table to store last discriminator assigned for each locus. */
108 struct locus_discrim_map
110 location_t locus;
111 int discriminator;
114 /* Hashtable helpers. */
116 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
118 typedef locus_discrim_map value_type;
119 typedef locus_discrim_map compare_type;
120 static inline hashval_t hash (const value_type *);
121 static inline bool equal (const value_type *, const compare_type *);
124 /* Trivial hash function for a location_t. ITEM is a pointer to
125 a hash table entry that maps a location_t to a discriminator. */
127 inline hashval_t
128 locus_discrim_hasher::hash (const value_type *item)
130 return LOCATION_LINE (item->locus);
133 /* Equality function for the locus-to-discriminator map. A and B
134 point to the two hash table entries to compare. */
136 inline bool
137 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
139 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
142 static hash_table <locus_discrim_hasher> discriminator_per_locus;
144 /* Basic blocks and flowgraphs. */
145 static void make_blocks (gimple_seq);
146 static void factor_computed_gotos (void);
148 /* Edges. */
149 static void make_edges (void);
150 static void assign_discriminators (void);
151 static void make_cond_expr_edges (basic_block);
152 static void make_gimple_switch_edges (basic_block);
153 static void make_goto_expr_edges (basic_block);
154 static void make_gimple_asm_edges (basic_block);
155 static edge gimple_redirect_edge_and_branch (edge, basic_block);
156 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
157 static unsigned int split_critical_edges (void);
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple, gimple);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge);
163 static gimple first_non_label_stmt (basic_block);
164 static bool verify_gimple_transaction (gimple);
166 /* Flowgraph optimization and cleanup. */
167 static void gimple_merge_blocks (basic_block, basic_block);
168 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
169 static void remove_bb (basic_block);
170 static edge find_taken_edge_computed_goto (basic_block, tree);
171 static edge find_taken_edge_cond_expr (basic_block, tree);
172 static edge find_taken_edge_switch_expr (basic_block, tree);
173 static tree find_case_label_for_value (gimple, tree);
175 void
176 init_empty_tree_cfg_for_function (struct function *fn)
178 /* Initialize the basic block array. */
179 init_flow (fn);
180 profile_status_for_function (fn) = PROFILE_ABSENT;
181 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
182 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
183 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
184 vec_safe_grow_cleared (basic_block_info_for_function (fn),
185 initial_cfg_capacity);
187 /* Build a mapping of labels to their associated blocks. */
188 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
190 initial_cfg_capacity);
192 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
193 ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
195 EXIT_BLOCK_PTR_FOR_FN (fn));
197 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn);
199 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 void
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun);
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
216 static void
217 build_gimple_cfg (gimple_seq seq)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224 init_empty_tree_cfg ();
226 found_computed_goto = 0;
227 make_blocks (seq);
229 /* Computed gotos are hell to deal with, especially if there are
230 lots of them with a large number of destinations. So we factor
231 them to a common computed goto location before we build the
232 edge list. After we convert back to normal form, we will un-factor
233 the computed gotos since factoring introduces an unwanted jump. */
234 if (found_computed_goto)
235 factor_computed_gotos ();
237 /* Make sure there is always at least one block, even if it's empty. */
238 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
239 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
241 /* Adjust the size of the array. */
242 if (basic_block_info->length () < (size_t) n_basic_blocks_for_fn (cfun))
243 vec_safe_grow_cleared (basic_block_info, n_basic_blocks_for_fn (cfun));
245 /* To speed up statement iterator walks, we first purge dead labels. */
246 cleanup_dead_labels ();
248 /* Group case nodes to reduce the number of edges.
249 We do this after cleaning up dead labels because otherwise we miss
250 a lot of obvious case merging opportunities. */
251 group_case_labels ();
253 /* Create the edges of the flowgraph. */
254 discriminator_per_locus.create (13);
255 make_edges ();
256 assign_discriminators ();
257 cleanup_dead_labels ();
258 discriminator_per_locus.dispose ();
262 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
263 it and set loop->safelen to INT_MAX. We assume that the annotation
264 comes immediately before the condition. */
266 static void
267 replace_loop_annotate ()
269 struct loop *loop;
270 basic_block bb;
271 gimple_stmt_iterator gsi;
272 gimple stmt;
274 FOR_EACH_LOOP (loop, 0)
276 gsi = gsi_last_bb (loop->header);
277 stmt = gsi_stmt (gsi);
278 if (stmt && gimple_code (stmt) == GIMPLE_COND)
280 gsi_prev_nondebug (&gsi);
281 if (gsi_end_p (gsi))
282 continue;
283 stmt = gsi_stmt (gsi);
284 if (gimple_code (stmt) != GIMPLE_CALL)
285 continue;
286 if (!gimple_call_internal_p (stmt)
287 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
288 continue;
289 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
290 != annot_expr_ivdep_kind)
291 continue;
292 stmt = gimple_build_assign (gimple_call_lhs (stmt),
293 gimple_call_arg (stmt, 0));
294 gsi_replace (&gsi, stmt, true);
295 loop->safelen = INT_MAX;
299 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
300 FOR_EACH_BB (bb)
302 gsi = gsi_last_bb (bb);
303 stmt = gsi_stmt (gsi);
304 if (stmt && gimple_code (stmt) == GIMPLE_COND)
305 gsi_prev_nondebug (&gsi);
306 if (gsi_end_p (gsi))
307 continue;
308 stmt = gsi_stmt (gsi);
309 if (gimple_code (stmt) != GIMPLE_CALL)
310 continue;
311 if (!gimple_call_internal_p (stmt)
312 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
313 continue;
314 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
315 != annot_expr_ivdep_kind)
316 continue;
317 warning_at (gimple_location (stmt), 0, "ignoring %<GCC ivdep%> "
318 "annotation");
319 stmt = gimple_build_assign (gimple_call_lhs (stmt),
320 gimple_call_arg (stmt, 0));
321 gsi_replace (&gsi, stmt, true);
326 static unsigned int
327 execute_build_cfg (void)
329 gimple_seq body = gimple_body (current_function_decl);
331 build_gimple_cfg (body);
332 gimple_set_body (current_function_decl, NULL);
333 if (dump_file && (dump_flags & TDF_DETAILS))
335 fprintf (dump_file, "Scope blocks:\n");
336 dump_scope_blocks (dump_file, dump_flags);
338 cleanup_tree_cfg ();
339 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
340 replace_loop_annotate ();
341 return 0;
344 namespace {
346 const pass_data pass_data_build_cfg =
348 GIMPLE_PASS, /* type */
349 "cfg", /* name */
350 OPTGROUP_NONE, /* optinfo_flags */
351 false, /* has_gate */
352 true, /* has_execute */
353 TV_TREE_CFG, /* tv_id */
354 PROP_gimple_leh, /* properties_required */
355 ( PROP_cfg | PROP_loops ), /* properties_provided */
356 0, /* properties_destroyed */
357 0, /* todo_flags_start */
358 TODO_verify_stmts, /* todo_flags_finish */
361 class pass_build_cfg : public gimple_opt_pass
363 public:
364 pass_build_cfg (gcc::context *ctxt)
365 : gimple_opt_pass (pass_data_build_cfg, ctxt)
368 /* opt_pass methods: */
369 unsigned int execute () { return execute_build_cfg (); }
371 }; // class pass_build_cfg
373 } // anon namespace
375 gimple_opt_pass *
376 make_pass_build_cfg (gcc::context *ctxt)
378 return new pass_build_cfg (ctxt);
382 /* Return true if T is a computed goto. */
384 static bool
385 computed_goto_p (gimple t)
387 return (gimple_code (t) == GIMPLE_GOTO
388 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
391 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
392 the other edge points to a bb with just __builtin_unreachable ().
393 I.e. return true for C->M edge in:
394 <bb C>:
396 if (something)
397 goto <bb N>;
398 else
399 goto <bb M>;
400 <bb N>:
401 __builtin_unreachable ();
402 <bb M>: */
404 bool
405 assert_unreachable_fallthru_edge_p (edge e)
407 basic_block pred_bb = e->src;
408 gimple last = last_stmt (pred_bb);
409 if (last && gimple_code (last) == GIMPLE_COND)
411 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
412 if (other_bb == e->dest)
413 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
414 if (EDGE_COUNT (other_bb->succs) == 0)
416 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
417 gimple stmt;
419 if (gsi_end_p (gsi))
420 return false;
421 stmt = gsi_stmt (gsi);
422 if (is_gimple_debug (stmt))
424 gsi_next_nondebug (&gsi);
425 if (gsi_end_p (gsi))
426 return false;
427 stmt = gsi_stmt (gsi);
429 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
432 return false;
436 /* Search the CFG for any computed gotos. If found, factor them to a
437 common computed goto site. Also record the location of that site so
438 that we can un-factor the gotos after we have converted back to
439 normal form. */
441 static void
442 factor_computed_gotos (void)
444 basic_block bb;
445 tree factored_label_decl = NULL;
446 tree var = NULL;
447 gimple factored_computed_goto_label = NULL;
448 gimple factored_computed_goto = NULL;
450 /* We know there are one or more computed gotos in this function.
451 Examine the last statement in each basic block to see if the block
452 ends with a computed goto. */
454 FOR_EACH_BB (bb)
456 gimple_stmt_iterator gsi = gsi_last_bb (bb);
457 gimple last;
459 if (gsi_end_p (gsi))
460 continue;
462 last = gsi_stmt (gsi);
464 /* Ignore the computed goto we create when we factor the original
465 computed gotos. */
466 if (last == factored_computed_goto)
467 continue;
469 /* If the last statement is a computed goto, factor it. */
470 if (computed_goto_p (last))
472 gimple assignment;
474 /* The first time we find a computed goto we need to create
475 the factored goto block and the variable each original
476 computed goto will use for their goto destination. */
477 if (!factored_computed_goto)
479 basic_block new_bb = create_empty_bb (bb);
480 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
482 /* Create the destination of the factored goto. Each original
483 computed goto will put its desired destination into this
484 variable and jump to the label we create immediately
485 below. */
486 var = create_tmp_var (ptr_type_node, "gotovar");
488 /* Build a label for the new block which will contain the
489 factored computed goto. */
490 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
491 factored_computed_goto_label
492 = gimple_build_label (factored_label_decl);
493 gsi_insert_after (&new_gsi, factored_computed_goto_label,
494 GSI_NEW_STMT);
496 /* Build our new computed goto. */
497 factored_computed_goto = gimple_build_goto (var);
498 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
501 /* Copy the original computed goto's destination into VAR. */
502 assignment = gimple_build_assign (var, gimple_goto_dest (last));
503 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
505 /* And re-vector the computed goto to the new destination. */
506 gimple_goto_set_dest (last, factored_label_decl);
512 /* Build a flowgraph for the sequence of stmts SEQ. */
514 static void
515 make_blocks (gimple_seq seq)
517 gimple_stmt_iterator i = gsi_start (seq);
518 gimple stmt = NULL;
519 bool start_new_block = true;
520 bool first_stmt_of_seq = true;
521 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
523 while (!gsi_end_p (i))
525 gimple prev_stmt;
527 prev_stmt = stmt;
528 stmt = gsi_stmt (i);
530 /* If the statement starts a new basic block or if we have determined
531 in a previous pass that we need to create a new block for STMT, do
532 so now. */
533 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
535 if (!first_stmt_of_seq)
536 gsi_split_seq_before (&i, &seq);
537 bb = create_basic_block (seq, NULL, bb);
538 start_new_block = false;
541 /* Now add STMT to BB and create the subgraphs for special statement
542 codes. */
543 gimple_set_bb (stmt, bb);
545 if (computed_goto_p (stmt))
546 found_computed_goto = true;
548 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
549 next iteration. */
550 if (stmt_ends_bb_p (stmt))
552 /* If the stmt can make abnormal goto use a new temporary
553 for the assignment to the LHS. This makes sure the old value
554 of the LHS is available on the abnormal edge. Otherwise
555 we will end up with overlapping life-ranges for abnormal
556 SSA names. */
557 if (gimple_has_lhs (stmt)
558 && stmt_can_make_abnormal_goto (stmt)
559 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
561 tree lhs = gimple_get_lhs (stmt);
562 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
563 gimple s = gimple_build_assign (lhs, tmp);
564 gimple_set_location (s, gimple_location (stmt));
565 gimple_set_block (s, gimple_block (stmt));
566 gimple_set_lhs (stmt, tmp);
567 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
568 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
569 DECL_GIMPLE_REG_P (tmp) = 1;
570 gsi_insert_after (&i, s, GSI_SAME_STMT);
572 start_new_block = true;
575 gsi_next (&i);
576 first_stmt_of_seq = false;
581 /* Create and return a new empty basic block after bb AFTER. */
583 static basic_block
584 create_bb (void *h, void *e, basic_block after)
586 basic_block bb;
588 gcc_assert (!e);
590 /* Create and initialize a new basic block. Since alloc_block uses
591 GC allocation that clears memory to allocate a basic block, we do
592 not have to clear the newly allocated basic block here. */
593 bb = alloc_block ();
595 bb->index = last_basic_block;
596 bb->flags = BB_NEW;
597 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
599 /* Add the new block to the linked list of blocks. */
600 link_block (bb, after);
602 /* Grow the basic block array if needed. */
603 if ((size_t) last_basic_block == basic_block_info->length ())
605 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
606 vec_safe_grow_cleared (basic_block_info, new_size);
609 /* Add the newly created block to the array. */
610 SET_BASIC_BLOCK (last_basic_block, bb);
612 n_basic_blocks_for_fn (cfun)++;
613 last_basic_block++;
615 return bb;
619 /*---------------------------------------------------------------------------
620 Edge creation
621 ---------------------------------------------------------------------------*/
623 /* Fold COND_EXPR_COND of each COND_EXPR. */
625 void
626 fold_cond_expr_cond (void)
628 basic_block bb;
630 FOR_EACH_BB (bb)
632 gimple stmt = last_stmt (bb);
634 if (stmt && gimple_code (stmt) == GIMPLE_COND)
636 location_t loc = gimple_location (stmt);
637 tree cond;
638 bool zerop, onep;
640 fold_defer_overflow_warnings ();
641 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
642 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
643 if (cond)
645 zerop = integer_zerop (cond);
646 onep = integer_onep (cond);
648 else
649 zerop = onep = false;
651 fold_undefer_overflow_warnings (zerop || onep,
652 stmt,
653 WARN_STRICT_OVERFLOW_CONDITIONAL);
654 if (zerop)
655 gimple_cond_make_false (stmt);
656 else if (onep)
657 gimple_cond_make_true (stmt);
662 /* Join all the blocks in the flowgraph. */
664 static void
665 make_edges (void)
667 basic_block bb;
668 struct omp_region *cur_region = NULL;
670 /* Create an edge from entry to the first block with executable
671 statements in it. */
672 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), BASIC_BLOCK (NUM_FIXED_BLOCKS),
673 EDGE_FALLTHRU);
675 /* Traverse the basic block array placing edges. */
676 FOR_EACH_BB (bb)
678 gimple last = last_stmt (bb);
679 bool fallthru;
681 if (last)
683 enum gimple_code code = gimple_code (last);
684 switch (code)
686 case GIMPLE_GOTO:
687 make_goto_expr_edges (bb);
688 fallthru = false;
689 break;
690 case GIMPLE_RETURN:
691 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
692 fallthru = false;
693 break;
694 case GIMPLE_COND:
695 make_cond_expr_edges (bb);
696 fallthru = false;
697 break;
698 case GIMPLE_SWITCH:
699 make_gimple_switch_edges (bb);
700 fallthru = false;
701 break;
702 case GIMPLE_RESX:
703 make_eh_edges (last);
704 fallthru = false;
705 break;
706 case GIMPLE_EH_DISPATCH:
707 fallthru = make_eh_dispatch_edges (last);
708 break;
710 case GIMPLE_CALL:
711 /* If this function receives a nonlocal goto, then we need to
712 make edges from this call site to all the nonlocal goto
713 handlers. */
714 if (stmt_can_make_abnormal_goto (last))
715 make_abnormal_goto_edges (bb, true);
717 /* If this statement has reachable exception handlers, then
718 create abnormal edges to them. */
719 make_eh_edges (last);
721 /* BUILTIN_RETURN is really a return statement. */
722 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
723 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0), fallthru =
724 false;
725 /* Some calls are known not to return. */
726 else
727 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
728 break;
730 case GIMPLE_ASSIGN:
731 /* A GIMPLE_ASSIGN may throw internally and thus be considered
732 control-altering. */
733 if (is_ctrl_altering_stmt (last))
734 make_eh_edges (last);
735 fallthru = true;
736 break;
738 case GIMPLE_ASM:
739 make_gimple_asm_edges (bb);
740 fallthru = true;
741 break;
743 CASE_GIMPLE_OMP:
744 fallthru = make_gimple_omp_edges (bb, &cur_region);
745 break;
747 case GIMPLE_TRANSACTION:
749 tree abort_label = gimple_transaction_label (last);
750 if (abort_label)
751 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
752 fallthru = true;
754 break;
756 default:
757 gcc_assert (!stmt_ends_bb_p (last));
758 fallthru = true;
761 else
762 fallthru = true;
764 if (fallthru)
765 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
768 free_omp_regions ();
770 /* Fold COND_EXPR_COND of each COND_EXPR. */
771 fold_cond_expr_cond ();
774 /* Find the next available discriminator value for LOCUS. The
775 discriminator distinguishes among several basic blocks that
776 share a common locus, allowing for more accurate sample-based
777 profiling. */
779 static int
780 next_discriminator_for_locus (location_t locus)
782 struct locus_discrim_map item;
783 struct locus_discrim_map **slot;
785 item.locus = locus;
786 item.discriminator = 0;
787 slot = discriminator_per_locus.find_slot_with_hash (
788 &item, LOCATION_LINE (locus), INSERT);
789 gcc_assert (slot);
790 if (*slot == HTAB_EMPTY_ENTRY)
792 *slot = XNEW (struct locus_discrim_map);
793 gcc_assert (*slot);
794 (*slot)->locus = locus;
795 (*slot)->discriminator = 0;
797 (*slot)->discriminator++;
798 return (*slot)->discriminator;
801 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
803 static bool
804 same_line_p (location_t locus1, location_t locus2)
806 expanded_location from, to;
808 if (locus1 == locus2)
809 return true;
811 from = expand_location (locus1);
812 to = expand_location (locus2);
814 if (from.line != to.line)
815 return false;
816 if (from.file == to.file)
817 return true;
818 return (from.file != NULL
819 && to.file != NULL
820 && filename_cmp (from.file, to.file) == 0);
823 /* Assign discriminators to each basic block. */
825 static void
826 assign_discriminators (void)
828 basic_block bb;
830 FOR_EACH_BB (bb)
832 edge e;
833 edge_iterator ei;
834 gimple last = last_stmt (bb);
835 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
837 if (locus == UNKNOWN_LOCATION)
838 continue;
840 FOR_EACH_EDGE (e, ei, bb->succs)
842 gimple first = first_non_label_stmt (e->dest);
843 gimple last = last_stmt (e->dest);
844 if ((first && same_line_p (locus, gimple_location (first)))
845 || (last && same_line_p (locus, gimple_location (last))))
847 if (e->dest->discriminator != 0 && bb->discriminator == 0)
848 bb->discriminator = next_discriminator_for_locus (locus);
849 else
850 e->dest->discriminator = next_discriminator_for_locus (locus);
856 /* Create the edges for a GIMPLE_COND starting at block BB. */
858 static void
859 make_cond_expr_edges (basic_block bb)
861 gimple entry = last_stmt (bb);
862 gimple then_stmt, else_stmt;
863 basic_block then_bb, else_bb;
864 tree then_label, else_label;
865 edge e;
867 gcc_assert (entry);
868 gcc_assert (gimple_code (entry) == GIMPLE_COND);
870 /* Entry basic blocks for each component. */
871 then_label = gimple_cond_true_label (entry);
872 else_label = gimple_cond_false_label (entry);
873 then_bb = label_to_block (then_label);
874 else_bb = label_to_block (else_label);
875 then_stmt = first_stmt (then_bb);
876 else_stmt = first_stmt (else_bb);
878 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
879 e->goto_locus = gimple_location (then_stmt);
880 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
881 if (e)
882 e->goto_locus = gimple_location (else_stmt);
884 /* We do not need the labels anymore. */
885 gimple_cond_set_true_label (entry, NULL_TREE);
886 gimple_cond_set_false_label (entry, NULL_TREE);
890 /* Called for each element in the hash table (P) as we delete the
891 edge to cases hash table.
893 Clear all the TREE_CHAINs to prevent problems with copying of
894 SWITCH_EXPRs and structure sharing rules, then free the hash table
895 element. */
897 static bool
898 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
899 void *data ATTRIBUTE_UNUSED)
901 tree t, next;
903 for (t = (tree) *value; t; t = next)
905 next = CASE_CHAIN (t);
906 CASE_CHAIN (t) = NULL;
909 *value = NULL;
910 return true;
913 /* Start recording information mapping edges to case labels. */
915 void
916 start_recording_case_labels (void)
918 gcc_assert (edge_to_cases == NULL);
919 edge_to_cases = pointer_map_create ();
920 touched_switch_bbs = BITMAP_ALLOC (NULL);
923 /* Return nonzero if we are recording information for case labels. */
925 static bool
926 recording_case_labels_p (void)
928 return (edge_to_cases != NULL);
931 /* Stop recording information mapping edges to case labels and
932 remove any information we have recorded. */
933 void
934 end_recording_case_labels (void)
936 bitmap_iterator bi;
937 unsigned i;
938 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
939 pointer_map_destroy (edge_to_cases);
940 edge_to_cases = NULL;
941 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
943 basic_block bb = BASIC_BLOCK (i);
944 if (bb)
946 gimple stmt = last_stmt (bb);
947 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
948 group_case_labels_stmt (stmt);
951 BITMAP_FREE (touched_switch_bbs);
954 /* If we are inside a {start,end}_recording_cases block, then return
955 a chain of CASE_LABEL_EXPRs from T which reference E.
957 Otherwise return NULL. */
959 static tree
960 get_cases_for_edge (edge e, gimple t)
962 void **slot;
963 size_t i, n;
965 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
966 chains available. Return NULL so the caller can detect this case. */
967 if (!recording_case_labels_p ())
968 return NULL;
970 slot = pointer_map_contains (edge_to_cases, e);
971 if (slot)
972 return (tree) *slot;
974 /* If we did not find E in the hash table, then this must be the first
975 time we have been queried for information about E & T. Add all the
976 elements from T to the hash table then perform the query again. */
978 n = gimple_switch_num_labels (t);
979 for (i = 0; i < n; i++)
981 tree elt = gimple_switch_label (t, i);
982 tree lab = CASE_LABEL (elt);
983 basic_block label_bb = label_to_block (lab);
984 edge this_edge = find_edge (e->src, label_bb);
986 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
987 a new chain. */
988 slot = pointer_map_insert (edge_to_cases, this_edge);
989 CASE_CHAIN (elt) = (tree) *slot;
990 *slot = elt;
993 return (tree) *pointer_map_contains (edge_to_cases, e);
996 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
998 static void
999 make_gimple_switch_edges (basic_block bb)
1001 gimple entry = last_stmt (bb);
1002 size_t i, n;
1004 n = gimple_switch_num_labels (entry);
1006 for (i = 0; i < n; ++i)
1008 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1009 basic_block label_bb = label_to_block (lab);
1010 make_edge (bb, label_bb, 0);
1015 /* Return the basic block holding label DEST. */
1017 basic_block
1018 label_to_block_fn (struct function *ifun, tree dest)
1020 int uid = LABEL_DECL_UID (dest);
1022 /* We would die hard when faced by an undefined label. Emit a label to
1023 the very first basic block. This will hopefully make even the dataflow
1024 and undefined variable warnings quite right. */
1025 if (seen_error () && uid < 0)
1027 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
1028 gimple stmt;
1030 stmt = gimple_build_label (dest);
1031 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1032 uid = LABEL_DECL_UID (dest);
1034 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1035 return NULL;
1036 return (*ifun->cfg->x_label_to_block_map)[uid];
1039 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1040 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1042 void
1043 make_abnormal_goto_edges (basic_block bb, bool for_call)
1045 basic_block target_bb;
1046 gimple_stmt_iterator gsi;
1048 FOR_EACH_BB (target_bb)
1050 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1052 gimple label_stmt = gsi_stmt (gsi);
1053 tree target;
1055 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1056 break;
1058 target = gimple_label_label (label_stmt);
1060 /* Make an edge to every label block that has been marked as a
1061 potential target for a computed goto or a non-local goto. */
1062 if ((FORCED_LABEL (target) && !for_call)
1063 || (DECL_NONLOCAL (target) && for_call))
1065 make_edge (bb, target_bb, EDGE_ABNORMAL);
1066 break;
1069 if (!gsi_end_p (gsi)
1070 && is_gimple_debug (gsi_stmt (gsi)))
1071 gsi_next_nondebug (&gsi);
1072 if (!gsi_end_p (gsi))
1074 /* Make an edge to every setjmp-like call. */
1075 gimple call_stmt = gsi_stmt (gsi);
1076 if (is_gimple_call (call_stmt)
1077 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1078 make_edge (bb, target_bb, EDGE_ABNORMAL);
1083 /* Create edges for a goto statement at block BB. */
1085 static void
1086 make_goto_expr_edges (basic_block bb)
1088 gimple_stmt_iterator last = gsi_last_bb (bb);
1089 gimple goto_t = gsi_stmt (last);
1091 /* A simple GOTO creates normal edges. */
1092 if (simple_goto_p (goto_t))
1094 tree dest = gimple_goto_dest (goto_t);
1095 basic_block label_bb = label_to_block (dest);
1096 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1097 e->goto_locus = gimple_location (goto_t);
1098 gsi_remove (&last, true);
1099 return;
1102 /* A computed GOTO creates abnormal edges. */
1103 make_abnormal_goto_edges (bb, false);
1106 /* Create edges for an asm statement with labels at block BB. */
1108 static void
1109 make_gimple_asm_edges (basic_block bb)
1111 gimple stmt = last_stmt (bb);
1112 int i, n = gimple_asm_nlabels (stmt);
1114 for (i = 0; i < n; ++i)
1116 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1117 basic_block label_bb = label_to_block (label);
1118 make_edge (bb, label_bb, 0);
1122 /*---------------------------------------------------------------------------
1123 Flowgraph analysis
1124 ---------------------------------------------------------------------------*/
1126 /* Cleanup useless labels in basic blocks. This is something we wish
1127 to do early because it allows us to group case labels before creating
1128 the edges for the CFG, and it speeds up block statement iterators in
1129 all passes later on.
1130 We rerun this pass after CFG is created, to get rid of the labels that
1131 are no longer referenced. After then we do not run it any more, since
1132 (almost) no new labels should be created. */
1134 /* A map from basic block index to the leading label of that block. */
1135 static struct label_record
1137 /* The label. */
1138 tree label;
1140 /* True if the label is referenced from somewhere. */
1141 bool used;
1142 } *label_for_bb;
1144 /* Given LABEL return the first label in the same basic block. */
1146 static tree
1147 main_block_label (tree label)
1149 basic_block bb = label_to_block (label);
1150 tree main_label = label_for_bb[bb->index].label;
1152 /* label_to_block possibly inserted undefined label into the chain. */
1153 if (!main_label)
1155 label_for_bb[bb->index].label = label;
1156 main_label = label;
1159 label_for_bb[bb->index].used = true;
1160 return main_label;
1163 /* Clean up redundant labels within the exception tree. */
1165 static void
1166 cleanup_dead_labels_eh (void)
1168 eh_landing_pad lp;
1169 eh_region r;
1170 tree lab;
1171 int i;
1173 if (cfun->eh == NULL)
1174 return;
1176 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1177 if (lp && lp->post_landing_pad)
1179 lab = main_block_label (lp->post_landing_pad);
1180 if (lab != lp->post_landing_pad)
1182 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1183 EH_LANDING_PAD_NR (lab) = lp->index;
1187 FOR_ALL_EH_REGION (r)
1188 switch (r->type)
1190 case ERT_CLEANUP:
1191 case ERT_MUST_NOT_THROW:
1192 break;
1194 case ERT_TRY:
1196 eh_catch c;
1197 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1199 lab = c->label;
1200 if (lab)
1201 c->label = main_block_label (lab);
1204 break;
1206 case ERT_ALLOWED_EXCEPTIONS:
1207 lab = r->u.allowed.label;
1208 if (lab)
1209 r->u.allowed.label = main_block_label (lab);
1210 break;
1215 /* Cleanup redundant labels. This is a three-step process:
1216 1) Find the leading label for each block.
1217 2) Redirect all references to labels to the leading labels.
1218 3) Cleanup all useless labels. */
1220 void
1221 cleanup_dead_labels (void)
1223 basic_block bb;
1224 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1226 /* Find a suitable label for each block. We use the first user-defined
1227 label if there is one, or otherwise just the first label we see. */
1228 FOR_EACH_BB (bb)
1230 gimple_stmt_iterator i;
1232 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1234 tree label;
1235 gimple stmt = gsi_stmt (i);
1237 if (gimple_code (stmt) != GIMPLE_LABEL)
1238 break;
1240 label = gimple_label_label (stmt);
1242 /* If we have not yet seen a label for the current block,
1243 remember this one and see if there are more labels. */
1244 if (!label_for_bb[bb->index].label)
1246 label_for_bb[bb->index].label = label;
1247 continue;
1250 /* If we did see a label for the current block already, but it
1251 is an artificially created label, replace it if the current
1252 label is a user defined label. */
1253 if (!DECL_ARTIFICIAL (label)
1254 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1256 label_for_bb[bb->index].label = label;
1257 break;
1262 /* Now redirect all jumps/branches to the selected label.
1263 First do so for each block ending in a control statement. */
1264 FOR_EACH_BB (bb)
1266 gimple stmt = last_stmt (bb);
1267 tree label, new_label;
1269 if (!stmt)
1270 continue;
1272 switch (gimple_code (stmt))
1274 case GIMPLE_COND:
1275 label = gimple_cond_true_label (stmt);
1276 if (label)
1278 new_label = main_block_label (label);
1279 if (new_label != label)
1280 gimple_cond_set_true_label (stmt, new_label);
1283 label = gimple_cond_false_label (stmt);
1284 if (label)
1286 new_label = main_block_label (label);
1287 if (new_label != label)
1288 gimple_cond_set_false_label (stmt, new_label);
1290 break;
1292 case GIMPLE_SWITCH:
1294 size_t i, n = gimple_switch_num_labels (stmt);
1296 /* Replace all destination labels. */
1297 for (i = 0; i < n; ++i)
1299 tree case_label = gimple_switch_label (stmt, i);
1300 label = CASE_LABEL (case_label);
1301 new_label = main_block_label (label);
1302 if (new_label != label)
1303 CASE_LABEL (case_label) = new_label;
1305 break;
1308 case GIMPLE_ASM:
1310 int i, n = gimple_asm_nlabels (stmt);
1312 for (i = 0; i < n; ++i)
1314 tree cons = gimple_asm_label_op (stmt, i);
1315 tree label = main_block_label (TREE_VALUE (cons));
1316 TREE_VALUE (cons) = label;
1318 break;
1321 /* We have to handle gotos until they're removed, and we don't
1322 remove them until after we've created the CFG edges. */
1323 case GIMPLE_GOTO:
1324 if (!computed_goto_p (stmt))
1326 label = gimple_goto_dest (stmt);
1327 new_label = main_block_label (label);
1328 if (new_label != label)
1329 gimple_goto_set_dest (stmt, new_label);
1331 break;
1333 case GIMPLE_TRANSACTION:
1335 tree label = gimple_transaction_label (stmt);
1336 if (label)
1338 tree new_label = main_block_label (label);
1339 if (new_label != label)
1340 gimple_transaction_set_label (stmt, new_label);
1343 break;
1345 default:
1346 break;
1350 /* Do the same for the exception region tree labels. */
1351 cleanup_dead_labels_eh ();
1353 /* Finally, purge dead labels. All user-defined labels and labels that
1354 can be the target of non-local gotos and labels which have their
1355 address taken are preserved. */
1356 FOR_EACH_BB (bb)
1358 gimple_stmt_iterator i;
1359 tree label_for_this_bb = label_for_bb[bb->index].label;
1361 if (!label_for_this_bb)
1362 continue;
1364 /* If the main label of the block is unused, we may still remove it. */
1365 if (!label_for_bb[bb->index].used)
1366 label_for_this_bb = NULL;
1368 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1370 tree label;
1371 gimple stmt = gsi_stmt (i);
1373 if (gimple_code (stmt) != GIMPLE_LABEL)
1374 break;
1376 label = gimple_label_label (stmt);
1378 if (label == label_for_this_bb
1379 || !DECL_ARTIFICIAL (label)
1380 || DECL_NONLOCAL (label)
1381 || FORCED_LABEL (label))
1382 gsi_next (&i);
1383 else
1384 gsi_remove (&i, true);
1388 free (label_for_bb);
1391 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1392 the ones jumping to the same label.
1393 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1395 void
1396 group_case_labels_stmt (gimple stmt)
1398 int old_size = gimple_switch_num_labels (stmt);
1399 int i, j, new_size = old_size;
1400 basic_block default_bb = NULL;
1402 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1404 /* Look for possible opportunities to merge cases. */
1405 i = 1;
1406 while (i < old_size)
1408 tree base_case, base_high;
1409 basic_block base_bb;
1411 base_case = gimple_switch_label (stmt, i);
1413 gcc_assert (base_case);
1414 base_bb = label_to_block (CASE_LABEL (base_case));
1416 /* Discard cases that have the same destination as the
1417 default case. */
1418 if (base_bb == default_bb)
1420 gimple_switch_set_label (stmt, i, NULL_TREE);
1421 i++;
1422 new_size--;
1423 continue;
1426 base_high = CASE_HIGH (base_case)
1427 ? CASE_HIGH (base_case)
1428 : CASE_LOW (base_case);
1429 i++;
1431 /* Try to merge case labels. Break out when we reach the end
1432 of the label vector or when we cannot merge the next case
1433 label with the current one. */
1434 while (i < old_size)
1436 tree merge_case = gimple_switch_label (stmt, i);
1437 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1438 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1440 /* Merge the cases if they jump to the same place,
1441 and their ranges are consecutive. */
1442 if (merge_bb == base_bb
1443 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1445 base_high = CASE_HIGH (merge_case) ?
1446 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1447 CASE_HIGH (base_case) = base_high;
1448 gimple_switch_set_label (stmt, i, NULL_TREE);
1449 new_size--;
1450 i++;
1452 else
1453 break;
1457 /* Compress the case labels in the label vector, and adjust the
1458 length of the vector. */
1459 for (i = 0, j = 0; i < new_size; i++)
1461 while (! gimple_switch_label (stmt, j))
1462 j++;
1463 gimple_switch_set_label (stmt, i,
1464 gimple_switch_label (stmt, j++));
1467 gcc_assert (new_size <= old_size);
1468 gimple_switch_set_num_labels (stmt, new_size);
1471 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1472 and scan the sorted vector of cases. Combine the ones jumping to the
1473 same label. */
1475 void
1476 group_case_labels (void)
1478 basic_block bb;
1480 FOR_EACH_BB (bb)
1482 gimple stmt = last_stmt (bb);
1483 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1484 group_case_labels_stmt (stmt);
1488 /* Checks whether we can merge block B into block A. */
1490 static bool
1491 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1493 gimple stmt;
1494 gimple_stmt_iterator gsi;
1496 if (!single_succ_p (a))
1497 return false;
1499 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1500 return false;
1502 if (single_succ (a) != b)
1503 return false;
1505 if (!single_pred_p (b))
1506 return false;
1508 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1509 return false;
1511 /* If A ends by a statement causing exceptions or something similar, we
1512 cannot merge the blocks. */
1513 stmt = last_stmt (a);
1514 if (stmt && stmt_ends_bb_p (stmt))
1515 return false;
1517 /* Do not allow a block with only a non-local label to be merged. */
1518 if (stmt
1519 && gimple_code (stmt) == GIMPLE_LABEL
1520 && DECL_NONLOCAL (gimple_label_label (stmt)))
1521 return false;
1523 /* Examine the labels at the beginning of B. */
1524 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1526 tree lab;
1527 stmt = gsi_stmt (gsi);
1528 if (gimple_code (stmt) != GIMPLE_LABEL)
1529 break;
1530 lab = gimple_label_label (stmt);
1532 /* Do not remove user forced labels or for -O0 any user labels. */
1533 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1534 return false;
1537 /* Protect the loop latches. */
1538 if (current_loops && b->loop_father->latch == b)
1539 return false;
1541 /* It must be possible to eliminate all phi nodes in B. If ssa form
1542 is not up-to-date and a name-mapping is registered, we cannot eliminate
1543 any phis. Symbols marked for renaming are never a problem though. */
1544 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1546 gimple phi = gsi_stmt (gsi);
1547 /* Technically only new names matter. */
1548 if (name_registered_for_update_p (PHI_RESULT (phi)))
1549 return false;
1552 /* When not optimizing, don't merge if we'd lose goto_locus. */
1553 if (!optimize
1554 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1556 location_t goto_locus = single_succ_edge (a)->goto_locus;
1557 gimple_stmt_iterator prev, next;
1558 prev = gsi_last_nondebug_bb (a);
1559 next = gsi_after_labels (b);
1560 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1561 gsi_next_nondebug (&next);
1562 if ((gsi_end_p (prev)
1563 || gimple_location (gsi_stmt (prev)) != goto_locus)
1564 && (gsi_end_p (next)
1565 || gimple_location (gsi_stmt (next)) != goto_locus))
1566 return false;
1569 return true;
1572 /* Replaces all uses of NAME by VAL. */
1574 void
1575 replace_uses_by (tree name, tree val)
1577 imm_use_iterator imm_iter;
1578 use_operand_p use;
1579 gimple stmt;
1580 edge e;
1582 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1584 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1586 replace_exp (use, val);
1588 if (gimple_code (stmt) == GIMPLE_PHI)
1590 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1591 if (e->flags & EDGE_ABNORMAL)
1593 /* This can only occur for virtual operands, since
1594 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1595 would prevent replacement. */
1596 gcc_checking_assert (virtual_operand_p (name));
1597 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1602 if (gimple_code (stmt) != GIMPLE_PHI)
1604 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1605 gimple orig_stmt = stmt;
1606 size_t i;
1608 /* Mark the block if we changed the last stmt in it. */
1609 if (cfgcleanup_altered_bbs
1610 && stmt_ends_bb_p (stmt))
1611 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1613 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1614 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1615 only change sth from non-invariant to invariant, and only
1616 when propagating constants. */
1617 if (is_gimple_min_invariant (val))
1618 for (i = 0; i < gimple_num_ops (stmt); i++)
1620 tree op = gimple_op (stmt, i);
1621 /* Operands may be empty here. For example, the labels
1622 of a GIMPLE_COND are nulled out following the creation
1623 of the corresponding CFG edges. */
1624 if (op && TREE_CODE (op) == ADDR_EXPR)
1625 recompute_tree_invariant_for_addr_expr (op);
1628 if (fold_stmt (&gsi))
1629 stmt = gsi_stmt (gsi);
1631 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1632 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1634 update_stmt (stmt);
1638 gcc_checking_assert (has_zero_uses (name));
1640 /* Also update the trees stored in loop structures. */
1641 if (current_loops)
1643 struct loop *loop;
1645 FOR_EACH_LOOP (loop, 0)
1647 substitute_in_loop_info (loop, name, val);
1652 /* Merge block B into block A. */
1654 static void
1655 gimple_merge_blocks (basic_block a, basic_block b)
1657 gimple_stmt_iterator last, gsi, psi;
1659 if (dump_file)
1660 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1662 /* Remove all single-valued PHI nodes from block B of the form
1663 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1664 gsi = gsi_last_bb (a);
1665 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1667 gimple phi = gsi_stmt (psi);
1668 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1669 gimple copy;
1670 bool may_replace_uses = (virtual_operand_p (def)
1671 || may_propagate_copy (def, use));
1673 /* In case we maintain loop closed ssa form, do not propagate arguments
1674 of loop exit phi nodes. */
1675 if (current_loops
1676 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1677 && !virtual_operand_p (def)
1678 && TREE_CODE (use) == SSA_NAME
1679 && a->loop_father != b->loop_father)
1680 may_replace_uses = false;
1682 if (!may_replace_uses)
1684 gcc_assert (!virtual_operand_p (def));
1686 /* Note that just emitting the copies is fine -- there is no problem
1687 with ordering of phi nodes. This is because A is the single
1688 predecessor of B, therefore results of the phi nodes cannot
1689 appear as arguments of the phi nodes. */
1690 copy = gimple_build_assign (def, use);
1691 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1692 remove_phi_node (&psi, false);
1694 else
1696 /* If we deal with a PHI for virtual operands, we can simply
1697 propagate these without fussing with folding or updating
1698 the stmt. */
1699 if (virtual_operand_p (def))
1701 imm_use_iterator iter;
1702 use_operand_p use_p;
1703 gimple stmt;
1705 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1706 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1707 SET_USE (use_p, use);
1709 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1710 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1712 else
1713 replace_uses_by (def, use);
1715 remove_phi_node (&psi, true);
1719 /* Ensure that B follows A. */
1720 move_block_after (b, a);
1722 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1723 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1725 /* Remove labels from B and set gimple_bb to A for other statements. */
1726 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1728 gimple stmt = gsi_stmt (gsi);
1729 if (gimple_code (stmt) == GIMPLE_LABEL)
1731 tree label = gimple_label_label (stmt);
1732 int lp_nr;
1734 gsi_remove (&gsi, false);
1736 /* Now that we can thread computed gotos, we might have
1737 a situation where we have a forced label in block B
1738 However, the label at the start of block B might still be
1739 used in other ways (think about the runtime checking for
1740 Fortran assigned gotos). So we can not just delete the
1741 label. Instead we move the label to the start of block A. */
1742 if (FORCED_LABEL (label))
1744 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1745 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1747 /* Other user labels keep around in a form of a debug stmt. */
1748 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1750 gimple dbg = gimple_build_debug_bind (label,
1751 integer_zero_node,
1752 stmt);
1753 gimple_debug_bind_reset_value (dbg);
1754 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1757 lp_nr = EH_LANDING_PAD_NR (label);
1758 if (lp_nr)
1760 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1761 lp->post_landing_pad = NULL;
1764 else
1766 gimple_set_bb (stmt, a);
1767 gsi_next (&gsi);
1771 /* Merge the sequences. */
1772 last = gsi_last_bb (a);
1773 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1774 set_bb_seq (b, NULL);
1776 if (cfgcleanup_altered_bbs)
1777 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1781 /* Return the one of two successors of BB that is not reachable by a
1782 complex edge, if there is one. Else, return BB. We use
1783 this in optimizations that use post-dominators for their heuristics,
1784 to catch the cases in C++ where function calls are involved. */
1786 basic_block
1787 single_noncomplex_succ (basic_block bb)
1789 edge e0, e1;
1790 if (EDGE_COUNT (bb->succs) != 2)
1791 return bb;
1793 e0 = EDGE_SUCC (bb, 0);
1794 e1 = EDGE_SUCC (bb, 1);
1795 if (e0->flags & EDGE_COMPLEX)
1796 return e1->dest;
1797 if (e1->flags & EDGE_COMPLEX)
1798 return e0->dest;
1800 return bb;
1803 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1805 void
1806 notice_special_calls (gimple call)
1808 int flags = gimple_call_flags (call);
1810 if (flags & ECF_MAY_BE_ALLOCA)
1811 cfun->calls_alloca = true;
1812 if (flags & ECF_RETURNS_TWICE)
1813 cfun->calls_setjmp = true;
1817 /* Clear flags set by notice_special_calls. Used by dead code removal
1818 to update the flags. */
1820 void
1821 clear_special_calls (void)
1823 cfun->calls_alloca = false;
1824 cfun->calls_setjmp = false;
1827 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1829 static void
1830 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1832 /* Since this block is no longer reachable, we can just delete all
1833 of its PHI nodes. */
1834 remove_phi_nodes (bb);
1836 /* Remove edges to BB's successors. */
1837 while (EDGE_COUNT (bb->succs) > 0)
1838 remove_edge (EDGE_SUCC (bb, 0));
1842 /* Remove statements of basic block BB. */
1844 static void
1845 remove_bb (basic_block bb)
1847 gimple_stmt_iterator i;
1849 if (dump_file)
1851 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1852 if (dump_flags & TDF_DETAILS)
1854 dump_bb (dump_file, bb, 0, dump_flags);
1855 fprintf (dump_file, "\n");
1859 if (current_loops)
1861 struct loop *loop = bb->loop_father;
1863 /* If a loop gets removed, clean up the information associated
1864 with it. */
1865 if (loop->latch == bb
1866 || loop->header == bb)
1867 free_numbers_of_iterations_estimates_loop (loop);
1870 /* Remove all the instructions in the block. */
1871 if (bb_seq (bb) != NULL)
1873 /* Walk backwards so as to get a chance to substitute all
1874 released DEFs into debug stmts. See
1875 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1876 details. */
1877 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1879 gimple stmt = gsi_stmt (i);
1880 if (gimple_code (stmt) == GIMPLE_LABEL
1881 && (FORCED_LABEL (gimple_label_label (stmt))
1882 || DECL_NONLOCAL (gimple_label_label (stmt))))
1884 basic_block new_bb;
1885 gimple_stmt_iterator new_gsi;
1887 /* A non-reachable non-local label may still be referenced.
1888 But it no longer needs to carry the extra semantics of
1889 non-locality. */
1890 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1892 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1893 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1896 new_bb = bb->prev_bb;
1897 new_gsi = gsi_start_bb (new_bb);
1898 gsi_remove (&i, false);
1899 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1901 else
1903 /* Release SSA definitions if we are in SSA. Note that we
1904 may be called when not in SSA. For example,
1905 final_cleanup calls this function via
1906 cleanup_tree_cfg. */
1907 if (gimple_in_ssa_p (cfun))
1908 release_defs (stmt);
1910 gsi_remove (&i, true);
1913 if (gsi_end_p (i))
1914 i = gsi_last_bb (bb);
1915 else
1916 gsi_prev (&i);
1920 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1921 bb->il.gimple.seq = NULL;
1922 bb->il.gimple.phi_nodes = NULL;
1926 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1927 predicate VAL, return the edge that will be taken out of the block.
1928 If VAL does not match a unique edge, NULL is returned. */
1930 edge
1931 find_taken_edge (basic_block bb, tree val)
1933 gimple stmt;
1935 stmt = last_stmt (bb);
1937 gcc_assert (stmt);
1938 gcc_assert (is_ctrl_stmt (stmt));
1940 if (val == NULL)
1941 return NULL;
1943 if (!is_gimple_min_invariant (val))
1944 return NULL;
1946 if (gimple_code (stmt) == GIMPLE_COND)
1947 return find_taken_edge_cond_expr (bb, val);
1949 if (gimple_code (stmt) == GIMPLE_SWITCH)
1950 return find_taken_edge_switch_expr (bb, val);
1952 if (computed_goto_p (stmt))
1954 /* Only optimize if the argument is a label, if the argument is
1955 not a label then we can not construct a proper CFG.
1957 It may be the case that we only need to allow the LABEL_REF to
1958 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1959 appear inside a LABEL_EXPR just to be safe. */
1960 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1961 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1962 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1963 return NULL;
1966 gcc_unreachable ();
1969 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1970 statement, determine which of the outgoing edges will be taken out of the
1971 block. Return NULL if either edge may be taken. */
1973 static edge
1974 find_taken_edge_computed_goto (basic_block bb, tree val)
1976 basic_block dest;
1977 edge e = NULL;
1979 dest = label_to_block (val);
1980 if (dest)
1982 e = find_edge (bb, dest);
1983 gcc_assert (e != NULL);
1986 return e;
1989 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1990 statement, determine which of the two edges will be taken out of the
1991 block. Return NULL if either edge may be taken. */
1993 static edge
1994 find_taken_edge_cond_expr (basic_block bb, tree val)
1996 edge true_edge, false_edge;
1998 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2000 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2001 return (integer_zerop (val) ? false_edge : true_edge);
2004 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2005 statement, determine which edge will be taken out of the block. Return
2006 NULL if any edge may be taken. */
2008 static edge
2009 find_taken_edge_switch_expr (basic_block bb, tree val)
2011 basic_block dest_bb;
2012 edge e;
2013 gimple switch_stmt;
2014 tree taken_case;
2016 switch_stmt = last_stmt (bb);
2017 taken_case = find_case_label_for_value (switch_stmt, val);
2018 dest_bb = label_to_block (CASE_LABEL (taken_case));
2020 e = find_edge (bb, dest_bb);
2021 gcc_assert (e);
2022 return e;
2026 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2027 We can make optimal use here of the fact that the case labels are
2028 sorted: We can do a binary search for a case matching VAL. */
2030 static tree
2031 find_case_label_for_value (gimple switch_stmt, tree val)
2033 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2034 tree default_case = gimple_switch_default_label (switch_stmt);
2036 for (low = 0, high = n; high - low > 1; )
2038 size_t i = (high + low) / 2;
2039 tree t = gimple_switch_label (switch_stmt, i);
2040 int cmp;
2042 /* Cache the result of comparing CASE_LOW and val. */
2043 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2045 if (cmp > 0)
2046 high = i;
2047 else
2048 low = i;
2050 if (CASE_HIGH (t) == NULL)
2052 /* A singe-valued case label. */
2053 if (cmp == 0)
2054 return t;
2056 else
2058 /* A case range. We can only handle integer ranges. */
2059 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2060 return t;
2064 return default_case;
2068 /* Dump a basic block on stderr. */
2070 void
2071 gimple_debug_bb (basic_block bb)
2073 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2077 /* Dump basic block with index N on stderr. */
2079 basic_block
2080 gimple_debug_bb_n (int n)
2082 gimple_debug_bb (BASIC_BLOCK (n));
2083 return BASIC_BLOCK (n);
2087 /* Dump the CFG on stderr.
2089 FLAGS are the same used by the tree dumping functions
2090 (see TDF_* in dumpfile.h). */
2092 void
2093 gimple_debug_cfg (int flags)
2095 gimple_dump_cfg (stderr, flags);
2099 /* Dump the program showing basic block boundaries on the given FILE.
2101 FLAGS are the same used by the tree dumping functions (see TDF_* in
2102 tree.h). */
2104 void
2105 gimple_dump_cfg (FILE *file, int flags)
2107 if (flags & TDF_DETAILS)
2109 dump_function_header (file, current_function_decl, flags);
2110 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2111 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2112 last_basic_block);
2114 brief_dump_cfg (file, flags | TDF_COMMENT);
2115 fprintf (file, "\n");
2118 if (flags & TDF_STATS)
2119 dump_cfg_stats (file);
2121 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2125 /* Dump CFG statistics on FILE. */
2127 void
2128 dump_cfg_stats (FILE *file)
2130 static long max_num_merged_labels = 0;
2131 unsigned long size, total = 0;
2132 long num_edges;
2133 basic_block bb;
2134 const char * const fmt_str = "%-30s%-13s%12s\n";
2135 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2136 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2137 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2138 const char *funcname = current_function_name ();
2140 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2142 fprintf (file, "---------------------------------------------------------\n");
2143 fprintf (file, fmt_str, "", " Number of ", "Memory");
2144 fprintf (file, fmt_str, "", " instances ", "used ");
2145 fprintf (file, "---------------------------------------------------------\n");
2147 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2148 total += size;
2149 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2150 SCALE (size), LABEL (size));
2152 num_edges = 0;
2153 FOR_EACH_BB (bb)
2154 num_edges += EDGE_COUNT (bb->succs);
2155 size = num_edges * sizeof (struct edge_def);
2156 total += size;
2157 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2159 fprintf (file, "---------------------------------------------------------\n");
2160 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2161 LABEL (total));
2162 fprintf (file, "---------------------------------------------------------\n");
2163 fprintf (file, "\n");
2165 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2166 max_num_merged_labels = cfg_stats.num_merged_labels;
2168 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2169 cfg_stats.num_merged_labels, max_num_merged_labels);
2171 fprintf (file, "\n");
2175 /* Dump CFG statistics on stderr. Keep extern so that it's always
2176 linked in the final executable. */
2178 DEBUG_FUNCTION void
2179 debug_cfg_stats (void)
2181 dump_cfg_stats (stderr);
2184 /*---------------------------------------------------------------------------
2185 Miscellaneous helpers
2186 ---------------------------------------------------------------------------*/
2188 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2189 flow. Transfers of control flow associated with EH are excluded. */
2191 static bool
2192 call_can_make_abnormal_goto (gimple t)
2194 /* If the function has no non-local labels, then a call cannot make an
2195 abnormal transfer of control. */
2196 if (!cfun->has_nonlocal_label
2197 && !cfun->calls_setjmp)
2198 return false;
2200 /* Likewise if the call has no side effects. */
2201 if (!gimple_has_side_effects (t))
2202 return false;
2204 /* Likewise if the called function is leaf. */
2205 if (gimple_call_flags (t) & ECF_LEAF)
2206 return false;
2208 return true;
2212 /* Return true if T can make an abnormal transfer of control flow.
2213 Transfers of control flow associated with EH are excluded. */
2215 bool
2216 stmt_can_make_abnormal_goto (gimple t)
2218 if (computed_goto_p (t))
2219 return true;
2220 if (is_gimple_call (t))
2221 return call_can_make_abnormal_goto (t);
2222 return false;
2226 /* Return true if T represents a stmt that always transfers control. */
2228 bool
2229 is_ctrl_stmt (gimple t)
2231 switch (gimple_code (t))
2233 case GIMPLE_COND:
2234 case GIMPLE_SWITCH:
2235 case GIMPLE_GOTO:
2236 case GIMPLE_RETURN:
2237 case GIMPLE_RESX:
2238 return true;
2239 default:
2240 return false;
2245 /* Return true if T is a statement that may alter the flow of control
2246 (e.g., a call to a non-returning function). */
2248 bool
2249 is_ctrl_altering_stmt (gimple t)
2251 gcc_assert (t);
2253 switch (gimple_code (t))
2255 case GIMPLE_CALL:
2257 int flags = gimple_call_flags (t);
2259 /* A call alters control flow if it can make an abnormal goto. */
2260 if (call_can_make_abnormal_goto (t))
2261 return true;
2263 /* A call also alters control flow if it does not return. */
2264 if (flags & ECF_NORETURN)
2265 return true;
2267 /* TM ending statements have backedges out of the transaction.
2268 Return true so we split the basic block containing them.
2269 Note that the TM_BUILTIN test is merely an optimization. */
2270 if ((flags & ECF_TM_BUILTIN)
2271 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2272 return true;
2274 /* BUILT_IN_RETURN call is same as return statement. */
2275 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2276 return true;
2278 break;
2280 case GIMPLE_EH_DISPATCH:
2281 /* EH_DISPATCH branches to the individual catch handlers at
2282 this level of a try or allowed-exceptions region. It can
2283 fallthru to the next statement as well. */
2284 return true;
2286 case GIMPLE_ASM:
2287 if (gimple_asm_nlabels (t) > 0)
2288 return true;
2289 break;
2291 CASE_GIMPLE_OMP:
2292 /* OpenMP directives alter control flow. */
2293 return true;
2295 case GIMPLE_TRANSACTION:
2296 /* A transaction start alters control flow. */
2297 return true;
2299 default:
2300 break;
2303 /* If a statement can throw, it alters control flow. */
2304 return stmt_can_throw_internal (t);
2308 /* Return true if T is a simple local goto. */
2310 bool
2311 simple_goto_p (gimple t)
2313 return (gimple_code (t) == GIMPLE_GOTO
2314 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2318 /* Return true if STMT should start a new basic block. PREV_STMT is
2319 the statement preceding STMT. It is used when STMT is a label or a
2320 case label. Labels should only start a new basic block if their
2321 previous statement wasn't a label. Otherwise, sequence of labels
2322 would generate unnecessary basic blocks that only contain a single
2323 label. */
2325 static inline bool
2326 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2328 if (stmt == NULL)
2329 return false;
2331 /* Labels start a new basic block only if the preceding statement
2332 wasn't a label of the same type. This prevents the creation of
2333 consecutive blocks that have nothing but a single label. */
2334 if (gimple_code (stmt) == GIMPLE_LABEL)
2336 /* Nonlocal and computed GOTO targets always start a new block. */
2337 if (DECL_NONLOCAL (gimple_label_label (stmt))
2338 || FORCED_LABEL (gimple_label_label (stmt)))
2339 return true;
2341 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2343 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2344 return true;
2346 cfg_stats.num_merged_labels++;
2347 return false;
2349 else
2350 return true;
2352 else if (gimple_code (stmt) == GIMPLE_CALL
2353 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2354 /* setjmp acts similar to a nonlocal GOTO target and thus should
2355 start a new block. */
2356 return true;
2358 return false;
2362 /* Return true if T should end a basic block. */
2364 bool
2365 stmt_ends_bb_p (gimple t)
2367 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2370 /* Remove block annotations and other data structures. */
2372 void
2373 delete_tree_cfg_annotations (void)
2375 vec_free (label_to_block_map);
2379 /* Return the first statement in basic block BB. */
2381 gimple
2382 first_stmt (basic_block bb)
2384 gimple_stmt_iterator i = gsi_start_bb (bb);
2385 gimple stmt = NULL;
2387 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2389 gsi_next (&i);
2390 stmt = NULL;
2392 return stmt;
2395 /* Return the first non-label statement in basic block BB. */
2397 static gimple
2398 first_non_label_stmt (basic_block bb)
2400 gimple_stmt_iterator i = gsi_start_bb (bb);
2401 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2402 gsi_next (&i);
2403 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2406 /* Return the last statement in basic block BB. */
2408 gimple
2409 last_stmt (basic_block bb)
2411 gimple_stmt_iterator i = gsi_last_bb (bb);
2412 gimple stmt = NULL;
2414 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2416 gsi_prev (&i);
2417 stmt = NULL;
2419 return stmt;
2422 /* Return the last statement of an otherwise empty block. Return NULL
2423 if the block is totally empty, or if it contains more than one
2424 statement. */
2426 gimple
2427 last_and_only_stmt (basic_block bb)
2429 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2430 gimple last, prev;
2432 if (gsi_end_p (i))
2433 return NULL;
2435 last = gsi_stmt (i);
2436 gsi_prev_nondebug (&i);
2437 if (gsi_end_p (i))
2438 return last;
2440 /* Empty statements should no longer appear in the instruction stream.
2441 Everything that might have appeared before should be deleted by
2442 remove_useless_stmts, and the optimizers should just gsi_remove
2443 instead of smashing with build_empty_stmt.
2445 Thus the only thing that should appear here in a block containing
2446 one executable statement is a label. */
2447 prev = gsi_stmt (i);
2448 if (gimple_code (prev) == GIMPLE_LABEL)
2449 return last;
2450 else
2451 return NULL;
2454 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2456 static void
2457 reinstall_phi_args (edge new_edge, edge old_edge)
2459 edge_var_map_vector *v;
2460 edge_var_map *vm;
2461 int i;
2462 gimple_stmt_iterator phis;
2464 v = redirect_edge_var_map_vector (old_edge);
2465 if (!v)
2466 return;
2468 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2469 v->iterate (i, &vm) && !gsi_end_p (phis);
2470 i++, gsi_next (&phis))
2472 gimple phi = gsi_stmt (phis);
2473 tree result = redirect_edge_var_map_result (vm);
2474 tree arg = redirect_edge_var_map_def (vm);
2476 gcc_assert (result == gimple_phi_result (phi));
2478 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2481 redirect_edge_var_map_clear (old_edge);
2484 /* Returns the basic block after which the new basic block created
2485 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2486 near its "logical" location. This is of most help to humans looking
2487 at debugging dumps. */
2489 static basic_block
2490 split_edge_bb_loc (edge edge_in)
2492 basic_block dest = edge_in->dest;
2493 basic_block dest_prev = dest->prev_bb;
2495 if (dest_prev)
2497 edge e = find_edge (dest_prev, dest);
2498 if (e && !(e->flags & EDGE_COMPLEX))
2499 return edge_in->src;
2501 return dest_prev;
2504 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2505 Abort on abnormal edges. */
2507 static basic_block
2508 gimple_split_edge (edge edge_in)
2510 basic_block new_bb, after_bb, dest;
2511 edge new_edge, e;
2513 /* Abnormal edges cannot be split. */
2514 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2516 dest = edge_in->dest;
2518 after_bb = split_edge_bb_loc (edge_in);
2520 new_bb = create_empty_bb (after_bb);
2521 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2522 new_bb->count = edge_in->count;
2523 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2524 new_edge->probability = REG_BR_PROB_BASE;
2525 new_edge->count = edge_in->count;
2527 e = redirect_edge_and_branch (edge_in, new_bb);
2528 gcc_assert (e == edge_in);
2529 reinstall_phi_args (new_edge, e);
2531 return new_bb;
2535 /* Verify properties of the address expression T with base object BASE. */
2537 static tree
2538 verify_address (tree t, tree base)
2540 bool old_constant;
2541 bool old_side_effects;
2542 bool new_constant;
2543 bool new_side_effects;
2545 old_constant = TREE_CONSTANT (t);
2546 old_side_effects = TREE_SIDE_EFFECTS (t);
2548 recompute_tree_invariant_for_addr_expr (t);
2549 new_side_effects = TREE_SIDE_EFFECTS (t);
2550 new_constant = TREE_CONSTANT (t);
2552 if (old_constant != new_constant)
2554 error ("constant not recomputed when ADDR_EXPR changed");
2555 return t;
2557 if (old_side_effects != new_side_effects)
2559 error ("side effects not recomputed when ADDR_EXPR changed");
2560 return t;
2563 if (!(TREE_CODE (base) == VAR_DECL
2564 || TREE_CODE (base) == PARM_DECL
2565 || TREE_CODE (base) == RESULT_DECL))
2566 return NULL_TREE;
2568 if (DECL_GIMPLE_REG_P (base))
2570 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2571 return base;
2574 return NULL_TREE;
2577 /* Callback for walk_tree, check that all elements with address taken are
2578 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2579 inside a PHI node. */
2581 static tree
2582 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2584 tree t = *tp, x;
2586 if (TYPE_P (t))
2587 *walk_subtrees = 0;
2589 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2590 #define CHECK_OP(N, MSG) \
2591 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2592 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2594 switch (TREE_CODE (t))
2596 case SSA_NAME:
2597 if (SSA_NAME_IN_FREE_LIST (t))
2599 error ("SSA name in freelist but still referenced");
2600 return *tp;
2602 break;
2604 case INDIRECT_REF:
2605 error ("INDIRECT_REF in gimple IL");
2606 return t;
2608 case MEM_REF:
2609 x = TREE_OPERAND (t, 0);
2610 if (!POINTER_TYPE_P (TREE_TYPE (x))
2611 || !is_gimple_mem_ref_addr (x))
2613 error ("invalid first operand of MEM_REF");
2614 return x;
2616 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2617 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2619 error ("invalid offset operand of MEM_REF");
2620 return TREE_OPERAND (t, 1);
2622 if (TREE_CODE (x) == ADDR_EXPR
2623 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2624 return x;
2625 *walk_subtrees = 0;
2626 break;
2628 case ASSERT_EXPR:
2629 x = fold (ASSERT_EXPR_COND (t));
2630 if (x == boolean_false_node)
2632 error ("ASSERT_EXPR with an always-false condition");
2633 return *tp;
2635 break;
2637 case MODIFY_EXPR:
2638 error ("MODIFY_EXPR not expected while having tuples");
2639 return *tp;
2641 case ADDR_EXPR:
2643 tree tem;
2645 gcc_assert (is_gimple_address (t));
2647 /* Skip any references (they will be checked when we recurse down the
2648 tree) and ensure that any variable used as a prefix is marked
2649 addressable. */
2650 for (x = TREE_OPERAND (t, 0);
2651 handled_component_p (x);
2652 x = TREE_OPERAND (x, 0))
2655 if ((tem = verify_address (t, x)))
2656 return tem;
2658 if (!(TREE_CODE (x) == VAR_DECL
2659 || TREE_CODE (x) == PARM_DECL
2660 || TREE_CODE (x) == RESULT_DECL))
2661 return NULL;
2663 if (!TREE_ADDRESSABLE (x))
2665 error ("address taken, but ADDRESSABLE bit not set");
2666 return x;
2669 break;
2672 case COND_EXPR:
2673 x = COND_EXPR_COND (t);
2674 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2676 error ("non-integral used in condition");
2677 return x;
2679 if (!is_gimple_condexpr (x))
2681 error ("invalid conditional operand");
2682 return x;
2684 break;
2686 case NON_LVALUE_EXPR:
2687 case TRUTH_NOT_EXPR:
2688 gcc_unreachable ();
2690 CASE_CONVERT:
2691 case FIX_TRUNC_EXPR:
2692 case FLOAT_EXPR:
2693 case NEGATE_EXPR:
2694 case ABS_EXPR:
2695 case BIT_NOT_EXPR:
2696 CHECK_OP (0, "invalid operand to unary operator");
2697 break;
2699 case REALPART_EXPR:
2700 case IMAGPART_EXPR:
2701 case BIT_FIELD_REF:
2702 if (!is_gimple_reg_type (TREE_TYPE (t)))
2704 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2705 return t;
2708 if (TREE_CODE (t) == BIT_FIELD_REF)
2710 if (!tree_fits_uhwi_p (TREE_OPERAND (t, 1))
2711 || !tree_fits_uhwi_p (TREE_OPERAND (t, 2)))
2713 error ("invalid position or size operand to BIT_FIELD_REF");
2714 return t;
2716 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2717 && (TYPE_PRECISION (TREE_TYPE (t))
2718 != tree_to_uhwi (TREE_OPERAND (t, 1))))
2720 error ("integral result type precision does not match "
2721 "field size of BIT_FIELD_REF");
2722 return t;
2724 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2725 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2726 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2727 != tree_to_uhwi (TREE_OPERAND (t, 1))))
2729 error ("mode precision of non-integral result does not "
2730 "match field size of BIT_FIELD_REF");
2731 return t;
2734 t = TREE_OPERAND (t, 0);
2736 /* Fall-through. */
2737 case COMPONENT_REF:
2738 case ARRAY_REF:
2739 case ARRAY_RANGE_REF:
2740 case VIEW_CONVERT_EXPR:
2741 /* We have a nest of references. Verify that each of the operands
2742 that determine where to reference is either a constant or a variable,
2743 verify that the base is valid, and then show we've already checked
2744 the subtrees. */
2745 while (handled_component_p (t))
2747 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2748 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2749 else if (TREE_CODE (t) == ARRAY_REF
2750 || TREE_CODE (t) == ARRAY_RANGE_REF)
2752 CHECK_OP (1, "invalid array index");
2753 if (TREE_OPERAND (t, 2))
2754 CHECK_OP (2, "invalid array lower bound");
2755 if (TREE_OPERAND (t, 3))
2756 CHECK_OP (3, "invalid array stride");
2758 else if (TREE_CODE (t) == BIT_FIELD_REF
2759 || TREE_CODE (t) == REALPART_EXPR
2760 || TREE_CODE (t) == IMAGPART_EXPR)
2762 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2763 "REALPART_EXPR");
2764 return t;
2767 t = TREE_OPERAND (t, 0);
2770 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2772 error ("invalid reference prefix");
2773 return t;
2775 *walk_subtrees = 0;
2776 break;
2777 case PLUS_EXPR:
2778 case MINUS_EXPR:
2779 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2780 POINTER_PLUS_EXPR. */
2781 if (POINTER_TYPE_P (TREE_TYPE (t)))
2783 error ("invalid operand to plus/minus, type is a pointer");
2784 return t;
2786 CHECK_OP (0, "invalid operand to binary operator");
2787 CHECK_OP (1, "invalid operand to binary operator");
2788 break;
2790 case POINTER_PLUS_EXPR:
2791 /* Check to make sure the first operand is a pointer or reference type. */
2792 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2794 error ("invalid operand to pointer plus, first operand is not a pointer");
2795 return t;
2797 /* Check to make sure the second operand is a ptrofftype. */
2798 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2800 error ("invalid operand to pointer plus, second operand is not an "
2801 "integer type of appropriate width");
2802 return t;
2804 /* FALLTHROUGH */
2805 case LT_EXPR:
2806 case LE_EXPR:
2807 case GT_EXPR:
2808 case GE_EXPR:
2809 case EQ_EXPR:
2810 case NE_EXPR:
2811 case UNORDERED_EXPR:
2812 case ORDERED_EXPR:
2813 case UNLT_EXPR:
2814 case UNLE_EXPR:
2815 case UNGT_EXPR:
2816 case UNGE_EXPR:
2817 case UNEQ_EXPR:
2818 case LTGT_EXPR:
2819 case MULT_EXPR:
2820 case TRUNC_DIV_EXPR:
2821 case CEIL_DIV_EXPR:
2822 case FLOOR_DIV_EXPR:
2823 case ROUND_DIV_EXPR:
2824 case TRUNC_MOD_EXPR:
2825 case CEIL_MOD_EXPR:
2826 case FLOOR_MOD_EXPR:
2827 case ROUND_MOD_EXPR:
2828 case RDIV_EXPR:
2829 case EXACT_DIV_EXPR:
2830 case MIN_EXPR:
2831 case MAX_EXPR:
2832 case LSHIFT_EXPR:
2833 case RSHIFT_EXPR:
2834 case LROTATE_EXPR:
2835 case RROTATE_EXPR:
2836 case BIT_IOR_EXPR:
2837 case BIT_XOR_EXPR:
2838 case BIT_AND_EXPR:
2839 CHECK_OP (0, "invalid operand to binary operator");
2840 CHECK_OP (1, "invalid operand to binary operator");
2841 break;
2843 case CONSTRUCTOR:
2844 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2845 *walk_subtrees = 0;
2846 break;
2848 case CASE_LABEL_EXPR:
2849 if (CASE_CHAIN (t))
2851 error ("invalid CASE_CHAIN");
2852 return t;
2854 break;
2856 default:
2857 break;
2859 return NULL;
2861 #undef CHECK_OP
2865 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2866 Returns true if there is an error, otherwise false. */
2868 static bool
2869 verify_types_in_gimple_min_lval (tree expr)
2871 tree op;
2873 if (is_gimple_id (expr))
2874 return false;
2876 if (TREE_CODE (expr) != TARGET_MEM_REF
2877 && TREE_CODE (expr) != MEM_REF)
2879 error ("invalid expression for min lvalue");
2880 return true;
2883 /* TARGET_MEM_REFs are strange beasts. */
2884 if (TREE_CODE (expr) == TARGET_MEM_REF)
2885 return false;
2887 op = TREE_OPERAND (expr, 0);
2888 if (!is_gimple_val (op))
2890 error ("invalid operand in indirect reference");
2891 debug_generic_stmt (op);
2892 return true;
2894 /* Memory references now generally can involve a value conversion. */
2896 return false;
2899 /* Verify if EXPR is a valid GIMPLE reference expression. If
2900 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2901 if there is an error, otherwise false. */
2903 static bool
2904 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2906 while (handled_component_p (expr))
2908 tree op = TREE_OPERAND (expr, 0);
2910 if (TREE_CODE (expr) == ARRAY_REF
2911 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2913 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2914 || (TREE_OPERAND (expr, 2)
2915 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2916 || (TREE_OPERAND (expr, 3)
2917 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2919 error ("invalid operands to array reference");
2920 debug_generic_stmt (expr);
2921 return true;
2925 /* Verify if the reference array element types are compatible. */
2926 if (TREE_CODE (expr) == ARRAY_REF
2927 && !useless_type_conversion_p (TREE_TYPE (expr),
2928 TREE_TYPE (TREE_TYPE (op))))
2930 error ("type mismatch in array reference");
2931 debug_generic_stmt (TREE_TYPE (expr));
2932 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2933 return true;
2935 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2936 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2937 TREE_TYPE (TREE_TYPE (op))))
2939 error ("type mismatch in array range reference");
2940 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2941 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2942 return true;
2945 if ((TREE_CODE (expr) == REALPART_EXPR
2946 || TREE_CODE (expr) == IMAGPART_EXPR)
2947 && !useless_type_conversion_p (TREE_TYPE (expr),
2948 TREE_TYPE (TREE_TYPE (op))))
2950 error ("type mismatch in real/imagpart reference");
2951 debug_generic_stmt (TREE_TYPE (expr));
2952 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2953 return true;
2956 if (TREE_CODE (expr) == COMPONENT_REF
2957 && !useless_type_conversion_p (TREE_TYPE (expr),
2958 TREE_TYPE (TREE_OPERAND (expr, 1))))
2960 error ("type mismatch in component reference");
2961 debug_generic_stmt (TREE_TYPE (expr));
2962 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2963 return true;
2966 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2968 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2969 that their operand is not an SSA name or an invariant when
2970 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2971 bug). Otherwise there is nothing to verify, gross mismatches at
2972 most invoke undefined behavior. */
2973 if (require_lvalue
2974 && (TREE_CODE (op) == SSA_NAME
2975 || is_gimple_min_invariant (op)))
2977 error ("conversion of an SSA_NAME on the left hand side");
2978 debug_generic_stmt (expr);
2979 return true;
2981 else if (TREE_CODE (op) == SSA_NAME
2982 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2984 error ("conversion of register to a different size");
2985 debug_generic_stmt (expr);
2986 return true;
2988 else if (!handled_component_p (op))
2989 return false;
2992 expr = op;
2995 if (TREE_CODE (expr) == MEM_REF)
2997 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2999 error ("invalid address operand in MEM_REF");
3000 debug_generic_stmt (expr);
3001 return true;
3003 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3004 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3006 error ("invalid offset operand in MEM_REF");
3007 debug_generic_stmt (expr);
3008 return true;
3011 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3013 if (!TMR_BASE (expr)
3014 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3016 error ("invalid address operand in TARGET_MEM_REF");
3017 return true;
3019 if (!TMR_OFFSET (expr)
3020 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3021 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3023 error ("invalid offset operand in TARGET_MEM_REF");
3024 debug_generic_stmt (expr);
3025 return true;
3029 return ((require_lvalue || !is_gimple_min_invariant (expr))
3030 && verify_types_in_gimple_min_lval (expr));
3033 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3034 list of pointer-to types that is trivially convertible to DEST. */
3036 static bool
3037 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3039 tree src;
3041 if (!TYPE_POINTER_TO (src_obj))
3042 return true;
3044 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3045 if (useless_type_conversion_p (dest, src))
3046 return true;
3048 return false;
3051 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3052 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3054 static bool
3055 valid_fixed_convert_types_p (tree type1, tree type2)
3057 return (FIXED_POINT_TYPE_P (type1)
3058 && (INTEGRAL_TYPE_P (type2)
3059 || SCALAR_FLOAT_TYPE_P (type2)
3060 || FIXED_POINT_TYPE_P (type2)));
3063 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3064 is a problem, otherwise false. */
3066 static bool
3067 verify_gimple_call (gimple stmt)
3069 tree fn = gimple_call_fn (stmt);
3070 tree fntype, fndecl;
3071 unsigned i;
3073 if (gimple_call_internal_p (stmt))
3075 if (fn)
3077 error ("gimple call has two targets");
3078 debug_generic_stmt (fn);
3079 return true;
3082 else
3084 if (!fn)
3086 error ("gimple call has no target");
3087 return true;
3091 if (fn && !is_gimple_call_addr (fn))
3093 error ("invalid function in gimple call");
3094 debug_generic_stmt (fn);
3095 return true;
3098 if (fn
3099 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3100 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3101 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3103 error ("non-function in gimple call");
3104 return true;
3107 fndecl = gimple_call_fndecl (stmt);
3108 if (fndecl
3109 && TREE_CODE (fndecl) == FUNCTION_DECL
3110 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3111 && !DECL_PURE_P (fndecl)
3112 && !TREE_READONLY (fndecl))
3114 error ("invalid pure const state for function");
3115 return true;
3118 if (gimple_call_lhs (stmt)
3119 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3120 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3122 error ("invalid LHS in gimple call");
3123 return true;
3126 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3128 error ("LHS in noreturn call");
3129 return true;
3132 fntype = gimple_call_fntype (stmt);
3133 if (fntype
3134 && gimple_call_lhs (stmt)
3135 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3136 TREE_TYPE (fntype))
3137 /* ??? At least C++ misses conversions at assignments from
3138 void * call results.
3139 ??? Java is completely off. Especially with functions
3140 returning java.lang.Object.
3141 For now simply allow arbitrary pointer type conversions. */
3142 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3143 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3145 error ("invalid conversion in gimple call");
3146 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3147 debug_generic_stmt (TREE_TYPE (fntype));
3148 return true;
3151 if (gimple_call_chain (stmt)
3152 && !is_gimple_val (gimple_call_chain (stmt)))
3154 error ("invalid static chain in gimple call");
3155 debug_generic_stmt (gimple_call_chain (stmt));
3156 return true;
3159 /* If there is a static chain argument, this should not be an indirect
3160 call, and the decl should have DECL_STATIC_CHAIN set. */
3161 if (gimple_call_chain (stmt))
3163 if (!gimple_call_fndecl (stmt))
3165 error ("static chain in indirect gimple call");
3166 return true;
3168 fn = TREE_OPERAND (fn, 0);
3170 if (!DECL_STATIC_CHAIN (fn))
3172 error ("static chain with function that doesn%'t use one");
3173 return true;
3177 /* ??? The C frontend passes unpromoted arguments in case it
3178 didn't see a function declaration before the call. So for now
3179 leave the call arguments mostly unverified. Once we gimplify
3180 unit-at-a-time we have a chance to fix this. */
3182 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3184 tree arg = gimple_call_arg (stmt, i);
3185 if ((is_gimple_reg_type (TREE_TYPE (arg))
3186 && !is_gimple_val (arg))
3187 || (!is_gimple_reg_type (TREE_TYPE (arg))
3188 && !is_gimple_lvalue (arg)))
3190 error ("invalid argument to gimple call");
3191 debug_generic_expr (arg);
3192 return true;
3196 return false;
3199 /* Verifies the gimple comparison with the result type TYPE and
3200 the operands OP0 and OP1. */
3202 static bool
3203 verify_gimple_comparison (tree type, tree op0, tree op1)
3205 tree op0_type = TREE_TYPE (op0);
3206 tree op1_type = TREE_TYPE (op1);
3208 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3210 error ("invalid operands in gimple comparison");
3211 return true;
3214 /* For comparisons we do not have the operations type as the
3215 effective type the comparison is carried out in. Instead
3216 we require that either the first operand is trivially
3217 convertible into the second, or the other way around.
3218 Because we special-case pointers to void we allow
3219 comparisons of pointers with the same mode as well. */
3220 if (!useless_type_conversion_p (op0_type, op1_type)
3221 && !useless_type_conversion_p (op1_type, op0_type)
3222 && (!POINTER_TYPE_P (op0_type)
3223 || !POINTER_TYPE_P (op1_type)
3224 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3226 error ("mismatching comparison operand types");
3227 debug_generic_expr (op0_type);
3228 debug_generic_expr (op1_type);
3229 return true;
3232 /* The resulting type of a comparison may be an effective boolean type. */
3233 if (INTEGRAL_TYPE_P (type)
3234 && (TREE_CODE (type) == BOOLEAN_TYPE
3235 || TYPE_PRECISION (type) == 1))
3237 if (TREE_CODE (op0_type) == VECTOR_TYPE
3238 || TREE_CODE (op1_type) == VECTOR_TYPE)
3240 error ("vector comparison returning a boolean");
3241 debug_generic_expr (op0_type);
3242 debug_generic_expr (op1_type);
3243 return true;
3246 /* Or an integer vector type with the same size and element count
3247 as the comparison operand types. */
3248 else if (TREE_CODE (type) == VECTOR_TYPE
3249 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3251 if (TREE_CODE (op0_type) != VECTOR_TYPE
3252 || TREE_CODE (op1_type) != VECTOR_TYPE)
3254 error ("non-vector operands in vector comparison");
3255 debug_generic_expr (op0_type);
3256 debug_generic_expr (op1_type);
3257 return true;
3260 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3261 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3262 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3263 /* The result of a vector comparison is of signed
3264 integral type. */
3265 || TYPE_UNSIGNED (TREE_TYPE (type)))
3267 error ("invalid vector comparison resulting type");
3268 debug_generic_expr (type);
3269 return true;
3272 else
3274 error ("bogus comparison result type");
3275 debug_generic_expr (type);
3276 return true;
3279 return false;
3282 /* Verify a gimple assignment statement STMT with an unary rhs.
3283 Returns true if anything is wrong. */
3285 static bool
3286 verify_gimple_assign_unary (gimple stmt)
3288 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3289 tree lhs = gimple_assign_lhs (stmt);
3290 tree lhs_type = TREE_TYPE (lhs);
3291 tree rhs1 = gimple_assign_rhs1 (stmt);
3292 tree rhs1_type = TREE_TYPE (rhs1);
3294 if (!is_gimple_reg (lhs))
3296 error ("non-register as LHS of unary operation");
3297 return true;
3300 if (!is_gimple_val (rhs1))
3302 error ("invalid operand in unary operation");
3303 return true;
3306 /* First handle conversions. */
3307 switch (rhs_code)
3309 CASE_CONVERT:
3311 /* Allow conversions from pointer type to integral type only if
3312 there is no sign or zero extension involved.
3313 For targets were the precision of ptrofftype doesn't match that
3314 of pointers we need to allow arbitrary conversions to ptrofftype. */
3315 if ((POINTER_TYPE_P (lhs_type)
3316 && INTEGRAL_TYPE_P (rhs1_type))
3317 || (POINTER_TYPE_P (rhs1_type)
3318 && INTEGRAL_TYPE_P (lhs_type)
3319 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3320 || ptrofftype_p (sizetype))))
3321 return false;
3323 /* Allow conversion from integral to offset type and vice versa. */
3324 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3325 && INTEGRAL_TYPE_P (rhs1_type))
3326 || (INTEGRAL_TYPE_P (lhs_type)
3327 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3328 return false;
3330 /* Otherwise assert we are converting between types of the
3331 same kind. */
3332 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3334 error ("invalid types in nop conversion");
3335 debug_generic_expr (lhs_type);
3336 debug_generic_expr (rhs1_type);
3337 return true;
3340 return false;
3343 case ADDR_SPACE_CONVERT_EXPR:
3345 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3346 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3347 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3349 error ("invalid types in address space conversion");
3350 debug_generic_expr (lhs_type);
3351 debug_generic_expr (rhs1_type);
3352 return true;
3355 return false;
3358 case FIXED_CONVERT_EXPR:
3360 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3361 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3363 error ("invalid types in fixed-point conversion");
3364 debug_generic_expr (lhs_type);
3365 debug_generic_expr (rhs1_type);
3366 return true;
3369 return false;
3372 case FLOAT_EXPR:
3374 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3375 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3376 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3378 error ("invalid types in conversion to floating point");
3379 debug_generic_expr (lhs_type);
3380 debug_generic_expr (rhs1_type);
3381 return true;
3384 return false;
3387 case FIX_TRUNC_EXPR:
3389 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3390 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3391 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3393 error ("invalid types in conversion to integer");
3394 debug_generic_expr (lhs_type);
3395 debug_generic_expr (rhs1_type);
3396 return true;
3399 return false;
3402 case VEC_UNPACK_HI_EXPR:
3403 case VEC_UNPACK_LO_EXPR:
3404 case REDUC_MAX_EXPR:
3405 case REDUC_MIN_EXPR:
3406 case REDUC_PLUS_EXPR:
3407 case VEC_UNPACK_FLOAT_HI_EXPR:
3408 case VEC_UNPACK_FLOAT_LO_EXPR:
3409 /* FIXME. */
3410 return false;
3412 case NEGATE_EXPR:
3413 case ABS_EXPR:
3414 case BIT_NOT_EXPR:
3415 case PAREN_EXPR:
3416 case NON_LVALUE_EXPR:
3417 case CONJ_EXPR:
3418 break;
3420 default:
3421 gcc_unreachable ();
3424 /* For the remaining codes assert there is no conversion involved. */
3425 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3427 error ("non-trivial conversion in unary operation");
3428 debug_generic_expr (lhs_type);
3429 debug_generic_expr (rhs1_type);
3430 return true;
3433 return false;
3436 /* Verify a gimple assignment statement STMT with a binary rhs.
3437 Returns true if anything is wrong. */
3439 static bool
3440 verify_gimple_assign_binary (gimple stmt)
3442 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3443 tree lhs = gimple_assign_lhs (stmt);
3444 tree lhs_type = TREE_TYPE (lhs);
3445 tree rhs1 = gimple_assign_rhs1 (stmt);
3446 tree rhs1_type = TREE_TYPE (rhs1);
3447 tree rhs2 = gimple_assign_rhs2 (stmt);
3448 tree rhs2_type = TREE_TYPE (rhs2);
3450 if (!is_gimple_reg (lhs))
3452 error ("non-register as LHS of binary operation");
3453 return true;
3456 if (!is_gimple_val (rhs1)
3457 || !is_gimple_val (rhs2))
3459 error ("invalid operands in binary operation");
3460 return true;
3463 /* First handle operations that involve different types. */
3464 switch (rhs_code)
3466 case COMPLEX_EXPR:
3468 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3469 || !(INTEGRAL_TYPE_P (rhs1_type)
3470 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3471 || !(INTEGRAL_TYPE_P (rhs2_type)
3472 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3474 error ("type mismatch in complex expression");
3475 debug_generic_expr (lhs_type);
3476 debug_generic_expr (rhs1_type);
3477 debug_generic_expr (rhs2_type);
3478 return true;
3481 return false;
3484 case LSHIFT_EXPR:
3485 case RSHIFT_EXPR:
3486 case LROTATE_EXPR:
3487 case RROTATE_EXPR:
3489 /* Shifts and rotates are ok on integral types, fixed point
3490 types and integer vector types. */
3491 if ((!INTEGRAL_TYPE_P (rhs1_type)
3492 && !FIXED_POINT_TYPE_P (rhs1_type)
3493 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3494 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3495 || (!INTEGRAL_TYPE_P (rhs2_type)
3496 /* Vector shifts of vectors are also ok. */
3497 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3498 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3499 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3500 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3501 || !useless_type_conversion_p (lhs_type, rhs1_type))
3503 error ("type mismatch in shift expression");
3504 debug_generic_expr (lhs_type);
3505 debug_generic_expr (rhs1_type);
3506 debug_generic_expr (rhs2_type);
3507 return true;
3510 return false;
3513 case VEC_LSHIFT_EXPR:
3514 case VEC_RSHIFT_EXPR:
3516 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3517 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3518 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3519 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3520 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3521 || (!INTEGRAL_TYPE_P (rhs2_type)
3522 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3523 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3524 || !useless_type_conversion_p (lhs_type, rhs1_type))
3526 error ("type mismatch in vector shift expression");
3527 debug_generic_expr (lhs_type);
3528 debug_generic_expr (rhs1_type);
3529 debug_generic_expr (rhs2_type);
3530 return true;
3532 /* For shifting a vector of non-integral components we
3533 only allow shifting by a constant multiple of the element size. */
3534 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3535 && (TREE_CODE (rhs2) != INTEGER_CST
3536 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3537 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3539 error ("non-element sized vector shift of floating point vector");
3540 return true;
3543 return false;
3546 case WIDEN_LSHIFT_EXPR:
3548 if (!INTEGRAL_TYPE_P (lhs_type)
3549 || !INTEGRAL_TYPE_P (rhs1_type)
3550 || TREE_CODE (rhs2) != INTEGER_CST
3551 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3553 error ("type mismatch in widening vector shift expression");
3554 debug_generic_expr (lhs_type);
3555 debug_generic_expr (rhs1_type);
3556 debug_generic_expr (rhs2_type);
3557 return true;
3560 return false;
3563 case VEC_WIDEN_LSHIFT_HI_EXPR:
3564 case VEC_WIDEN_LSHIFT_LO_EXPR:
3566 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3567 || TREE_CODE (lhs_type) != VECTOR_TYPE
3568 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3569 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3570 || TREE_CODE (rhs2) != INTEGER_CST
3571 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3572 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3574 error ("type mismatch in widening vector shift expression");
3575 debug_generic_expr (lhs_type);
3576 debug_generic_expr (rhs1_type);
3577 debug_generic_expr (rhs2_type);
3578 return true;
3581 return false;
3584 case PLUS_EXPR:
3585 case MINUS_EXPR:
3587 tree lhs_etype = lhs_type;
3588 tree rhs1_etype = rhs1_type;
3589 tree rhs2_etype = rhs2_type;
3590 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3592 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3593 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3595 error ("invalid non-vector operands to vector valued plus");
3596 return true;
3598 lhs_etype = TREE_TYPE (lhs_type);
3599 rhs1_etype = TREE_TYPE (rhs1_type);
3600 rhs2_etype = TREE_TYPE (rhs2_type);
3602 if (POINTER_TYPE_P (lhs_etype)
3603 || POINTER_TYPE_P (rhs1_etype)
3604 || POINTER_TYPE_P (rhs2_etype))
3606 error ("invalid (pointer) operands to plus/minus");
3607 return true;
3610 /* Continue with generic binary expression handling. */
3611 break;
3614 case POINTER_PLUS_EXPR:
3616 if (!POINTER_TYPE_P (rhs1_type)
3617 || !useless_type_conversion_p (lhs_type, rhs1_type)
3618 || !ptrofftype_p (rhs2_type))
3620 error ("type mismatch in pointer plus expression");
3621 debug_generic_stmt (lhs_type);
3622 debug_generic_stmt (rhs1_type);
3623 debug_generic_stmt (rhs2_type);
3624 return true;
3627 return false;
3630 case TRUTH_ANDIF_EXPR:
3631 case TRUTH_ORIF_EXPR:
3632 case TRUTH_AND_EXPR:
3633 case TRUTH_OR_EXPR:
3634 case TRUTH_XOR_EXPR:
3636 gcc_unreachable ();
3638 case LT_EXPR:
3639 case LE_EXPR:
3640 case GT_EXPR:
3641 case GE_EXPR:
3642 case EQ_EXPR:
3643 case NE_EXPR:
3644 case UNORDERED_EXPR:
3645 case ORDERED_EXPR:
3646 case UNLT_EXPR:
3647 case UNLE_EXPR:
3648 case UNGT_EXPR:
3649 case UNGE_EXPR:
3650 case UNEQ_EXPR:
3651 case LTGT_EXPR:
3652 /* Comparisons are also binary, but the result type is not
3653 connected to the operand types. */
3654 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3656 case WIDEN_MULT_EXPR:
3657 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3658 return true;
3659 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3660 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3662 case WIDEN_SUM_EXPR:
3663 case VEC_WIDEN_MULT_HI_EXPR:
3664 case VEC_WIDEN_MULT_LO_EXPR:
3665 case VEC_WIDEN_MULT_EVEN_EXPR:
3666 case VEC_WIDEN_MULT_ODD_EXPR:
3667 case VEC_PACK_TRUNC_EXPR:
3668 case VEC_PACK_SAT_EXPR:
3669 case VEC_PACK_FIX_TRUNC_EXPR:
3670 /* FIXME. */
3671 return false;
3673 case MULT_EXPR:
3674 case MULT_HIGHPART_EXPR:
3675 case TRUNC_DIV_EXPR:
3676 case CEIL_DIV_EXPR:
3677 case FLOOR_DIV_EXPR:
3678 case ROUND_DIV_EXPR:
3679 case TRUNC_MOD_EXPR:
3680 case CEIL_MOD_EXPR:
3681 case FLOOR_MOD_EXPR:
3682 case ROUND_MOD_EXPR:
3683 case RDIV_EXPR:
3684 case EXACT_DIV_EXPR:
3685 case MIN_EXPR:
3686 case MAX_EXPR:
3687 case BIT_IOR_EXPR:
3688 case BIT_XOR_EXPR:
3689 case BIT_AND_EXPR:
3690 /* Continue with generic binary expression handling. */
3691 break;
3693 default:
3694 gcc_unreachable ();
3697 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3698 || !useless_type_conversion_p (lhs_type, rhs2_type))
3700 error ("type mismatch in binary expression");
3701 debug_generic_stmt (lhs_type);
3702 debug_generic_stmt (rhs1_type);
3703 debug_generic_stmt (rhs2_type);
3704 return true;
3707 return false;
3710 /* Verify a gimple assignment statement STMT with a ternary rhs.
3711 Returns true if anything is wrong. */
3713 static bool
3714 verify_gimple_assign_ternary (gimple stmt)
3716 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3717 tree lhs = gimple_assign_lhs (stmt);
3718 tree lhs_type = TREE_TYPE (lhs);
3719 tree rhs1 = gimple_assign_rhs1 (stmt);
3720 tree rhs1_type = TREE_TYPE (rhs1);
3721 tree rhs2 = gimple_assign_rhs2 (stmt);
3722 tree rhs2_type = TREE_TYPE (rhs2);
3723 tree rhs3 = gimple_assign_rhs3 (stmt);
3724 tree rhs3_type = TREE_TYPE (rhs3);
3726 if (!is_gimple_reg (lhs))
3728 error ("non-register as LHS of ternary operation");
3729 return true;
3732 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3733 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3734 || !is_gimple_val (rhs2)
3735 || !is_gimple_val (rhs3))
3737 error ("invalid operands in ternary operation");
3738 return true;
3741 /* First handle operations that involve different types. */
3742 switch (rhs_code)
3744 case WIDEN_MULT_PLUS_EXPR:
3745 case WIDEN_MULT_MINUS_EXPR:
3746 if ((!INTEGRAL_TYPE_P (rhs1_type)
3747 && !FIXED_POINT_TYPE_P (rhs1_type))
3748 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3749 || !useless_type_conversion_p (lhs_type, rhs3_type)
3750 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3751 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3753 error ("type mismatch in widening multiply-accumulate expression");
3754 debug_generic_expr (lhs_type);
3755 debug_generic_expr (rhs1_type);
3756 debug_generic_expr (rhs2_type);
3757 debug_generic_expr (rhs3_type);
3758 return true;
3760 break;
3762 case FMA_EXPR:
3763 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3764 || !useless_type_conversion_p (lhs_type, rhs2_type)
3765 || !useless_type_conversion_p (lhs_type, rhs3_type))
3767 error ("type mismatch in fused multiply-add expression");
3768 debug_generic_expr (lhs_type);
3769 debug_generic_expr (rhs1_type);
3770 debug_generic_expr (rhs2_type);
3771 debug_generic_expr (rhs3_type);
3772 return true;
3774 break;
3776 case COND_EXPR:
3777 case VEC_COND_EXPR:
3778 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3779 || !useless_type_conversion_p (lhs_type, rhs3_type))
3781 error ("type mismatch in conditional expression");
3782 debug_generic_expr (lhs_type);
3783 debug_generic_expr (rhs2_type);
3784 debug_generic_expr (rhs3_type);
3785 return true;
3787 break;
3789 case VEC_PERM_EXPR:
3790 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3791 || !useless_type_conversion_p (lhs_type, rhs2_type))
3793 error ("type mismatch in vector permute expression");
3794 debug_generic_expr (lhs_type);
3795 debug_generic_expr (rhs1_type);
3796 debug_generic_expr (rhs2_type);
3797 debug_generic_expr (rhs3_type);
3798 return true;
3801 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3802 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3803 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3805 error ("vector types expected in vector permute expression");
3806 debug_generic_expr (lhs_type);
3807 debug_generic_expr (rhs1_type);
3808 debug_generic_expr (rhs2_type);
3809 debug_generic_expr (rhs3_type);
3810 return true;
3813 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3814 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3815 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3816 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3817 != TYPE_VECTOR_SUBPARTS (lhs_type))
3819 error ("vectors with different element number found "
3820 "in vector permute expression");
3821 debug_generic_expr (lhs_type);
3822 debug_generic_expr (rhs1_type);
3823 debug_generic_expr (rhs2_type);
3824 debug_generic_expr (rhs3_type);
3825 return true;
3828 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3829 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3830 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3832 error ("invalid mask type in vector permute expression");
3833 debug_generic_expr (lhs_type);
3834 debug_generic_expr (rhs1_type);
3835 debug_generic_expr (rhs2_type);
3836 debug_generic_expr (rhs3_type);
3837 return true;
3840 return false;
3842 case DOT_PROD_EXPR:
3843 case REALIGN_LOAD_EXPR:
3844 /* FIXME. */
3845 return false;
3847 default:
3848 gcc_unreachable ();
3850 return false;
3853 /* Verify a gimple assignment statement STMT with a single rhs.
3854 Returns true if anything is wrong. */
3856 static bool
3857 verify_gimple_assign_single (gimple stmt)
3859 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3860 tree lhs = gimple_assign_lhs (stmt);
3861 tree lhs_type = TREE_TYPE (lhs);
3862 tree rhs1 = gimple_assign_rhs1 (stmt);
3863 tree rhs1_type = TREE_TYPE (rhs1);
3864 bool res = false;
3866 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3868 error ("non-trivial conversion at assignment");
3869 debug_generic_expr (lhs_type);
3870 debug_generic_expr (rhs1_type);
3871 return true;
3874 if (gimple_clobber_p (stmt)
3875 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3877 error ("non-decl/MEM_REF LHS in clobber statement");
3878 debug_generic_expr (lhs);
3879 return true;
3882 if (handled_component_p (lhs))
3883 res |= verify_types_in_gimple_reference (lhs, true);
3885 /* Special codes we cannot handle via their class. */
3886 switch (rhs_code)
3888 case ADDR_EXPR:
3890 tree op = TREE_OPERAND (rhs1, 0);
3891 if (!is_gimple_addressable (op))
3893 error ("invalid operand in unary expression");
3894 return true;
3897 /* Technically there is no longer a need for matching types, but
3898 gimple hygiene asks for this check. In LTO we can end up
3899 combining incompatible units and thus end up with addresses
3900 of globals that change their type to a common one. */
3901 if (!in_lto_p
3902 && !types_compatible_p (TREE_TYPE (op),
3903 TREE_TYPE (TREE_TYPE (rhs1)))
3904 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3905 TREE_TYPE (op)))
3907 error ("type mismatch in address expression");
3908 debug_generic_stmt (TREE_TYPE (rhs1));
3909 debug_generic_stmt (TREE_TYPE (op));
3910 return true;
3913 return verify_types_in_gimple_reference (op, true);
3916 /* tcc_reference */
3917 case INDIRECT_REF:
3918 error ("INDIRECT_REF in gimple IL");
3919 return true;
3921 case COMPONENT_REF:
3922 case BIT_FIELD_REF:
3923 case ARRAY_REF:
3924 case ARRAY_RANGE_REF:
3925 case VIEW_CONVERT_EXPR:
3926 case REALPART_EXPR:
3927 case IMAGPART_EXPR:
3928 case TARGET_MEM_REF:
3929 case MEM_REF:
3930 if (!is_gimple_reg (lhs)
3931 && is_gimple_reg_type (TREE_TYPE (lhs)))
3933 error ("invalid rhs for gimple memory store");
3934 debug_generic_stmt (lhs);
3935 debug_generic_stmt (rhs1);
3936 return true;
3938 return res || verify_types_in_gimple_reference (rhs1, false);
3940 /* tcc_constant */
3941 case SSA_NAME:
3942 case INTEGER_CST:
3943 case REAL_CST:
3944 case FIXED_CST:
3945 case COMPLEX_CST:
3946 case VECTOR_CST:
3947 case STRING_CST:
3948 return res;
3950 /* tcc_declaration */
3951 case CONST_DECL:
3952 return res;
3953 case VAR_DECL:
3954 case PARM_DECL:
3955 if (!is_gimple_reg (lhs)
3956 && !is_gimple_reg (rhs1)
3957 && is_gimple_reg_type (TREE_TYPE (lhs)))
3959 error ("invalid rhs for gimple memory store");
3960 debug_generic_stmt (lhs);
3961 debug_generic_stmt (rhs1);
3962 return true;
3964 return res;
3966 case CONSTRUCTOR:
3967 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3969 unsigned int i;
3970 tree elt_i, elt_v, elt_t = NULL_TREE;
3972 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3973 return res;
3974 /* For vector CONSTRUCTORs we require that either it is empty
3975 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3976 (then the element count must be correct to cover the whole
3977 outer vector and index must be NULL on all elements, or it is
3978 a CONSTRUCTOR of scalar elements, where we as an exception allow
3979 smaller number of elements (assuming zero filling) and
3980 consecutive indexes as compared to NULL indexes (such
3981 CONSTRUCTORs can appear in the IL from FEs). */
3982 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3984 if (elt_t == NULL_TREE)
3986 elt_t = TREE_TYPE (elt_v);
3987 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3989 tree elt_t = TREE_TYPE (elt_v);
3990 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3991 TREE_TYPE (elt_t)))
3993 error ("incorrect type of vector CONSTRUCTOR"
3994 " elements");
3995 debug_generic_stmt (rhs1);
3996 return true;
3998 else if (CONSTRUCTOR_NELTS (rhs1)
3999 * TYPE_VECTOR_SUBPARTS (elt_t)
4000 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4002 error ("incorrect number of vector CONSTRUCTOR"
4003 " elements");
4004 debug_generic_stmt (rhs1);
4005 return true;
4008 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4009 elt_t))
4011 error ("incorrect type of vector CONSTRUCTOR elements");
4012 debug_generic_stmt (rhs1);
4013 return true;
4015 else if (CONSTRUCTOR_NELTS (rhs1)
4016 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4018 error ("incorrect number of vector CONSTRUCTOR elements");
4019 debug_generic_stmt (rhs1);
4020 return true;
4023 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4025 error ("incorrect type of vector CONSTRUCTOR elements");
4026 debug_generic_stmt (rhs1);
4027 return true;
4029 if (elt_i != NULL_TREE
4030 && (TREE_CODE (elt_t) == VECTOR_TYPE
4031 || TREE_CODE (elt_i) != INTEGER_CST
4032 || compare_tree_int (elt_i, i) != 0))
4034 error ("vector CONSTRUCTOR with non-NULL element index");
4035 debug_generic_stmt (rhs1);
4036 return true;
4040 return res;
4041 case OBJ_TYPE_REF:
4042 case ASSERT_EXPR:
4043 case WITH_SIZE_EXPR:
4044 /* FIXME. */
4045 return res;
4047 default:;
4050 return res;
4053 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4054 is a problem, otherwise false. */
4056 static bool
4057 verify_gimple_assign (gimple stmt)
4059 switch (gimple_assign_rhs_class (stmt))
4061 case GIMPLE_SINGLE_RHS:
4062 return verify_gimple_assign_single (stmt);
4064 case GIMPLE_UNARY_RHS:
4065 return verify_gimple_assign_unary (stmt);
4067 case GIMPLE_BINARY_RHS:
4068 return verify_gimple_assign_binary (stmt);
4070 case GIMPLE_TERNARY_RHS:
4071 return verify_gimple_assign_ternary (stmt);
4073 default:
4074 gcc_unreachable ();
4078 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4079 is a problem, otherwise false. */
4081 static bool
4082 verify_gimple_return (gimple stmt)
4084 tree op = gimple_return_retval (stmt);
4085 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4087 /* We cannot test for present return values as we do not fix up missing
4088 return values from the original source. */
4089 if (op == NULL)
4090 return false;
4092 if (!is_gimple_val (op)
4093 && TREE_CODE (op) != RESULT_DECL)
4095 error ("invalid operand in return statement");
4096 debug_generic_stmt (op);
4097 return true;
4100 if ((TREE_CODE (op) == RESULT_DECL
4101 && DECL_BY_REFERENCE (op))
4102 || (TREE_CODE (op) == SSA_NAME
4103 && SSA_NAME_VAR (op)
4104 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4105 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4106 op = TREE_TYPE (op);
4108 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4110 error ("invalid conversion in return statement");
4111 debug_generic_stmt (restype);
4112 debug_generic_stmt (TREE_TYPE (op));
4113 return true;
4116 return false;
4120 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4121 is a problem, otherwise false. */
4123 static bool
4124 verify_gimple_goto (gimple stmt)
4126 tree dest = gimple_goto_dest (stmt);
4128 /* ??? We have two canonical forms of direct goto destinations, a
4129 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4130 if (TREE_CODE (dest) != LABEL_DECL
4131 && (!is_gimple_val (dest)
4132 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4134 error ("goto destination is neither a label nor a pointer");
4135 return true;
4138 return false;
4141 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4142 is a problem, otherwise false. */
4144 static bool
4145 verify_gimple_switch (gimple stmt)
4147 unsigned int i, n;
4148 tree elt, prev_upper_bound = NULL_TREE;
4149 tree index_type, elt_type = NULL_TREE;
4151 if (!is_gimple_val (gimple_switch_index (stmt)))
4153 error ("invalid operand to switch statement");
4154 debug_generic_stmt (gimple_switch_index (stmt));
4155 return true;
4158 index_type = TREE_TYPE (gimple_switch_index (stmt));
4159 if (! INTEGRAL_TYPE_P (index_type))
4161 error ("non-integral type switch statement");
4162 debug_generic_expr (index_type);
4163 return true;
4166 elt = gimple_switch_label (stmt, 0);
4167 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4169 error ("invalid default case label in switch statement");
4170 debug_generic_expr (elt);
4171 return true;
4174 n = gimple_switch_num_labels (stmt);
4175 for (i = 1; i < n; i++)
4177 elt = gimple_switch_label (stmt, i);
4179 if (! CASE_LOW (elt))
4181 error ("invalid case label in switch statement");
4182 debug_generic_expr (elt);
4183 return true;
4185 if (CASE_HIGH (elt)
4186 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4188 error ("invalid case range in switch statement");
4189 debug_generic_expr (elt);
4190 return true;
4193 if (elt_type)
4195 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4196 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4198 error ("type mismatch for case label in switch statement");
4199 debug_generic_expr (elt);
4200 return true;
4203 else
4205 elt_type = TREE_TYPE (CASE_LOW (elt));
4206 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4208 error ("type precision mismatch in switch statement");
4209 return true;
4213 if (prev_upper_bound)
4215 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4217 error ("case labels not sorted in switch statement");
4218 return true;
4222 prev_upper_bound = CASE_HIGH (elt);
4223 if (! prev_upper_bound)
4224 prev_upper_bound = CASE_LOW (elt);
4227 return false;
4230 /* Verify a gimple debug statement STMT.
4231 Returns true if anything is wrong. */
4233 static bool
4234 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4236 /* There isn't much that could be wrong in a gimple debug stmt. A
4237 gimple debug bind stmt, for example, maps a tree, that's usually
4238 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4239 component or member of an aggregate type, to another tree, that
4240 can be an arbitrary expression. These stmts expand into debug
4241 insns, and are converted to debug notes by var-tracking.c. */
4242 return false;
4245 /* Verify a gimple label statement STMT.
4246 Returns true if anything is wrong. */
4248 static bool
4249 verify_gimple_label (gimple stmt)
4251 tree decl = gimple_label_label (stmt);
4252 int uid;
4253 bool err = false;
4255 if (TREE_CODE (decl) != LABEL_DECL)
4256 return true;
4257 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4258 && DECL_CONTEXT (decl) != current_function_decl)
4260 error ("label's context is not the current function decl");
4261 err |= true;
4264 uid = LABEL_DECL_UID (decl);
4265 if (cfun->cfg
4266 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4268 error ("incorrect entry in label_to_block_map");
4269 err |= true;
4272 uid = EH_LANDING_PAD_NR (decl);
4273 if (uid)
4275 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4276 if (decl != lp->post_landing_pad)
4278 error ("incorrect setting of landing pad number");
4279 err |= true;
4283 return err;
4286 /* Verify the GIMPLE statement STMT. Returns true if there is an
4287 error, otherwise false. */
4289 static bool
4290 verify_gimple_stmt (gimple stmt)
4292 switch (gimple_code (stmt))
4294 case GIMPLE_ASSIGN:
4295 return verify_gimple_assign (stmt);
4297 case GIMPLE_LABEL:
4298 return verify_gimple_label (stmt);
4300 case GIMPLE_CALL:
4301 return verify_gimple_call (stmt);
4303 case GIMPLE_COND:
4304 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4306 error ("invalid comparison code in gimple cond");
4307 return true;
4309 if (!(!gimple_cond_true_label (stmt)
4310 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4311 || !(!gimple_cond_false_label (stmt)
4312 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4314 error ("invalid labels in gimple cond");
4315 return true;
4318 return verify_gimple_comparison (boolean_type_node,
4319 gimple_cond_lhs (stmt),
4320 gimple_cond_rhs (stmt));
4322 case GIMPLE_GOTO:
4323 return verify_gimple_goto (stmt);
4325 case GIMPLE_SWITCH:
4326 return verify_gimple_switch (stmt);
4328 case GIMPLE_RETURN:
4329 return verify_gimple_return (stmt);
4331 case GIMPLE_ASM:
4332 return false;
4334 case GIMPLE_TRANSACTION:
4335 return verify_gimple_transaction (stmt);
4337 /* Tuples that do not have tree operands. */
4338 case GIMPLE_NOP:
4339 case GIMPLE_PREDICT:
4340 case GIMPLE_RESX:
4341 case GIMPLE_EH_DISPATCH:
4342 case GIMPLE_EH_MUST_NOT_THROW:
4343 return false;
4345 CASE_GIMPLE_OMP:
4346 /* OpenMP directives are validated by the FE and never operated
4347 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4348 non-gimple expressions when the main index variable has had
4349 its address taken. This does not affect the loop itself
4350 because the header of an GIMPLE_OMP_FOR is merely used to determine
4351 how to setup the parallel iteration. */
4352 return false;
4354 case GIMPLE_DEBUG:
4355 return verify_gimple_debug (stmt);
4357 default:
4358 gcc_unreachable ();
4362 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4363 and false otherwise. */
4365 static bool
4366 verify_gimple_phi (gimple phi)
4368 bool err = false;
4369 unsigned i;
4370 tree phi_result = gimple_phi_result (phi);
4371 bool virtual_p;
4373 if (!phi_result)
4375 error ("invalid PHI result");
4376 return true;
4379 virtual_p = virtual_operand_p (phi_result);
4380 if (TREE_CODE (phi_result) != SSA_NAME
4381 || (virtual_p
4382 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4384 error ("invalid PHI result");
4385 err = true;
4388 for (i = 0; i < gimple_phi_num_args (phi); i++)
4390 tree t = gimple_phi_arg_def (phi, i);
4392 if (!t)
4394 error ("missing PHI def");
4395 err |= true;
4396 continue;
4398 /* Addressable variables do have SSA_NAMEs but they
4399 are not considered gimple values. */
4400 else if ((TREE_CODE (t) == SSA_NAME
4401 && virtual_p != virtual_operand_p (t))
4402 || (virtual_p
4403 && (TREE_CODE (t) != SSA_NAME
4404 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4405 || (!virtual_p
4406 && !is_gimple_val (t)))
4408 error ("invalid PHI argument");
4409 debug_generic_expr (t);
4410 err |= true;
4412 #ifdef ENABLE_TYPES_CHECKING
4413 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4415 error ("incompatible types in PHI argument %u", i);
4416 debug_generic_stmt (TREE_TYPE (phi_result));
4417 debug_generic_stmt (TREE_TYPE (t));
4418 err |= true;
4420 #endif
4423 return err;
4426 /* Verify the GIMPLE statements inside the sequence STMTS. */
4428 static bool
4429 verify_gimple_in_seq_2 (gimple_seq stmts)
4431 gimple_stmt_iterator ittr;
4432 bool err = false;
4434 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4436 gimple stmt = gsi_stmt (ittr);
4438 switch (gimple_code (stmt))
4440 case GIMPLE_BIND:
4441 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4442 break;
4444 case GIMPLE_TRY:
4445 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4446 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4447 break;
4449 case GIMPLE_EH_FILTER:
4450 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4451 break;
4453 case GIMPLE_EH_ELSE:
4454 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4455 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4456 break;
4458 case GIMPLE_CATCH:
4459 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4460 break;
4462 case GIMPLE_TRANSACTION:
4463 err |= verify_gimple_transaction (stmt);
4464 break;
4466 default:
4468 bool err2 = verify_gimple_stmt (stmt);
4469 if (err2)
4470 debug_gimple_stmt (stmt);
4471 err |= err2;
4476 return err;
4479 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4480 is a problem, otherwise false. */
4482 static bool
4483 verify_gimple_transaction (gimple stmt)
4485 tree lab = gimple_transaction_label (stmt);
4486 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4487 return true;
4488 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4492 /* Verify the GIMPLE statements inside the statement list STMTS. */
4494 DEBUG_FUNCTION void
4495 verify_gimple_in_seq (gimple_seq stmts)
4497 timevar_push (TV_TREE_STMT_VERIFY);
4498 if (verify_gimple_in_seq_2 (stmts))
4499 internal_error ("verify_gimple failed");
4500 timevar_pop (TV_TREE_STMT_VERIFY);
4503 /* Return true when the T can be shared. */
4505 static bool
4506 tree_node_can_be_shared (tree t)
4508 if (IS_TYPE_OR_DECL_P (t)
4509 || is_gimple_min_invariant (t)
4510 || TREE_CODE (t) == SSA_NAME
4511 || t == error_mark_node
4512 || TREE_CODE (t) == IDENTIFIER_NODE)
4513 return true;
4515 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4516 return true;
4518 if (DECL_P (t))
4519 return true;
4521 return false;
4524 /* Called via walk_tree. Verify tree sharing. */
4526 static tree
4527 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4529 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4531 if (tree_node_can_be_shared (*tp))
4533 *walk_subtrees = false;
4534 return NULL;
4537 if (pointer_set_insert (visited, *tp))
4538 return *tp;
4540 return NULL;
4543 /* Called via walk_gimple_stmt. Verify tree sharing. */
4545 static tree
4546 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4548 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4549 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4552 static bool eh_error_found;
4553 static int
4554 verify_eh_throw_stmt_node (void **slot, void *data)
4556 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4557 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4559 if (!pointer_set_contains (visited, node->stmt))
4561 error ("dead STMT in EH table");
4562 debug_gimple_stmt (node->stmt);
4563 eh_error_found = true;
4565 return 1;
4568 /* Verify if the location LOCs block is in BLOCKS. */
4570 static bool
4571 verify_location (pointer_set_t *blocks, location_t loc)
4573 tree block = LOCATION_BLOCK (loc);
4574 if (block != NULL_TREE
4575 && !pointer_set_contains (blocks, block))
4577 error ("location references block not in block tree");
4578 return true;
4580 if (block != NULL_TREE)
4581 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4582 return false;
4585 /* Called via walk_tree. Verify that expressions have no blocks. */
4587 static tree
4588 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4590 if (!EXPR_P (*tp))
4592 *walk_subtrees = false;
4593 return NULL;
4596 location_t loc = EXPR_LOCATION (*tp);
4597 if (LOCATION_BLOCK (loc) != NULL)
4598 return *tp;
4600 return NULL;
4603 /* Called via walk_tree. Verify locations of expressions. */
4605 static tree
4606 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4608 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4610 if (TREE_CODE (*tp) == VAR_DECL
4611 && DECL_HAS_DEBUG_EXPR_P (*tp))
4613 tree t = DECL_DEBUG_EXPR (*tp);
4614 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4615 if (addr)
4616 return addr;
4618 if ((TREE_CODE (*tp) == VAR_DECL
4619 || TREE_CODE (*tp) == PARM_DECL
4620 || TREE_CODE (*tp) == RESULT_DECL)
4621 && DECL_HAS_VALUE_EXPR_P (*tp))
4623 tree t = DECL_VALUE_EXPR (*tp);
4624 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4625 if (addr)
4626 return addr;
4629 if (!EXPR_P (*tp))
4631 *walk_subtrees = false;
4632 return NULL;
4635 location_t loc = EXPR_LOCATION (*tp);
4636 if (verify_location (blocks, loc))
4637 return *tp;
4639 return NULL;
4642 /* Called via walk_gimple_op. Verify locations of expressions. */
4644 static tree
4645 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4647 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4648 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4651 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4653 static void
4654 collect_subblocks (pointer_set_t *blocks, tree block)
4656 tree t;
4657 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4659 pointer_set_insert (blocks, t);
4660 collect_subblocks (blocks, t);
4664 /* Verify the GIMPLE statements in the CFG of FN. */
4666 DEBUG_FUNCTION void
4667 verify_gimple_in_cfg (struct function *fn)
4669 basic_block bb;
4670 bool err = false;
4671 struct pointer_set_t *visited, *visited_stmts, *blocks;
4673 timevar_push (TV_TREE_STMT_VERIFY);
4674 visited = pointer_set_create ();
4675 visited_stmts = pointer_set_create ();
4677 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4678 blocks = pointer_set_create ();
4679 if (DECL_INITIAL (fn->decl))
4681 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4682 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4685 FOR_EACH_BB_FN (bb, fn)
4687 gimple_stmt_iterator gsi;
4689 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4691 gimple phi = gsi_stmt (gsi);
4692 bool err2 = false;
4693 unsigned i;
4695 pointer_set_insert (visited_stmts, phi);
4697 if (gimple_bb (phi) != bb)
4699 error ("gimple_bb (phi) is set to a wrong basic block");
4700 err2 = true;
4703 err2 |= verify_gimple_phi (phi);
4705 /* Only PHI arguments have locations. */
4706 if (gimple_location (phi) != UNKNOWN_LOCATION)
4708 error ("PHI node with location");
4709 err2 = true;
4712 for (i = 0; i < gimple_phi_num_args (phi); i++)
4714 tree arg = gimple_phi_arg_def (phi, i);
4715 tree addr = walk_tree (&arg, verify_node_sharing_1,
4716 visited, NULL);
4717 if (addr)
4719 error ("incorrect sharing of tree nodes");
4720 debug_generic_expr (addr);
4721 err2 |= true;
4723 location_t loc = gimple_phi_arg_location (phi, i);
4724 if (virtual_operand_p (gimple_phi_result (phi))
4725 && loc != UNKNOWN_LOCATION)
4727 error ("virtual PHI with argument locations");
4728 err2 = true;
4730 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4731 if (addr)
4733 debug_generic_expr (addr);
4734 err2 = true;
4736 err2 |= verify_location (blocks, loc);
4739 if (err2)
4740 debug_gimple_stmt (phi);
4741 err |= err2;
4744 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4746 gimple stmt = gsi_stmt (gsi);
4747 bool err2 = false;
4748 struct walk_stmt_info wi;
4749 tree addr;
4750 int lp_nr;
4752 pointer_set_insert (visited_stmts, stmt);
4754 if (gimple_bb (stmt) != bb)
4756 error ("gimple_bb (stmt) is set to a wrong basic block");
4757 err2 = true;
4760 err2 |= verify_gimple_stmt (stmt);
4761 err2 |= verify_location (blocks, gimple_location (stmt));
4763 memset (&wi, 0, sizeof (wi));
4764 wi.info = (void *) visited;
4765 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4766 if (addr)
4768 error ("incorrect sharing of tree nodes");
4769 debug_generic_expr (addr);
4770 err2 |= true;
4773 memset (&wi, 0, sizeof (wi));
4774 wi.info = (void *) blocks;
4775 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4776 if (addr)
4778 debug_generic_expr (addr);
4779 err2 |= true;
4782 /* ??? Instead of not checking these stmts at all the walker
4783 should know its context via wi. */
4784 if (!is_gimple_debug (stmt)
4785 && !is_gimple_omp (stmt))
4787 memset (&wi, 0, sizeof (wi));
4788 addr = walk_gimple_op (stmt, verify_expr, &wi);
4789 if (addr)
4791 debug_generic_expr (addr);
4792 inform (gimple_location (stmt), "in statement");
4793 err2 |= true;
4797 /* If the statement is marked as part of an EH region, then it is
4798 expected that the statement could throw. Verify that when we
4799 have optimizations that simplify statements such that we prove
4800 that they cannot throw, that we update other data structures
4801 to match. */
4802 lp_nr = lookup_stmt_eh_lp (stmt);
4803 if (lp_nr != 0)
4805 if (!stmt_could_throw_p (stmt))
4807 error ("statement marked for throw, but doesn%'t");
4808 err2 |= true;
4810 else if (lp_nr > 0
4811 && !gsi_one_before_end_p (gsi)
4812 && stmt_can_throw_internal (stmt))
4814 error ("statement marked for throw in middle of block");
4815 err2 |= true;
4819 if (err2)
4820 debug_gimple_stmt (stmt);
4821 err |= err2;
4825 eh_error_found = false;
4826 if (get_eh_throw_stmt_table (cfun))
4827 htab_traverse (get_eh_throw_stmt_table (cfun),
4828 verify_eh_throw_stmt_node,
4829 visited_stmts);
4831 if (err || eh_error_found)
4832 internal_error ("verify_gimple failed");
4834 pointer_set_destroy (visited);
4835 pointer_set_destroy (visited_stmts);
4836 pointer_set_destroy (blocks);
4837 verify_histograms ();
4838 timevar_pop (TV_TREE_STMT_VERIFY);
4842 /* Verifies that the flow information is OK. */
4844 static int
4845 gimple_verify_flow_info (void)
4847 int err = 0;
4848 basic_block bb;
4849 gimple_stmt_iterator gsi;
4850 gimple stmt;
4851 edge e;
4852 edge_iterator ei;
4854 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4855 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4857 error ("ENTRY_BLOCK has IL associated with it");
4858 err = 1;
4861 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4862 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4864 error ("EXIT_BLOCK has IL associated with it");
4865 err = 1;
4868 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4869 if (e->flags & EDGE_FALLTHRU)
4871 error ("fallthru to exit from bb %d", e->src->index);
4872 err = 1;
4875 FOR_EACH_BB (bb)
4877 bool found_ctrl_stmt = false;
4879 stmt = NULL;
4881 /* Skip labels on the start of basic block. */
4882 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4884 tree label;
4885 gimple prev_stmt = stmt;
4887 stmt = gsi_stmt (gsi);
4889 if (gimple_code (stmt) != GIMPLE_LABEL)
4890 break;
4892 label = gimple_label_label (stmt);
4893 if (prev_stmt && DECL_NONLOCAL (label))
4895 error ("nonlocal label ");
4896 print_generic_expr (stderr, label, 0);
4897 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4898 bb->index);
4899 err = 1;
4902 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4904 error ("EH landing pad label ");
4905 print_generic_expr (stderr, label, 0);
4906 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4907 bb->index);
4908 err = 1;
4911 if (label_to_block (label) != bb)
4913 error ("label ");
4914 print_generic_expr (stderr, label, 0);
4915 fprintf (stderr, " to block does not match in bb %d",
4916 bb->index);
4917 err = 1;
4920 if (decl_function_context (label) != current_function_decl)
4922 error ("label ");
4923 print_generic_expr (stderr, label, 0);
4924 fprintf (stderr, " has incorrect context in bb %d",
4925 bb->index);
4926 err = 1;
4930 /* Verify that body of basic block BB is free of control flow. */
4931 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4933 gimple stmt = gsi_stmt (gsi);
4935 if (found_ctrl_stmt)
4937 error ("control flow in the middle of basic block %d",
4938 bb->index);
4939 err = 1;
4942 if (stmt_ends_bb_p (stmt))
4943 found_ctrl_stmt = true;
4945 if (gimple_code (stmt) == GIMPLE_LABEL)
4947 error ("label ");
4948 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4949 fprintf (stderr, " in the middle of basic block %d", bb->index);
4950 err = 1;
4954 gsi = gsi_last_bb (bb);
4955 if (gsi_end_p (gsi))
4956 continue;
4958 stmt = gsi_stmt (gsi);
4960 if (gimple_code (stmt) == GIMPLE_LABEL)
4961 continue;
4963 err |= verify_eh_edges (stmt);
4965 if (is_ctrl_stmt (stmt))
4967 FOR_EACH_EDGE (e, ei, bb->succs)
4968 if (e->flags & EDGE_FALLTHRU)
4970 error ("fallthru edge after a control statement in bb %d",
4971 bb->index);
4972 err = 1;
4976 if (gimple_code (stmt) != GIMPLE_COND)
4978 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4979 after anything else but if statement. */
4980 FOR_EACH_EDGE (e, ei, bb->succs)
4981 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4983 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4984 bb->index);
4985 err = 1;
4989 switch (gimple_code (stmt))
4991 case GIMPLE_COND:
4993 edge true_edge;
4994 edge false_edge;
4996 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4998 if (!true_edge
4999 || !false_edge
5000 || !(true_edge->flags & EDGE_TRUE_VALUE)
5001 || !(false_edge->flags & EDGE_FALSE_VALUE)
5002 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5003 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5004 || EDGE_COUNT (bb->succs) >= 3)
5006 error ("wrong outgoing edge flags at end of bb %d",
5007 bb->index);
5008 err = 1;
5011 break;
5013 case GIMPLE_GOTO:
5014 if (simple_goto_p (stmt))
5016 error ("explicit goto at end of bb %d", bb->index);
5017 err = 1;
5019 else
5021 /* FIXME. We should double check that the labels in the
5022 destination blocks have their address taken. */
5023 FOR_EACH_EDGE (e, ei, bb->succs)
5024 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5025 | EDGE_FALSE_VALUE))
5026 || !(e->flags & EDGE_ABNORMAL))
5028 error ("wrong outgoing edge flags at end of bb %d",
5029 bb->index);
5030 err = 1;
5033 break;
5035 case GIMPLE_CALL:
5036 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5037 break;
5038 /* ... fallthru ... */
5039 case GIMPLE_RETURN:
5040 if (!single_succ_p (bb)
5041 || (single_succ_edge (bb)->flags
5042 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5043 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5045 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5046 err = 1;
5048 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5050 error ("return edge does not point to exit in bb %d",
5051 bb->index);
5052 err = 1;
5054 break;
5056 case GIMPLE_SWITCH:
5058 tree prev;
5059 edge e;
5060 size_t i, n;
5062 n = gimple_switch_num_labels (stmt);
5064 /* Mark all the destination basic blocks. */
5065 for (i = 0; i < n; ++i)
5067 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5068 basic_block label_bb = label_to_block (lab);
5069 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5070 label_bb->aux = (void *)1;
5073 /* Verify that the case labels are sorted. */
5074 prev = gimple_switch_label (stmt, 0);
5075 for (i = 1; i < n; ++i)
5077 tree c = gimple_switch_label (stmt, i);
5078 if (!CASE_LOW (c))
5080 error ("found default case not at the start of "
5081 "case vector");
5082 err = 1;
5083 continue;
5085 if (CASE_LOW (prev)
5086 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5088 error ("case labels not sorted: ");
5089 print_generic_expr (stderr, prev, 0);
5090 fprintf (stderr," is greater than ");
5091 print_generic_expr (stderr, c, 0);
5092 fprintf (stderr," but comes before it.\n");
5093 err = 1;
5095 prev = c;
5097 /* VRP will remove the default case if it can prove it will
5098 never be executed. So do not verify there always exists
5099 a default case here. */
5101 FOR_EACH_EDGE (e, ei, bb->succs)
5103 if (!e->dest->aux)
5105 error ("extra outgoing edge %d->%d",
5106 bb->index, e->dest->index);
5107 err = 1;
5110 e->dest->aux = (void *)2;
5111 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5112 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5114 error ("wrong outgoing edge flags at end of bb %d",
5115 bb->index);
5116 err = 1;
5120 /* Check that we have all of them. */
5121 for (i = 0; i < n; ++i)
5123 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5124 basic_block label_bb = label_to_block (lab);
5126 if (label_bb->aux != (void *)2)
5128 error ("missing edge %i->%i", bb->index, label_bb->index);
5129 err = 1;
5133 FOR_EACH_EDGE (e, ei, bb->succs)
5134 e->dest->aux = (void *)0;
5136 break;
5138 case GIMPLE_EH_DISPATCH:
5139 err |= verify_eh_dispatch_edge (stmt);
5140 break;
5142 default:
5143 break;
5147 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5148 verify_dominators (CDI_DOMINATORS);
5150 return err;
5154 /* Updates phi nodes after creating a forwarder block joined
5155 by edge FALLTHRU. */
5157 static void
5158 gimple_make_forwarder_block (edge fallthru)
5160 edge e;
5161 edge_iterator ei;
5162 basic_block dummy, bb;
5163 tree var;
5164 gimple_stmt_iterator gsi;
5166 dummy = fallthru->src;
5167 bb = fallthru->dest;
5169 if (single_pred_p (bb))
5170 return;
5172 /* If we redirected a branch we must create new PHI nodes at the
5173 start of BB. */
5174 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5176 gimple phi, new_phi;
5178 phi = gsi_stmt (gsi);
5179 var = gimple_phi_result (phi);
5180 new_phi = create_phi_node (var, bb);
5181 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5182 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5183 UNKNOWN_LOCATION);
5186 /* Add the arguments we have stored on edges. */
5187 FOR_EACH_EDGE (e, ei, bb->preds)
5189 if (e == fallthru)
5190 continue;
5192 flush_pending_stmts (e);
5197 /* Return a non-special label in the head of basic block BLOCK.
5198 Create one if it doesn't exist. */
5200 tree
5201 gimple_block_label (basic_block bb)
5203 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5204 bool first = true;
5205 tree label;
5206 gimple stmt;
5208 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5210 stmt = gsi_stmt (i);
5211 if (gimple_code (stmt) != GIMPLE_LABEL)
5212 break;
5213 label = gimple_label_label (stmt);
5214 if (!DECL_NONLOCAL (label))
5216 if (!first)
5217 gsi_move_before (&i, &s);
5218 return label;
5222 label = create_artificial_label (UNKNOWN_LOCATION);
5223 stmt = gimple_build_label (label);
5224 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5225 return label;
5229 /* Attempt to perform edge redirection by replacing a possibly complex
5230 jump instruction by a goto or by removing the jump completely.
5231 This can apply only if all edges now point to the same block. The
5232 parameters and return values are equivalent to
5233 redirect_edge_and_branch. */
5235 static edge
5236 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5238 basic_block src = e->src;
5239 gimple_stmt_iterator i;
5240 gimple stmt;
5242 /* We can replace or remove a complex jump only when we have exactly
5243 two edges. */
5244 if (EDGE_COUNT (src->succs) != 2
5245 /* Verify that all targets will be TARGET. Specifically, the
5246 edge that is not E must also go to TARGET. */
5247 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5248 return NULL;
5250 i = gsi_last_bb (src);
5251 if (gsi_end_p (i))
5252 return NULL;
5254 stmt = gsi_stmt (i);
5256 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5258 gsi_remove (&i, true);
5259 e = ssa_redirect_edge (e, target);
5260 e->flags = EDGE_FALLTHRU;
5261 return e;
5264 return NULL;
5268 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5269 edge representing the redirected branch. */
5271 static edge
5272 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5274 basic_block bb = e->src;
5275 gimple_stmt_iterator gsi;
5276 edge ret;
5277 gimple stmt;
5279 if (e->flags & EDGE_ABNORMAL)
5280 return NULL;
5282 if (e->dest == dest)
5283 return NULL;
5285 if (e->flags & EDGE_EH)
5286 return redirect_eh_edge (e, dest);
5288 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5290 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5291 if (ret)
5292 return ret;
5295 gsi = gsi_last_bb (bb);
5296 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5298 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5300 case GIMPLE_COND:
5301 /* For COND_EXPR, we only need to redirect the edge. */
5302 break;
5304 case GIMPLE_GOTO:
5305 /* No non-abnormal edges should lead from a non-simple goto, and
5306 simple ones should be represented implicitly. */
5307 gcc_unreachable ();
5309 case GIMPLE_SWITCH:
5311 tree label = gimple_block_label (dest);
5312 tree cases = get_cases_for_edge (e, stmt);
5314 /* If we have a list of cases associated with E, then use it
5315 as it's a lot faster than walking the entire case vector. */
5316 if (cases)
5318 edge e2 = find_edge (e->src, dest);
5319 tree last, first;
5321 first = cases;
5322 while (cases)
5324 last = cases;
5325 CASE_LABEL (cases) = label;
5326 cases = CASE_CHAIN (cases);
5329 /* If there was already an edge in the CFG, then we need
5330 to move all the cases associated with E to E2. */
5331 if (e2)
5333 tree cases2 = get_cases_for_edge (e2, stmt);
5335 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5336 CASE_CHAIN (cases2) = first;
5338 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5340 else
5342 size_t i, n = gimple_switch_num_labels (stmt);
5344 for (i = 0; i < n; i++)
5346 tree elt = gimple_switch_label (stmt, i);
5347 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5348 CASE_LABEL (elt) = label;
5352 break;
5354 case GIMPLE_ASM:
5356 int i, n = gimple_asm_nlabels (stmt);
5357 tree label = NULL;
5359 for (i = 0; i < n; ++i)
5361 tree cons = gimple_asm_label_op (stmt, i);
5362 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5364 if (!label)
5365 label = gimple_block_label (dest);
5366 TREE_VALUE (cons) = label;
5370 /* If we didn't find any label matching the former edge in the
5371 asm labels, we must be redirecting the fallthrough
5372 edge. */
5373 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5375 break;
5377 case GIMPLE_RETURN:
5378 gsi_remove (&gsi, true);
5379 e->flags |= EDGE_FALLTHRU;
5380 break;
5382 case GIMPLE_OMP_RETURN:
5383 case GIMPLE_OMP_CONTINUE:
5384 case GIMPLE_OMP_SECTIONS_SWITCH:
5385 case GIMPLE_OMP_FOR:
5386 /* The edges from OMP constructs can be simply redirected. */
5387 break;
5389 case GIMPLE_EH_DISPATCH:
5390 if (!(e->flags & EDGE_FALLTHRU))
5391 redirect_eh_dispatch_edge (stmt, e, dest);
5392 break;
5394 case GIMPLE_TRANSACTION:
5395 /* The ABORT edge has a stored label associated with it, otherwise
5396 the edges are simply redirectable. */
5397 if (e->flags == 0)
5398 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5399 break;
5401 default:
5402 /* Otherwise it must be a fallthru edge, and we don't need to
5403 do anything besides redirecting it. */
5404 gcc_assert (e->flags & EDGE_FALLTHRU);
5405 break;
5408 /* Update/insert PHI nodes as necessary. */
5410 /* Now update the edges in the CFG. */
5411 e = ssa_redirect_edge (e, dest);
5413 return e;
5416 /* Returns true if it is possible to remove edge E by redirecting
5417 it to the destination of the other edge from E->src. */
5419 static bool
5420 gimple_can_remove_branch_p (const_edge e)
5422 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5423 return false;
5425 return true;
5428 /* Simple wrapper, as we can always redirect fallthru edges. */
5430 static basic_block
5431 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5433 e = gimple_redirect_edge_and_branch (e, dest);
5434 gcc_assert (e);
5436 return NULL;
5440 /* Splits basic block BB after statement STMT (but at least after the
5441 labels). If STMT is NULL, BB is split just after the labels. */
5443 static basic_block
5444 gimple_split_block (basic_block bb, void *stmt)
5446 gimple_stmt_iterator gsi;
5447 gimple_stmt_iterator gsi_tgt;
5448 gimple act;
5449 gimple_seq list;
5450 basic_block new_bb;
5451 edge e;
5452 edge_iterator ei;
5454 new_bb = create_empty_bb (bb);
5456 /* Redirect the outgoing edges. */
5457 new_bb->succs = bb->succs;
5458 bb->succs = NULL;
5459 FOR_EACH_EDGE (e, ei, new_bb->succs)
5460 e->src = new_bb;
5462 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5463 stmt = NULL;
5465 /* Move everything from GSI to the new basic block. */
5466 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5468 act = gsi_stmt (gsi);
5469 if (gimple_code (act) == GIMPLE_LABEL)
5470 continue;
5472 if (!stmt)
5473 break;
5475 if (stmt == act)
5477 gsi_next (&gsi);
5478 break;
5482 if (gsi_end_p (gsi))
5483 return new_bb;
5485 /* Split the statement list - avoid re-creating new containers as this
5486 brings ugly quadratic memory consumption in the inliner.
5487 (We are still quadratic since we need to update stmt BB pointers,
5488 sadly.) */
5489 gsi_split_seq_before (&gsi, &list);
5490 set_bb_seq (new_bb, list);
5491 for (gsi_tgt = gsi_start (list);
5492 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5493 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5495 return new_bb;
5499 /* Moves basic block BB after block AFTER. */
5501 static bool
5502 gimple_move_block_after (basic_block bb, basic_block after)
5504 if (bb->prev_bb == after)
5505 return true;
5507 unlink_block (bb);
5508 link_block (bb, after);
5510 return true;
5514 /* Return TRUE if block BB has no executable statements, otherwise return
5515 FALSE. */
5517 static bool
5518 gimple_empty_block_p (basic_block bb)
5520 /* BB must have no executable statements. */
5521 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5522 if (phi_nodes (bb))
5523 return false;
5524 if (gsi_end_p (gsi))
5525 return true;
5526 if (is_gimple_debug (gsi_stmt (gsi)))
5527 gsi_next_nondebug (&gsi);
5528 return gsi_end_p (gsi);
5532 /* Split a basic block if it ends with a conditional branch and if the
5533 other part of the block is not empty. */
5535 static basic_block
5536 gimple_split_block_before_cond_jump (basic_block bb)
5538 gimple last, split_point;
5539 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5540 if (gsi_end_p (gsi))
5541 return NULL;
5542 last = gsi_stmt (gsi);
5543 if (gimple_code (last) != GIMPLE_COND
5544 && gimple_code (last) != GIMPLE_SWITCH)
5545 return NULL;
5546 gsi_prev_nondebug (&gsi);
5547 split_point = gsi_stmt (gsi);
5548 return split_block (bb, split_point)->dest;
5552 /* Return true if basic_block can be duplicated. */
5554 static bool
5555 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5557 return true;
5560 /* Create a duplicate of the basic block BB. NOTE: This does not
5561 preserve SSA form. */
5563 static basic_block
5564 gimple_duplicate_bb (basic_block bb)
5566 basic_block new_bb;
5567 gimple_stmt_iterator gsi, gsi_tgt;
5568 gimple_seq phis = phi_nodes (bb);
5569 gimple phi, stmt, copy;
5571 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5573 /* Copy the PHI nodes. We ignore PHI node arguments here because
5574 the incoming edges have not been setup yet. */
5575 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5577 phi = gsi_stmt (gsi);
5578 copy = create_phi_node (NULL_TREE, new_bb);
5579 create_new_def_for (gimple_phi_result (phi), copy,
5580 gimple_phi_result_ptr (copy));
5581 gimple_set_uid (copy, gimple_uid (phi));
5584 gsi_tgt = gsi_start_bb (new_bb);
5585 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5587 def_operand_p def_p;
5588 ssa_op_iter op_iter;
5589 tree lhs;
5591 stmt = gsi_stmt (gsi);
5592 if (gimple_code (stmt) == GIMPLE_LABEL)
5593 continue;
5595 /* Don't duplicate label debug stmts. */
5596 if (gimple_debug_bind_p (stmt)
5597 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5598 == LABEL_DECL)
5599 continue;
5601 /* Create a new copy of STMT and duplicate STMT's virtual
5602 operands. */
5603 copy = gimple_copy (stmt);
5604 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5606 maybe_duplicate_eh_stmt (copy, stmt);
5607 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5609 /* When copying around a stmt writing into a local non-user
5610 aggregate, make sure it won't share stack slot with other
5611 vars. */
5612 lhs = gimple_get_lhs (stmt);
5613 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5615 tree base = get_base_address (lhs);
5616 if (base
5617 && (TREE_CODE (base) == VAR_DECL
5618 || TREE_CODE (base) == RESULT_DECL)
5619 && DECL_IGNORED_P (base)
5620 && !TREE_STATIC (base)
5621 && !DECL_EXTERNAL (base)
5622 && (TREE_CODE (base) != VAR_DECL
5623 || !DECL_HAS_VALUE_EXPR_P (base)))
5624 DECL_NONSHAREABLE (base) = 1;
5627 /* Create new names for all the definitions created by COPY and
5628 add replacement mappings for each new name. */
5629 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5630 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5633 return new_bb;
5636 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5638 static void
5639 add_phi_args_after_copy_edge (edge e_copy)
5641 basic_block bb, bb_copy = e_copy->src, dest;
5642 edge e;
5643 edge_iterator ei;
5644 gimple phi, phi_copy;
5645 tree def;
5646 gimple_stmt_iterator psi, psi_copy;
5648 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5649 return;
5651 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5653 if (e_copy->dest->flags & BB_DUPLICATED)
5654 dest = get_bb_original (e_copy->dest);
5655 else
5656 dest = e_copy->dest;
5658 e = find_edge (bb, dest);
5659 if (!e)
5661 /* During loop unrolling the target of the latch edge is copied.
5662 In this case we are not looking for edge to dest, but to
5663 duplicated block whose original was dest. */
5664 FOR_EACH_EDGE (e, ei, bb->succs)
5666 if ((e->dest->flags & BB_DUPLICATED)
5667 && get_bb_original (e->dest) == dest)
5668 break;
5671 gcc_assert (e != NULL);
5674 for (psi = gsi_start_phis (e->dest),
5675 psi_copy = gsi_start_phis (e_copy->dest);
5676 !gsi_end_p (psi);
5677 gsi_next (&psi), gsi_next (&psi_copy))
5679 phi = gsi_stmt (psi);
5680 phi_copy = gsi_stmt (psi_copy);
5681 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5682 add_phi_arg (phi_copy, def, e_copy,
5683 gimple_phi_arg_location_from_edge (phi, e));
5688 /* Basic block BB_COPY was created by code duplication. Add phi node
5689 arguments for edges going out of BB_COPY. The blocks that were
5690 duplicated have BB_DUPLICATED set. */
5692 void
5693 add_phi_args_after_copy_bb (basic_block bb_copy)
5695 edge e_copy;
5696 edge_iterator ei;
5698 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5700 add_phi_args_after_copy_edge (e_copy);
5704 /* Blocks in REGION_COPY array of length N_REGION were created by
5705 duplication of basic blocks. Add phi node arguments for edges
5706 going from these blocks. If E_COPY is not NULL, also add
5707 phi node arguments for its destination.*/
5709 void
5710 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5711 edge e_copy)
5713 unsigned i;
5715 for (i = 0; i < n_region; i++)
5716 region_copy[i]->flags |= BB_DUPLICATED;
5718 for (i = 0; i < n_region; i++)
5719 add_phi_args_after_copy_bb (region_copy[i]);
5720 if (e_copy)
5721 add_phi_args_after_copy_edge (e_copy);
5723 for (i = 0; i < n_region; i++)
5724 region_copy[i]->flags &= ~BB_DUPLICATED;
5727 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5728 important exit edge EXIT. By important we mean that no SSA name defined
5729 inside region is live over the other exit edges of the region. All entry
5730 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5731 to the duplicate of the region. Dominance and loop information is
5732 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5733 UPDATE_DOMINANCE is false then we assume that the caller will update the
5734 dominance information after calling this function. The new basic
5735 blocks are stored to REGION_COPY in the same order as they had in REGION,
5736 provided that REGION_COPY is not NULL.
5737 The function returns false if it is unable to copy the region,
5738 true otherwise. */
5740 bool
5741 gimple_duplicate_sese_region (edge entry, edge exit,
5742 basic_block *region, unsigned n_region,
5743 basic_block *region_copy,
5744 bool update_dominance)
5746 unsigned i;
5747 bool free_region_copy = false, copying_header = false;
5748 struct loop *loop = entry->dest->loop_father;
5749 edge exit_copy;
5750 vec<basic_block> doms;
5751 edge redirected;
5752 int total_freq = 0, entry_freq = 0;
5753 gcov_type total_count = 0, entry_count = 0;
5755 if (!can_copy_bbs_p (region, n_region))
5756 return false;
5758 /* Some sanity checking. Note that we do not check for all possible
5759 missuses of the functions. I.e. if you ask to copy something weird,
5760 it will work, but the state of structures probably will not be
5761 correct. */
5762 for (i = 0; i < n_region; i++)
5764 /* We do not handle subloops, i.e. all the blocks must belong to the
5765 same loop. */
5766 if (region[i]->loop_father != loop)
5767 return false;
5769 if (region[i] != entry->dest
5770 && region[i] == loop->header)
5771 return false;
5774 set_loop_copy (loop, loop);
5776 /* In case the function is used for loop header copying (which is the primary
5777 use), ensure that EXIT and its copy will be new latch and entry edges. */
5778 if (loop->header == entry->dest)
5780 copying_header = true;
5781 set_loop_copy (loop, loop_outer (loop));
5783 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5784 return false;
5786 for (i = 0; i < n_region; i++)
5787 if (region[i] != exit->src
5788 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5789 return false;
5792 if (!region_copy)
5794 region_copy = XNEWVEC (basic_block, n_region);
5795 free_region_copy = true;
5798 initialize_original_copy_tables ();
5800 /* Record blocks outside the region that are dominated by something
5801 inside. */
5802 if (update_dominance)
5804 doms.create (0);
5805 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5808 if (entry->dest->count)
5810 total_count = entry->dest->count;
5811 entry_count = entry->count;
5812 /* Fix up corner cases, to avoid division by zero or creation of negative
5813 frequencies. */
5814 if (entry_count > total_count)
5815 entry_count = total_count;
5817 else
5819 total_freq = entry->dest->frequency;
5820 entry_freq = EDGE_FREQUENCY (entry);
5821 /* Fix up corner cases, to avoid division by zero or creation of negative
5822 frequencies. */
5823 if (total_freq == 0)
5824 total_freq = 1;
5825 else if (entry_freq > total_freq)
5826 entry_freq = total_freq;
5829 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5830 split_edge_bb_loc (entry), update_dominance);
5831 if (total_count)
5833 scale_bbs_frequencies_gcov_type (region, n_region,
5834 total_count - entry_count,
5835 total_count);
5836 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5837 total_count);
5839 else
5841 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5842 total_freq);
5843 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5846 if (copying_header)
5848 loop->header = exit->dest;
5849 loop->latch = exit->src;
5852 /* Redirect the entry and add the phi node arguments. */
5853 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5854 gcc_assert (redirected != NULL);
5855 flush_pending_stmts (entry);
5857 /* Concerning updating of dominators: We must recount dominators
5858 for entry block and its copy. Anything that is outside of the
5859 region, but was dominated by something inside needs recounting as
5860 well. */
5861 if (update_dominance)
5863 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5864 doms.safe_push (get_bb_original (entry->dest));
5865 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5866 doms.release ();
5869 /* Add the other PHI node arguments. */
5870 add_phi_args_after_copy (region_copy, n_region, NULL);
5872 if (free_region_copy)
5873 free (region_copy);
5875 free_original_copy_tables ();
5876 return true;
5879 /* Checks if BB is part of the region defined by N_REGION BBS. */
5880 static bool
5881 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5883 unsigned int n;
5885 for (n = 0; n < n_region; n++)
5887 if (bb == bbs[n])
5888 return true;
5890 return false;
5893 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5894 are stored to REGION_COPY in the same order in that they appear
5895 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5896 the region, EXIT an exit from it. The condition guarding EXIT
5897 is moved to ENTRY. Returns true if duplication succeeds, false
5898 otherwise.
5900 For example,
5902 some_code;
5903 if (cond)
5905 else
5908 is transformed to
5910 if (cond)
5912 some_code;
5915 else
5917 some_code;
5922 bool
5923 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5924 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5925 basic_block *region_copy ATTRIBUTE_UNUSED)
5927 unsigned i;
5928 bool free_region_copy = false;
5929 struct loop *loop = exit->dest->loop_father;
5930 struct loop *orig_loop = entry->dest->loop_father;
5931 basic_block switch_bb, entry_bb, nentry_bb;
5932 vec<basic_block> doms;
5933 int total_freq = 0, exit_freq = 0;
5934 gcov_type total_count = 0, exit_count = 0;
5935 edge exits[2], nexits[2], e;
5936 gimple_stmt_iterator gsi;
5937 gimple cond_stmt;
5938 edge sorig, snew;
5939 basic_block exit_bb;
5940 gimple_stmt_iterator psi;
5941 gimple phi;
5942 tree def;
5943 struct loop *target, *aloop, *cloop;
5945 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5946 exits[0] = exit;
5947 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5949 if (!can_copy_bbs_p (region, n_region))
5950 return false;
5952 initialize_original_copy_tables ();
5953 set_loop_copy (orig_loop, loop);
5955 target= loop;
5956 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5958 if (bb_part_of_region_p (aloop->header, region, n_region))
5960 cloop = duplicate_loop (aloop, target);
5961 duplicate_subloops (aloop, cloop);
5965 if (!region_copy)
5967 region_copy = XNEWVEC (basic_block, n_region);
5968 free_region_copy = true;
5971 gcc_assert (!need_ssa_update_p (cfun));
5973 /* Record blocks outside the region that are dominated by something
5974 inside. */
5975 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5977 if (exit->src->count)
5979 total_count = exit->src->count;
5980 exit_count = exit->count;
5981 /* Fix up corner cases, to avoid division by zero or creation of negative
5982 frequencies. */
5983 if (exit_count > total_count)
5984 exit_count = total_count;
5986 else
5988 total_freq = exit->src->frequency;
5989 exit_freq = EDGE_FREQUENCY (exit);
5990 /* Fix up corner cases, to avoid division by zero or creation of negative
5991 frequencies. */
5992 if (total_freq == 0)
5993 total_freq = 1;
5994 if (exit_freq > total_freq)
5995 exit_freq = total_freq;
5998 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5999 split_edge_bb_loc (exit), true);
6000 if (total_count)
6002 scale_bbs_frequencies_gcov_type (region, n_region,
6003 total_count - exit_count,
6004 total_count);
6005 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6006 total_count);
6008 else
6010 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6011 total_freq);
6012 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6015 /* Create the switch block, and put the exit condition to it. */
6016 entry_bb = entry->dest;
6017 nentry_bb = get_bb_copy (entry_bb);
6018 if (!last_stmt (entry->src)
6019 || !stmt_ends_bb_p (last_stmt (entry->src)))
6020 switch_bb = entry->src;
6021 else
6022 switch_bb = split_edge (entry);
6023 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6025 gsi = gsi_last_bb (switch_bb);
6026 cond_stmt = last_stmt (exit->src);
6027 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6028 cond_stmt = gimple_copy (cond_stmt);
6030 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6032 sorig = single_succ_edge (switch_bb);
6033 sorig->flags = exits[1]->flags;
6034 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6036 /* Register the new edge from SWITCH_BB in loop exit lists. */
6037 rescan_loop_exit (snew, true, false);
6039 /* Add the PHI node arguments. */
6040 add_phi_args_after_copy (region_copy, n_region, snew);
6042 /* Get rid of now superfluous conditions and associated edges (and phi node
6043 arguments). */
6044 exit_bb = exit->dest;
6046 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6047 PENDING_STMT (e) = NULL;
6049 /* The latch of ORIG_LOOP was copied, and so was the backedge
6050 to the original header. We redirect this backedge to EXIT_BB. */
6051 for (i = 0; i < n_region; i++)
6052 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6054 gcc_assert (single_succ_edge (region_copy[i]));
6055 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6056 PENDING_STMT (e) = NULL;
6057 for (psi = gsi_start_phis (exit_bb);
6058 !gsi_end_p (psi);
6059 gsi_next (&psi))
6061 phi = gsi_stmt (psi);
6062 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6063 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6066 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6067 PENDING_STMT (e) = NULL;
6069 /* Anything that is outside of the region, but was dominated by something
6070 inside needs to update dominance info. */
6071 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6072 doms.release ();
6073 /* Update the SSA web. */
6074 update_ssa (TODO_update_ssa);
6076 if (free_region_copy)
6077 free (region_copy);
6079 free_original_copy_tables ();
6080 return true;
6083 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6084 adding blocks when the dominator traversal reaches EXIT. This
6085 function silently assumes that ENTRY strictly dominates EXIT. */
6087 void
6088 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6089 vec<basic_block> *bbs_p)
6091 basic_block son;
6093 for (son = first_dom_son (CDI_DOMINATORS, entry);
6094 son;
6095 son = next_dom_son (CDI_DOMINATORS, son))
6097 bbs_p->safe_push (son);
6098 if (son != exit)
6099 gather_blocks_in_sese_region (son, exit, bbs_p);
6103 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6104 The duplicates are recorded in VARS_MAP. */
6106 static void
6107 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6108 tree to_context)
6110 tree t = *tp, new_t;
6111 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6112 void **loc;
6114 if (DECL_CONTEXT (t) == to_context)
6115 return;
6117 loc = pointer_map_contains (vars_map, t);
6119 if (!loc)
6121 loc = pointer_map_insert (vars_map, t);
6123 if (SSA_VAR_P (t))
6125 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6126 add_local_decl (f, new_t);
6128 else
6130 gcc_assert (TREE_CODE (t) == CONST_DECL);
6131 new_t = copy_node (t);
6133 DECL_CONTEXT (new_t) = to_context;
6135 *loc = new_t;
6137 else
6138 new_t = (tree) *loc;
6140 *tp = new_t;
6144 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6145 VARS_MAP maps old ssa names and var_decls to the new ones. */
6147 static tree
6148 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6149 tree to_context)
6151 void **loc;
6152 tree new_name;
6154 gcc_assert (!virtual_operand_p (name));
6156 loc = pointer_map_contains (vars_map, name);
6158 if (!loc)
6160 tree decl = SSA_NAME_VAR (name);
6161 if (decl)
6163 replace_by_duplicate_decl (&decl, vars_map, to_context);
6164 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6165 decl, SSA_NAME_DEF_STMT (name));
6166 if (SSA_NAME_IS_DEFAULT_DEF (name))
6167 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6168 decl, new_name);
6170 else
6171 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6172 name, SSA_NAME_DEF_STMT (name));
6174 loc = pointer_map_insert (vars_map, name);
6175 *loc = new_name;
6177 else
6178 new_name = (tree) *loc;
6180 return new_name;
6183 struct move_stmt_d
6185 tree orig_block;
6186 tree new_block;
6187 tree from_context;
6188 tree to_context;
6189 struct pointer_map_t *vars_map;
6190 htab_t new_label_map;
6191 struct pointer_map_t *eh_map;
6192 bool remap_decls_p;
6195 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6196 contained in *TP if it has been ORIG_BLOCK previously and change the
6197 DECL_CONTEXT of every local variable referenced in *TP. */
6199 static tree
6200 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6202 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6203 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6204 tree t = *tp;
6206 if (EXPR_P (t))
6208 tree block = TREE_BLOCK (t);
6209 if (block == p->orig_block
6210 || (p->orig_block == NULL_TREE
6211 && block != NULL_TREE))
6212 TREE_SET_BLOCK (t, p->new_block);
6213 #ifdef ENABLE_CHECKING
6214 else if (block != NULL_TREE)
6216 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6217 block = BLOCK_SUPERCONTEXT (block);
6218 gcc_assert (block == p->orig_block);
6220 #endif
6222 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6224 if (TREE_CODE (t) == SSA_NAME)
6225 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6226 else if (TREE_CODE (t) == LABEL_DECL)
6228 if (p->new_label_map)
6230 struct tree_map in, *out;
6231 in.base.from = t;
6232 out = (struct tree_map *)
6233 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6234 if (out)
6235 *tp = t = out->to;
6238 DECL_CONTEXT (t) = p->to_context;
6240 else if (p->remap_decls_p)
6242 /* Replace T with its duplicate. T should no longer appear in the
6243 parent function, so this looks wasteful; however, it may appear
6244 in referenced_vars, and more importantly, as virtual operands of
6245 statements, and in alias lists of other variables. It would be
6246 quite difficult to expunge it from all those places. ??? It might
6247 suffice to do this for addressable variables. */
6248 if ((TREE_CODE (t) == VAR_DECL
6249 && !is_global_var (t))
6250 || TREE_CODE (t) == CONST_DECL)
6251 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6253 *walk_subtrees = 0;
6255 else if (TYPE_P (t))
6256 *walk_subtrees = 0;
6258 return NULL_TREE;
6261 /* Helper for move_stmt_r. Given an EH region number for the source
6262 function, map that to the duplicate EH regio number in the dest. */
6264 static int
6265 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6267 eh_region old_r, new_r;
6268 void **slot;
6270 old_r = get_eh_region_from_number (old_nr);
6271 slot = pointer_map_contains (p->eh_map, old_r);
6272 new_r = (eh_region) *slot;
6274 return new_r->index;
6277 /* Similar, but operate on INTEGER_CSTs. */
6279 static tree
6280 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6282 int old_nr, new_nr;
6284 old_nr = tree_to_shwi (old_t_nr);
6285 new_nr = move_stmt_eh_region_nr (old_nr, p);
6287 return build_int_cst (integer_type_node, new_nr);
6290 /* Like move_stmt_op, but for gimple statements.
6292 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6293 contained in the current statement in *GSI_P and change the
6294 DECL_CONTEXT of every local variable referenced in the current
6295 statement. */
6297 static tree
6298 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6299 struct walk_stmt_info *wi)
6301 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6302 gimple stmt = gsi_stmt (*gsi_p);
6303 tree block = gimple_block (stmt);
6305 if (block == p->orig_block
6306 || (p->orig_block == NULL_TREE
6307 && block != NULL_TREE))
6308 gimple_set_block (stmt, p->new_block);
6310 switch (gimple_code (stmt))
6312 case GIMPLE_CALL:
6313 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6315 tree r, fndecl = gimple_call_fndecl (stmt);
6316 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6317 switch (DECL_FUNCTION_CODE (fndecl))
6319 case BUILT_IN_EH_COPY_VALUES:
6320 r = gimple_call_arg (stmt, 1);
6321 r = move_stmt_eh_region_tree_nr (r, p);
6322 gimple_call_set_arg (stmt, 1, r);
6323 /* FALLTHRU */
6325 case BUILT_IN_EH_POINTER:
6326 case BUILT_IN_EH_FILTER:
6327 r = gimple_call_arg (stmt, 0);
6328 r = move_stmt_eh_region_tree_nr (r, p);
6329 gimple_call_set_arg (stmt, 0, r);
6330 break;
6332 default:
6333 break;
6336 break;
6338 case GIMPLE_RESX:
6340 int r = gimple_resx_region (stmt);
6341 r = move_stmt_eh_region_nr (r, p);
6342 gimple_resx_set_region (stmt, r);
6344 break;
6346 case GIMPLE_EH_DISPATCH:
6348 int r = gimple_eh_dispatch_region (stmt);
6349 r = move_stmt_eh_region_nr (r, p);
6350 gimple_eh_dispatch_set_region (stmt, r);
6352 break;
6354 case GIMPLE_OMP_RETURN:
6355 case GIMPLE_OMP_CONTINUE:
6356 break;
6357 default:
6358 if (is_gimple_omp (stmt))
6360 /* Do not remap variables inside OMP directives. Variables
6361 referenced in clauses and directive header belong to the
6362 parent function and should not be moved into the child
6363 function. */
6364 bool save_remap_decls_p = p->remap_decls_p;
6365 p->remap_decls_p = false;
6366 *handled_ops_p = true;
6368 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6369 move_stmt_op, wi);
6371 p->remap_decls_p = save_remap_decls_p;
6373 break;
6376 return NULL_TREE;
6379 /* Move basic block BB from function CFUN to function DEST_FN. The
6380 block is moved out of the original linked list and placed after
6381 block AFTER in the new list. Also, the block is removed from the
6382 original array of blocks and placed in DEST_FN's array of blocks.
6383 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6384 updated to reflect the moved edges.
6386 The local variables are remapped to new instances, VARS_MAP is used
6387 to record the mapping. */
6389 static void
6390 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6391 basic_block after, bool update_edge_count_p,
6392 struct move_stmt_d *d)
6394 struct control_flow_graph *cfg;
6395 edge_iterator ei;
6396 edge e;
6397 gimple_stmt_iterator si;
6398 unsigned old_len, new_len;
6400 /* Remove BB from dominance structures. */
6401 delete_from_dominance_info (CDI_DOMINATORS, bb);
6403 /* Move BB from its current loop to the copy in the new function. */
6404 if (current_loops)
6406 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6407 if (new_loop)
6408 bb->loop_father = new_loop;
6411 /* Link BB to the new linked list. */
6412 move_block_after (bb, after);
6414 /* Update the edge count in the corresponding flowgraphs. */
6415 if (update_edge_count_p)
6416 FOR_EACH_EDGE (e, ei, bb->succs)
6418 cfun->cfg->x_n_edges--;
6419 dest_cfun->cfg->x_n_edges++;
6422 /* Remove BB from the original basic block array. */
6423 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6424 cfun->cfg->x_n_basic_blocks--;
6426 /* Grow DEST_CFUN's basic block array if needed. */
6427 cfg = dest_cfun->cfg;
6428 cfg->x_n_basic_blocks++;
6429 if (bb->index >= cfg->x_last_basic_block)
6430 cfg->x_last_basic_block = bb->index + 1;
6432 old_len = vec_safe_length (cfg->x_basic_block_info);
6433 if ((unsigned) cfg->x_last_basic_block >= old_len)
6435 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6436 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6439 (*cfg->x_basic_block_info)[bb->index] = bb;
6441 /* Remap the variables in phi nodes. */
6442 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6444 gimple phi = gsi_stmt (si);
6445 use_operand_p use;
6446 tree op = PHI_RESULT (phi);
6447 ssa_op_iter oi;
6448 unsigned i;
6450 if (virtual_operand_p (op))
6452 /* Remove the phi nodes for virtual operands (alias analysis will be
6453 run for the new function, anyway). */
6454 remove_phi_node (&si, true);
6455 continue;
6458 SET_PHI_RESULT (phi,
6459 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6460 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6462 op = USE_FROM_PTR (use);
6463 if (TREE_CODE (op) == SSA_NAME)
6464 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6467 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6469 location_t locus = gimple_phi_arg_location (phi, i);
6470 tree block = LOCATION_BLOCK (locus);
6472 if (locus == UNKNOWN_LOCATION)
6473 continue;
6474 if (d->orig_block == NULL_TREE || block == d->orig_block)
6476 if (d->new_block == NULL_TREE)
6477 locus = LOCATION_LOCUS (locus);
6478 else
6479 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6480 gimple_phi_arg_set_location (phi, i, locus);
6484 gsi_next (&si);
6487 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6489 gimple stmt = gsi_stmt (si);
6490 struct walk_stmt_info wi;
6492 memset (&wi, 0, sizeof (wi));
6493 wi.info = d;
6494 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6496 if (gimple_code (stmt) == GIMPLE_LABEL)
6498 tree label = gimple_label_label (stmt);
6499 int uid = LABEL_DECL_UID (label);
6501 gcc_assert (uid > -1);
6503 old_len = vec_safe_length (cfg->x_label_to_block_map);
6504 if (old_len <= (unsigned) uid)
6506 new_len = 3 * uid / 2 + 1;
6507 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6510 (*cfg->x_label_to_block_map)[uid] = bb;
6511 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6513 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6515 if (uid >= dest_cfun->cfg->last_label_uid)
6516 dest_cfun->cfg->last_label_uid = uid + 1;
6519 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6520 remove_stmt_from_eh_lp_fn (cfun, stmt);
6522 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6523 gimple_remove_stmt_histograms (cfun, stmt);
6525 /* We cannot leave any operands allocated from the operand caches of
6526 the current function. */
6527 free_stmt_operands (stmt);
6528 push_cfun (dest_cfun);
6529 update_stmt (stmt);
6530 pop_cfun ();
6533 FOR_EACH_EDGE (e, ei, bb->succs)
6534 if (e->goto_locus != UNKNOWN_LOCATION)
6536 tree block = LOCATION_BLOCK (e->goto_locus);
6537 if (d->orig_block == NULL_TREE
6538 || block == d->orig_block)
6539 e->goto_locus = d->new_block ?
6540 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6541 LOCATION_LOCUS (e->goto_locus);
6545 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6546 the outermost EH region. Use REGION as the incoming base EH region. */
6548 static eh_region
6549 find_outermost_region_in_block (struct function *src_cfun,
6550 basic_block bb, eh_region region)
6552 gimple_stmt_iterator si;
6554 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6556 gimple stmt = gsi_stmt (si);
6557 eh_region stmt_region;
6558 int lp_nr;
6560 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6561 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6562 if (stmt_region)
6564 if (region == NULL)
6565 region = stmt_region;
6566 else if (stmt_region != region)
6568 region = eh_region_outermost (src_cfun, stmt_region, region);
6569 gcc_assert (region != NULL);
6574 return region;
6577 static tree
6578 new_label_mapper (tree decl, void *data)
6580 htab_t hash = (htab_t) data;
6581 struct tree_map *m;
6582 void **slot;
6584 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6586 m = XNEW (struct tree_map);
6587 m->hash = DECL_UID (decl);
6588 m->base.from = decl;
6589 m->to = create_artificial_label (UNKNOWN_LOCATION);
6590 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6591 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6592 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6594 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6595 gcc_assert (*slot == NULL);
6597 *slot = m;
6599 return m->to;
6602 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6603 subblocks. */
6605 static void
6606 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6607 tree to_context)
6609 tree *tp, t;
6611 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6613 t = *tp;
6614 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6615 continue;
6616 replace_by_duplicate_decl (&t, vars_map, to_context);
6617 if (t != *tp)
6619 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6621 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6622 DECL_HAS_VALUE_EXPR_P (t) = 1;
6624 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6625 *tp = t;
6629 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6630 replace_block_vars_by_duplicates (block, vars_map, to_context);
6633 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6634 from FN1 to FN2. */
6636 static void
6637 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6638 struct loop *loop)
6640 /* Discard it from the old loop array. */
6641 (*get_loops (fn1))[loop->num] = NULL;
6643 /* Place it in the new loop array, assigning it a new number. */
6644 loop->num = number_of_loops (fn2);
6645 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6647 /* Recurse to children. */
6648 for (loop = loop->inner; loop; loop = loop->next)
6649 fixup_loop_arrays_after_move (fn1, fn2, loop);
6652 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6653 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6654 single basic block in the original CFG and the new basic block is
6655 returned. DEST_CFUN must not have a CFG yet.
6657 Note that the region need not be a pure SESE region. Blocks inside
6658 the region may contain calls to abort/exit. The only restriction
6659 is that ENTRY_BB should be the only entry point and it must
6660 dominate EXIT_BB.
6662 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6663 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6664 to the new function.
6666 All local variables referenced in the region are assumed to be in
6667 the corresponding BLOCK_VARS and unexpanded variable lists
6668 associated with DEST_CFUN. */
6670 basic_block
6671 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6672 basic_block exit_bb, tree orig_block)
6674 vec<basic_block> bbs, dom_bbs;
6675 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6676 basic_block after, bb, *entry_pred, *exit_succ, abb;
6677 struct function *saved_cfun = cfun;
6678 int *entry_flag, *exit_flag;
6679 unsigned *entry_prob, *exit_prob;
6680 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6681 edge e;
6682 edge_iterator ei;
6683 htab_t new_label_map;
6684 struct pointer_map_t *vars_map, *eh_map;
6685 struct loop *loop = entry_bb->loop_father;
6686 struct loop *loop0 = get_loop (saved_cfun, 0);
6687 struct move_stmt_d d;
6689 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6690 region. */
6691 gcc_assert (entry_bb != exit_bb
6692 && (!exit_bb
6693 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6695 /* Collect all the blocks in the region. Manually add ENTRY_BB
6696 because it won't be added by dfs_enumerate_from. */
6697 bbs.create (0);
6698 bbs.safe_push (entry_bb);
6699 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6701 /* The blocks that used to be dominated by something in BBS will now be
6702 dominated by the new block. */
6703 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6704 bbs.address (),
6705 bbs.length ());
6707 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6708 the predecessor edges to ENTRY_BB and the successor edges to
6709 EXIT_BB so that we can re-attach them to the new basic block that
6710 will replace the region. */
6711 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6712 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6713 entry_flag = XNEWVEC (int, num_entry_edges);
6714 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6715 i = 0;
6716 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6718 entry_prob[i] = e->probability;
6719 entry_flag[i] = e->flags;
6720 entry_pred[i++] = e->src;
6721 remove_edge (e);
6724 if (exit_bb)
6726 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6727 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6728 exit_flag = XNEWVEC (int, num_exit_edges);
6729 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6730 i = 0;
6731 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6733 exit_prob[i] = e->probability;
6734 exit_flag[i] = e->flags;
6735 exit_succ[i++] = e->dest;
6736 remove_edge (e);
6739 else
6741 num_exit_edges = 0;
6742 exit_succ = NULL;
6743 exit_flag = NULL;
6744 exit_prob = NULL;
6747 /* Switch context to the child function to initialize DEST_FN's CFG. */
6748 gcc_assert (dest_cfun->cfg == NULL);
6749 push_cfun (dest_cfun);
6751 init_empty_tree_cfg ();
6753 /* Initialize EH information for the new function. */
6754 eh_map = NULL;
6755 new_label_map = NULL;
6756 if (saved_cfun->eh)
6758 eh_region region = NULL;
6760 FOR_EACH_VEC_ELT (bbs, i, bb)
6761 region = find_outermost_region_in_block (saved_cfun, bb, region);
6763 init_eh_for_function ();
6764 if (region != NULL)
6766 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6767 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6768 new_label_mapper, new_label_map);
6772 /* Initialize an empty loop tree. */
6773 struct loops *loops = ggc_alloc_cleared_loops ();
6774 init_loops_structure (dest_cfun, loops, 1);
6775 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6776 set_loops_for_fn (dest_cfun, loops);
6778 /* Move the outlined loop tree part. */
6779 num_nodes = bbs.length ();
6780 FOR_EACH_VEC_ELT (bbs, i, bb)
6782 if (bb->loop_father->header == bb)
6784 struct loop *this_loop = bb->loop_father;
6785 struct loop *outer = loop_outer (this_loop);
6786 if (outer == loop
6787 /* If the SESE region contains some bbs ending with
6788 a noreturn call, those are considered to belong
6789 to the outermost loop in saved_cfun, rather than
6790 the entry_bb's loop_father. */
6791 || outer == loop0)
6793 if (outer != loop)
6794 num_nodes -= this_loop->num_nodes;
6795 flow_loop_tree_node_remove (bb->loop_father);
6796 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6797 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6800 else if (bb->loop_father == loop0 && loop0 != loop)
6801 num_nodes--;
6803 /* Remove loop exits from the outlined region. */
6804 if (loops_for_fn (saved_cfun)->exits)
6805 FOR_EACH_EDGE (e, ei, bb->succs)
6807 void **slot = htab_find_slot_with_hash
6808 (loops_for_fn (saved_cfun)->exits, e,
6809 htab_hash_pointer (e), NO_INSERT);
6810 if (slot)
6811 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6816 /* Adjust the number of blocks in the tree root of the outlined part. */
6817 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6819 /* Setup a mapping to be used by move_block_to_fn. */
6820 loop->aux = current_loops->tree_root;
6821 loop0->aux = current_loops->tree_root;
6823 pop_cfun ();
6825 /* Move blocks from BBS into DEST_CFUN. */
6826 gcc_assert (bbs.length () >= 2);
6827 after = dest_cfun->cfg->x_entry_block_ptr;
6828 vars_map = pointer_map_create ();
6830 memset (&d, 0, sizeof (d));
6831 d.orig_block = orig_block;
6832 d.new_block = DECL_INITIAL (dest_cfun->decl);
6833 d.from_context = cfun->decl;
6834 d.to_context = dest_cfun->decl;
6835 d.vars_map = vars_map;
6836 d.new_label_map = new_label_map;
6837 d.eh_map = eh_map;
6838 d.remap_decls_p = true;
6840 FOR_EACH_VEC_ELT (bbs, i, bb)
6842 /* No need to update edge counts on the last block. It has
6843 already been updated earlier when we detached the region from
6844 the original CFG. */
6845 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6846 after = bb;
6849 loop->aux = NULL;
6850 loop0->aux = NULL;
6851 /* Loop sizes are no longer correct, fix them up. */
6852 loop->num_nodes -= num_nodes;
6853 for (struct loop *outer = loop_outer (loop);
6854 outer; outer = loop_outer (outer))
6855 outer->num_nodes -= num_nodes;
6856 loop0->num_nodes -= bbs.length () - num_nodes;
6858 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6860 struct loop *aloop;
6861 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6862 if (aloop != NULL)
6864 if (aloop->simduid)
6866 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6867 d.to_context);
6868 dest_cfun->has_simduid_loops = true;
6870 if (aloop->force_vect)
6871 dest_cfun->has_force_vect_loops = true;
6875 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6876 if (orig_block)
6878 tree block;
6879 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6880 == NULL_TREE);
6881 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6882 = BLOCK_SUBBLOCKS (orig_block);
6883 for (block = BLOCK_SUBBLOCKS (orig_block);
6884 block; block = BLOCK_CHAIN (block))
6885 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6886 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6889 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6890 vars_map, dest_cfun->decl);
6892 if (new_label_map)
6893 htab_delete (new_label_map);
6894 if (eh_map)
6895 pointer_map_destroy (eh_map);
6896 pointer_map_destroy (vars_map);
6898 /* Rewire the entry and exit blocks. The successor to the entry
6899 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6900 the child function. Similarly, the predecessor of DEST_FN's
6901 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6902 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6903 various CFG manipulation function get to the right CFG.
6905 FIXME, this is silly. The CFG ought to become a parameter to
6906 these helpers. */
6907 push_cfun (dest_cfun);
6908 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
6909 if (exit_bb)
6910 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
6911 pop_cfun ();
6913 /* Back in the original function, the SESE region has disappeared,
6914 create a new basic block in its place. */
6915 bb = create_empty_bb (entry_pred[0]);
6916 if (current_loops)
6917 add_bb_to_loop (bb, loop);
6918 for (i = 0; i < num_entry_edges; i++)
6920 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6921 e->probability = entry_prob[i];
6924 for (i = 0; i < num_exit_edges; i++)
6926 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6927 e->probability = exit_prob[i];
6930 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6931 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6932 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6933 dom_bbs.release ();
6935 if (exit_bb)
6937 free (exit_prob);
6938 free (exit_flag);
6939 free (exit_succ);
6941 free (entry_prob);
6942 free (entry_flag);
6943 free (entry_pred);
6944 bbs.release ();
6946 return bb;
6950 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6953 void
6954 dump_function_to_file (tree fndecl, FILE *file, int flags)
6956 tree arg, var, old_current_fndecl = current_function_decl;
6957 struct function *dsf;
6958 bool ignore_topmost_bind = false, any_var = false;
6959 basic_block bb;
6960 tree chain;
6961 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6962 && decl_is_tm_clone (fndecl));
6963 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6965 current_function_decl = fndecl;
6966 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6968 arg = DECL_ARGUMENTS (fndecl);
6969 while (arg)
6971 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6972 fprintf (file, " ");
6973 print_generic_expr (file, arg, dump_flags);
6974 if (flags & TDF_VERBOSE)
6975 print_node (file, "", arg, 4);
6976 if (DECL_CHAIN (arg))
6977 fprintf (file, ", ");
6978 arg = DECL_CHAIN (arg);
6980 fprintf (file, ")\n");
6982 if (flags & TDF_VERBOSE)
6983 print_node (file, "", fndecl, 2);
6985 dsf = DECL_STRUCT_FUNCTION (fndecl);
6986 if (dsf && (flags & TDF_EH))
6987 dump_eh_tree (file, dsf);
6989 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6991 dump_node (fndecl, TDF_SLIM | flags, file);
6992 current_function_decl = old_current_fndecl;
6993 return;
6996 /* When GIMPLE is lowered, the variables are no longer available in
6997 BIND_EXPRs, so display them separately. */
6998 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7000 unsigned ix;
7001 ignore_topmost_bind = true;
7003 fprintf (file, "{\n");
7004 if (!vec_safe_is_empty (fun->local_decls))
7005 FOR_EACH_LOCAL_DECL (fun, ix, var)
7007 print_generic_decl (file, var, flags);
7008 if (flags & TDF_VERBOSE)
7009 print_node (file, "", var, 4);
7010 fprintf (file, "\n");
7012 any_var = true;
7014 if (gimple_in_ssa_p (cfun))
7015 for (ix = 1; ix < num_ssa_names; ++ix)
7017 tree name = ssa_name (ix);
7018 if (name && !SSA_NAME_VAR (name))
7020 fprintf (file, " ");
7021 print_generic_expr (file, TREE_TYPE (name), flags);
7022 fprintf (file, " ");
7023 print_generic_expr (file, name, flags);
7024 fprintf (file, ";\n");
7026 any_var = true;
7031 if (fun && fun->decl == fndecl
7032 && fun->cfg
7033 && basic_block_info_for_function (fun))
7035 /* If the CFG has been built, emit a CFG-based dump. */
7036 if (!ignore_topmost_bind)
7037 fprintf (file, "{\n");
7039 if (any_var && n_basic_blocks_for_fn (fun))
7040 fprintf (file, "\n");
7042 FOR_EACH_BB_FN (bb, fun)
7043 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7045 fprintf (file, "}\n");
7047 else if (DECL_SAVED_TREE (fndecl) == NULL)
7049 /* The function is now in GIMPLE form but the CFG has not been
7050 built yet. Emit the single sequence of GIMPLE statements
7051 that make up its body. */
7052 gimple_seq body = gimple_body (fndecl);
7054 if (gimple_seq_first_stmt (body)
7055 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7056 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7057 print_gimple_seq (file, body, 0, flags);
7058 else
7060 if (!ignore_topmost_bind)
7061 fprintf (file, "{\n");
7063 if (any_var)
7064 fprintf (file, "\n");
7066 print_gimple_seq (file, body, 2, flags);
7067 fprintf (file, "}\n");
7070 else
7072 int indent;
7074 /* Make a tree based dump. */
7075 chain = DECL_SAVED_TREE (fndecl);
7076 if (chain && TREE_CODE (chain) == BIND_EXPR)
7078 if (ignore_topmost_bind)
7080 chain = BIND_EXPR_BODY (chain);
7081 indent = 2;
7083 else
7084 indent = 0;
7086 else
7088 if (!ignore_topmost_bind)
7089 fprintf (file, "{\n");
7090 indent = 2;
7093 if (any_var)
7094 fprintf (file, "\n");
7096 print_generic_stmt_indented (file, chain, flags, indent);
7097 if (ignore_topmost_bind)
7098 fprintf (file, "}\n");
7101 if (flags & TDF_ENUMERATE_LOCALS)
7102 dump_enumerated_decls (file, flags);
7103 fprintf (file, "\n\n");
7105 current_function_decl = old_current_fndecl;
7108 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7110 DEBUG_FUNCTION void
7111 debug_function (tree fn, int flags)
7113 dump_function_to_file (fn, stderr, flags);
7117 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7119 static void
7120 print_pred_bbs (FILE *file, basic_block bb)
7122 edge e;
7123 edge_iterator ei;
7125 FOR_EACH_EDGE (e, ei, bb->preds)
7126 fprintf (file, "bb_%d ", e->src->index);
7130 /* Print on FILE the indexes for the successors of basic_block BB. */
7132 static void
7133 print_succ_bbs (FILE *file, basic_block bb)
7135 edge e;
7136 edge_iterator ei;
7138 FOR_EACH_EDGE (e, ei, bb->succs)
7139 fprintf (file, "bb_%d ", e->dest->index);
7142 /* Print to FILE the basic block BB following the VERBOSITY level. */
7144 void
7145 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7147 char *s_indent = (char *) alloca ((size_t) indent + 1);
7148 memset ((void *) s_indent, ' ', (size_t) indent);
7149 s_indent[indent] = '\0';
7151 /* Print basic_block's header. */
7152 if (verbosity >= 2)
7154 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7155 print_pred_bbs (file, bb);
7156 fprintf (file, "}, succs = {");
7157 print_succ_bbs (file, bb);
7158 fprintf (file, "})\n");
7161 /* Print basic_block's body. */
7162 if (verbosity >= 3)
7164 fprintf (file, "%s {\n", s_indent);
7165 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7166 fprintf (file, "%s }\n", s_indent);
7170 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7172 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7173 VERBOSITY level this outputs the contents of the loop, or just its
7174 structure. */
7176 static void
7177 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7179 char *s_indent;
7180 basic_block bb;
7182 if (loop == NULL)
7183 return;
7185 s_indent = (char *) alloca ((size_t) indent + 1);
7186 memset ((void *) s_indent, ' ', (size_t) indent);
7187 s_indent[indent] = '\0';
7189 /* Print loop's header. */
7190 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7191 if (loop->header)
7192 fprintf (file, "header = %d", loop->header->index);
7193 else
7195 fprintf (file, "deleted)\n");
7196 return;
7198 if (loop->latch)
7199 fprintf (file, ", latch = %d", loop->latch->index);
7200 else
7201 fprintf (file, ", multiple latches");
7202 fprintf (file, ", niter = ");
7203 print_generic_expr (file, loop->nb_iterations, 0);
7205 if (loop->any_upper_bound)
7207 fprintf (file, ", upper_bound = ");
7208 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7211 if (loop->any_estimate)
7213 fprintf (file, ", estimate = ");
7214 dump_double_int (file, loop->nb_iterations_estimate, true);
7216 fprintf (file, ")\n");
7218 /* Print loop's body. */
7219 if (verbosity >= 1)
7221 fprintf (file, "%s{\n", s_indent);
7222 FOR_EACH_BB (bb)
7223 if (bb->loop_father == loop)
7224 print_loops_bb (file, bb, indent, verbosity);
7226 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7227 fprintf (file, "%s}\n", s_indent);
7231 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7232 spaces. Following VERBOSITY level this outputs the contents of the
7233 loop, or just its structure. */
7235 static void
7236 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7237 int verbosity)
7239 if (loop == NULL)
7240 return;
7242 print_loop (file, loop, indent, verbosity);
7243 print_loop_and_siblings (file, loop->next, indent, verbosity);
7246 /* Follow a CFG edge from the entry point of the program, and on entry
7247 of a loop, pretty print the loop structure on FILE. */
7249 void
7250 print_loops (FILE *file, int verbosity)
7252 basic_block bb;
7254 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7255 if (bb && bb->loop_father)
7256 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7259 /* Dump a loop. */
7261 DEBUG_FUNCTION void
7262 debug (struct loop &ref)
7264 print_loop (stderr, &ref, 0, /*verbosity*/0);
7267 DEBUG_FUNCTION void
7268 debug (struct loop *ptr)
7270 if (ptr)
7271 debug (*ptr);
7272 else
7273 fprintf (stderr, "<nil>\n");
7276 /* Dump a loop verbosely. */
7278 DEBUG_FUNCTION void
7279 debug_verbose (struct loop &ref)
7281 print_loop (stderr, &ref, 0, /*verbosity*/3);
7284 DEBUG_FUNCTION void
7285 debug_verbose (struct loop *ptr)
7287 if (ptr)
7288 debug (*ptr);
7289 else
7290 fprintf (stderr, "<nil>\n");
7294 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7296 DEBUG_FUNCTION void
7297 debug_loops (int verbosity)
7299 print_loops (stderr, verbosity);
7302 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7304 DEBUG_FUNCTION void
7305 debug_loop (struct loop *loop, int verbosity)
7307 print_loop (stderr, loop, 0, verbosity);
7310 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7311 level. */
7313 DEBUG_FUNCTION void
7314 debug_loop_num (unsigned num, int verbosity)
7316 debug_loop (get_loop (cfun, num), verbosity);
7319 /* Return true if BB ends with a call, possibly followed by some
7320 instructions that must stay with the call. Return false,
7321 otherwise. */
7323 static bool
7324 gimple_block_ends_with_call_p (basic_block bb)
7326 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7327 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7331 /* Return true if BB ends with a conditional branch. Return false,
7332 otherwise. */
7334 static bool
7335 gimple_block_ends_with_condjump_p (const_basic_block bb)
7337 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7338 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7342 /* Return true if we need to add fake edge to exit at statement T.
7343 Helper function for gimple_flow_call_edges_add. */
7345 static bool
7346 need_fake_edge_p (gimple t)
7348 tree fndecl = NULL_TREE;
7349 int call_flags = 0;
7351 /* NORETURN and LONGJMP calls already have an edge to exit.
7352 CONST and PURE calls do not need one.
7353 We don't currently check for CONST and PURE here, although
7354 it would be a good idea, because those attributes are
7355 figured out from the RTL in mark_constant_function, and
7356 the counter incrementation code from -fprofile-arcs
7357 leads to different results from -fbranch-probabilities. */
7358 if (is_gimple_call (t))
7360 fndecl = gimple_call_fndecl (t);
7361 call_flags = gimple_call_flags (t);
7364 if (is_gimple_call (t)
7365 && fndecl
7366 && DECL_BUILT_IN (fndecl)
7367 && (call_flags & ECF_NOTHROW)
7368 && !(call_flags & ECF_RETURNS_TWICE)
7369 /* fork() doesn't really return twice, but the effect of
7370 wrapping it in __gcov_fork() which calls __gcov_flush()
7371 and clears the counters before forking has the same
7372 effect as returning twice. Force a fake edge. */
7373 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7374 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7375 return false;
7377 if (is_gimple_call (t))
7379 edge_iterator ei;
7380 edge e;
7381 basic_block bb;
7383 if (!(call_flags & ECF_NORETURN))
7384 return true;
7386 bb = gimple_bb (t);
7387 FOR_EACH_EDGE (e, ei, bb->succs)
7388 if ((e->flags & EDGE_FAKE) == 0)
7389 return true;
7392 if (gimple_code (t) == GIMPLE_ASM
7393 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7394 return true;
7396 return false;
7400 /* Add fake edges to the function exit for any non constant and non
7401 noreturn calls (or noreturn calls with EH/abnormal edges),
7402 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7403 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7404 that were split.
7406 The goal is to expose cases in which entering a basic block does
7407 not imply that all subsequent instructions must be executed. */
7409 static int
7410 gimple_flow_call_edges_add (sbitmap blocks)
7412 int i;
7413 int blocks_split = 0;
7414 int last_bb = last_basic_block;
7415 bool check_last_block = false;
7417 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7418 return 0;
7420 if (! blocks)
7421 check_last_block = true;
7422 else
7423 check_last_block = bitmap_bit_p (blocks,
7424 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7426 /* In the last basic block, before epilogue generation, there will be
7427 a fallthru edge to EXIT. Special care is required if the last insn
7428 of the last basic block is a call because make_edge folds duplicate
7429 edges, which would result in the fallthru edge also being marked
7430 fake, which would result in the fallthru edge being removed by
7431 remove_fake_edges, which would result in an invalid CFG.
7433 Moreover, we can't elide the outgoing fake edge, since the block
7434 profiler needs to take this into account in order to solve the minimal
7435 spanning tree in the case that the call doesn't return.
7437 Handle this by adding a dummy instruction in a new last basic block. */
7438 if (check_last_block)
7440 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7441 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7442 gimple t = NULL;
7444 if (!gsi_end_p (gsi))
7445 t = gsi_stmt (gsi);
7447 if (t && need_fake_edge_p (t))
7449 edge e;
7451 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7452 if (e)
7454 gsi_insert_on_edge (e, gimple_build_nop ());
7455 gsi_commit_edge_inserts ();
7460 /* Now add fake edges to the function exit for any non constant
7461 calls since there is no way that we can determine if they will
7462 return or not... */
7463 for (i = 0; i < last_bb; i++)
7465 basic_block bb = BASIC_BLOCK (i);
7466 gimple_stmt_iterator gsi;
7467 gimple stmt, last_stmt;
7469 if (!bb)
7470 continue;
7472 if (blocks && !bitmap_bit_p (blocks, i))
7473 continue;
7475 gsi = gsi_last_nondebug_bb (bb);
7476 if (!gsi_end_p (gsi))
7478 last_stmt = gsi_stmt (gsi);
7481 stmt = gsi_stmt (gsi);
7482 if (need_fake_edge_p (stmt))
7484 edge e;
7486 /* The handling above of the final block before the
7487 epilogue should be enough to verify that there is
7488 no edge to the exit block in CFG already.
7489 Calling make_edge in such case would cause us to
7490 mark that edge as fake and remove it later. */
7491 #ifdef ENABLE_CHECKING
7492 if (stmt == last_stmt)
7494 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7495 gcc_assert (e == NULL);
7497 #endif
7499 /* Note that the following may create a new basic block
7500 and renumber the existing basic blocks. */
7501 if (stmt != last_stmt)
7503 e = split_block (bb, stmt);
7504 if (e)
7505 blocks_split++;
7507 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7509 gsi_prev (&gsi);
7511 while (!gsi_end_p (gsi));
7515 if (blocks_split)
7516 verify_flow_info ();
7518 return blocks_split;
7521 /* Removes edge E and all the blocks dominated by it, and updates dominance
7522 information. The IL in E->src needs to be updated separately.
7523 If dominance info is not available, only the edge E is removed.*/
7525 void
7526 remove_edge_and_dominated_blocks (edge e)
7528 vec<basic_block> bbs_to_remove = vNULL;
7529 vec<basic_block> bbs_to_fix_dom = vNULL;
7530 bitmap df, df_idom;
7531 edge f;
7532 edge_iterator ei;
7533 bool none_removed = false;
7534 unsigned i;
7535 basic_block bb, dbb;
7536 bitmap_iterator bi;
7538 if (!dom_info_available_p (CDI_DOMINATORS))
7540 remove_edge (e);
7541 return;
7544 /* No updating is needed for edges to exit. */
7545 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7547 if (cfgcleanup_altered_bbs)
7548 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7549 remove_edge (e);
7550 return;
7553 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7554 that is not dominated by E->dest, then this set is empty. Otherwise,
7555 all the basic blocks dominated by E->dest are removed.
7557 Also, to DF_IDOM we store the immediate dominators of the blocks in
7558 the dominance frontier of E (i.e., of the successors of the
7559 removed blocks, if there are any, and of E->dest otherwise). */
7560 FOR_EACH_EDGE (f, ei, e->dest->preds)
7562 if (f == e)
7563 continue;
7565 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7567 none_removed = true;
7568 break;
7572 df = BITMAP_ALLOC (NULL);
7573 df_idom = BITMAP_ALLOC (NULL);
7575 if (none_removed)
7576 bitmap_set_bit (df_idom,
7577 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7578 else
7580 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7581 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7583 FOR_EACH_EDGE (f, ei, bb->succs)
7585 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7586 bitmap_set_bit (df, f->dest->index);
7589 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7590 bitmap_clear_bit (df, bb->index);
7592 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7594 bb = BASIC_BLOCK (i);
7595 bitmap_set_bit (df_idom,
7596 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7600 if (cfgcleanup_altered_bbs)
7602 /* Record the set of the altered basic blocks. */
7603 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7604 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7607 /* Remove E and the cancelled blocks. */
7608 if (none_removed)
7609 remove_edge (e);
7610 else
7612 /* Walk backwards so as to get a chance to substitute all
7613 released DEFs into debug stmts. See
7614 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7615 details. */
7616 for (i = bbs_to_remove.length (); i-- > 0; )
7617 delete_basic_block (bbs_to_remove[i]);
7620 /* Update the dominance information. The immediate dominator may change only
7621 for blocks whose immediate dominator belongs to DF_IDOM:
7623 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7624 removal. Let Z the arbitrary block such that idom(Z) = Y and
7625 Z dominates X after the removal. Before removal, there exists a path P
7626 from Y to X that avoids Z. Let F be the last edge on P that is
7627 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7628 dominates W, and because of P, Z does not dominate W), and W belongs to
7629 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7630 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7632 bb = BASIC_BLOCK (i);
7633 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7634 dbb;
7635 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7636 bbs_to_fix_dom.safe_push (dbb);
7639 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7641 BITMAP_FREE (df);
7642 BITMAP_FREE (df_idom);
7643 bbs_to_remove.release ();
7644 bbs_to_fix_dom.release ();
7647 /* Purge dead EH edges from basic block BB. */
7649 bool
7650 gimple_purge_dead_eh_edges (basic_block bb)
7652 bool changed = false;
7653 edge e;
7654 edge_iterator ei;
7655 gimple stmt = last_stmt (bb);
7657 if (stmt && stmt_can_throw_internal (stmt))
7658 return false;
7660 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7662 if (e->flags & EDGE_EH)
7664 remove_edge_and_dominated_blocks (e);
7665 changed = true;
7667 else
7668 ei_next (&ei);
7671 return changed;
7674 /* Purge dead EH edges from basic block listed in BLOCKS. */
7676 bool
7677 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7679 bool changed = false;
7680 unsigned i;
7681 bitmap_iterator bi;
7683 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7685 basic_block bb = BASIC_BLOCK (i);
7687 /* Earlier gimple_purge_dead_eh_edges could have removed
7688 this basic block already. */
7689 gcc_assert (bb || changed);
7690 if (bb != NULL)
7691 changed |= gimple_purge_dead_eh_edges (bb);
7694 return changed;
7697 /* Purge dead abnormal call edges from basic block BB. */
7699 bool
7700 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7702 bool changed = false;
7703 edge e;
7704 edge_iterator ei;
7705 gimple stmt = last_stmt (bb);
7707 if (!cfun->has_nonlocal_label
7708 && !cfun->calls_setjmp)
7709 return false;
7711 if (stmt && stmt_can_make_abnormal_goto (stmt))
7712 return false;
7714 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7716 if (e->flags & EDGE_ABNORMAL)
7718 if (e->flags & EDGE_FALLTHRU)
7719 e->flags &= ~EDGE_ABNORMAL;
7720 else
7721 remove_edge_and_dominated_blocks (e);
7722 changed = true;
7724 else
7725 ei_next (&ei);
7728 return changed;
7731 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7733 bool
7734 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7736 bool changed = false;
7737 unsigned i;
7738 bitmap_iterator bi;
7740 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7742 basic_block bb = BASIC_BLOCK (i);
7744 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7745 this basic block already. */
7746 gcc_assert (bb || changed);
7747 if (bb != NULL)
7748 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7751 return changed;
7754 /* This function is called whenever a new edge is created or
7755 redirected. */
7757 static void
7758 gimple_execute_on_growing_pred (edge e)
7760 basic_block bb = e->dest;
7762 if (!gimple_seq_empty_p (phi_nodes (bb)))
7763 reserve_phi_args_for_new_edge (bb);
7766 /* This function is called immediately before edge E is removed from
7767 the edge vector E->dest->preds. */
7769 static void
7770 gimple_execute_on_shrinking_pred (edge e)
7772 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7773 remove_phi_args (e);
7776 /*---------------------------------------------------------------------------
7777 Helper functions for Loop versioning
7778 ---------------------------------------------------------------------------*/
7780 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7781 of 'first'. Both of them are dominated by 'new_head' basic block. When
7782 'new_head' was created by 'second's incoming edge it received phi arguments
7783 on the edge by split_edge(). Later, additional edge 'e' was created to
7784 connect 'new_head' and 'first'. Now this routine adds phi args on this
7785 additional edge 'e' that new_head to second edge received as part of edge
7786 splitting. */
7788 static void
7789 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7790 basic_block new_head, edge e)
7792 gimple phi1, phi2;
7793 gimple_stmt_iterator psi1, psi2;
7794 tree def;
7795 edge e2 = find_edge (new_head, second);
7797 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7798 edge, we should always have an edge from NEW_HEAD to SECOND. */
7799 gcc_assert (e2 != NULL);
7801 /* Browse all 'second' basic block phi nodes and add phi args to
7802 edge 'e' for 'first' head. PHI args are always in correct order. */
7804 for (psi2 = gsi_start_phis (second),
7805 psi1 = gsi_start_phis (first);
7806 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7807 gsi_next (&psi2), gsi_next (&psi1))
7809 phi1 = gsi_stmt (psi1);
7810 phi2 = gsi_stmt (psi2);
7811 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7812 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7817 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7818 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7819 the destination of the ELSE part. */
7821 static void
7822 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7823 basic_block second_head ATTRIBUTE_UNUSED,
7824 basic_block cond_bb, void *cond_e)
7826 gimple_stmt_iterator gsi;
7827 gimple new_cond_expr;
7828 tree cond_expr = (tree) cond_e;
7829 edge e0;
7831 /* Build new conditional expr */
7832 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7833 NULL_TREE, NULL_TREE);
7835 /* Add new cond in cond_bb. */
7836 gsi = gsi_last_bb (cond_bb);
7837 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7839 /* Adjust edges appropriately to connect new head with first head
7840 as well as second head. */
7841 e0 = single_succ_edge (cond_bb);
7842 e0->flags &= ~EDGE_FALLTHRU;
7843 e0->flags |= EDGE_FALSE_VALUE;
7847 /* Do book-keeping of basic block BB for the profile consistency checker.
7848 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7849 then do post-pass accounting. Store the counting in RECORD. */
7850 static void
7851 gimple_account_profile_record (basic_block bb, int after_pass,
7852 struct profile_record *record)
7854 gimple_stmt_iterator i;
7855 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7857 record->size[after_pass]
7858 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7859 if (profile_status == PROFILE_READ)
7860 record->time[after_pass]
7861 += estimate_num_insns (gsi_stmt (i),
7862 &eni_time_weights) * bb->count;
7863 else if (profile_status == PROFILE_GUESSED)
7864 record->time[after_pass]
7865 += estimate_num_insns (gsi_stmt (i),
7866 &eni_time_weights) * bb->frequency;
7870 struct cfg_hooks gimple_cfg_hooks = {
7871 "gimple",
7872 gimple_verify_flow_info,
7873 gimple_dump_bb, /* dump_bb */
7874 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7875 create_bb, /* create_basic_block */
7876 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7877 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7878 gimple_can_remove_branch_p, /* can_remove_branch_p */
7879 remove_bb, /* delete_basic_block */
7880 gimple_split_block, /* split_block */
7881 gimple_move_block_after, /* move_block_after */
7882 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7883 gimple_merge_blocks, /* merge_blocks */
7884 gimple_predict_edge, /* predict_edge */
7885 gimple_predicted_by_p, /* predicted_by_p */
7886 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7887 gimple_duplicate_bb, /* duplicate_block */
7888 gimple_split_edge, /* split_edge */
7889 gimple_make_forwarder_block, /* make_forward_block */
7890 NULL, /* tidy_fallthru_edge */
7891 NULL, /* force_nonfallthru */
7892 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7893 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7894 gimple_flow_call_edges_add, /* flow_call_edges_add */
7895 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7896 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7897 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7898 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7899 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7900 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7901 flush_pending_stmts, /* flush_pending_stmts */
7902 gimple_empty_block_p, /* block_empty_p */
7903 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7904 gimple_account_profile_record,
7908 /* Split all critical edges. */
7910 static unsigned int
7911 split_critical_edges (void)
7913 basic_block bb;
7914 edge e;
7915 edge_iterator ei;
7917 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7918 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7919 mappings around the calls to split_edge. */
7920 start_recording_case_labels ();
7921 FOR_ALL_BB (bb)
7923 FOR_EACH_EDGE (e, ei, bb->succs)
7925 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7926 split_edge (e);
7927 /* PRE inserts statements to edges and expects that
7928 since split_critical_edges was done beforehand, committing edge
7929 insertions will not split more edges. In addition to critical
7930 edges we must split edges that have multiple successors and
7931 end by control flow statements, such as RESX.
7932 Go ahead and split them too. This matches the logic in
7933 gimple_find_edge_insert_loc. */
7934 else if ((!single_pred_p (e->dest)
7935 || !gimple_seq_empty_p (phi_nodes (e->dest))
7936 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7937 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
7938 && !(e->flags & EDGE_ABNORMAL))
7940 gimple_stmt_iterator gsi;
7942 gsi = gsi_last_bb (e->src);
7943 if (!gsi_end_p (gsi)
7944 && stmt_ends_bb_p (gsi_stmt (gsi))
7945 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7946 && !gimple_call_builtin_p (gsi_stmt (gsi),
7947 BUILT_IN_RETURN)))
7948 split_edge (e);
7952 end_recording_case_labels ();
7953 return 0;
7956 namespace {
7958 const pass_data pass_data_split_crit_edges =
7960 GIMPLE_PASS, /* type */
7961 "crited", /* name */
7962 OPTGROUP_NONE, /* optinfo_flags */
7963 false, /* has_gate */
7964 true, /* has_execute */
7965 TV_TREE_SPLIT_EDGES, /* tv_id */
7966 PROP_cfg, /* properties_required */
7967 PROP_no_crit_edges, /* properties_provided */
7968 0, /* properties_destroyed */
7969 0, /* todo_flags_start */
7970 TODO_verify_flow, /* todo_flags_finish */
7973 class pass_split_crit_edges : public gimple_opt_pass
7975 public:
7976 pass_split_crit_edges (gcc::context *ctxt)
7977 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7980 /* opt_pass methods: */
7981 unsigned int execute () { return split_critical_edges (); }
7983 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7984 }; // class pass_split_crit_edges
7986 } // anon namespace
7988 gimple_opt_pass *
7989 make_pass_split_crit_edges (gcc::context *ctxt)
7991 return new pass_split_crit_edges (ctxt);
7995 /* Build a ternary operation and gimplify it. Emit code before GSI.
7996 Return the gimple_val holding the result. */
7998 tree
7999 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8000 tree type, tree a, tree b, tree c)
8002 tree ret;
8003 location_t loc = gimple_location (gsi_stmt (*gsi));
8005 ret = fold_build3_loc (loc, code, type, a, b, c);
8006 STRIP_NOPS (ret);
8008 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8009 GSI_SAME_STMT);
8012 /* Build a binary operation and gimplify it. Emit code before GSI.
8013 Return the gimple_val holding the result. */
8015 tree
8016 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8017 tree type, tree a, tree b)
8019 tree ret;
8021 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8022 STRIP_NOPS (ret);
8024 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8025 GSI_SAME_STMT);
8028 /* Build a unary operation and gimplify it. Emit code before GSI.
8029 Return the gimple_val holding the result. */
8031 tree
8032 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8033 tree a)
8035 tree ret;
8037 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8038 STRIP_NOPS (ret);
8040 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8041 GSI_SAME_STMT);
8046 /* Emit return warnings. */
8048 static unsigned int
8049 execute_warn_function_return (void)
8051 source_location location;
8052 gimple last;
8053 edge e;
8054 edge_iterator ei;
8056 if (!targetm.warn_func_return (cfun->decl))
8057 return 0;
8059 /* If we have a path to EXIT, then we do return. */
8060 if (TREE_THIS_VOLATILE (cfun->decl)
8061 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0)
8063 location = UNKNOWN_LOCATION;
8064 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8066 last = last_stmt (e->src);
8067 if ((gimple_code (last) == GIMPLE_RETURN
8068 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8069 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8070 break;
8072 if (location == UNKNOWN_LOCATION)
8073 location = cfun->function_end_locus;
8074 warning_at (location, 0, "%<noreturn%> function does return");
8077 /* If we see "return;" in some basic block, then we do reach the end
8078 without returning a value. */
8079 else if (warn_return_type
8080 && !TREE_NO_WARNING (cfun->decl)
8081 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0
8082 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8084 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8086 gimple last = last_stmt (e->src);
8087 if (gimple_code (last) == GIMPLE_RETURN
8088 && gimple_return_retval (last) == NULL
8089 && !gimple_no_warning_p (last))
8091 location = gimple_location (last);
8092 if (location == UNKNOWN_LOCATION)
8093 location = cfun->function_end_locus;
8094 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8095 TREE_NO_WARNING (cfun->decl) = 1;
8096 break;
8100 return 0;
8104 /* Given a basic block B which ends with a conditional and has
8105 precisely two successors, determine which of the edges is taken if
8106 the conditional is true and which is taken if the conditional is
8107 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8109 void
8110 extract_true_false_edges_from_block (basic_block b,
8111 edge *true_edge,
8112 edge *false_edge)
8114 edge e = EDGE_SUCC (b, 0);
8116 if (e->flags & EDGE_TRUE_VALUE)
8118 *true_edge = e;
8119 *false_edge = EDGE_SUCC (b, 1);
8121 else
8123 *false_edge = e;
8124 *true_edge = EDGE_SUCC (b, 1);
8128 namespace {
8130 const pass_data pass_data_warn_function_return =
8132 GIMPLE_PASS, /* type */
8133 "*warn_function_return", /* name */
8134 OPTGROUP_NONE, /* optinfo_flags */
8135 false, /* has_gate */
8136 true, /* has_execute */
8137 TV_NONE, /* tv_id */
8138 PROP_cfg, /* properties_required */
8139 0, /* properties_provided */
8140 0, /* properties_destroyed */
8141 0, /* todo_flags_start */
8142 0, /* todo_flags_finish */
8145 class pass_warn_function_return : public gimple_opt_pass
8147 public:
8148 pass_warn_function_return (gcc::context *ctxt)
8149 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8152 /* opt_pass methods: */
8153 unsigned int execute () { return execute_warn_function_return (); }
8155 }; // class pass_warn_function_return
8157 } // anon namespace
8159 gimple_opt_pass *
8160 make_pass_warn_function_return (gcc::context *ctxt)
8162 return new pass_warn_function_return (ctxt);
8165 /* Walk a gimplified function and warn for functions whose return value is
8166 ignored and attribute((warn_unused_result)) is set. This is done before
8167 inlining, so we don't have to worry about that. */
8169 static void
8170 do_warn_unused_result (gimple_seq seq)
8172 tree fdecl, ftype;
8173 gimple_stmt_iterator i;
8175 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8177 gimple g = gsi_stmt (i);
8179 switch (gimple_code (g))
8181 case GIMPLE_BIND:
8182 do_warn_unused_result (gimple_bind_body (g));
8183 break;
8184 case GIMPLE_TRY:
8185 do_warn_unused_result (gimple_try_eval (g));
8186 do_warn_unused_result (gimple_try_cleanup (g));
8187 break;
8188 case GIMPLE_CATCH:
8189 do_warn_unused_result (gimple_catch_handler (g));
8190 break;
8191 case GIMPLE_EH_FILTER:
8192 do_warn_unused_result (gimple_eh_filter_failure (g));
8193 break;
8195 case GIMPLE_CALL:
8196 if (gimple_call_lhs (g))
8197 break;
8198 if (gimple_call_internal_p (g))
8199 break;
8201 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8202 LHS. All calls whose value is ignored should be
8203 represented like this. Look for the attribute. */
8204 fdecl = gimple_call_fndecl (g);
8205 ftype = gimple_call_fntype (g);
8207 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8209 location_t loc = gimple_location (g);
8211 if (fdecl)
8212 warning_at (loc, OPT_Wunused_result,
8213 "ignoring return value of %qD, "
8214 "declared with attribute warn_unused_result",
8215 fdecl);
8216 else
8217 warning_at (loc, OPT_Wunused_result,
8218 "ignoring return value of function "
8219 "declared with attribute warn_unused_result");
8221 break;
8223 default:
8224 /* Not a container, not a call, or a call whose value is used. */
8225 break;
8230 static unsigned int
8231 run_warn_unused_result (void)
8233 do_warn_unused_result (gimple_body (current_function_decl));
8234 return 0;
8237 static bool
8238 gate_warn_unused_result (void)
8240 return flag_warn_unused_result;
8243 namespace {
8245 const pass_data pass_data_warn_unused_result =
8247 GIMPLE_PASS, /* type */
8248 "*warn_unused_result", /* name */
8249 OPTGROUP_NONE, /* optinfo_flags */
8250 true, /* has_gate */
8251 true, /* has_execute */
8252 TV_NONE, /* tv_id */
8253 PROP_gimple_any, /* properties_required */
8254 0, /* properties_provided */
8255 0, /* properties_destroyed */
8256 0, /* todo_flags_start */
8257 0, /* todo_flags_finish */
8260 class pass_warn_unused_result : public gimple_opt_pass
8262 public:
8263 pass_warn_unused_result (gcc::context *ctxt)
8264 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8267 /* opt_pass methods: */
8268 bool gate () { return gate_warn_unused_result (); }
8269 unsigned int execute () { return run_warn_unused_result (); }
8271 }; // class pass_warn_unused_result
8273 } // anon namespace
8275 gimple_opt_pass *
8276 make_pass_warn_unused_result (gcc::context *ctxt)
8278 return new pass_warn_unused_result (ctxt);
8281 /* IPA passes, compilation of earlier functions or inlining
8282 might have changed some properties, such as marked functions nothrow,
8283 pure, const or noreturn.
8284 Remove redundant edges and basic blocks, and create new ones if necessary.
8286 This pass can't be executed as stand alone pass from pass manager, because
8287 in between inlining and this fixup the verify_flow_info would fail. */
8289 unsigned int
8290 execute_fixup_cfg (void)
8292 basic_block bb;
8293 gimple_stmt_iterator gsi;
8294 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8295 gcov_type count_scale;
8296 edge e;
8297 edge_iterator ei;
8299 count_scale
8300 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8301 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8303 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8304 cgraph_get_node (current_function_decl)->count;
8305 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8306 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8307 count_scale);
8309 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8310 e->count = apply_scale (e->count, count_scale);
8312 FOR_EACH_BB (bb)
8314 bb->count = apply_scale (bb->count, count_scale);
8315 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8317 gimple stmt = gsi_stmt (gsi);
8318 tree decl = is_gimple_call (stmt)
8319 ? gimple_call_fndecl (stmt)
8320 : NULL;
8321 if (decl)
8323 int flags = gimple_call_flags (stmt);
8324 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8326 if (gimple_purge_dead_abnormal_call_edges (bb))
8327 todo |= TODO_cleanup_cfg;
8329 if (gimple_in_ssa_p (cfun))
8331 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8332 update_stmt (stmt);
8336 if (flags & ECF_NORETURN
8337 && fixup_noreturn_call (stmt))
8338 todo |= TODO_cleanup_cfg;
8341 if (maybe_clean_eh_stmt (stmt)
8342 && gimple_purge_dead_eh_edges (bb))
8343 todo |= TODO_cleanup_cfg;
8346 FOR_EACH_EDGE (e, ei, bb->succs)
8347 e->count = apply_scale (e->count, count_scale);
8349 /* If we have a basic block with no successors that does not
8350 end with a control statement or a noreturn call end it with
8351 a call to __builtin_unreachable. This situation can occur
8352 when inlining a noreturn call that does in fact return. */
8353 if (EDGE_COUNT (bb->succs) == 0)
8355 gimple stmt = last_stmt (bb);
8356 if (!stmt
8357 || (!is_ctrl_stmt (stmt)
8358 && (!is_gimple_call (stmt)
8359 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8361 stmt = gimple_build_call
8362 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8363 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8364 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8368 if (count_scale != REG_BR_PROB_BASE)
8369 compute_function_frequency ();
8371 /* We just processed all calls. */
8372 if (cfun->gimple_df)
8373 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8375 /* Dump a textual representation of the flowgraph. */
8376 if (dump_file)
8377 gimple_dump_cfg (dump_file, dump_flags);
8379 if (current_loops
8380 && (todo & TODO_cleanup_cfg))
8381 loops_state_set (LOOPS_NEED_FIXUP);
8383 return todo;
8386 namespace {
8388 const pass_data pass_data_fixup_cfg =
8390 GIMPLE_PASS, /* type */
8391 "*free_cfg_annotations", /* name */
8392 OPTGROUP_NONE, /* optinfo_flags */
8393 false, /* has_gate */
8394 true, /* has_execute */
8395 TV_NONE, /* tv_id */
8396 PROP_cfg, /* properties_required */
8397 0, /* properties_provided */
8398 0, /* properties_destroyed */
8399 0, /* todo_flags_start */
8400 0, /* todo_flags_finish */
8403 class pass_fixup_cfg : public gimple_opt_pass
8405 public:
8406 pass_fixup_cfg (gcc::context *ctxt)
8407 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8410 /* opt_pass methods: */
8411 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8412 unsigned int execute () { return execute_fixup_cfg (); }
8414 }; // class pass_fixup_cfg
8416 } // anon namespace
8418 gimple_opt_pass *
8419 make_pass_fixup_cfg (gcc::context *ctxt)
8421 return new pass_fixup_cfg (ctxt);
8424 /* Garbage collection support for edge_def. */
8426 extern void gt_ggc_mx (tree&);
8427 extern void gt_ggc_mx (gimple&);
8428 extern void gt_ggc_mx (rtx&);
8429 extern void gt_ggc_mx (basic_block&);
8431 void
8432 gt_ggc_mx (edge_def *e)
8434 tree block = LOCATION_BLOCK (e->goto_locus);
8435 gt_ggc_mx (e->src);
8436 gt_ggc_mx (e->dest);
8437 if (current_ir_type () == IR_GIMPLE)
8438 gt_ggc_mx (e->insns.g);
8439 else
8440 gt_ggc_mx (e->insns.r);
8441 gt_ggc_mx (block);
8444 /* PCH support for edge_def. */
8446 extern void gt_pch_nx (tree&);
8447 extern void gt_pch_nx (gimple&);
8448 extern void gt_pch_nx (rtx&);
8449 extern void gt_pch_nx (basic_block&);
8451 void
8452 gt_pch_nx (edge_def *e)
8454 tree block = LOCATION_BLOCK (e->goto_locus);
8455 gt_pch_nx (e->src);
8456 gt_pch_nx (e->dest);
8457 if (current_ir_type () == IR_GIMPLE)
8458 gt_pch_nx (e->insns.g);
8459 else
8460 gt_pch_nx (e->insns.r);
8461 gt_pch_nx (block);
8464 void
8465 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8467 tree block = LOCATION_BLOCK (e->goto_locus);
8468 op (&(e->src), cookie);
8469 op (&(e->dest), cookie);
8470 if (current_ir_type () == IR_GIMPLE)
8471 op (&(e->insns.g), cookie);
8472 else
8473 op (&(e->insns.r), cookie);
8474 op (&(block), cookie);