Make gimple_goto_dest require a const ggoto *
[official-gcc.git] / gcc / tree-cfg.c
blob669f23ec4523be7247ebda1ac0d32ab033fc69ab
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "trans-mem.h"
29 #include "stor-layout.h"
30 #include "print-tree.h"
31 #include "tm_p.h"
32 #include "predict.h"
33 #include "vec.h"
34 #include "hashtab.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfganal.h"
43 #include "basic-block.h"
44 #include "flags.h"
45 #include "gimple-pretty-print.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-fold.h"
49 #include "tree-eh.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "gimple-iterator.h"
54 #include "gimplify-me.h"
55 #include "gimple-walk.h"
56 #include "gimple-ssa.h"
57 #include "cgraph.h"
58 #include "tree-cfg.h"
59 #include "tree-phinodes.h"
60 #include "ssa-iterators.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-ssa-loop-manip.h"
64 #include "tree-ssa-loop-niter.h"
65 #include "tree-into-ssa.h"
66 #include "expr.h"
67 #include "tree-dfa.h"
68 #include "tree-ssa.h"
69 #include "tree-dump.h"
70 #include "tree-pass.h"
71 #include "diagnostic-core.h"
72 #include "except.h"
73 #include "cfgloop.h"
74 #include "tree-ssa-propagate.h"
75 #include "value-prof.h"
76 #include "tree-inline.h"
77 #include "target.h"
78 #include "tree-ssa-live.h"
79 #include "omp-low.h"
80 #include "tree-cfgcleanup.h"
81 #include "wide-int.h"
82 #include "wide-int-print.h"
84 /* This file contains functions for building the Control Flow Graph (CFG)
85 for a function tree. */
87 /* Local declarations. */
89 /* Initial capacity for the basic block array. */
90 static const int initial_cfg_capacity = 20;
92 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
93 which use a particular edge. The CASE_LABEL_EXPRs are chained together
94 via their CASE_CHAIN field, which we clear after we're done with the
95 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
97 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
98 update the case vector in response to edge redirections.
100 Right now this table is set up and torn down at key points in the
101 compilation process. It would be nice if we could make the table
102 more persistent. The key is getting notification of changes to
103 the CFG (particularly edge removal, creation and redirection). */
105 static hash_map<edge, tree> *edge_to_cases;
107 /* If we record edge_to_cases, this bitmap will hold indexes
108 of basic blocks that end in a GIMPLE_SWITCH which we touched
109 due to edge manipulations. */
111 static bitmap touched_switch_bbs;
113 /* CFG statistics. */
114 struct cfg_stats_d
116 long num_merged_labels;
119 static struct cfg_stats_d cfg_stats;
121 /* Hash table to store last discriminator assigned for each locus. */
122 struct locus_discrim_map
124 location_t locus;
125 int discriminator;
128 /* Hashtable helpers. */
130 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
132 typedef locus_discrim_map value_type;
133 typedef locus_discrim_map compare_type;
134 static inline hashval_t hash (const value_type *);
135 static inline bool equal (const value_type *, const compare_type *);
138 /* Trivial hash function for a location_t. ITEM is a pointer to
139 a hash table entry that maps a location_t to a discriminator. */
141 inline hashval_t
142 locus_discrim_hasher::hash (const value_type *item)
144 return LOCATION_LINE (item->locus);
147 /* Equality function for the locus-to-discriminator map. A and B
148 point to the two hash table entries to compare. */
150 inline bool
151 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
153 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
156 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
158 /* Basic blocks and flowgraphs. */
159 static void make_blocks (gimple_seq);
161 /* Edges. */
162 static void make_edges (void);
163 static void assign_discriminators (void);
164 static void make_cond_expr_edges (basic_block);
165 static void make_gimple_switch_edges (gswitch *, basic_block);
166 static bool make_goto_expr_edges (basic_block);
167 static void make_gimple_asm_edges (basic_block);
168 static edge gimple_redirect_edge_and_branch (edge, basic_block);
169 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
171 /* Various helpers. */
172 static inline bool stmt_starts_bb_p (gimple, gimple);
173 static int gimple_verify_flow_info (void);
174 static void gimple_make_forwarder_block (edge);
175 static gimple first_non_label_stmt (basic_block);
176 static bool verify_gimple_transaction (gtransaction *);
177 static bool call_can_make_abnormal_goto (gimple);
179 /* Flowgraph optimization and cleanup. */
180 static void gimple_merge_blocks (basic_block, basic_block);
181 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
182 static void remove_bb (basic_block);
183 static edge find_taken_edge_computed_goto (basic_block, tree);
184 static edge find_taken_edge_cond_expr (basic_block, tree);
185 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
186 static tree find_case_label_for_value (gswitch *, tree);
188 void
189 init_empty_tree_cfg_for_function (struct function *fn)
191 /* Initialize the basic block array. */
192 init_flow (fn);
193 profile_status_for_fn (fn) = PROFILE_ABSENT;
194 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
195 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
196 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
197 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
198 initial_cfg_capacity);
200 /* Build a mapping of labels to their associated blocks. */
201 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
202 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
203 initial_cfg_capacity);
205 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
206 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
208 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
209 = EXIT_BLOCK_PTR_FOR_FN (fn);
210 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
211 = ENTRY_BLOCK_PTR_FOR_FN (fn);
214 void
215 init_empty_tree_cfg (void)
217 init_empty_tree_cfg_for_function (cfun);
220 /*---------------------------------------------------------------------------
221 Create basic blocks
222 ---------------------------------------------------------------------------*/
224 /* Entry point to the CFG builder for trees. SEQ is the sequence of
225 statements to be added to the flowgraph. */
227 static void
228 build_gimple_cfg (gimple_seq seq)
230 /* Register specific gimple functions. */
231 gimple_register_cfg_hooks ();
233 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
235 init_empty_tree_cfg ();
237 make_blocks (seq);
239 /* Make sure there is always at least one block, even if it's empty. */
240 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
241 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
243 /* Adjust the size of the array. */
244 if (basic_block_info_for_fn (cfun)->length ()
245 < (size_t) n_basic_blocks_for_fn (cfun))
246 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
247 n_basic_blocks_for_fn (cfun));
249 /* To speed up statement iterator walks, we first purge dead labels. */
250 cleanup_dead_labels ();
252 /* Group case nodes to reduce the number of edges.
253 We do this after cleaning up dead labels because otherwise we miss
254 a lot of obvious case merging opportunities. */
255 group_case_labels ();
257 /* Create the edges of the flowgraph. */
258 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
259 make_edges ();
260 assign_discriminators ();
261 cleanup_dead_labels ();
262 delete discriminator_per_locus;
263 discriminator_per_locus = NULL;
267 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
268 them and propagate the information to the loop. We assume that the
269 annotations come immediately before the condition of the loop. */
271 static void
272 replace_loop_annotate ()
274 struct loop *loop;
275 basic_block bb;
276 gimple_stmt_iterator gsi;
277 gimple stmt;
279 FOR_EACH_LOOP (loop, 0)
281 gsi = gsi_last_bb (loop->header);
282 stmt = gsi_stmt (gsi);
283 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
284 continue;
285 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
287 stmt = gsi_stmt (gsi);
288 if (gimple_code (stmt) != GIMPLE_CALL)
289 break;
290 if (!gimple_call_internal_p (stmt)
291 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
292 break;
293 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
295 case annot_expr_ivdep_kind:
296 loop->safelen = INT_MAX;
297 break;
298 case annot_expr_no_vector_kind:
299 loop->dont_vectorize = true;
300 break;
301 case annot_expr_vector_kind:
302 loop->force_vectorize = true;
303 cfun->has_force_vectorize_loops = true;
304 break;
305 default:
306 gcc_unreachable ();
308 stmt = gimple_build_assign (gimple_call_lhs (stmt),
309 gimple_call_arg (stmt, 0));
310 gsi_replace (&gsi, stmt, true);
314 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
315 FOR_EACH_BB_FN (bb, cfun)
317 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
319 stmt = gsi_stmt (gsi);
320 if (gimple_code (stmt) != GIMPLE_CALL)
321 break;
322 if (!gimple_call_internal_p (stmt)
323 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
324 break;
325 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
327 case annot_expr_ivdep_kind:
328 case annot_expr_no_vector_kind:
329 case annot_expr_vector_kind:
330 break;
331 default:
332 gcc_unreachable ();
334 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
335 stmt = gimple_build_assign (gimple_call_lhs (stmt),
336 gimple_call_arg (stmt, 0));
337 gsi_replace (&gsi, stmt, true);
343 static unsigned int
344 execute_build_cfg (void)
346 gimple_seq body = gimple_body (current_function_decl);
348 build_gimple_cfg (body);
349 gimple_set_body (current_function_decl, NULL);
350 if (dump_file && (dump_flags & TDF_DETAILS))
352 fprintf (dump_file, "Scope blocks:\n");
353 dump_scope_blocks (dump_file, dump_flags);
355 cleanup_tree_cfg ();
356 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
357 replace_loop_annotate ();
358 return 0;
361 namespace {
363 const pass_data pass_data_build_cfg =
365 GIMPLE_PASS, /* type */
366 "cfg", /* name */
367 OPTGROUP_NONE, /* optinfo_flags */
368 TV_TREE_CFG, /* tv_id */
369 PROP_gimple_leh, /* properties_required */
370 ( PROP_cfg | PROP_loops ), /* properties_provided */
371 0, /* properties_destroyed */
372 0, /* todo_flags_start */
373 0, /* todo_flags_finish */
376 class pass_build_cfg : public gimple_opt_pass
378 public:
379 pass_build_cfg (gcc::context *ctxt)
380 : gimple_opt_pass (pass_data_build_cfg, ctxt)
383 /* opt_pass methods: */
384 virtual unsigned int execute (function *) { return execute_build_cfg (); }
386 }; // class pass_build_cfg
388 } // anon namespace
390 gimple_opt_pass *
391 make_pass_build_cfg (gcc::context *ctxt)
393 return new pass_build_cfg (ctxt);
397 /* Return true if T is a computed goto. */
399 bool
400 computed_goto_p (gimple t)
402 ggoto *goto_stmt = dyn_cast <ggoto *> (t);
403 if (!goto_stmt)
404 return false;
405 return TREE_CODE (gimple_goto_dest (goto_stmt)) != LABEL_DECL;
408 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
409 the other edge points to a bb with just __builtin_unreachable ().
410 I.e. return true for C->M edge in:
411 <bb C>:
413 if (something)
414 goto <bb N>;
415 else
416 goto <bb M>;
417 <bb N>:
418 __builtin_unreachable ();
419 <bb M>: */
421 bool
422 assert_unreachable_fallthru_edge_p (edge e)
424 basic_block pred_bb = e->src;
425 gimple last = last_stmt (pred_bb);
426 if (last && gimple_code (last) == GIMPLE_COND)
428 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
429 if (other_bb == e->dest)
430 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
431 if (EDGE_COUNT (other_bb->succs) == 0)
433 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
434 gimple stmt;
436 if (gsi_end_p (gsi))
437 return false;
438 stmt = gsi_stmt (gsi);
439 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
441 gsi_next (&gsi);
442 if (gsi_end_p (gsi))
443 return false;
444 stmt = gsi_stmt (gsi);
446 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
449 return false;
453 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
454 could alter control flow except via eh. We initialize the flag at
455 CFG build time and only ever clear it later. */
457 static void
458 gimple_call_initialize_ctrl_altering (gimple stmt)
460 int flags = gimple_call_flags (stmt);
462 /* A call alters control flow if it can make an abnormal goto. */
463 if (call_can_make_abnormal_goto (stmt)
464 /* A call also alters control flow if it does not return. */
465 || flags & ECF_NORETURN
466 /* TM ending statements have backedges out of the transaction.
467 Return true so we split the basic block containing them.
468 Note that the TM_BUILTIN test is merely an optimization. */
469 || ((flags & ECF_TM_BUILTIN)
470 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
471 /* BUILT_IN_RETURN call is same as return statement. */
472 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
473 gimple_call_set_ctrl_altering (stmt, true);
474 else
475 gimple_call_set_ctrl_altering (stmt, false);
479 /* Build a flowgraph for the sequence of stmts SEQ. */
481 static void
482 make_blocks (gimple_seq seq)
484 gimple_stmt_iterator i = gsi_start (seq);
485 gimple stmt = NULL;
486 bool start_new_block = true;
487 bool first_stmt_of_seq = true;
488 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
490 while (!gsi_end_p (i))
492 gimple prev_stmt;
494 prev_stmt = stmt;
495 stmt = gsi_stmt (i);
497 if (stmt && is_gimple_call (stmt))
498 gimple_call_initialize_ctrl_altering (stmt);
500 /* If the statement starts a new basic block or if we have determined
501 in a previous pass that we need to create a new block for STMT, do
502 so now. */
503 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
505 if (!first_stmt_of_seq)
506 gsi_split_seq_before (&i, &seq);
507 bb = create_basic_block (seq, NULL, bb);
508 start_new_block = false;
511 /* Now add STMT to BB and create the subgraphs for special statement
512 codes. */
513 gimple_set_bb (stmt, bb);
515 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
516 next iteration. */
517 if (stmt_ends_bb_p (stmt))
519 /* If the stmt can make abnormal goto use a new temporary
520 for the assignment to the LHS. This makes sure the old value
521 of the LHS is available on the abnormal edge. Otherwise
522 we will end up with overlapping life-ranges for abnormal
523 SSA names. */
524 if (gimple_has_lhs (stmt)
525 && stmt_can_make_abnormal_goto (stmt)
526 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
528 tree lhs = gimple_get_lhs (stmt);
529 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
530 gimple s = gimple_build_assign (lhs, tmp);
531 gimple_set_location (s, gimple_location (stmt));
532 gimple_set_block (s, gimple_block (stmt));
533 gimple_set_lhs (stmt, tmp);
534 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
535 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
536 DECL_GIMPLE_REG_P (tmp) = 1;
537 gsi_insert_after (&i, s, GSI_SAME_STMT);
539 start_new_block = true;
542 gsi_next (&i);
543 first_stmt_of_seq = false;
548 /* Create and return a new empty basic block after bb AFTER. */
550 static basic_block
551 create_bb (void *h, void *e, basic_block after)
553 basic_block bb;
555 gcc_assert (!e);
557 /* Create and initialize a new basic block. Since alloc_block uses
558 GC allocation that clears memory to allocate a basic block, we do
559 not have to clear the newly allocated basic block here. */
560 bb = alloc_block ();
562 bb->index = last_basic_block_for_fn (cfun);
563 bb->flags = BB_NEW;
564 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
566 /* Add the new block to the linked list of blocks. */
567 link_block (bb, after);
569 /* Grow the basic block array if needed. */
570 if ((size_t) last_basic_block_for_fn (cfun)
571 == basic_block_info_for_fn (cfun)->length ())
573 size_t new_size =
574 (last_basic_block_for_fn (cfun)
575 + (last_basic_block_for_fn (cfun) + 3) / 4);
576 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
579 /* Add the newly created block to the array. */
580 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
582 n_basic_blocks_for_fn (cfun)++;
583 last_basic_block_for_fn (cfun)++;
585 return bb;
589 /*---------------------------------------------------------------------------
590 Edge creation
591 ---------------------------------------------------------------------------*/
593 /* Fold COND_EXPR_COND of each COND_EXPR. */
595 void
596 fold_cond_expr_cond (void)
598 basic_block bb;
600 FOR_EACH_BB_FN (bb, cfun)
602 gimple stmt = last_stmt (bb);
604 if (stmt && gimple_code (stmt) == GIMPLE_COND)
606 gcond *cond_stmt = as_a <gcond *> (stmt);
607 location_t loc = gimple_location (stmt);
608 tree cond;
609 bool zerop, onep;
611 fold_defer_overflow_warnings ();
612 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
613 boolean_type_node,
614 gimple_cond_lhs (cond_stmt),
615 gimple_cond_rhs (cond_stmt));
616 if (cond)
618 zerop = integer_zerop (cond);
619 onep = integer_onep (cond);
621 else
622 zerop = onep = false;
624 fold_undefer_overflow_warnings (zerop || onep,
625 stmt,
626 WARN_STRICT_OVERFLOW_CONDITIONAL);
627 if (zerop)
628 gimple_cond_make_false (cond_stmt);
629 else if (onep)
630 gimple_cond_make_true (cond_stmt);
635 /* If basic block BB has an abnormal edge to a basic block
636 containing IFN_ABNORMAL_DISPATCHER internal call, return
637 that the dispatcher's basic block, otherwise return NULL. */
639 basic_block
640 get_abnormal_succ_dispatcher (basic_block bb)
642 edge e;
643 edge_iterator ei;
645 FOR_EACH_EDGE (e, ei, bb->succs)
646 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
648 gimple_stmt_iterator gsi
649 = gsi_start_nondebug_after_labels_bb (e->dest);
650 gimple g = gsi_stmt (gsi);
651 if (g
652 && is_gimple_call (g)
653 && gimple_call_internal_p (g)
654 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
655 return e->dest;
657 return NULL;
660 /* Helper function for make_edges. Create a basic block with
661 with ABNORMAL_DISPATCHER internal call in it if needed, and
662 create abnormal edges from BBS to it and from it to FOR_BB
663 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
665 static void
666 handle_abnormal_edges (basic_block *dispatcher_bbs,
667 basic_block for_bb, int *bb_to_omp_idx,
668 auto_vec<basic_block> *bbs, bool computed_goto)
670 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
671 unsigned int idx = 0;
672 basic_block bb;
673 bool inner = false;
675 if (bb_to_omp_idx)
677 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
678 if (bb_to_omp_idx[for_bb->index] != 0)
679 inner = true;
682 /* If the dispatcher has been created already, then there are basic
683 blocks with abnormal edges to it, so just make a new edge to
684 for_bb. */
685 if (*dispatcher == NULL)
687 /* Check if there are any basic blocks that need to have
688 abnormal edges to this dispatcher. If there are none, return
689 early. */
690 if (bb_to_omp_idx == NULL)
692 if (bbs->is_empty ())
693 return;
695 else
697 FOR_EACH_VEC_ELT (*bbs, idx, bb)
698 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
699 break;
700 if (bb == NULL)
701 return;
704 /* Create the dispatcher bb. */
705 *dispatcher = create_basic_block (NULL, NULL, for_bb);
706 if (computed_goto)
708 /* Factor computed gotos into a common computed goto site. Also
709 record the location of that site so that we can un-factor the
710 gotos after we have converted back to normal form. */
711 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
713 /* Create the destination of the factored goto. Each original
714 computed goto will put its desired destination into this
715 variable and jump to the label we create immediately below. */
716 tree var = create_tmp_var (ptr_type_node, "gotovar");
718 /* Build a label for the new block which will contain the
719 factored computed goto. */
720 tree factored_label_decl
721 = create_artificial_label (UNKNOWN_LOCATION);
722 gimple factored_computed_goto_label
723 = gimple_build_label (factored_label_decl);
724 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
726 /* Build our new computed goto. */
727 gimple factored_computed_goto = gimple_build_goto (var);
728 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
730 FOR_EACH_VEC_ELT (*bbs, idx, bb)
732 if (bb_to_omp_idx
733 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
734 continue;
736 gsi = gsi_last_bb (bb);
737 ggoto *last = as_a <ggoto *> (gsi_stmt (gsi));
739 gcc_assert (computed_goto_p (last));
741 /* Copy the original computed goto's destination into VAR. */
742 gimple assignment
743 = gimple_build_assign (var, gimple_goto_dest (last));
744 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
746 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
747 e->goto_locus = gimple_location (last);
748 gsi_remove (&gsi, true);
751 else
753 tree arg = inner ? boolean_true_node : boolean_false_node;
754 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
755 1, arg);
756 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
757 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
759 /* Create predecessor edges of the dispatcher. */
760 FOR_EACH_VEC_ELT (*bbs, idx, bb)
762 if (bb_to_omp_idx
763 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
764 continue;
765 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
770 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
773 /* Join all the blocks in the flowgraph. */
775 static void
776 make_edges (void)
778 basic_block bb;
779 struct omp_region *cur_region = NULL;
780 auto_vec<basic_block> ab_edge_goto;
781 auto_vec<basic_block> ab_edge_call;
782 int *bb_to_omp_idx = NULL;
783 int cur_omp_region_idx = 0;
785 /* Create an edge from entry to the first block with executable
786 statements in it. */
787 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
788 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
789 EDGE_FALLTHRU);
791 /* Traverse the basic block array placing edges. */
792 FOR_EACH_BB_FN (bb, cfun)
794 gimple last = last_stmt (bb);
795 bool fallthru;
797 if (bb_to_omp_idx)
798 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
800 if (last)
802 enum gimple_code code = gimple_code (last);
803 switch (code)
805 case GIMPLE_GOTO:
806 if (make_goto_expr_edges (bb))
807 ab_edge_goto.safe_push (bb);
808 fallthru = false;
809 break;
810 case GIMPLE_RETURN:
812 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
813 e->goto_locus = gimple_location (last);
814 fallthru = false;
816 break;
817 case GIMPLE_COND:
818 make_cond_expr_edges (bb);
819 fallthru = false;
820 break;
821 case GIMPLE_SWITCH:
822 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
823 fallthru = false;
824 break;
825 case GIMPLE_RESX:
826 make_eh_edges (last);
827 fallthru = false;
828 break;
829 case GIMPLE_EH_DISPATCH:
830 fallthru =
831 make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
832 break;
834 case GIMPLE_CALL:
835 /* If this function receives a nonlocal goto, then we need to
836 make edges from this call site to all the nonlocal goto
837 handlers. */
838 if (stmt_can_make_abnormal_goto (last))
839 ab_edge_call.safe_push (bb);
841 /* If this statement has reachable exception handlers, then
842 create abnormal edges to them. */
843 make_eh_edges (last);
845 /* BUILTIN_RETURN is really a return statement. */
846 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
848 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
849 fallthru = false;
851 /* Some calls are known not to return. */
852 else
853 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
854 break;
856 case GIMPLE_ASSIGN:
857 /* A GIMPLE_ASSIGN may throw internally and thus be considered
858 control-altering. */
859 if (is_ctrl_altering_stmt (last))
860 make_eh_edges (last);
861 fallthru = true;
862 break;
864 case GIMPLE_ASM:
865 make_gimple_asm_edges (bb);
866 fallthru = true;
867 break;
869 CASE_GIMPLE_OMP:
870 fallthru = make_gimple_omp_edges (bb, &cur_region,
871 &cur_omp_region_idx);
872 if (cur_region && bb_to_omp_idx == NULL)
873 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
874 break;
876 case GIMPLE_TRANSACTION:
878 tree abort_label =
879 gimple_transaction_label (as_a <gtransaction *> (last));
880 if (abort_label)
881 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
882 fallthru = true;
884 break;
886 default:
887 gcc_assert (!stmt_ends_bb_p (last));
888 fallthru = true;
891 else
892 fallthru = true;
894 if (fallthru)
895 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
898 /* Computed gotos are hell to deal with, especially if there are
899 lots of them with a large number of destinations. So we factor
900 them to a common computed goto location before we build the
901 edge list. After we convert back to normal form, we will un-factor
902 the computed gotos since factoring introduces an unwanted jump.
903 For non-local gotos and abnormal edges from calls to calls that return
904 twice or forced labels, factor the abnormal edges too, by having all
905 abnormal edges from the calls go to a common artificial basic block
906 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
907 basic block to all forced labels and calls returning twice.
908 We do this per-OpenMP structured block, because those regions
909 are guaranteed to be single entry single exit by the standard,
910 so it is not allowed to enter or exit such regions abnormally this way,
911 thus all computed gotos, non-local gotos and setjmp/longjmp calls
912 must not transfer control across SESE region boundaries. */
913 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
915 gimple_stmt_iterator gsi;
916 basic_block dispatcher_bb_array[2] = { NULL, NULL };
917 basic_block *dispatcher_bbs = dispatcher_bb_array;
918 int count = n_basic_blocks_for_fn (cfun);
920 if (bb_to_omp_idx)
921 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
923 FOR_EACH_BB_FN (bb, cfun)
925 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
927 glabel *label_stmt =
928 dyn_cast <glabel *> (gsi_stmt (gsi));
929 tree target;
931 if (!label_stmt)
932 break;
934 target = gimple_label_label (label_stmt);
936 /* Make an edge to every label block that has been marked as a
937 potential target for a computed goto or a non-local goto. */
938 if (FORCED_LABEL (target))
939 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
940 &ab_edge_goto, true);
941 if (DECL_NONLOCAL (target))
943 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
944 &ab_edge_call, false);
945 break;
949 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
950 gsi_next_nondebug (&gsi);
951 if (!gsi_end_p (gsi))
953 /* Make an edge to every setjmp-like call. */
954 gimple call_stmt = gsi_stmt (gsi);
955 if (is_gimple_call (call_stmt)
956 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
957 || gimple_call_builtin_p (call_stmt,
958 BUILT_IN_SETJMP_RECEIVER)))
959 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
960 &ab_edge_call, false);
964 if (bb_to_omp_idx)
965 XDELETE (dispatcher_bbs);
968 XDELETE (bb_to_omp_idx);
970 free_omp_regions ();
972 /* Fold COND_EXPR_COND of each COND_EXPR. */
973 fold_cond_expr_cond ();
976 /* Find the next available discriminator value for LOCUS. The
977 discriminator distinguishes among several basic blocks that
978 share a common locus, allowing for more accurate sample-based
979 profiling. */
981 static int
982 next_discriminator_for_locus (location_t locus)
984 struct locus_discrim_map item;
985 struct locus_discrim_map **slot;
987 item.locus = locus;
988 item.discriminator = 0;
989 slot = discriminator_per_locus->find_slot_with_hash (
990 &item, LOCATION_LINE (locus), INSERT);
991 gcc_assert (slot);
992 if (*slot == HTAB_EMPTY_ENTRY)
994 *slot = XNEW (struct locus_discrim_map);
995 gcc_assert (*slot);
996 (*slot)->locus = locus;
997 (*slot)->discriminator = 0;
999 (*slot)->discriminator++;
1000 return (*slot)->discriminator;
1003 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1005 static bool
1006 same_line_p (location_t locus1, location_t locus2)
1008 expanded_location from, to;
1010 if (locus1 == locus2)
1011 return true;
1013 from = expand_location (locus1);
1014 to = expand_location (locus2);
1016 if (from.line != to.line)
1017 return false;
1018 if (from.file == to.file)
1019 return true;
1020 return (from.file != NULL
1021 && to.file != NULL
1022 && filename_cmp (from.file, to.file) == 0);
1025 /* Assign discriminators to each basic block. */
1027 static void
1028 assign_discriminators (void)
1030 basic_block bb;
1032 FOR_EACH_BB_FN (bb, cfun)
1034 edge e;
1035 edge_iterator ei;
1036 gimple last = last_stmt (bb);
1037 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1039 if (locus == UNKNOWN_LOCATION)
1040 continue;
1042 FOR_EACH_EDGE (e, ei, bb->succs)
1044 gimple first = first_non_label_stmt (e->dest);
1045 gimple last = last_stmt (e->dest);
1046 if ((first && same_line_p (locus, gimple_location (first)))
1047 || (last && same_line_p (locus, gimple_location (last))))
1049 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1050 bb->discriminator = next_discriminator_for_locus (locus);
1051 else
1052 e->dest->discriminator = next_discriminator_for_locus (locus);
1058 /* Create the edges for a GIMPLE_COND starting at block BB. */
1060 static void
1061 make_cond_expr_edges (basic_block bb)
1063 gcond *entry = as_a <gcond *> (last_stmt (bb));
1064 gimple then_stmt, else_stmt;
1065 basic_block then_bb, else_bb;
1066 tree then_label, else_label;
1067 edge e;
1069 gcc_assert (entry);
1070 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1072 /* Entry basic blocks for each component. */
1073 then_label = gimple_cond_true_label (entry);
1074 else_label = gimple_cond_false_label (entry);
1075 then_bb = label_to_block (then_label);
1076 else_bb = label_to_block (else_label);
1077 then_stmt = first_stmt (then_bb);
1078 else_stmt = first_stmt (else_bb);
1080 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1081 e->goto_locus = gimple_location (then_stmt);
1082 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1083 if (e)
1084 e->goto_locus = gimple_location (else_stmt);
1086 /* We do not need the labels anymore. */
1087 gimple_cond_set_true_label (entry, NULL_TREE);
1088 gimple_cond_set_false_label (entry, NULL_TREE);
1092 /* Called for each element in the hash table (P) as we delete the
1093 edge to cases hash table.
1095 Clear all the TREE_CHAINs to prevent problems with copying of
1096 SWITCH_EXPRs and structure sharing rules, then free the hash table
1097 element. */
1099 bool
1100 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1102 tree t, next;
1104 for (t = value; t; t = next)
1106 next = CASE_CHAIN (t);
1107 CASE_CHAIN (t) = NULL;
1110 return true;
1113 /* Start recording information mapping edges to case labels. */
1115 void
1116 start_recording_case_labels (void)
1118 gcc_assert (edge_to_cases == NULL);
1119 edge_to_cases = new hash_map<edge, tree>;
1120 touched_switch_bbs = BITMAP_ALLOC (NULL);
1123 /* Return nonzero if we are recording information for case labels. */
1125 static bool
1126 recording_case_labels_p (void)
1128 return (edge_to_cases != NULL);
1131 /* Stop recording information mapping edges to case labels and
1132 remove any information we have recorded. */
1133 void
1134 end_recording_case_labels (void)
1136 bitmap_iterator bi;
1137 unsigned i;
1138 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1139 delete edge_to_cases;
1140 edge_to_cases = NULL;
1141 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1143 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1144 if (bb)
1146 gimple stmt = last_stmt (bb);
1147 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1148 group_case_labels_stmt (as_a <gswitch *> (stmt));
1151 BITMAP_FREE (touched_switch_bbs);
1154 /* If we are inside a {start,end}_recording_cases block, then return
1155 a chain of CASE_LABEL_EXPRs from T which reference E.
1157 Otherwise return NULL. */
1159 static tree
1160 get_cases_for_edge (edge e, gswitch *t)
1162 tree *slot;
1163 size_t i, n;
1165 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1166 chains available. Return NULL so the caller can detect this case. */
1167 if (!recording_case_labels_p ())
1168 return NULL;
1170 slot = edge_to_cases->get (e);
1171 if (slot)
1172 return *slot;
1174 /* If we did not find E in the hash table, then this must be the first
1175 time we have been queried for information about E & T. Add all the
1176 elements from T to the hash table then perform the query again. */
1178 n = gimple_switch_num_labels (t);
1179 for (i = 0; i < n; i++)
1181 tree elt = gimple_switch_label (t, i);
1182 tree lab = CASE_LABEL (elt);
1183 basic_block label_bb = label_to_block (lab);
1184 edge this_edge = find_edge (e->src, label_bb);
1186 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1187 a new chain. */
1188 tree &s = edge_to_cases->get_or_insert (this_edge);
1189 CASE_CHAIN (elt) = s;
1190 s = elt;
1193 return *edge_to_cases->get (e);
1196 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1198 static void
1199 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1201 size_t i, n;
1203 n = gimple_switch_num_labels (entry);
1205 for (i = 0; i < n; ++i)
1207 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1208 basic_block label_bb = label_to_block (lab);
1209 make_edge (bb, label_bb, 0);
1214 /* Return the basic block holding label DEST. */
1216 basic_block
1217 label_to_block_fn (struct function *ifun, tree dest)
1219 int uid = LABEL_DECL_UID (dest);
1221 /* We would die hard when faced by an undefined label. Emit a label to
1222 the very first basic block. This will hopefully make even the dataflow
1223 and undefined variable warnings quite right. */
1224 if (seen_error () && uid < 0)
1226 gimple_stmt_iterator gsi =
1227 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1228 gimple stmt;
1230 stmt = gimple_build_label (dest);
1231 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1232 uid = LABEL_DECL_UID (dest);
1234 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1235 return NULL;
1236 return (*ifun->cfg->x_label_to_block_map)[uid];
1239 /* Create edges for a goto statement at block BB. Returns true
1240 if abnormal edges should be created. */
1242 static bool
1243 make_goto_expr_edges (basic_block bb)
1245 gimple_stmt_iterator last = gsi_last_bb (bb);
1246 gimple goto_t = gsi_stmt (last);
1248 /* A simple GOTO creates normal edges. */
1249 if (simple_goto_p (goto_t))
1251 tree dest = gimple_goto_dest (as_a <ggoto *> (goto_t));
1252 basic_block label_bb = label_to_block (dest);
1253 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1254 e->goto_locus = gimple_location (goto_t);
1255 gsi_remove (&last, true);
1256 return false;
1259 /* A computed GOTO creates abnormal edges. */
1260 return true;
1263 /* Create edges for an asm statement with labels at block BB. */
1265 static void
1266 make_gimple_asm_edges (basic_block bb)
1268 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1269 int i, n = gimple_asm_nlabels (stmt);
1271 for (i = 0; i < n; ++i)
1273 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1274 basic_block label_bb = label_to_block (label);
1275 make_edge (bb, label_bb, 0);
1279 /*---------------------------------------------------------------------------
1280 Flowgraph analysis
1281 ---------------------------------------------------------------------------*/
1283 /* Cleanup useless labels in basic blocks. This is something we wish
1284 to do early because it allows us to group case labels before creating
1285 the edges for the CFG, and it speeds up block statement iterators in
1286 all passes later on.
1287 We rerun this pass after CFG is created, to get rid of the labels that
1288 are no longer referenced. After then we do not run it any more, since
1289 (almost) no new labels should be created. */
1291 /* A map from basic block index to the leading label of that block. */
1292 static struct label_record
1294 /* The label. */
1295 tree label;
1297 /* True if the label is referenced from somewhere. */
1298 bool used;
1299 } *label_for_bb;
1301 /* Given LABEL return the first label in the same basic block. */
1303 static tree
1304 main_block_label (tree label)
1306 basic_block bb = label_to_block (label);
1307 tree main_label = label_for_bb[bb->index].label;
1309 /* label_to_block possibly inserted undefined label into the chain. */
1310 if (!main_label)
1312 label_for_bb[bb->index].label = label;
1313 main_label = label;
1316 label_for_bb[bb->index].used = true;
1317 return main_label;
1320 /* Clean up redundant labels within the exception tree. */
1322 static void
1323 cleanup_dead_labels_eh (void)
1325 eh_landing_pad lp;
1326 eh_region r;
1327 tree lab;
1328 int i;
1330 if (cfun->eh == NULL)
1331 return;
1333 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1334 if (lp && lp->post_landing_pad)
1336 lab = main_block_label (lp->post_landing_pad);
1337 if (lab != lp->post_landing_pad)
1339 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1340 EH_LANDING_PAD_NR (lab) = lp->index;
1344 FOR_ALL_EH_REGION (r)
1345 switch (r->type)
1347 case ERT_CLEANUP:
1348 case ERT_MUST_NOT_THROW:
1349 break;
1351 case ERT_TRY:
1353 eh_catch c;
1354 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1356 lab = c->label;
1357 if (lab)
1358 c->label = main_block_label (lab);
1361 break;
1363 case ERT_ALLOWED_EXCEPTIONS:
1364 lab = r->u.allowed.label;
1365 if (lab)
1366 r->u.allowed.label = main_block_label (lab);
1367 break;
1372 /* Cleanup redundant labels. This is a three-step process:
1373 1) Find the leading label for each block.
1374 2) Redirect all references to labels to the leading labels.
1375 3) Cleanup all useless labels. */
1377 void
1378 cleanup_dead_labels (void)
1380 basic_block bb;
1381 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1383 /* Find a suitable label for each block. We use the first user-defined
1384 label if there is one, or otherwise just the first label we see. */
1385 FOR_EACH_BB_FN (bb, cfun)
1387 gimple_stmt_iterator i;
1389 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1391 tree label;
1392 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1394 if (!label_stmt)
1395 break;
1397 label = gimple_label_label (label_stmt);
1399 /* If we have not yet seen a label for the current block,
1400 remember this one and see if there are more labels. */
1401 if (!label_for_bb[bb->index].label)
1403 label_for_bb[bb->index].label = label;
1404 continue;
1407 /* If we did see a label for the current block already, but it
1408 is an artificially created label, replace it if the current
1409 label is a user defined label. */
1410 if (!DECL_ARTIFICIAL (label)
1411 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1413 label_for_bb[bb->index].label = label;
1414 break;
1419 /* Now redirect all jumps/branches to the selected label.
1420 First do so for each block ending in a control statement. */
1421 FOR_EACH_BB_FN (bb, cfun)
1423 gimple stmt = last_stmt (bb);
1424 tree label, new_label;
1426 if (!stmt)
1427 continue;
1429 switch (gimple_code (stmt))
1431 case GIMPLE_COND:
1433 gcond *cond_stmt = as_a <gcond *> (stmt);
1434 label = gimple_cond_true_label (cond_stmt);
1435 if (label)
1437 new_label = main_block_label (label);
1438 if (new_label != label)
1439 gimple_cond_set_true_label (cond_stmt, new_label);
1442 label = gimple_cond_false_label (cond_stmt);
1443 if (label)
1445 new_label = main_block_label (label);
1446 if (new_label != label)
1447 gimple_cond_set_false_label (cond_stmt, new_label);
1450 break;
1452 case GIMPLE_SWITCH:
1454 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1455 size_t i, n = gimple_switch_num_labels (switch_stmt);
1457 /* Replace all destination labels. */
1458 for (i = 0; i < n; ++i)
1460 tree case_label = gimple_switch_label (switch_stmt, i);
1461 label = CASE_LABEL (case_label);
1462 new_label = main_block_label (label);
1463 if (new_label != label)
1464 CASE_LABEL (case_label) = new_label;
1466 break;
1469 case GIMPLE_ASM:
1471 gasm *asm_stmt = as_a <gasm *> (stmt);
1472 int i, n = gimple_asm_nlabels (asm_stmt);
1474 for (i = 0; i < n; ++i)
1476 tree cons = gimple_asm_label_op (asm_stmt, i);
1477 tree label = main_block_label (TREE_VALUE (cons));
1478 TREE_VALUE (cons) = label;
1480 break;
1483 /* We have to handle gotos until they're removed, and we don't
1484 remove them until after we've created the CFG edges. */
1485 case GIMPLE_GOTO:
1486 if (!computed_goto_p (stmt))
1488 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1489 label = gimple_goto_dest (goto_stmt);
1490 new_label = main_block_label (label);
1491 if (new_label != label)
1492 gimple_goto_set_dest (goto_stmt, new_label);
1494 break;
1496 case GIMPLE_TRANSACTION:
1498 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1499 tree label = gimple_transaction_label (trans_stmt);
1500 if (label)
1502 tree new_label = main_block_label (label);
1503 if (new_label != label)
1504 gimple_transaction_set_label (trans_stmt, new_label);
1507 break;
1509 default:
1510 break;
1514 /* Do the same for the exception region tree labels. */
1515 cleanup_dead_labels_eh ();
1517 /* Finally, purge dead labels. All user-defined labels and labels that
1518 can be the target of non-local gotos and labels which have their
1519 address taken are preserved. */
1520 FOR_EACH_BB_FN (bb, cfun)
1522 gimple_stmt_iterator i;
1523 tree label_for_this_bb = label_for_bb[bb->index].label;
1525 if (!label_for_this_bb)
1526 continue;
1528 /* If the main label of the block is unused, we may still remove it. */
1529 if (!label_for_bb[bb->index].used)
1530 label_for_this_bb = NULL;
1532 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1534 tree label;
1535 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1537 if (!label_stmt)
1538 break;
1540 label = gimple_label_label (label_stmt);
1542 if (label == label_for_this_bb
1543 || !DECL_ARTIFICIAL (label)
1544 || DECL_NONLOCAL (label)
1545 || FORCED_LABEL (label))
1546 gsi_next (&i);
1547 else
1548 gsi_remove (&i, true);
1552 free (label_for_bb);
1555 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1556 the ones jumping to the same label.
1557 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1559 void
1560 group_case_labels_stmt (gswitch *stmt)
1562 int old_size = gimple_switch_num_labels (stmt);
1563 int i, j, new_size = old_size;
1564 basic_block default_bb = NULL;
1566 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1568 /* Look for possible opportunities to merge cases. */
1569 i = 1;
1570 while (i < old_size)
1572 tree base_case, base_high;
1573 basic_block base_bb;
1575 base_case = gimple_switch_label (stmt, i);
1577 gcc_assert (base_case);
1578 base_bb = label_to_block (CASE_LABEL (base_case));
1580 /* Discard cases that have the same destination as the
1581 default case. */
1582 if (base_bb == default_bb)
1584 gimple_switch_set_label (stmt, i, NULL_TREE);
1585 i++;
1586 new_size--;
1587 continue;
1590 base_high = CASE_HIGH (base_case)
1591 ? CASE_HIGH (base_case)
1592 : CASE_LOW (base_case);
1593 i++;
1595 /* Try to merge case labels. Break out when we reach the end
1596 of the label vector or when we cannot merge the next case
1597 label with the current one. */
1598 while (i < old_size)
1600 tree merge_case = gimple_switch_label (stmt, i);
1601 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1602 wide_int bhp1 = wi::add (base_high, 1);
1604 /* Merge the cases if they jump to the same place,
1605 and their ranges are consecutive. */
1606 if (merge_bb == base_bb
1607 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1609 base_high = CASE_HIGH (merge_case) ?
1610 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1611 CASE_HIGH (base_case) = base_high;
1612 gimple_switch_set_label (stmt, i, NULL_TREE);
1613 new_size--;
1614 i++;
1616 else
1617 break;
1621 /* Compress the case labels in the label vector, and adjust the
1622 length of the vector. */
1623 for (i = 0, j = 0; i < new_size; i++)
1625 while (! gimple_switch_label (stmt, j))
1626 j++;
1627 gimple_switch_set_label (stmt, i,
1628 gimple_switch_label (stmt, j++));
1631 gcc_assert (new_size <= old_size);
1632 gimple_switch_set_num_labels (stmt, new_size);
1635 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1636 and scan the sorted vector of cases. Combine the ones jumping to the
1637 same label. */
1639 void
1640 group_case_labels (void)
1642 basic_block bb;
1644 FOR_EACH_BB_FN (bb, cfun)
1646 gimple stmt = last_stmt (bb);
1647 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1648 group_case_labels_stmt (as_a <gswitch *> (stmt));
1652 /* Checks whether we can merge block B into block A. */
1654 static bool
1655 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1657 gimple stmt;
1659 if (!single_succ_p (a))
1660 return false;
1662 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1663 return false;
1665 if (single_succ (a) != b)
1666 return false;
1668 if (!single_pred_p (b))
1669 return false;
1671 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1672 return false;
1674 /* If A ends by a statement causing exceptions or something similar, we
1675 cannot merge the blocks. */
1676 stmt = last_stmt (a);
1677 if (stmt && stmt_ends_bb_p (stmt))
1678 return false;
1680 /* Do not allow a block with only a non-local label to be merged. */
1681 if (stmt)
1682 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1683 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1684 return false;
1686 /* Examine the labels at the beginning of B. */
1687 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1688 gsi_next (&gsi))
1690 tree lab;
1691 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1692 if (!label_stmt)
1693 break;
1694 lab = gimple_label_label (label_stmt);
1696 /* Do not remove user forced labels or for -O0 any user labels. */
1697 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1698 return false;
1701 /* Protect simple loop latches. We only want to avoid merging
1702 the latch with the loop header in this case. */
1703 if (current_loops
1704 && b->loop_father->latch == b
1705 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1706 && b->loop_father->header == a)
1707 return false;
1709 /* It must be possible to eliminate all phi nodes in B. If ssa form
1710 is not up-to-date and a name-mapping is registered, we cannot eliminate
1711 any phis. Symbols marked for renaming are never a problem though. */
1712 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1713 gsi_next (&gsi))
1715 gphi *phi = gsi.phi ();
1716 /* Technically only new names matter. */
1717 if (name_registered_for_update_p (PHI_RESULT (phi)))
1718 return false;
1721 /* When not optimizing, don't merge if we'd lose goto_locus. */
1722 if (!optimize
1723 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1725 location_t goto_locus = single_succ_edge (a)->goto_locus;
1726 gimple_stmt_iterator prev, next;
1727 prev = gsi_last_nondebug_bb (a);
1728 next = gsi_after_labels (b);
1729 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1730 gsi_next_nondebug (&next);
1731 if ((gsi_end_p (prev)
1732 || gimple_location (gsi_stmt (prev)) != goto_locus)
1733 && (gsi_end_p (next)
1734 || gimple_location (gsi_stmt (next)) != goto_locus))
1735 return false;
1738 return true;
1741 /* Replaces all uses of NAME by VAL. */
1743 void
1744 replace_uses_by (tree name, tree val)
1746 imm_use_iterator imm_iter;
1747 use_operand_p use;
1748 gimple stmt;
1749 edge e;
1751 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1753 /* Mark the block if we change the last stmt in it. */
1754 if (cfgcleanup_altered_bbs
1755 && stmt_ends_bb_p (stmt))
1756 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1758 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1760 replace_exp (use, val);
1762 if (gimple_code (stmt) == GIMPLE_PHI)
1764 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1765 PHI_ARG_INDEX_FROM_USE (use));
1766 if (e->flags & EDGE_ABNORMAL)
1768 /* This can only occur for virtual operands, since
1769 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1770 would prevent replacement. */
1771 gcc_checking_assert (virtual_operand_p (name));
1772 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1777 if (gimple_code (stmt) != GIMPLE_PHI)
1779 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1780 gimple orig_stmt = stmt;
1781 size_t i;
1783 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1784 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1785 only change sth from non-invariant to invariant, and only
1786 when propagating constants. */
1787 if (is_gimple_min_invariant (val))
1788 for (i = 0; i < gimple_num_ops (stmt); i++)
1790 tree op = gimple_op (stmt, i);
1791 /* Operands may be empty here. For example, the labels
1792 of a GIMPLE_COND are nulled out following the creation
1793 of the corresponding CFG edges. */
1794 if (op && TREE_CODE (op) == ADDR_EXPR)
1795 recompute_tree_invariant_for_addr_expr (op);
1798 if (fold_stmt (&gsi))
1799 stmt = gsi_stmt (gsi);
1801 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1802 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1804 update_stmt (stmt);
1808 gcc_checking_assert (has_zero_uses (name));
1810 /* Also update the trees stored in loop structures. */
1811 if (current_loops)
1813 struct loop *loop;
1815 FOR_EACH_LOOP (loop, 0)
1817 substitute_in_loop_info (loop, name, val);
1822 /* Merge block B into block A. */
1824 static void
1825 gimple_merge_blocks (basic_block a, basic_block b)
1827 gimple_stmt_iterator last, gsi;
1828 gphi_iterator psi;
1830 if (dump_file)
1831 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1833 /* Remove all single-valued PHI nodes from block B of the form
1834 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1835 gsi = gsi_last_bb (a);
1836 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1838 gimple phi = gsi_stmt (psi);
1839 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1840 gimple copy;
1841 bool may_replace_uses = (virtual_operand_p (def)
1842 || may_propagate_copy (def, use));
1844 /* In case we maintain loop closed ssa form, do not propagate arguments
1845 of loop exit phi nodes. */
1846 if (current_loops
1847 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1848 && !virtual_operand_p (def)
1849 && TREE_CODE (use) == SSA_NAME
1850 && a->loop_father != b->loop_father)
1851 may_replace_uses = false;
1853 if (!may_replace_uses)
1855 gcc_assert (!virtual_operand_p (def));
1857 /* Note that just emitting the copies is fine -- there is no problem
1858 with ordering of phi nodes. This is because A is the single
1859 predecessor of B, therefore results of the phi nodes cannot
1860 appear as arguments of the phi nodes. */
1861 copy = gimple_build_assign (def, use);
1862 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1863 remove_phi_node (&psi, false);
1865 else
1867 /* If we deal with a PHI for virtual operands, we can simply
1868 propagate these without fussing with folding or updating
1869 the stmt. */
1870 if (virtual_operand_p (def))
1872 imm_use_iterator iter;
1873 use_operand_p use_p;
1874 gimple stmt;
1876 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1877 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1878 SET_USE (use_p, use);
1880 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1881 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1883 else
1884 replace_uses_by (def, use);
1886 remove_phi_node (&psi, true);
1890 /* Ensure that B follows A. */
1891 move_block_after (b, a);
1893 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1894 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1896 /* Remove labels from B and set gimple_bb to A for other statements. */
1897 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1899 gimple stmt = gsi_stmt (gsi);
1900 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1902 tree label = gimple_label_label (label_stmt);
1903 int lp_nr;
1905 gsi_remove (&gsi, false);
1907 /* Now that we can thread computed gotos, we might have
1908 a situation where we have a forced label in block B
1909 However, the label at the start of block B might still be
1910 used in other ways (think about the runtime checking for
1911 Fortran assigned gotos). So we can not just delete the
1912 label. Instead we move the label to the start of block A. */
1913 if (FORCED_LABEL (label))
1915 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1916 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1918 /* Other user labels keep around in a form of a debug stmt. */
1919 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1921 gimple dbg = gimple_build_debug_bind (label,
1922 integer_zero_node,
1923 stmt);
1924 gimple_debug_bind_reset_value (dbg);
1925 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1928 lp_nr = EH_LANDING_PAD_NR (label);
1929 if (lp_nr)
1931 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1932 lp->post_landing_pad = NULL;
1935 else
1937 gimple_set_bb (stmt, a);
1938 gsi_next (&gsi);
1942 /* When merging two BBs, if their counts are different, the larger count
1943 is selected as the new bb count. This is to handle inconsistent
1944 profiles. */
1945 if (a->loop_father == b->loop_father)
1947 a->count = MAX (a->count, b->count);
1948 a->frequency = MAX (a->frequency, b->frequency);
1951 /* Merge the sequences. */
1952 last = gsi_last_bb (a);
1953 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1954 set_bb_seq (b, NULL);
1956 if (cfgcleanup_altered_bbs)
1957 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1961 /* Return the one of two successors of BB that is not reachable by a
1962 complex edge, if there is one. Else, return BB. We use
1963 this in optimizations that use post-dominators for their heuristics,
1964 to catch the cases in C++ where function calls are involved. */
1966 basic_block
1967 single_noncomplex_succ (basic_block bb)
1969 edge e0, e1;
1970 if (EDGE_COUNT (bb->succs) != 2)
1971 return bb;
1973 e0 = EDGE_SUCC (bb, 0);
1974 e1 = EDGE_SUCC (bb, 1);
1975 if (e0->flags & EDGE_COMPLEX)
1976 return e1->dest;
1977 if (e1->flags & EDGE_COMPLEX)
1978 return e0->dest;
1980 return bb;
1983 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1985 void
1986 notice_special_calls (gcall *call)
1988 int flags = gimple_call_flags (call);
1990 if (flags & ECF_MAY_BE_ALLOCA)
1991 cfun->calls_alloca = true;
1992 if (flags & ECF_RETURNS_TWICE)
1993 cfun->calls_setjmp = true;
1997 /* Clear flags set by notice_special_calls. Used by dead code removal
1998 to update the flags. */
2000 void
2001 clear_special_calls (void)
2003 cfun->calls_alloca = false;
2004 cfun->calls_setjmp = false;
2007 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2009 static void
2010 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2012 /* Since this block is no longer reachable, we can just delete all
2013 of its PHI nodes. */
2014 remove_phi_nodes (bb);
2016 /* Remove edges to BB's successors. */
2017 while (EDGE_COUNT (bb->succs) > 0)
2018 remove_edge (EDGE_SUCC (bb, 0));
2022 /* Remove statements of basic block BB. */
2024 static void
2025 remove_bb (basic_block bb)
2027 gimple_stmt_iterator i;
2029 if (dump_file)
2031 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2032 if (dump_flags & TDF_DETAILS)
2034 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2035 fprintf (dump_file, "\n");
2039 if (current_loops)
2041 struct loop *loop = bb->loop_father;
2043 /* If a loop gets removed, clean up the information associated
2044 with it. */
2045 if (loop->latch == bb
2046 || loop->header == bb)
2047 free_numbers_of_iterations_estimates_loop (loop);
2050 /* Remove all the instructions in the block. */
2051 if (bb_seq (bb) != NULL)
2053 /* Walk backwards so as to get a chance to substitute all
2054 released DEFs into debug stmts. See
2055 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2056 details. */
2057 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2059 gimple stmt = gsi_stmt (i);
2060 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2061 if (label_stmt
2062 && (FORCED_LABEL (gimple_label_label (label_stmt))
2063 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2065 basic_block new_bb;
2066 gimple_stmt_iterator new_gsi;
2068 /* A non-reachable non-local label may still be referenced.
2069 But it no longer needs to carry the extra semantics of
2070 non-locality. */
2071 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2073 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2074 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2077 new_bb = bb->prev_bb;
2078 new_gsi = gsi_start_bb (new_bb);
2079 gsi_remove (&i, false);
2080 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2082 else
2084 /* Release SSA definitions if we are in SSA. Note that we
2085 may be called when not in SSA. For example,
2086 final_cleanup calls this function via
2087 cleanup_tree_cfg. */
2088 if (gimple_in_ssa_p (cfun))
2089 release_defs (stmt);
2091 gsi_remove (&i, true);
2094 if (gsi_end_p (i))
2095 i = gsi_last_bb (bb);
2096 else
2097 gsi_prev (&i);
2101 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2102 bb->il.gimple.seq = NULL;
2103 bb->il.gimple.phi_nodes = NULL;
2107 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2108 predicate VAL, return the edge that will be taken out of the block.
2109 If VAL does not match a unique edge, NULL is returned. */
2111 edge
2112 find_taken_edge (basic_block bb, tree val)
2114 gimple stmt;
2116 stmt = last_stmt (bb);
2118 gcc_assert (stmt);
2119 gcc_assert (is_ctrl_stmt (stmt));
2121 if (val == NULL)
2122 return NULL;
2124 if (!is_gimple_min_invariant (val))
2125 return NULL;
2127 if (gimple_code (stmt) == GIMPLE_COND)
2128 return find_taken_edge_cond_expr (bb, val);
2130 if (gimple_code (stmt) == GIMPLE_SWITCH)
2131 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2133 if (computed_goto_p (stmt))
2135 /* Only optimize if the argument is a label, if the argument is
2136 not a label then we can not construct a proper CFG.
2138 It may be the case that we only need to allow the LABEL_REF to
2139 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2140 appear inside a LABEL_EXPR just to be safe. */
2141 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2142 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2143 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2144 return NULL;
2147 gcc_unreachable ();
2150 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2151 statement, determine which of the outgoing edges will be taken out of the
2152 block. Return NULL if either edge may be taken. */
2154 static edge
2155 find_taken_edge_computed_goto (basic_block bb, tree val)
2157 basic_block dest;
2158 edge e = NULL;
2160 dest = label_to_block (val);
2161 if (dest)
2163 e = find_edge (bb, dest);
2164 gcc_assert (e != NULL);
2167 return e;
2170 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2171 statement, determine which of the two edges will be taken out of the
2172 block. Return NULL if either edge may be taken. */
2174 static edge
2175 find_taken_edge_cond_expr (basic_block bb, tree val)
2177 edge true_edge, false_edge;
2179 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2181 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2182 return (integer_zerop (val) ? false_edge : true_edge);
2185 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2186 statement, determine which edge will be taken out of the block. Return
2187 NULL if any edge may be taken. */
2189 static edge
2190 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2191 tree val)
2193 basic_block dest_bb;
2194 edge e;
2195 tree taken_case;
2197 taken_case = find_case_label_for_value (switch_stmt, val);
2198 dest_bb = label_to_block (CASE_LABEL (taken_case));
2200 e = find_edge (bb, dest_bb);
2201 gcc_assert (e);
2202 return e;
2206 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2207 We can make optimal use here of the fact that the case labels are
2208 sorted: We can do a binary search for a case matching VAL. */
2210 static tree
2211 find_case_label_for_value (gswitch *switch_stmt, tree val)
2213 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2214 tree default_case = gimple_switch_default_label (switch_stmt);
2216 for (low = 0, high = n; high - low > 1; )
2218 size_t i = (high + low) / 2;
2219 tree t = gimple_switch_label (switch_stmt, i);
2220 int cmp;
2222 /* Cache the result of comparing CASE_LOW and val. */
2223 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2225 if (cmp > 0)
2226 high = i;
2227 else
2228 low = i;
2230 if (CASE_HIGH (t) == NULL)
2232 /* A singe-valued case label. */
2233 if (cmp == 0)
2234 return t;
2236 else
2238 /* A case range. We can only handle integer ranges. */
2239 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2240 return t;
2244 return default_case;
2248 /* Dump a basic block on stderr. */
2250 void
2251 gimple_debug_bb (basic_block bb)
2253 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2257 /* Dump basic block with index N on stderr. */
2259 basic_block
2260 gimple_debug_bb_n (int n)
2262 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2263 return BASIC_BLOCK_FOR_FN (cfun, n);
2267 /* Dump the CFG on stderr.
2269 FLAGS are the same used by the tree dumping functions
2270 (see TDF_* in dumpfile.h). */
2272 void
2273 gimple_debug_cfg (int flags)
2275 gimple_dump_cfg (stderr, flags);
2279 /* Dump the program showing basic block boundaries on the given FILE.
2281 FLAGS are the same used by the tree dumping functions (see TDF_* in
2282 tree.h). */
2284 void
2285 gimple_dump_cfg (FILE *file, int flags)
2287 if (flags & TDF_DETAILS)
2289 dump_function_header (file, current_function_decl, flags);
2290 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2291 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2292 last_basic_block_for_fn (cfun));
2294 brief_dump_cfg (file, flags | TDF_COMMENT);
2295 fprintf (file, "\n");
2298 if (flags & TDF_STATS)
2299 dump_cfg_stats (file);
2301 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2305 /* Dump CFG statistics on FILE. */
2307 void
2308 dump_cfg_stats (FILE *file)
2310 static long max_num_merged_labels = 0;
2311 unsigned long size, total = 0;
2312 long num_edges;
2313 basic_block bb;
2314 const char * const fmt_str = "%-30s%-13s%12s\n";
2315 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2316 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2317 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2318 const char *funcname = current_function_name ();
2320 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2322 fprintf (file, "---------------------------------------------------------\n");
2323 fprintf (file, fmt_str, "", " Number of ", "Memory");
2324 fprintf (file, fmt_str, "", " instances ", "used ");
2325 fprintf (file, "---------------------------------------------------------\n");
2327 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2328 total += size;
2329 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2330 SCALE (size), LABEL (size));
2332 num_edges = 0;
2333 FOR_EACH_BB_FN (bb, cfun)
2334 num_edges += EDGE_COUNT (bb->succs);
2335 size = num_edges * sizeof (struct edge_def);
2336 total += size;
2337 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2339 fprintf (file, "---------------------------------------------------------\n");
2340 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2341 LABEL (total));
2342 fprintf (file, "---------------------------------------------------------\n");
2343 fprintf (file, "\n");
2345 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2346 max_num_merged_labels = cfg_stats.num_merged_labels;
2348 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2349 cfg_stats.num_merged_labels, max_num_merged_labels);
2351 fprintf (file, "\n");
2355 /* Dump CFG statistics on stderr. Keep extern so that it's always
2356 linked in the final executable. */
2358 DEBUG_FUNCTION void
2359 debug_cfg_stats (void)
2361 dump_cfg_stats (stderr);
2364 /*---------------------------------------------------------------------------
2365 Miscellaneous helpers
2366 ---------------------------------------------------------------------------*/
2368 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2369 flow. Transfers of control flow associated with EH are excluded. */
2371 static bool
2372 call_can_make_abnormal_goto (gimple t)
2374 /* If the function has no non-local labels, then a call cannot make an
2375 abnormal transfer of control. */
2376 if (!cfun->has_nonlocal_label
2377 && !cfun->calls_setjmp)
2378 return false;
2380 /* Likewise if the call has no side effects. */
2381 if (!gimple_has_side_effects (t))
2382 return false;
2384 /* Likewise if the called function is leaf. */
2385 if (gimple_call_flags (t) & ECF_LEAF)
2386 return false;
2388 return true;
2392 /* Return true if T can make an abnormal transfer of control flow.
2393 Transfers of control flow associated with EH are excluded. */
2395 bool
2396 stmt_can_make_abnormal_goto (gimple t)
2398 if (computed_goto_p (t))
2399 return true;
2400 if (is_gimple_call (t))
2401 return call_can_make_abnormal_goto (t);
2402 return false;
2406 /* Return true if T represents a stmt that always transfers control. */
2408 bool
2409 is_ctrl_stmt (gimple t)
2411 switch (gimple_code (t))
2413 case GIMPLE_COND:
2414 case GIMPLE_SWITCH:
2415 case GIMPLE_GOTO:
2416 case GIMPLE_RETURN:
2417 case GIMPLE_RESX:
2418 return true;
2419 default:
2420 return false;
2425 /* Return true if T is a statement that may alter the flow of control
2426 (e.g., a call to a non-returning function). */
2428 bool
2429 is_ctrl_altering_stmt (gimple t)
2431 gcc_assert (t);
2433 switch (gimple_code (t))
2435 case GIMPLE_CALL:
2436 /* Per stmt call flag indicates whether the call could alter
2437 controlflow. */
2438 if (gimple_call_ctrl_altering_p (t))
2439 return true;
2440 break;
2442 case GIMPLE_EH_DISPATCH:
2443 /* EH_DISPATCH branches to the individual catch handlers at
2444 this level of a try or allowed-exceptions region. It can
2445 fallthru to the next statement as well. */
2446 return true;
2448 case GIMPLE_ASM:
2449 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2450 return true;
2451 break;
2453 CASE_GIMPLE_OMP:
2454 /* OpenMP directives alter control flow. */
2455 return true;
2457 case GIMPLE_TRANSACTION:
2458 /* A transaction start alters control flow. */
2459 return true;
2461 default:
2462 break;
2465 /* If a statement can throw, it alters control flow. */
2466 return stmt_can_throw_internal (t);
2470 /* Return true if T is a simple local goto. */
2472 bool
2473 simple_goto_p (gimple t)
2475 ggoto *goto_stmt = dyn_cast <ggoto *> (t);
2476 if (!goto_stmt)
2477 return false;
2478 return TREE_CODE (gimple_goto_dest (goto_stmt)) == LABEL_DECL;
2482 /* Return true if STMT should start a new basic block. PREV_STMT is
2483 the statement preceding STMT. It is used when STMT is a label or a
2484 case label. Labels should only start a new basic block if their
2485 previous statement wasn't a label. Otherwise, sequence of labels
2486 would generate unnecessary basic blocks that only contain a single
2487 label. */
2489 static inline bool
2490 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2492 if (stmt == NULL)
2493 return false;
2495 /* Labels start a new basic block only if the preceding statement
2496 wasn't a label of the same type. This prevents the creation of
2497 consecutive blocks that have nothing but a single label. */
2498 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2500 /* Nonlocal and computed GOTO targets always start a new block. */
2501 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2502 || FORCED_LABEL (gimple_label_label (label_stmt)))
2503 return true;
2505 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2507 if (DECL_NONLOCAL (gimple_label_label (
2508 as_a <glabel *> (prev_stmt))))
2509 return true;
2511 cfg_stats.num_merged_labels++;
2512 return false;
2514 else
2515 return true;
2517 else if (gimple_code (stmt) == GIMPLE_CALL
2518 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2519 /* setjmp acts similar to a nonlocal GOTO target and thus should
2520 start a new block. */
2521 return true;
2523 return false;
2527 /* Return true if T should end a basic block. */
2529 bool
2530 stmt_ends_bb_p (gimple t)
2532 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2535 /* Remove block annotations and other data structures. */
2537 void
2538 delete_tree_cfg_annotations (void)
2540 vec_free (label_to_block_map_for_fn (cfun));
2544 /* Return the first statement in basic block BB. */
2546 gimple
2547 first_stmt (basic_block bb)
2549 gimple_stmt_iterator i = gsi_start_bb (bb);
2550 gimple stmt = NULL;
2552 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2554 gsi_next (&i);
2555 stmt = NULL;
2557 return stmt;
2560 /* Return the first non-label statement in basic block BB. */
2562 static gimple
2563 first_non_label_stmt (basic_block bb)
2565 gimple_stmt_iterator i = gsi_start_bb (bb);
2566 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2567 gsi_next (&i);
2568 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2571 /* Return the last statement in basic block BB. */
2573 gimple
2574 last_stmt (basic_block bb)
2576 gimple_stmt_iterator i = gsi_last_bb (bb);
2577 gimple stmt = NULL;
2579 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2581 gsi_prev (&i);
2582 stmt = NULL;
2584 return stmt;
2587 /* Return the last statement of an otherwise empty block. Return NULL
2588 if the block is totally empty, or if it contains more than one
2589 statement. */
2591 gimple
2592 last_and_only_stmt (basic_block bb)
2594 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2595 gimple last, prev;
2597 if (gsi_end_p (i))
2598 return NULL;
2600 last = gsi_stmt (i);
2601 gsi_prev_nondebug (&i);
2602 if (gsi_end_p (i))
2603 return last;
2605 /* Empty statements should no longer appear in the instruction stream.
2606 Everything that might have appeared before should be deleted by
2607 remove_useless_stmts, and the optimizers should just gsi_remove
2608 instead of smashing with build_empty_stmt.
2610 Thus the only thing that should appear here in a block containing
2611 one executable statement is a label. */
2612 prev = gsi_stmt (i);
2613 if (gimple_code (prev) == GIMPLE_LABEL)
2614 return last;
2615 else
2616 return NULL;
2619 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2621 static void
2622 reinstall_phi_args (edge new_edge, edge old_edge)
2624 edge_var_map *vm;
2625 int i;
2626 gphi_iterator phis;
2628 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2629 if (!v)
2630 return;
2632 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2633 v->iterate (i, &vm) && !gsi_end_p (phis);
2634 i++, gsi_next (&phis))
2636 gphi *phi = phis.phi ();
2637 tree result = redirect_edge_var_map_result (vm);
2638 tree arg = redirect_edge_var_map_def (vm);
2640 gcc_assert (result == gimple_phi_result (phi));
2642 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2645 redirect_edge_var_map_clear (old_edge);
2648 /* Returns the basic block after which the new basic block created
2649 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2650 near its "logical" location. This is of most help to humans looking
2651 at debugging dumps. */
2653 static basic_block
2654 split_edge_bb_loc (edge edge_in)
2656 basic_block dest = edge_in->dest;
2657 basic_block dest_prev = dest->prev_bb;
2659 if (dest_prev)
2661 edge e = find_edge (dest_prev, dest);
2662 if (e && !(e->flags & EDGE_COMPLEX))
2663 return edge_in->src;
2665 return dest_prev;
2668 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2669 Abort on abnormal edges. */
2671 static basic_block
2672 gimple_split_edge (edge edge_in)
2674 basic_block new_bb, after_bb, dest;
2675 edge new_edge, e;
2677 /* Abnormal edges cannot be split. */
2678 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2680 dest = edge_in->dest;
2682 after_bb = split_edge_bb_loc (edge_in);
2684 new_bb = create_empty_bb (after_bb);
2685 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2686 new_bb->count = edge_in->count;
2687 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2688 new_edge->probability = REG_BR_PROB_BASE;
2689 new_edge->count = edge_in->count;
2691 e = redirect_edge_and_branch (edge_in, new_bb);
2692 gcc_assert (e == edge_in);
2693 reinstall_phi_args (new_edge, e);
2695 return new_bb;
2699 /* Verify properties of the address expression T with base object BASE. */
2701 static tree
2702 verify_address (tree t, tree base)
2704 bool old_constant;
2705 bool old_side_effects;
2706 bool new_constant;
2707 bool new_side_effects;
2709 old_constant = TREE_CONSTANT (t);
2710 old_side_effects = TREE_SIDE_EFFECTS (t);
2712 recompute_tree_invariant_for_addr_expr (t);
2713 new_side_effects = TREE_SIDE_EFFECTS (t);
2714 new_constant = TREE_CONSTANT (t);
2716 if (old_constant != new_constant)
2718 error ("constant not recomputed when ADDR_EXPR changed");
2719 return t;
2721 if (old_side_effects != new_side_effects)
2723 error ("side effects not recomputed when ADDR_EXPR changed");
2724 return t;
2727 if (!(TREE_CODE (base) == VAR_DECL
2728 || TREE_CODE (base) == PARM_DECL
2729 || TREE_CODE (base) == RESULT_DECL))
2730 return NULL_TREE;
2732 if (DECL_GIMPLE_REG_P (base))
2734 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2735 return base;
2738 return NULL_TREE;
2741 /* Callback for walk_tree, check that all elements with address taken are
2742 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2743 inside a PHI node. */
2745 static tree
2746 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2748 tree t = *tp, x;
2750 if (TYPE_P (t))
2751 *walk_subtrees = 0;
2753 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2754 #define CHECK_OP(N, MSG) \
2755 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2756 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2758 switch (TREE_CODE (t))
2760 case SSA_NAME:
2761 if (SSA_NAME_IN_FREE_LIST (t))
2763 error ("SSA name in freelist but still referenced");
2764 return *tp;
2766 break;
2768 case INDIRECT_REF:
2769 error ("INDIRECT_REF in gimple IL");
2770 return t;
2772 case MEM_REF:
2773 x = TREE_OPERAND (t, 0);
2774 if (!POINTER_TYPE_P (TREE_TYPE (x))
2775 || !is_gimple_mem_ref_addr (x))
2777 error ("invalid first operand of MEM_REF");
2778 return x;
2780 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2781 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2783 error ("invalid offset operand of MEM_REF");
2784 return TREE_OPERAND (t, 1);
2786 if (TREE_CODE (x) == ADDR_EXPR
2787 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2788 return x;
2789 *walk_subtrees = 0;
2790 break;
2792 case ASSERT_EXPR:
2793 x = fold (ASSERT_EXPR_COND (t));
2794 if (x == boolean_false_node)
2796 error ("ASSERT_EXPR with an always-false condition");
2797 return *tp;
2799 break;
2801 case MODIFY_EXPR:
2802 error ("MODIFY_EXPR not expected while having tuples");
2803 return *tp;
2805 case ADDR_EXPR:
2807 tree tem;
2809 gcc_assert (is_gimple_address (t));
2811 /* Skip any references (they will be checked when we recurse down the
2812 tree) and ensure that any variable used as a prefix is marked
2813 addressable. */
2814 for (x = TREE_OPERAND (t, 0);
2815 handled_component_p (x);
2816 x = TREE_OPERAND (x, 0))
2819 if ((tem = verify_address (t, x)))
2820 return tem;
2822 if (!(TREE_CODE (x) == VAR_DECL
2823 || TREE_CODE (x) == PARM_DECL
2824 || TREE_CODE (x) == RESULT_DECL))
2825 return NULL;
2827 if (!TREE_ADDRESSABLE (x))
2829 error ("address taken, but ADDRESSABLE bit not set");
2830 return x;
2833 break;
2836 case COND_EXPR:
2837 x = COND_EXPR_COND (t);
2838 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2840 error ("non-integral used in condition");
2841 return x;
2843 if (!is_gimple_condexpr (x))
2845 error ("invalid conditional operand");
2846 return x;
2848 break;
2850 case NON_LVALUE_EXPR:
2851 case TRUTH_NOT_EXPR:
2852 gcc_unreachable ();
2854 CASE_CONVERT:
2855 case FIX_TRUNC_EXPR:
2856 case FLOAT_EXPR:
2857 case NEGATE_EXPR:
2858 case ABS_EXPR:
2859 case BIT_NOT_EXPR:
2860 CHECK_OP (0, "invalid operand to unary operator");
2861 break;
2863 case REALPART_EXPR:
2864 case IMAGPART_EXPR:
2865 case BIT_FIELD_REF:
2866 if (!is_gimple_reg_type (TREE_TYPE (t)))
2868 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2869 return t;
2872 if (TREE_CODE (t) == BIT_FIELD_REF)
2874 tree t0 = TREE_OPERAND (t, 0);
2875 tree t1 = TREE_OPERAND (t, 1);
2876 tree t2 = TREE_OPERAND (t, 2);
2877 if (!tree_fits_uhwi_p (t1)
2878 || !tree_fits_uhwi_p (t2))
2880 error ("invalid position or size operand to BIT_FIELD_REF");
2881 return t;
2883 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2884 && (TYPE_PRECISION (TREE_TYPE (t))
2885 != tree_to_uhwi (t1)))
2887 error ("integral result type precision does not match "
2888 "field size of BIT_FIELD_REF");
2889 return t;
2891 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2892 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2893 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2894 != tree_to_uhwi (t1)))
2896 error ("mode precision of non-integral result does not "
2897 "match field size of BIT_FIELD_REF");
2898 return t;
2900 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2901 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2902 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2904 error ("position plus size exceeds size of referenced object in "
2905 "BIT_FIELD_REF");
2906 return t;
2909 t = TREE_OPERAND (t, 0);
2911 /* Fall-through. */
2912 case COMPONENT_REF:
2913 case ARRAY_REF:
2914 case ARRAY_RANGE_REF:
2915 case VIEW_CONVERT_EXPR:
2916 /* We have a nest of references. Verify that each of the operands
2917 that determine where to reference is either a constant or a variable,
2918 verify that the base is valid, and then show we've already checked
2919 the subtrees. */
2920 while (handled_component_p (t))
2922 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2923 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2924 else if (TREE_CODE (t) == ARRAY_REF
2925 || TREE_CODE (t) == ARRAY_RANGE_REF)
2927 CHECK_OP (1, "invalid array index");
2928 if (TREE_OPERAND (t, 2))
2929 CHECK_OP (2, "invalid array lower bound");
2930 if (TREE_OPERAND (t, 3))
2931 CHECK_OP (3, "invalid array stride");
2933 else if (TREE_CODE (t) == BIT_FIELD_REF
2934 || TREE_CODE (t) == REALPART_EXPR
2935 || TREE_CODE (t) == IMAGPART_EXPR)
2937 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2938 "REALPART_EXPR");
2939 return t;
2942 t = TREE_OPERAND (t, 0);
2945 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2947 error ("invalid reference prefix");
2948 return t;
2950 *walk_subtrees = 0;
2951 break;
2952 case PLUS_EXPR:
2953 case MINUS_EXPR:
2954 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2955 POINTER_PLUS_EXPR. */
2956 if (POINTER_TYPE_P (TREE_TYPE (t)))
2958 error ("invalid operand to plus/minus, type is a pointer");
2959 return t;
2961 CHECK_OP (0, "invalid operand to binary operator");
2962 CHECK_OP (1, "invalid operand to binary operator");
2963 break;
2965 case POINTER_PLUS_EXPR:
2966 /* Check to make sure the first operand is a pointer or reference type. */
2967 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2969 error ("invalid operand to pointer plus, first operand is not a pointer");
2970 return t;
2972 /* Check to make sure the second operand is a ptrofftype. */
2973 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2975 error ("invalid operand to pointer plus, second operand is not an "
2976 "integer type of appropriate width");
2977 return t;
2979 /* FALLTHROUGH */
2980 case LT_EXPR:
2981 case LE_EXPR:
2982 case GT_EXPR:
2983 case GE_EXPR:
2984 case EQ_EXPR:
2985 case NE_EXPR:
2986 case UNORDERED_EXPR:
2987 case ORDERED_EXPR:
2988 case UNLT_EXPR:
2989 case UNLE_EXPR:
2990 case UNGT_EXPR:
2991 case UNGE_EXPR:
2992 case UNEQ_EXPR:
2993 case LTGT_EXPR:
2994 case MULT_EXPR:
2995 case TRUNC_DIV_EXPR:
2996 case CEIL_DIV_EXPR:
2997 case FLOOR_DIV_EXPR:
2998 case ROUND_DIV_EXPR:
2999 case TRUNC_MOD_EXPR:
3000 case CEIL_MOD_EXPR:
3001 case FLOOR_MOD_EXPR:
3002 case ROUND_MOD_EXPR:
3003 case RDIV_EXPR:
3004 case EXACT_DIV_EXPR:
3005 case MIN_EXPR:
3006 case MAX_EXPR:
3007 case LSHIFT_EXPR:
3008 case RSHIFT_EXPR:
3009 case LROTATE_EXPR:
3010 case RROTATE_EXPR:
3011 case BIT_IOR_EXPR:
3012 case BIT_XOR_EXPR:
3013 case BIT_AND_EXPR:
3014 CHECK_OP (0, "invalid operand to binary operator");
3015 CHECK_OP (1, "invalid operand to binary operator");
3016 break;
3018 case CONSTRUCTOR:
3019 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3020 *walk_subtrees = 0;
3021 break;
3023 case CASE_LABEL_EXPR:
3024 if (CASE_CHAIN (t))
3026 error ("invalid CASE_CHAIN");
3027 return t;
3029 break;
3031 default:
3032 break;
3034 return NULL;
3036 #undef CHECK_OP
3040 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3041 Returns true if there is an error, otherwise false. */
3043 static bool
3044 verify_types_in_gimple_min_lval (tree expr)
3046 tree op;
3048 if (is_gimple_id (expr))
3049 return false;
3051 if (TREE_CODE (expr) != TARGET_MEM_REF
3052 && TREE_CODE (expr) != MEM_REF)
3054 error ("invalid expression for min lvalue");
3055 return true;
3058 /* TARGET_MEM_REFs are strange beasts. */
3059 if (TREE_CODE (expr) == TARGET_MEM_REF)
3060 return false;
3062 op = TREE_OPERAND (expr, 0);
3063 if (!is_gimple_val (op))
3065 error ("invalid operand in indirect reference");
3066 debug_generic_stmt (op);
3067 return true;
3069 /* Memory references now generally can involve a value conversion. */
3071 return false;
3074 /* Verify if EXPR is a valid GIMPLE reference expression. If
3075 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3076 if there is an error, otherwise false. */
3078 static bool
3079 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3081 while (handled_component_p (expr))
3083 tree op = TREE_OPERAND (expr, 0);
3085 if (TREE_CODE (expr) == ARRAY_REF
3086 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3088 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3089 || (TREE_OPERAND (expr, 2)
3090 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3091 || (TREE_OPERAND (expr, 3)
3092 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3094 error ("invalid operands to array reference");
3095 debug_generic_stmt (expr);
3096 return true;
3100 /* Verify if the reference array element types are compatible. */
3101 if (TREE_CODE (expr) == ARRAY_REF
3102 && !useless_type_conversion_p (TREE_TYPE (expr),
3103 TREE_TYPE (TREE_TYPE (op))))
3105 error ("type mismatch in array reference");
3106 debug_generic_stmt (TREE_TYPE (expr));
3107 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3108 return true;
3110 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3111 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3112 TREE_TYPE (TREE_TYPE (op))))
3114 error ("type mismatch in array range reference");
3115 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3116 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3117 return true;
3120 if ((TREE_CODE (expr) == REALPART_EXPR
3121 || TREE_CODE (expr) == IMAGPART_EXPR)
3122 && !useless_type_conversion_p (TREE_TYPE (expr),
3123 TREE_TYPE (TREE_TYPE (op))))
3125 error ("type mismatch in real/imagpart reference");
3126 debug_generic_stmt (TREE_TYPE (expr));
3127 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3128 return true;
3131 if (TREE_CODE (expr) == COMPONENT_REF
3132 && !useless_type_conversion_p (TREE_TYPE (expr),
3133 TREE_TYPE (TREE_OPERAND (expr, 1))))
3135 error ("type mismatch in component reference");
3136 debug_generic_stmt (TREE_TYPE (expr));
3137 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3138 return true;
3141 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3143 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3144 that their operand is not an SSA name or an invariant when
3145 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3146 bug). Otherwise there is nothing to verify, gross mismatches at
3147 most invoke undefined behavior. */
3148 if (require_lvalue
3149 && (TREE_CODE (op) == SSA_NAME
3150 || is_gimple_min_invariant (op)))
3152 error ("conversion of an SSA_NAME on the left hand side");
3153 debug_generic_stmt (expr);
3154 return true;
3156 else if (TREE_CODE (op) == SSA_NAME
3157 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3159 error ("conversion of register to a different size");
3160 debug_generic_stmt (expr);
3161 return true;
3163 else if (!handled_component_p (op))
3164 return false;
3167 expr = op;
3170 if (TREE_CODE (expr) == MEM_REF)
3172 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3174 error ("invalid address operand in MEM_REF");
3175 debug_generic_stmt (expr);
3176 return true;
3178 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3179 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3181 error ("invalid offset operand in MEM_REF");
3182 debug_generic_stmt (expr);
3183 return true;
3186 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3188 if (!TMR_BASE (expr)
3189 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3191 error ("invalid address operand in TARGET_MEM_REF");
3192 return true;
3194 if (!TMR_OFFSET (expr)
3195 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3196 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3198 error ("invalid offset operand in TARGET_MEM_REF");
3199 debug_generic_stmt (expr);
3200 return true;
3204 return ((require_lvalue || !is_gimple_min_invariant (expr))
3205 && verify_types_in_gimple_min_lval (expr));
3208 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3209 list of pointer-to types that is trivially convertible to DEST. */
3211 static bool
3212 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3214 tree src;
3216 if (!TYPE_POINTER_TO (src_obj))
3217 return true;
3219 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3220 if (useless_type_conversion_p (dest, src))
3221 return true;
3223 return false;
3226 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3227 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3229 static bool
3230 valid_fixed_convert_types_p (tree type1, tree type2)
3232 return (FIXED_POINT_TYPE_P (type1)
3233 && (INTEGRAL_TYPE_P (type2)
3234 || SCALAR_FLOAT_TYPE_P (type2)
3235 || FIXED_POINT_TYPE_P (type2)));
3238 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3239 is a problem, otherwise false. */
3241 static bool
3242 verify_gimple_call (gcall *stmt)
3244 tree fn = gimple_call_fn (stmt);
3245 tree fntype, fndecl;
3246 unsigned i;
3248 if (gimple_call_internal_p (stmt))
3250 if (fn)
3252 error ("gimple call has two targets");
3253 debug_generic_stmt (fn);
3254 return true;
3257 else
3259 if (!fn)
3261 error ("gimple call has no target");
3262 return true;
3266 if (fn && !is_gimple_call_addr (fn))
3268 error ("invalid function in gimple call");
3269 debug_generic_stmt (fn);
3270 return true;
3273 if (fn
3274 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3275 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3276 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3278 error ("non-function in gimple call");
3279 return true;
3282 fndecl = gimple_call_fndecl (stmt);
3283 if (fndecl
3284 && TREE_CODE (fndecl) == FUNCTION_DECL
3285 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3286 && !DECL_PURE_P (fndecl)
3287 && !TREE_READONLY (fndecl))
3289 error ("invalid pure const state for function");
3290 return true;
3293 if (gimple_call_lhs (stmt)
3294 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3295 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3297 error ("invalid LHS in gimple call");
3298 return true;
3301 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3303 error ("LHS in noreturn call");
3304 return true;
3307 fntype = gimple_call_fntype (stmt);
3308 if (fntype
3309 && gimple_call_lhs (stmt)
3310 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3311 TREE_TYPE (fntype))
3312 /* ??? At least C++ misses conversions at assignments from
3313 void * call results.
3314 ??? Java is completely off. Especially with functions
3315 returning java.lang.Object.
3316 For now simply allow arbitrary pointer type conversions. */
3317 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3318 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3320 error ("invalid conversion in gimple call");
3321 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3322 debug_generic_stmt (TREE_TYPE (fntype));
3323 return true;
3326 if (gimple_call_chain (stmt)
3327 && !is_gimple_val (gimple_call_chain (stmt)))
3329 error ("invalid static chain in gimple call");
3330 debug_generic_stmt (gimple_call_chain (stmt));
3331 return true;
3334 /* If there is a static chain argument, this should not be an indirect
3335 call, and the decl should have DECL_STATIC_CHAIN set. */
3336 if (gimple_call_chain (stmt))
3338 if (!gimple_call_fndecl (stmt))
3340 error ("static chain in indirect gimple call");
3341 return true;
3343 fn = TREE_OPERAND (fn, 0);
3345 if (!DECL_STATIC_CHAIN (fn))
3347 error ("static chain with function that doesn%'t use one");
3348 return true;
3352 /* ??? The C frontend passes unpromoted arguments in case it
3353 didn't see a function declaration before the call. So for now
3354 leave the call arguments mostly unverified. Once we gimplify
3355 unit-at-a-time we have a chance to fix this. */
3357 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3359 tree arg = gimple_call_arg (stmt, i);
3360 if ((is_gimple_reg_type (TREE_TYPE (arg))
3361 && !is_gimple_val (arg))
3362 || (!is_gimple_reg_type (TREE_TYPE (arg))
3363 && !is_gimple_lvalue (arg)))
3365 error ("invalid argument to gimple call");
3366 debug_generic_expr (arg);
3367 return true;
3371 return false;
3374 /* Verifies the gimple comparison with the result type TYPE and
3375 the operands OP0 and OP1. */
3377 static bool
3378 verify_gimple_comparison (tree type, tree op0, tree op1)
3380 tree op0_type = TREE_TYPE (op0);
3381 tree op1_type = TREE_TYPE (op1);
3383 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3385 error ("invalid operands in gimple comparison");
3386 return true;
3389 /* For comparisons we do not have the operations type as the
3390 effective type the comparison is carried out in. Instead
3391 we require that either the first operand is trivially
3392 convertible into the second, or the other way around.
3393 Because we special-case pointers to void we allow
3394 comparisons of pointers with the same mode as well. */
3395 if (!useless_type_conversion_p (op0_type, op1_type)
3396 && !useless_type_conversion_p (op1_type, op0_type)
3397 && (!POINTER_TYPE_P (op0_type)
3398 || !POINTER_TYPE_P (op1_type)
3399 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3401 error ("mismatching comparison operand types");
3402 debug_generic_expr (op0_type);
3403 debug_generic_expr (op1_type);
3404 return true;
3407 /* The resulting type of a comparison may be an effective boolean type. */
3408 if (INTEGRAL_TYPE_P (type)
3409 && (TREE_CODE (type) == BOOLEAN_TYPE
3410 || TYPE_PRECISION (type) == 1))
3412 if (TREE_CODE (op0_type) == VECTOR_TYPE
3413 || TREE_CODE (op1_type) == VECTOR_TYPE)
3415 error ("vector comparison returning a boolean");
3416 debug_generic_expr (op0_type);
3417 debug_generic_expr (op1_type);
3418 return true;
3421 /* Or an integer vector type with the same size and element count
3422 as the comparison operand types. */
3423 else if (TREE_CODE (type) == VECTOR_TYPE
3424 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3426 if (TREE_CODE (op0_type) != VECTOR_TYPE
3427 || TREE_CODE (op1_type) != VECTOR_TYPE)
3429 error ("non-vector operands in vector comparison");
3430 debug_generic_expr (op0_type);
3431 debug_generic_expr (op1_type);
3432 return true;
3435 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3436 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3437 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3438 /* The result of a vector comparison is of signed
3439 integral type. */
3440 || TYPE_UNSIGNED (TREE_TYPE (type)))
3442 error ("invalid vector comparison resulting type");
3443 debug_generic_expr (type);
3444 return true;
3447 else
3449 error ("bogus comparison result type");
3450 debug_generic_expr (type);
3451 return true;
3454 return false;
3457 /* Verify a gimple assignment statement STMT with an unary rhs.
3458 Returns true if anything is wrong. */
3460 static bool
3461 verify_gimple_assign_unary (gassign *stmt)
3463 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3464 tree lhs = gimple_assign_lhs (stmt);
3465 tree lhs_type = TREE_TYPE (lhs);
3466 tree rhs1 = gimple_assign_rhs1 (stmt);
3467 tree rhs1_type = TREE_TYPE (rhs1);
3469 if (!is_gimple_reg (lhs))
3471 error ("non-register as LHS of unary operation");
3472 return true;
3475 if (!is_gimple_val (rhs1))
3477 error ("invalid operand in unary operation");
3478 return true;
3481 /* First handle conversions. */
3482 switch (rhs_code)
3484 CASE_CONVERT:
3486 /* Allow conversions from pointer type to integral type only if
3487 there is no sign or zero extension involved.
3488 For targets were the precision of ptrofftype doesn't match that
3489 of pointers we need to allow arbitrary conversions to ptrofftype. */
3490 if ((POINTER_TYPE_P (lhs_type)
3491 && INTEGRAL_TYPE_P (rhs1_type))
3492 || (POINTER_TYPE_P (rhs1_type)
3493 && INTEGRAL_TYPE_P (lhs_type)
3494 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3495 || ptrofftype_p (sizetype))))
3496 return false;
3498 /* Allow conversion from integral to offset type and vice versa. */
3499 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3500 && INTEGRAL_TYPE_P (rhs1_type))
3501 || (INTEGRAL_TYPE_P (lhs_type)
3502 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3503 return false;
3505 /* Otherwise assert we are converting between types of the
3506 same kind. */
3507 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3509 error ("invalid types in nop conversion");
3510 debug_generic_expr (lhs_type);
3511 debug_generic_expr (rhs1_type);
3512 return true;
3515 return false;
3518 case ADDR_SPACE_CONVERT_EXPR:
3520 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3521 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3522 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3524 error ("invalid types in address space conversion");
3525 debug_generic_expr (lhs_type);
3526 debug_generic_expr (rhs1_type);
3527 return true;
3530 return false;
3533 case FIXED_CONVERT_EXPR:
3535 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3536 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3538 error ("invalid types in fixed-point conversion");
3539 debug_generic_expr (lhs_type);
3540 debug_generic_expr (rhs1_type);
3541 return true;
3544 return false;
3547 case FLOAT_EXPR:
3549 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3550 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3551 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3553 error ("invalid types in conversion to floating point");
3554 debug_generic_expr (lhs_type);
3555 debug_generic_expr (rhs1_type);
3556 return true;
3559 return false;
3562 case FIX_TRUNC_EXPR:
3564 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3565 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3566 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3568 error ("invalid types in conversion to integer");
3569 debug_generic_expr (lhs_type);
3570 debug_generic_expr (rhs1_type);
3571 return true;
3574 return false;
3576 case REDUC_MAX_EXPR:
3577 case REDUC_MIN_EXPR:
3578 case REDUC_PLUS_EXPR:
3579 if (!VECTOR_TYPE_P (rhs1_type)
3580 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3582 error ("reduction should convert from vector to element type");
3583 debug_generic_expr (lhs_type);
3584 debug_generic_expr (rhs1_type);
3585 return true;
3587 return false;
3589 case VEC_UNPACK_HI_EXPR:
3590 case VEC_UNPACK_LO_EXPR:
3591 case VEC_UNPACK_FLOAT_HI_EXPR:
3592 case VEC_UNPACK_FLOAT_LO_EXPR:
3593 /* FIXME. */
3594 return false;
3596 case NEGATE_EXPR:
3597 case ABS_EXPR:
3598 case BIT_NOT_EXPR:
3599 case PAREN_EXPR:
3600 case CONJ_EXPR:
3601 break;
3603 default:
3604 gcc_unreachable ();
3607 /* For the remaining codes assert there is no conversion involved. */
3608 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3610 error ("non-trivial conversion in unary operation");
3611 debug_generic_expr (lhs_type);
3612 debug_generic_expr (rhs1_type);
3613 return true;
3616 return false;
3619 /* Verify a gimple assignment statement STMT with a binary rhs.
3620 Returns true if anything is wrong. */
3622 static bool
3623 verify_gimple_assign_binary (gassign *stmt)
3625 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3626 tree lhs = gimple_assign_lhs (stmt);
3627 tree lhs_type = TREE_TYPE (lhs);
3628 tree rhs1 = gimple_assign_rhs1 (stmt);
3629 tree rhs1_type = TREE_TYPE (rhs1);
3630 tree rhs2 = gimple_assign_rhs2 (stmt);
3631 tree rhs2_type = TREE_TYPE (rhs2);
3633 if (!is_gimple_reg (lhs))
3635 error ("non-register as LHS of binary operation");
3636 return true;
3639 if (!is_gimple_val (rhs1)
3640 || !is_gimple_val (rhs2))
3642 error ("invalid operands in binary operation");
3643 return true;
3646 /* First handle operations that involve different types. */
3647 switch (rhs_code)
3649 case COMPLEX_EXPR:
3651 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3652 || !(INTEGRAL_TYPE_P (rhs1_type)
3653 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3654 || !(INTEGRAL_TYPE_P (rhs2_type)
3655 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3657 error ("type mismatch in complex expression");
3658 debug_generic_expr (lhs_type);
3659 debug_generic_expr (rhs1_type);
3660 debug_generic_expr (rhs2_type);
3661 return true;
3664 return false;
3667 case LSHIFT_EXPR:
3668 case RSHIFT_EXPR:
3669 case LROTATE_EXPR:
3670 case RROTATE_EXPR:
3672 /* Shifts and rotates are ok on integral types, fixed point
3673 types and integer vector types. */
3674 if ((!INTEGRAL_TYPE_P (rhs1_type)
3675 && !FIXED_POINT_TYPE_P (rhs1_type)
3676 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3677 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3678 || (!INTEGRAL_TYPE_P (rhs2_type)
3679 /* Vector shifts of vectors are also ok. */
3680 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3681 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3682 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3683 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3684 || !useless_type_conversion_p (lhs_type, rhs1_type))
3686 error ("type mismatch in shift expression");
3687 debug_generic_expr (lhs_type);
3688 debug_generic_expr (rhs1_type);
3689 debug_generic_expr (rhs2_type);
3690 return true;
3693 return false;
3696 case VEC_LSHIFT_EXPR:
3697 case VEC_RSHIFT_EXPR:
3699 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3700 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3701 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3702 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3703 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3704 || (!INTEGRAL_TYPE_P (rhs2_type)
3705 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3706 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3707 || !useless_type_conversion_p (lhs_type, rhs1_type))
3709 error ("type mismatch in vector shift expression");
3710 debug_generic_expr (lhs_type);
3711 debug_generic_expr (rhs1_type);
3712 debug_generic_expr (rhs2_type);
3713 return true;
3715 /* For shifting a vector of non-integral components we
3716 only allow shifting by a constant multiple of the element size. */
3717 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3718 && (TREE_CODE (rhs2) != INTEGER_CST
3719 || !div_if_zero_remainder (rhs2,
3720 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3722 error ("non-element sized vector shift of floating point vector");
3723 return true;
3726 return false;
3729 case WIDEN_LSHIFT_EXPR:
3731 if (!INTEGRAL_TYPE_P (lhs_type)
3732 || !INTEGRAL_TYPE_P (rhs1_type)
3733 || TREE_CODE (rhs2) != INTEGER_CST
3734 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3736 error ("type mismatch in widening vector shift expression");
3737 debug_generic_expr (lhs_type);
3738 debug_generic_expr (rhs1_type);
3739 debug_generic_expr (rhs2_type);
3740 return true;
3743 return false;
3746 case VEC_WIDEN_LSHIFT_HI_EXPR:
3747 case VEC_WIDEN_LSHIFT_LO_EXPR:
3749 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3750 || TREE_CODE (lhs_type) != VECTOR_TYPE
3751 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3752 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3753 || TREE_CODE (rhs2) != INTEGER_CST
3754 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3755 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3757 error ("type mismatch in widening vector shift expression");
3758 debug_generic_expr (lhs_type);
3759 debug_generic_expr (rhs1_type);
3760 debug_generic_expr (rhs2_type);
3761 return true;
3764 return false;
3767 case PLUS_EXPR:
3768 case MINUS_EXPR:
3770 tree lhs_etype = lhs_type;
3771 tree rhs1_etype = rhs1_type;
3772 tree rhs2_etype = rhs2_type;
3773 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3775 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3776 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3778 error ("invalid non-vector operands to vector valued plus");
3779 return true;
3781 lhs_etype = TREE_TYPE (lhs_type);
3782 rhs1_etype = TREE_TYPE (rhs1_type);
3783 rhs2_etype = TREE_TYPE (rhs2_type);
3785 if (POINTER_TYPE_P (lhs_etype)
3786 || POINTER_TYPE_P (rhs1_etype)
3787 || POINTER_TYPE_P (rhs2_etype))
3789 error ("invalid (pointer) operands to plus/minus");
3790 return true;
3793 /* Continue with generic binary expression handling. */
3794 break;
3797 case POINTER_PLUS_EXPR:
3799 if (!POINTER_TYPE_P (rhs1_type)
3800 || !useless_type_conversion_p (lhs_type, rhs1_type)
3801 || !ptrofftype_p (rhs2_type))
3803 error ("type mismatch in pointer plus expression");
3804 debug_generic_stmt (lhs_type);
3805 debug_generic_stmt (rhs1_type);
3806 debug_generic_stmt (rhs2_type);
3807 return true;
3810 return false;
3813 case TRUTH_ANDIF_EXPR:
3814 case TRUTH_ORIF_EXPR:
3815 case TRUTH_AND_EXPR:
3816 case TRUTH_OR_EXPR:
3817 case TRUTH_XOR_EXPR:
3819 gcc_unreachable ();
3821 case LT_EXPR:
3822 case LE_EXPR:
3823 case GT_EXPR:
3824 case GE_EXPR:
3825 case EQ_EXPR:
3826 case NE_EXPR:
3827 case UNORDERED_EXPR:
3828 case ORDERED_EXPR:
3829 case UNLT_EXPR:
3830 case UNLE_EXPR:
3831 case UNGT_EXPR:
3832 case UNGE_EXPR:
3833 case UNEQ_EXPR:
3834 case LTGT_EXPR:
3835 /* Comparisons are also binary, but the result type is not
3836 connected to the operand types. */
3837 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3839 case WIDEN_MULT_EXPR:
3840 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3841 return true;
3842 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3843 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3845 case WIDEN_SUM_EXPR:
3846 case VEC_WIDEN_MULT_HI_EXPR:
3847 case VEC_WIDEN_MULT_LO_EXPR:
3848 case VEC_WIDEN_MULT_EVEN_EXPR:
3849 case VEC_WIDEN_MULT_ODD_EXPR:
3850 case VEC_PACK_TRUNC_EXPR:
3851 case VEC_PACK_SAT_EXPR:
3852 case VEC_PACK_FIX_TRUNC_EXPR:
3853 /* FIXME. */
3854 return false;
3856 case MULT_EXPR:
3857 case MULT_HIGHPART_EXPR:
3858 case TRUNC_DIV_EXPR:
3859 case CEIL_DIV_EXPR:
3860 case FLOOR_DIV_EXPR:
3861 case ROUND_DIV_EXPR:
3862 case TRUNC_MOD_EXPR:
3863 case CEIL_MOD_EXPR:
3864 case FLOOR_MOD_EXPR:
3865 case ROUND_MOD_EXPR:
3866 case RDIV_EXPR:
3867 case EXACT_DIV_EXPR:
3868 case MIN_EXPR:
3869 case MAX_EXPR:
3870 case BIT_IOR_EXPR:
3871 case BIT_XOR_EXPR:
3872 case BIT_AND_EXPR:
3873 /* Continue with generic binary expression handling. */
3874 break;
3876 default:
3877 gcc_unreachable ();
3880 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3881 || !useless_type_conversion_p (lhs_type, rhs2_type))
3883 error ("type mismatch in binary expression");
3884 debug_generic_stmt (lhs_type);
3885 debug_generic_stmt (rhs1_type);
3886 debug_generic_stmt (rhs2_type);
3887 return true;
3890 return false;
3893 /* Verify a gimple assignment statement STMT with a ternary rhs.
3894 Returns true if anything is wrong. */
3896 static bool
3897 verify_gimple_assign_ternary (gassign *stmt)
3899 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3900 tree lhs = gimple_assign_lhs (stmt);
3901 tree lhs_type = TREE_TYPE (lhs);
3902 tree rhs1 = gimple_assign_rhs1 (stmt);
3903 tree rhs1_type = TREE_TYPE (rhs1);
3904 tree rhs2 = gimple_assign_rhs2 (stmt);
3905 tree rhs2_type = TREE_TYPE (rhs2);
3906 tree rhs3 = gimple_assign_rhs3 (stmt);
3907 tree rhs3_type = TREE_TYPE (rhs3);
3909 if (!is_gimple_reg (lhs))
3911 error ("non-register as LHS of ternary operation");
3912 return true;
3915 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3916 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3917 || !is_gimple_val (rhs2)
3918 || !is_gimple_val (rhs3))
3920 error ("invalid operands in ternary operation");
3921 return true;
3924 /* First handle operations that involve different types. */
3925 switch (rhs_code)
3927 case WIDEN_MULT_PLUS_EXPR:
3928 case WIDEN_MULT_MINUS_EXPR:
3929 if ((!INTEGRAL_TYPE_P (rhs1_type)
3930 && !FIXED_POINT_TYPE_P (rhs1_type))
3931 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3932 || !useless_type_conversion_p (lhs_type, rhs3_type)
3933 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3934 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3936 error ("type mismatch in widening multiply-accumulate expression");
3937 debug_generic_expr (lhs_type);
3938 debug_generic_expr (rhs1_type);
3939 debug_generic_expr (rhs2_type);
3940 debug_generic_expr (rhs3_type);
3941 return true;
3943 break;
3945 case FMA_EXPR:
3946 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3947 || !useless_type_conversion_p (lhs_type, rhs2_type)
3948 || !useless_type_conversion_p (lhs_type, rhs3_type))
3950 error ("type mismatch in fused multiply-add expression");
3951 debug_generic_expr (lhs_type);
3952 debug_generic_expr (rhs1_type);
3953 debug_generic_expr (rhs2_type);
3954 debug_generic_expr (rhs3_type);
3955 return true;
3957 break;
3959 case COND_EXPR:
3960 case VEC_COND_EXPR:
3961 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3962 || !useless_type_conversion_p (lhs_type, rhs3_type))
3964 error ("type mismatch in conditional expression");
3965 debug_generic_expr (lhs_type);
3966 debug_generic_expr (rhs2_type);
3967 debug_generic_expr (rhs3_type);
3968 return true;
3970 break;
3972 case VEC_PERM_EXPR:
3973 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3974 || !useless_type_conversion_p (lhs_type, rhs2_type))
3976 error ("type mismatch in vector permute expression");
3977 debug_generic_expr (lhs_type);
3978 debug_generic_expr (rhs1_type);
3979 debug_generic_expr (rhs2_type);
3980 debug_generic_expr (rhs3_type);
3981 return true;
3984 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3985 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3986 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3988 error ("vector types expected in vector permute expression");
3989 debug_generic_expr (lhs_type);
3990 debug_generic_expr (rhs1_type);
3991 debug_generic_expr (rhs2_type);
3992 debug_generic_expr (rhs3_type);
3993 return true;
3996 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3997 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3998 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3999 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4000 != TYPE_VECTOR_SUBPARTS (lhs_type))
4002 error ("vectors with different element number found "
4003 "in vector permute expression");
4004 debug_generic_expr (lhs_type);
4005 debug_generic_expr (rhs1_type);
4006 debug_generic_expr (rhs2_type);
4007 debug_generic_expr (rhs3_type);
4008 return true;
4011 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4012 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4013 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4015 error ("invalid mask type in vector permute expression");
4016 debug_generic_expr (lhs_type);
4017 debug_generic_expr (rhs1_type);
4018 debug_generic_expr (rhs2_type);
4019 debug_generic_expr (rhs3_type);
4020 return true;
4023 return false;
4025 case SAD_EXPR:
4026 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4027 || !useless_type_conversion_p (lhs_type, rhs3_type)
4028 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4029 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4030 > GET_MODE_BITSIZE (GET_MODE_INNER
4031 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4033 error ("type mismatch in sad expression");
4034 debug_generic_expr (lhs_type);
4035 debug_generic_expr (rhs1_type);
4036 debug_generic_expr (rhs2_type);
4037 debug_generic_expr (rhs3_type);
4038 return true;
4041 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4042 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4043 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4045 error ("vector types expected in sad expression");
4046 debug_generic_expr (lhs_type);
4047 debug_generic_expr (rhs1_type);
4048 debug_generic_expr (rhs2_type);
4049 debug_generic_expr (rhs3_type);
4050 return true;
4053 return false;
4055 case DOT_PROD_EXPR:
4056 case REALIGN_LOAD_EXPR:
4057 /* FIXME. */
4058 return false;
4060 default:
4061 gcc_unreachable ();
4063 return false;
4066 /* Verify a gimple assignment statement STMT with a single rhs.
4067 Returns true if anything is wrong. */
4069 static bool
4070 verify_gimple_assign_single (gassign *stmt)
4072 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4073 tree lhs = gimple_assign_lhs (stmt);
4074 tree lhs_type = TREE_TYPE (lhs);
4075 tree rhs1 = gimple_assign_rhs1 (stmt);
4076 tree rhs1_type = TREE_TYPE (rhs1);
4077 bool res = false;
4079 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4081 error ("non-trivial conversion at assignment");
4082 debug_generic_expr (lhs_type);
4083 debug_generic_expr (rhs1_type);
4084 return true;
4087 if (gimple_clobber_p (stmt)
4088 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4090 error ("non-decl/MEM_REF LHS in clobber statement");
4091 debug_generic_expr (lhs);
4092 return true;
4095 if (handled_component_p (lhs)
4096 || TREE_CODE (lhs) == MEM_REF
4097 || TREE_CODE (lhs) == TARGET_MEM_REF)
4098 res |= verify_types_in_gimple_reference (lhs, true);
4100 /* Special codes we cannot handle via their class. */
4101 switch (rhs_code)
4103 case ADDR_EXPR:
4105 tree op = TREE_OPERAND (rhs1, 0);
4106 if (!is_gimple_addressable (op))
4108 error ("invalid operand in unary expression");
4109 return true;
4112 /* Technically there is no longer a need for matching types, but
4113 gimple hygiene asks for this check. In LTO we can end up
4114 combining incompatible units and thus end up with addresses
4115 of globals that change their type to a common one. */
4116 if (!in_lto_p
4117 && !types_compatible_p (TREE_TYPE (op),
4118 TREE_TYPE (TREE_TYPE (rhs1)))
4119 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4120 TREE_TYPE (op)))
4122 error ("type mismatch in address expression");
4123 debug_generic_stmt (TREE_TYPE (rhs1));
4124 debug_generic_stmt (TREE_TYPE (op));
4125 return true;
4128 return verify_types_in_gimple_reference (op, true);
4131 /* tcc_reference */
4132 case INDIRECT_REF:
4133 error ("INDIRECT_REF in gimple IL");
4134 return true;
4136 case COMPONENT_REF:
4137 case BIT_FIELD_REF:
4138 case ARRAY_REF:
4139 case ARRAY_RANGE_REF:
4140 case VIEW_CONVERT_EXPR:
4141 case REALPART_EXPR:
4142 case IMAGPART_EXPR:
4143 case TARGET_MEM_REF:
4144 case MEM_REF:
4145 if (!is_gimple_reg (lhs)
4146 && is_gimple_reg_type (TREE_TYPE (lhs)))
4148 error ("invalid rhs for gimple memory store");
4149 debug_generic_stmt (lhs);
4150 debug_generic_stmt (rhs1);
4151 return true;
4153 return res || verify_types_in_gimple_reference (rhs1, false);
4155 /* tcc_constant */
4156 case SSA_NAME:
4157 case INTEGER_CST:
4158 case REAL_CST:
4159 case FIXED_CST:
4160 case COMPLEX_CST:
4161 case VECTOR_CST:
4162 case STRING_CST:
4163 return res;
4165 /* tcc_declaration */
4166 case CONST_DECL:
4167 return res;
4168 case VAR_DECL:
4169 case PARM_DECL:
4170 if (!is_gimple_reg (lhs)
4171 && !is_gimple_reg (rhs1)
4172 && is_gimple_reg_type (TREE_TYPE (lhs)))
4174 error ("invalid rhs for gimple memory store");
4175 debug_generic_stmt (lhs);
4176 debug_generic_stmt (rhs1);
4177 return true;
4179 return res;
4181 case CONSTRUCTOR:
4182 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4184 unsigned int i;
4185 tree elt_i, elt_v, elt_t = NULL_TREE;
4187 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4188 return res;
4189 /* For vector CONSTRUCTORs we require that either it is empty
4190 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4191 (then the element count must be correct to cover the whole
4192 outer vector and index must be NULL on all elements, or it is
4193 a CONSTRUCTOR of scalar elements, where we as an exception allow
4194 smaller number of elements (assuming zero filling) and
4195 consecutive indexes as compared to NULL indexes (such
4196 CONSTRUCTORs can appear in the IL from FEs). */
4197 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4199 if (elt_t == NULL_TREE)
4201 elt_t = TREE_TYPE (elt_v);
4202 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4204 tree elt_t = TREE_TYPE (elt_v);
4205 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4206 TREE_TYPE (elt_t)))
4208 error ("incorrect type of vector CONSTRUCTOR"
4209 " elements");
4210 debug_generic_stmt (rhs1);
4211 return true;
4213 else if (CONSTRUCTOR_NELTS (rhs1)
4214 * TYPE_VECTOR_SUBPARTS (elt_t)
4215 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4217 error ("incorrect number of vector CONSTRUCTOR"
4218 " elements");
4219 debug_generic_stmt (rhs1);
4220 return true;
4223 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4224 elt_t))
4226 error ("incorrect type of vector CONSTRUCTOR elements");
4227 debug_generic_stmt (rhs1);
4228 return true;
4230 else if (CONSTRUCTOR_NELTS (rhs1)
4231 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4233 error ("incorrect number of vector CONSTRUCTOR elements");
4234 debug_generic_stmt (rhs1);
4235 return true;
4238 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4240 error ("incorrect type of vector CONSTRUCTOR elements");
4241 debug_generic_stmt (rhs1);
4242 return true;
4244 if (elt_i != NULL_TREE
4245 && (TREE_CODE (elt_t) == VECTOR_TYPE
4246 || TREE_CODE (elt_i) != INTEGER_CST
4247 || compare_tree_int (elt_i, i) != 0))
4249 error ("vector CONSTRUCTOR with non-NULL element index");
4250 debug_generic_stmt (rhs1);
4251 return true;
4253 if (!is_gimple_val (elt_v))
4255 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4256 debug_generic_stmt (rhs1);
4257 return true;
4261 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4263 error ("non-vector CONSTRUCTOR with elements");
4264 debug_generic_stmt (rhs1);
4265 return true;
4267 return res;
4268 case OBJ_TYPE_REF:
4269 case ASSERT_EXPR:
4270 case WITH_SIZE_EXPR:
4271 /* FIXME. */
4272 return res;
4274 default:;
4277 return res;
4280 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4281 is a problem, otherwise false. */
4283 static bool
4284 verify_gimple_assign (gassign *stmt)
4286 switch (gimple_assign_rhs_class (stmt))
4288 case GIMPLE_SINGLE_RHS:
4289 return verify_gimple_assign_single (stmt);
4291 case GIMPLE_UNARY_RHS:
4292 return verify_gimple_assign_unary (stmt);
4294 case GIMPLE_BINARY_RHS:
4295 return verify_gimple_assign_binary (stmt);
4297 case GIMPLE_TERNARY_RHS:
4298 return verify_gimple_assign_ternary (stmt);
4300 default:
4301 gcc_unreachable ();
4305 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4306 is a problem, otherwise false. */
4308 static bool
4309 verify_gimple_return (greturn *stmt)
4311 tree op = gimple_return_retval (stmt);
4312 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4314 /* We cannot test for present return values as we do not fix up missing
4315 return values from the original source. */
4316 if (op == NULL)
4317 return false;
4319 if (!is_gimple_val (op)
4320 && TREE_CODE (op) != RESULT_DECL)
4322 error ("invalid operand in return statement");
4323 debug_generic_stmt (op);
4324 return true;
4327 if ((TREE_CODE (op) == RESULT_DECL
4328 && DECL_BY_REFERENCE (op))
4329 || (TREE_CODE (op) == SSA_NAME
4330 && SSA_NAME_VAR (op)
4331 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4332 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4333 op = TREE_TYPE (op);
4335 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4337 error ("invalid conversion in return statement");
4338 debug_generic_stmt (restype);
4339 debug_generic_stmt (TREE_TYPE (op));
4340 return true;
4343 return false;
4347 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4348 is a problem, otherwise false. */
4350 static bool
4351 verify_gimple_goto (ggoto *stmt)
4353 tree dest = gimple_goto_dest (stmt);
4355 /* ??? We have two canonical forms of direct goto destinations, a
4356 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4357 if (TREE_CODE (dest) != LABEL_DECL
4358 && (!is_gimple_val (dest)
4359 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4361 error ("goto destination is neither a label nor a pointer");
4362 return true;
4365 return false;
4368 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4369 is a problem, otherwise false. */
4371 static bool
4372 verify_gimple_switch (gswitch *stmt)
4374 unsigned int i, n;
4375 tree elt, prev_upper_bound = NULL_TREE;
4376 tree index_type, elt_type = NULL_TREE;
4378 if (!is_gimple_val (gimple_switch_index (stmt)))
4380 error ("invalid operand to switch statement");
4381 debug_generic_stmt (gimple_switch_index (stmt));
4382 return true;
4385 index_type = TREE_TYPE (gimple_switch_index (stmt));
4386 if (! INTEGRAL_TYPE_P (index_type))
4388 error ("non-integral type switch statement");
4389 debug_generic_expr (index_type);
4390 return true;
4393 elt = gimple_switch_label (stmt, 0);
4394 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4396 error ("invalid default case label in switch statement");
4397 debug_generic_expr (elt);
4398 return true;
4401 n = gimple_switch_num_labels (stmt);
4402 for (i = 1; i < n; i++)
4404 elt = gimple_switch_label (stmt, i);
4406 if (! CASE_LOW (elt))
4408 error ("invalid case label in switch statement");
4409 debug_generic_expr (elt);
4410 return true;
4412 if (CASE_HIGH (elt)
4413 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4415 error ("invalid case range in switch statement");
4416 debug_generic_expr (elt);
4417 return true;
4420 if (elt_type)
4422 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4423 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4425 error ("type mismatch for case label in switch statement");
4426 debug_generic_expr (elt);
4427 return true;
4430 else
4432 elt_type = TREE_TYPE (CASE_LOW (elt));
4433 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4435 error ("type precision mismatch in switch statement");
4436 return true;
4440 if (prev_upper_bound)
4442 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4444 error ("case labels not sorted in switch statement");
4445 return true;
4449 prev_upper_bound = CASE_HIGH (elt);
4450 if (! prev_upper_bound)
4451 prev_upper_bound = CASE_LOW (elt);
4454 return false;
4457 /* Verify a gimple debug statement STMT.
4458 Returns true if anything is wrong. */
4460 static bool
4461 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4463 /* There isn't much that could be wrong in a gimple debug stmt. A
4464 gimple debug bind stmt, for example, maps a tree, that's usually
4465 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4466 component or member of an aggregate type, to another tree, that
4467 can be an arbitrary expression. These stmts expand into debug
4468 insns, and are converted to debug notes by var-tracking.c. */
4469 return false;
4472 /* Verify a gimple label statement STMT.
4473 Returns true if anything is wrong. */
4475 static bool
4476 verify_gimple_label (glabel *stmt)
4478 tree decl = gimple_label_label (stmt);
4479 int uid;
4480 bool err = false;
4482 if (TREE_CODE (decl) != LABEL_DECL)
4483 return true;
4484 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4485 && DECL_CONTEXT (decl) != current_function_decl)
4487 error ("label's context is not the current function decl");
4488 err |= true;
4491 uid = LABEL_DECL_UID (decl);
4492 if (cfun->cfg
4493 && (uid == -1
4494 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4496 error ("incorrect entry in label_to_block_map");
4497 err |= true;
4500 uid = EH_LANDING_PAD_NR (decl);
4501 if (uid)
4503 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4504 if (decl != lp->post_landing_pad)
4506 error ("incorrect setting of landing pad number");
4507 err |= true;
4511 return err;
4514 /* Verify a gimple cond statement STMT.
4515 Returns true if anything is wrong. */
4517 static bool
4518 verify_gimple_cond (gcond *stmt)
4520 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4522 error ("invalid comparison code in gimple cond");
4523 return true;
4525 if (!(!gimple_cond_true_label (stmt)
4526 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4527 || !(!gimple_cond_false_label (stmt)
4528 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4530 error ("invalid labels in gimple cond");
4531 return true;
4534 return verify_gimple_comparison (boolean_type_node,
4535 gimple_cond_lhs (stmt),
4536 gimple_cond_rhs (stmt));
4539 /* Verify the GIMPLE statement STMT. Returns true if there is an
4540 error, otherwise false. */
4542 static bool
4543 verify_gimple_stmt (gimple stmt)
4545 switch (gimple_code (stmt))
4547 case GIMPLE_ASSIGN:
4548 return verify_gimple_assign (as_a <gassign *> (stmt));
4550 case GIMPLE_LABEL:
4551 return verify_gimple_label (as_a <glabel *> (stmt));
4553 case GIMPLE_CALL:
4554 return verify_gimple_call (as_a <gcall *> (stmt));
4556 case GIMPLE_COND:
4557 return verify_gimple_cond (as_a <gcond *> (stmt));
4559 case GIMPLE_GOTO:
4560 return verify_gimple_goto (as_a <ggoto *> (stmt));
4562 case GIMPLE_SWITCH:
4563 return verify_gimple_switch (as_a <gswitch *> (stmt));
4565 case GIMPLE_RETURN:
4566 return verify_gimple_return (as_a <greturn *> (stmt));
4568 case GIMPLE_ASM:
4569 return false;
4571 case GIMPLE_TRANSACTION:
4572 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4574 /* Tuples that do not have tree operands. */
4575 case GIMPLE_NOP:
4576 case GIMPLE_PREDICT:
4577 case GIMPLE_RESX:
4578 case GIMPLE_EH_DISPATCH:
4579 case GIMPLE_EH_MUST_NOT_THROW:
4580 return false;
4582 CASE_GIMPLE_OMP:
4583 /* OpenMP directives are validated by the FE and never operated
4584 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4585 non-gimple expressions when the main index variable has had
4586 its address taken. This does not affect the loop itself
4587 because the header of an GIMPLE_OMP_FOR is merely used to determine
4588 how to setup the parallel iteration. */
4589 return false;
4591 case GIMPLE_DEBUG:
4592 return verify_gimple_debug (stmt);
4594 default:
4595 gcc_unreachable ();
4599 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4600 and false otherwise. */
4602 static bool
4603 verify_gimple_phi (gimple phi)
4605 bool err = false;
4606 unsigned i;
4607 tree phi_result = gimple_phi_result (phi);
4608 bool virtual_p;
4610 if (!phi_result)
4612 error ("invalid PHI result");
4613 return true;
4616 virtual_p = virtual_operand_p (phi_result);
4617 if (TREE_CODE (phi_result) != SSA_NAME
4618 || (virtual_p
4619 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4621 error ("invalid PHI result");
4622 err = true;
4625 for (i = 0; i < gimple_phi_num_args (phi); i++)
4627 tree t = gimple_phi_arg_def (phi, i);
4629 if (!t)
4631 error ("missing PHI def");
4632 err |= true;
4633 continue;
4635 /* Addressable variables do have SSA_NAMEs but they
4636 are not considered gimple values. */
4637 else if ((TREE_CODE (t) == SSA_NAME
4638 && virtual_p != virtual_operand_p (t))
4639 || (virtual_p
4640 && (TREE_CODE (t) != SSA_NAME
4641 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4642 || (!virtual_p
4643 && !is_gimple_val (t)))
4645 error ("invalid PHI argument");
4646 debug_generic_expr (t);
4647 err |= true;
4649 #ifdef ENABLE_TYPES_CHECKING
4650 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4652 error ("incompatible types in PHI argument %u", i);
4653 debug_generic_stmt (TREE_TYPE (phi_result));
4654 debug_generic_stmt (TREE_TYPE (t));
4655 err |= true;
4657 #endif
4660 return err;
4663 /* Verify the GIMPLE statements inside the sequence STMTS. */
4665 static bool
4666 verify_gimple_in_seq_2 (gimple_seq stmts)
4668 gimple_stmt_iterator ittr;
4669 bool err = false;
4671 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4673 gimple stmt = gsi_stmt (ittr);
4675 switch (gimple_code (stmt))
4677 case GIMPLE_BIND:
4678 err |= verify_gimple_in_seq_2 (
4679 gimple_bind_body (as_a <gbind *> (stmt)));
4680 break;
4682 case GIMPLE_TRY:
4683 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4684 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4685 break;
4687 case GIMPLE_EH_FILTER:
4688 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4689 break;
4691 case GIMPLE_EH_ELSE:
4693 geh_else *eh_else = as_a <geh_else *> (stmt);
4694 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4695 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4697 break;
4699 case GIMPLE_CATCH:
4700 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4701 as_a <gcatch *> (stmt)));
4702 break;
4704 case GIMPLE_TRANSACTION:
4705 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4706 break;
4708 default:
4710 bool err2 = verify_gimple_stmt (stmt);
4711 if (err2)
4712 debug_gimple_stmt (stmt);
4713 err |= err2;
4718 return err;
4721 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4722 is a problem, otherwise false. */
4724 static bool
4725 verify_gimple_transaction (gtransaction *stmt)
4727 tree lab = gimple_transaction_label (stmt);
4728 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4729 return true;
4730 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4734 /* Verify the GIMPLE statements inside the statement list STMTS. */
4736 DEBUG_FUNCTION void
4737 verify_gimple_in_seq (gimple_seq stmts)
4739 timevar_push (TV_TREE_STMT_VERIFY);
4740 if (verify_gimple_in_seq_2 (stmts))
4741 internal_error ("verify_gimple failed");
4742 timevar_pop (TV_TREE_STMT_VERIFY);
4745 /* Return true when the T can be shared. */
4747 static bool
4748 tree_node_can_be_shared (tree t)
4750 if (IS_TYPE_OR_DECL_P (t)
4751 || is_gimple_min_invariant (t)
4752 || TREE_CODE (t) == SSA_NAME
4753 || t == error_mark_node
4754 || TREE_CODE (t) == IDENTIFIER_NODE)
4755 return true;
4757 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4758 return true;
4760 if (DECL_P (t))
4761 return true;
4763 return false;
4766 /* Called via walk_tree. Verify tree sharing. */
4768 static tree
4769 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4771 hash_set<void *> *visited = (hash_set<void *> *) data;
4773 if (tree_node_can_be_shared (*tp))
4775 *walk_subtrees = false;
4776 return NULL;
4779 if (visited->add (*tp))
4780 return *tp;
4782 return NULL;
4785 /* Called via walk_gimple_stmt. Verify tree sharing. */
4787 static tree
4788 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4790 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4791 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4794 static bool eh_error_found;
4795 bool
4796 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4797 hash_set<gimple> *visited)
4799 if (!visited->contains (stmt))
4801 error ("dead STMT in EH table");
4802 debug_gimple_stmt (stmt);
4803 eh_error_found = true;
4805 return true;
4808 /* Verify if the location LOCs block is in BLOCKS. */
4810 static bool
4811 verify_location (hash_set<tree> *blocks, location_t loc)
4813 tree block = LOCATION_BLOCK (loc);
4814 if (block != NULL_TREE
4815 && !blocks->contains (block))
4817 error ("location references block not in block tree");
4818 return true;
4820 if (block != NULL_TREE)
4821 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4822 return false;
4825 /* Called via walk_tree. Verify that expressions have no blocks. */
4827 static tree
4828 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4830 if (!EXPR_P (*tp))
4832 *walk_subtrees = false;
4833 return NULL;
4836 location_t loc = EXPR_LOCATION (*tp);
4837 if (LOCATION_BLOCK (loc) != NULL)
4838 return *tp;
4840 return NULL;
4843 /* Called via walk_tree. Verify locations of expressions. */
4845 static tree
4846 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4848 hash_set<tree> *blocks = (hash_set<tree> *) data;
4850 if (TREE_CODE (*tp) == VAR_DECL
4851 && DECL_HAS_DEBUG_EXPR_P (*tp))
4853 tree t = DECL_DEBUG_EXPR (*tp);
4854 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4855 if (addr)
4856 return addr;
4858 if ((TREE_CODE (*tp) == VAR_DECL
4859 || TREE_CODE (*tp) == PARM_DECL
4860 || TREE_CODE (*tp) == RESULT_DECL)
4861 && DECL_HAS_VALUE_EXPR_P (*tp))
4863 tree t = DECL_VALUE_EXPR (*tp);
4864 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4865 if (addr)
4866 return addr;
4869 if (!EXPR_P (*tp))
4871 *walk_subtrees = false;
4872 return NULL;
4875 location_t loc = EXPR_LOCATION (*tp);
4876 if (verify_location (blocks, loc))
4877 return *tp;
4879 return NULL;
4882 /* Called via walk_gimple_op. Verify locations of expressions. */
4884 static tree
4885 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4887 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4888 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4891 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4893 static void
4894 collect_subblocks (hash_set<tree> *blocks, tree block)
4896 tree t;
4897 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4899 blocks->add (t);
4900 collect_subblocks (blocks, t);
4904 /* Verify the GIMPLE statements in the CFG of FN. */
4906 DEBUG_FUNCTION void
4907 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4909 basic_block bb;
4910 bool err = false;
4912 timevar_push (TV_TREE_STMT_VERIFY);
4913 hash_set<void *> visited;
4914 hash_set<gimple> visited_stmts;
4916 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4917 hash_set<tree> blocks;
4918 if (DECL_INITIAL (fn->decl))
4920 blocks.add (DECL_INITIAL (fn->decl));
4921 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4924 FOR_EACH_BB_FN (bb, fn)
4926 gimple_stmt_iterator gsi;
4928 for (gphi_iterator gpi = gsi_start_phis (bb);
4929 !gsi_end_p (gpi);
4930 gsi_next (&gpi))
4932 gphi *phi = gpi.phi ();
4933 bool err2 = false;
4934 unsigned i;
4936 visited_stmts.add (phi);
4938 if (gimple_bb (phi) != bb)
4940 error ("gimple_bb (phi) is set to a wrong basic block");
4941 err2 = true;
4944 err2 |= verify_gimple_phi (phi);
4946 /* Only PHI arguments have locations. */
4947 if (gimple_location (phi) != UNKNOWN_LOCATION)
4949 error ("PHI node with location");
4950 err2 = true;
4953 for (i = 0; i < gimple_phi_num_args (phi); i++)
4955 tree arg = gimple_phi_arg_def (phi, i);
4956 tree addr = walk_tree (&arg, verify_node_sharing_1,
4957 &visited, NULL);
4958 if (addr)
4960 error ("incorrect sharing of tree nodes");
4961 debug_generic_expr (addr);
4962 err2 |= true;
4964 location_t loc = gimple_phi_arg_location (phi, i);
4965 if (virtual_operand_p (gimple_phi_result (phi))
4966 && loc != UNKNOWN_LOCATION)
4968 error ("virtual PHI with argument locations");
4969 err2 = true;
4971 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4972 if (addr)
4974 debug_generic_expr (addr);
4975 err2 = true;
4977 err2 |= verify_location (&blocks, loc);
4980 if (err2)
4981 debug_gimple_stmt (phi);
4982 err |= err2;
4985 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4987 gimple stmt = gsi_stmt (gsi);
4988 bool err2 = false;
4989 struct walk_stmt_info wi;
4990 tree addr;
4991 int lp_nr;
4993 visited_stmts.add (stmt);
4995 if (gimple_bb (stmt) != bb)
4997 error ("gimple_bb (stmt) is set to a wrong basic block");
4998 err2 = true;
5001 err2 |= verify_gimple_stmt (stmt);
5002 err2 |= verify_location (&blocks, gimple_location (stmt));
5004 memset (&wi, 0, sizeof (wi));
5005 wi.info = (void *) &visited;
5006 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5007 if (addr)
5009 error ("incorrect sharing of tree nodes");
5010 debug_generic_expr (addr);
5011 err2 |= true;
5014 memset (&wi, 0, sizeof (wi));
5015 wi.info = (void *) &blocks;
5016 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5017 if (addr)
5019 debug_generic_expr (addr);
5020 err2 |= true;
5023 /* ??? Instead of not checking these stmts at all the walker
5024 should know its context via wi. */
5025 if (!is_gimple_debug (stmt)
5026 && !is_gimple_omp (stmt))
5028 memset (&wi, 0, sizeof (wi));
5029 addr = walk_gimple_op (stmt, verify_expr, &wi);
5030 if (addr)
5032 debug_generic_expr (addr);
5033 inform (gimple_location (stmt), "in statement");
5034 err2 |= true;
5038 /* If the statement is marked as part of an EH region, then it is
5039 expected that the statement could throw. Verify that when we
5040 have optimizations that simplify statements such that we prove
5041 that they cannot throw, that we update other data structures
5042 to match. */
5043 lp_nr = lookup_stmt_eh_lp (stmt);
5044 if (lp_nr > 0)
5046 if (!stmt_could_throw_p (stmt))
5048 if (verify_nothrow)
5050 error ("statement marked for throw, but doesn%'t");
5051 err2 |= true;
5054 else if (!gsi_one_before_end_p (gsi))
5056 error ("statement marked for throw in middle of block");
5057 err2 |= true;
5061 if (err2)
5062 debug_gimple_stmt (stmt);
5063 err |= err2;
5067 eh_error_found = false;
5068 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5069 if (eh_table)
5070 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5071 (&visited_stmts);
5073 if (err || eh_error_found)
5074 internal_error ("verify_gimple failed");
5076 verify_histograms ();
5077 timevar_pop (TV_TREE_STMT_VERIFY);
5081 /* Verifies that the flow information is OK. */
5083 static int
5084 gimple_verify_flow_info (void)
5086 int err = 0;
5087 basic_block bb;
5088 gimple_stmt_iterator gsi;
5089 gimple stmt;
5090 edge e;
5091 edge_iterator ei;
5093 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5094 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5096 error ("ENTRY_BLOCK has IL associated with it");
5097 err = 1;
5100 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5101 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5103 error ("EXIT_BLOCK has IL associated with it");
5104 err = 1;
5107 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5108 if (e->flags & EDGE_FALLTHRU)
5110 error ("fallthru to exit from bb %d", e->src->index);
5111 err = 1;
5114 FOR_EACH_BB_FN (bb, cfun)
5116 bool found_ctrl_stmt = false;
5118 stmt = NULL;
5120 /* Skip labels on the start of basic block. */
5121 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5123 tree label;
5124 gimple prev_stmt = stmt;
5126 stmt = gsi_stmt (gsi);
5128 if (gimple_code (stmt) != GIMPLE_LABEL)
5129 break;
5131 label = gimple_label_label (as_a <glabel *> (stmt));
5132 if (prev_stmt && DECL_NONLOCAL (label))
5134 error ("nonlocal label ");
5135 print_generic_expr (stderr, label, 0);
5136 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5137 bb->index);
5138 err = 1;
5141 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5143 error ("EH landing pad label ");
5144 print_generic_expr (stderr, label, 0);
5145 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5146 bb->index);
5147 err = 1;
5150 if (label_to_block (label) != bb)
5152 error ("label ");
5153 print_generic_expr (stderr, label, 0);
5154 fprintf (stderr, " to block does not match in bb %d",
5155 bb->index);
5156 err = 1;
5159 if (decl_function_context (label) != current_function_decl)
5161 error ("label ");
5162 print_generic_expr (stderr, label, 0);
5163 fprintf (stderr, " has incorrect context in bb %d",
5164 bb->index);
5165 err = 1;
5169 /* Verify that body of basic block BB is free of control flow. */
5170 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5172 gimple stmt = gsi_stmt (gsi);
5174 if (found_ctrl_stmt)
5176 error ("control flow in the middle of basic block %d",
5177 bb->index);
5178 err = 1;
5181 if (stmt_ends_bb_p (stmt))
5182 found_ctrl_stmt = true;
5184 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5186 error ("label ");
5187 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5188 fprintf (stderr, " in the middle of basic block %d", bb->index);
5189 err = 1;
5193 gsi = gsi_last_bb (bb);
5194 if (gsi_end_p (gsi))
5195 continue;
5197 stmt = gsi_stmt (gsi);
5199 if (gimple_code (stmt) == GIMPLE_LABEL)
5200 continue;
5202 err |= verify_eh_edges (stmt);
5204 if (is_ctrl_stmt (stmt))
5206 FOR_EACH_EDGE (e, ei, bb->succs)
5207 if (e->flags & EDGE_FALLTHRU)
5209 error ("fallthru edge after a control statement in bb %d",
5210 bb->index);
5211 err = 1;
5215 if (gimple_code (stmt) != GIMPLE_COND)
5217 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5218 after anything else but if statement. */
5219 FOR_EACH_EDGE (e, ei, bb->succs)
5220 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5222 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5223 bb->index);
5224 err = 1;
5228 switch (gimple_code (stmt))
5230 case GIMPLE_COND:
5232 edge true_edge;
5233 edge false_edge;
5235 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5237 if (!true_edge
5238 || !false_edge
5239 || !(true_edge->flags & EDGE_TRUE_VALUE)
5240 || !(false_edge->flags & EDGE_FALSE_VALUE)
5241 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5242 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5243 || EDGE_COUNT (bb->succs) >= 3)
5245 error ("wrong outgoing edge flags at end of bb %d",
5246 bb->index);
5247 err = 1;
5250 break;
5252 case GIMPLE_GOTO:
5253 if (simple_goto_p (stmt))
5255 error ("explicit goto at end of bb %d", bb->index);
5256 err = 1;
5258 else
5260 /* FIXME. We should double check that the labels in the
5261 destination blocks have their address taken. */
5262 FOR_EACH_EDGE (e, ei, bb->succs)
5263 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5264 | EDGE_FALSE_VALUE))
5265 || !(e->flags & EDGE_ABNORMAL))
5267 error ("wrong outgoing edge flags at end of bb %d",
5268 bb->index);
5269 err = 1;
5272 break;
5274 case GIMPLE_CALL:
5275 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5276 break;
5277 /* ... fallthru ... */
5278 case GIMPLE_RETURN:
5279 if (!single_succ_p (bb)
5280 || (single_succ_edge (bb)->flags
5281 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5282 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5284 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5285 err = 1;
5287 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5289 error ("return edge does not point to exit in bb %d",
5290 bb->index);
5291 err = 1;
5293 break;
5295 case GIMPLE_SWITCH:
5297 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5298 tree prev;
5299 edge e;
5300 size_t i, n;
5302 n = gimple_switch_num_labels (switch_stmt);
5304 /* Mark all the destination basic blocks. */
5305 for (i = 0; i < n; ++i)
5307 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5308 basic_block label_bb = label_to_block (lab);
5309 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5310 label_bb->aux = (void *)1;
5313 /* Verify that the case labels are sorted. */
5314 prev = gimple_switch_label (switch_stmt, 0);
5315 for (i = 1; i < n; ++i)
5317 tree c = gimple_switch_label (switch_stmt, i);
5318 if (!CASE_LOW (c))
5320 error ("found default case not at the start of "
5321 "case vector");
5322 err = 1;
5323 continue;
5325 if (CASE_LOW (prev)
5326 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5328 error ("case labels not sorted: ");
5329 print_generic_expr (stderr, prev, 0);
5330 fprintf (stderr," is greater than ");
5331 print_generic_expr (stderr, c, 0);
5332 fprintf (stderr," but comes before it.\n");
5333 err = 1;
5335 prev = c;
5337 /* VRP will remove the default case if it can prove it will
5338 never be executed. So do not verify there always exists
5339 a default case here. */
5341 FOR_EACH_EDGE (e, ei, bb->succs)
5343 if (!e->dest->aux)
5345 error ("extra outgoing edge %d->%d",
5346 bb->index, e->dest->index);
5347 err = 1;
5350 e->dest->aux = (void *)2;
5351 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5352 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5354 error ("wrong outgoing edge flags at end of bb %d",
5355 bb->index);
5356 err = 1;
5360 /* Check that we have all of them. */
5361 for (i = 0; i < n; ++i)
5363 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5364 basic_block label_bb = label_to_block (lab);
5366 if (label_bb->aux != (void *)2)
5368 error ("missing edge %i->%i", bb->index, label_bb->index);
5369 err = 1;
5373 FOR_EACH_EDGE (e, ei, bb->succs)
5374 e->dest->aux = (void *)0;
5376 break;
5378 case GIMPLE_EH_DISPATCH:
5379 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5380 break;
5382 default:
5383 break;
5387 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5388 verify_dominators (CDI_DOMINATORS);
5390 return err;
5394 /* Updates phi nodes after creating a forwarder block joined
5395 by edge FALLTHRU. */
5397 static void
5398 gimple_make_forwarder_block (edge fallthru)
5400 edge e;
5401 edge_iterator ei;
5402 basic_block dummy, bb;
5403 tree var;
5404 gphi_iterator gsi;
5406 dummy = fallthru->src;
5407 bb = fallthru->dest;
5409 if (single_pred_p (bb))
5410 return;
5412 /* If we redirected a branch we must create new PHI nodes at the
5413 start of BB. */
5414 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5416 gphi *phi, *new_phi;
5418 phi = gsi.phi ();
5419 var = gimple_phi_result (phi);
5420 new_phi = create_phi_node (var, bb);
5421 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5422 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5423 UNKNOWN_LOCATION);
5426 /* Add the arguments we have stored on edges. */
5427 FOR_EACH_EDGE (e, ei, bb->preds)
5429 if (e == fallthru)
5430 continue;
5432 flush_pending_stmts (e);
5437 /* Return a non-special label in the head of basic block BLOCK.
5438 Create one if it doesn't exist. */
5440 tree
5441 gimple_block_label (basic_block bb)
5443 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5444 bool first = true;
5445 tree label;
5446 glabel *stmt;
5448 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5450 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5451 if (!stmt)
5452 break;
5453 label = gimple_label_label (stmt);
5454 if (!DECL_NONLOCAL (label))
5456 if (!first)
5457 gsi_move_before (&i, &s);
5458 return label;
5462 label = create_artificial_label (UNKNOWN_LOCATION);
5463 stmt = gimple_build_label (label);
5464 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5465 return label;
5469 /* Attempt to perform edge redirection by replacing a possibly complex
5470 jump instruction by a goto or by removing the jump completely.
5471 This can apply only if all edges now point to the same block. The
5472 parameters and return values are equivalent to
5473 redirect_edge_and_branch. */
5475 static edge
5476 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5478 basic_block src = e->src;
5479 gimple_stmt_iterator i;
5480 gimple stmt;
5482 /* We can replace or remove a complex jump only when we have exactly
5483 two edges. */
5484 if (EDGE_COUNT (src->succs) != 2
5485 /* Verify that all targets will be TARGET. Specifically, the
5486 edge that is not E must also go to TARGET. */
5487 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5488 return NULL;
5490 i = gsi_last_bb (src);
5491 if (gsi_end_p (i))
5492 return NULL;
5494 stmt = gsi_stmt (i);
5496 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5498 gsi_remove (&i, true);
5499 e = ssa_redirect_edge (e, target);
5500 e->flags = EDGE_FALLTHRU;
5501 return e;
5504 return NULL;
5508 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5509 edge representing the redirected branch. */
5511 static edge
5512 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5514 basic_block bb = e->src;
5515 gimple_stmt_iterator gsi;
5516 edge ret;
5517 gimple stmt;
5519 if (e->flags & EDGE_ABNORMAL)
5520 return NULL;
5522 if (e->dest == dest)
5523 return NULL;
5525 if (e->flags & EDGE_EH)
5526 return redirect_eh_edge (e, dest);
5528 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5530 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5531 if (ret)
5532 return ret;
5535 gsi = gsi_last_bb (bb);
5536 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5538 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5540 case GIMPLE_COND:
5541 /* For COND_EXPR, we only need to redirect the edge. */
5542 break;
5544 case GIMPLE_GOTO:
5545 /* No non-abnormal edges should lead from a non-simple goto, and
5546 simple ones should be represented implicitly. */
5547 gcc_unreachable ();
5549 case GIMPLE_SWITCH:
5551 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5552 tree label = gimple_block_label (dest);
5553 tree cases = get_cases_for_edge (e, switch_stmt);
5555 /* If we have a list of cases associated with E, then use it
5556 as it's a lot faster than walking the entire case vector. */
5557 if (cases)
5559 edge e2 = find_edge (e->src, dest);
5560 tree last, first;
5562 first = cases;
5563 while (cases)
5565 last = cases;
5566 CASE_LABEL (cases) = label;
5567 cases = CASE_CHAIN (cases);
5570 /* If there was already an edge in the CFG, then we need
5571 to move all the cases associated with E to E2. */
5572 if (e2)
5574 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5576 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5577 CASE_CHAIN (cases2) = first;
5579 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5581 else
5583 size_t i, n = gimple_switch_num_labels (switch_stmt);
5585 for (i = 0; i < n; i++)
5587 tree elt = gimple_switch_label (switch_stmt, i);
5588 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5589 CASE_LABEL (elt) = label;
5593 break;
5595 case GIMPLE_ASM:
5597 gasm *asm_stmt = as_a <gasm *> (stmt);
5598 int i, n = gimple_asm_nlabels (asm_stmt);
5599 tree label = NULL;
5601 for (i = 0; i < n; ++i)
5603 tree cons = gimple_asm_label_op (asm_stmt, i);
5604 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5606 if (!label)
5607 label = gimple_block_label (dest);
5608 TREE_VALUE (cons) = label;
5612 /* If we didn't find any label matching the former edge in the
5613 asm labels, we must be redirecting the fallthrough
5614 edge. */
5615 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5617 break;
5619 case GIMPLE_RETURN:
5620 gsi_remove (&gsi, true);
5621 e->flags |= EDGE_FALLTHRU;
5622 break;
5624 case GIMPLE_OMP_RETURN:
5625 case GIMPLE_OMP_CONTINUE:
5626 case GIMPLE_OMP_SECTIONS_SWITCH:
5627 case GIMPLE_OMP_FOR:
5628 /* The edges from OMP constructs can be simply redirected. */
5629 break;
5631 case GIMPLE_EH_DISPATCH:
5632 if (!(e->flags & EDGE_FALLTHRU))
5633 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5634 break;
5636 case GIMPLE_TRANSACTION:
5637 /* The ABORT edge has a stored label associated with it, otherwise
5638 the edges are simply redirectable. */
5639 if (e->flags == 0)
5640 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5641 gimple_block_label (dest));
5642 break;
5644 default:
5645 /* Otherwise it must be a fallthru edge, and we don't need to
5646 do anything besides redirecting it. */
5647 gcc_assert (e->flags & EDGE_FALLTHRU);
5648 break;
5651 /* Update/insert PHI nodes as necessary. */
5653 /* Now update the edges in the CFG. */
5654 e = ssa_redirect_edge (e, dest);
5656 return e;
5659 /* Returns true if it is possible to remove edge E by redirecting
5660 it to the destination of the other edge from E->src. */
5662 static bool
5663 gimple_can_remove_branch_p (const_edge e)
5665 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5666 return false;
5668 return true;
5671 /* Simple wrapper, as we can always redirect fallthru edges. */
5673 static basic_block
5674 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5676 e = gimple_redirect_edge_and_branch (e, dest);
5677 gcc_assert (e);
5679 return NULL;
5683 /* Splits basic block BB after statement STMT (but at least after the
5684 labels). If STMT is NULL, BB is split just after the labels. */
5686 static basic_block
5687 gimple_split_block (basic_block bb, void *stmt)
5689 gimple_stmt_iterator gsi;
5690 gimple_stmt_iterator gsi_tgt;
5691 gimple act;
5692 gimple_seq list;
5693 basic_block new_bb;
5694 edge e;
5695 edge_iterator ei;
5697 new_bb = create_empty_bb (bb);
5699 /* Redirect the outgoing edges. */
5700 new_bb->succs = bb->succs;
5701 bb->succs = NULL;
5702 FOR_EACH_EDGE (e, ei, new_bb->succs)
5703 e->src = new_bb;
5705 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5706 stmt = NULL;
5708 /* Move everything from GSI to the new basic block. */
5709 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5711 act = gsi_stmt (gsi);
5712 if (gimple_code (act) == GIMPLE_LABEL)
5713 continue;
5715 if (!stmt)
5716 break;
5718 if (stmt == act)
5720 gsi_next (&gsi);
5721 break;
5725 if (gsi_end_p (gsi))
5726 return new_bb;
5728 /* Split the statement list - avoid re-creating new containers as this
5729 brings ugly quadratic memory consumption in the inliner.
5730 (We are still quadratic since we need to update stmt BB pointers,
5731 sadly.) */
5732 gsi_split_seq_before (&gsi, &list);
5733 set_bb_seq (new_bb, list);
5734 for (gsi_tgt = gsi_start (list);
5735 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5736 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5738 return new_bb;
5742 /* Moves basic block BB after block AFTER. */
5744 static bool
5745 gimple_move_block_after (basic_block bb, basic_block after)
5747 if (bb->prev_bb == after)
5748 return true;
5750 unlink_block (bb);
5751 link_block (bb, after);
5753 return true;
5757 /* Return TRUE if block BB has no executable statements, otherwise return
5758 FALSE. */
5760 static bool
5761 gimple_empty_block_p (basic_block bb)
5763 /* BB must have no executable statements. */
5764 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5765 if (phi_nodes (bb))
5766 return false;
5767 if (gsi_end_p (gsi))
5768 return true;
5769 if (is_gimple_debug (gsi_stmt (gsi)))
5770 gsi_next_nondebug (&gsi);
5771 return gsi_end_p (gsi);
5775 /* Split a basic block if it ends with a conditional branch and if the
5776 other part of the block is not empty. */
5778 static basic_block
5779 gimple_split_block_before_cond_jump (basic_block bb)
5781 gimple last, split_point;
5782 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5783 if (gsi_end_p (gsi))
5784 return NULL;
5785 last = gsi_stmt (gsi);
5786 if (gimple_code (last) != GIMPLE_COND
5787 && gimple_code (last) != GIMPLE_SWITCH)
5788 return NULL;
5789 gsi_prev_nondebug (&gsi);
5790 split_point = gsi_stmt (gsi);
5791 return split_block (bb, split_point)->dest;
5795 /* Return true if basic_block can be duplicated. */
5797 static bool
5798 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5800 return true;
5803 /* Create a duplicate of the basic block BB. NOTE: This does not
5804 preserve SSA form. */
5806 static basic_block
5807 gimple_duplicate_bb (basic_block bb)
5809 basic_block new_bb;
5810 gimple_stmt_iterator gsi_tgt;
5812 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5814 /* Copy the PHI nodes. We ignore PHI node arguments here because
5815 the incoming edges have not been setup yet. */
5816 for (gphi_iterator gpi = gsi_start_phis (bb);
5817 !gsi_end_p (gpi);
5818 gsi_next (&gpi))
5820 gphi *phi, *copy;
5821 phi = gpi.phi ();
5822 copy = create_phi_node (NULL_TREE, new_bb);
5823 create_new_def_for (gimple_phi_result (phi), copy,
5824 gimple_phi_result_ptr (copy));
5825 gimple_set_uid (copy, gimple_uid (phi));
5828 gsi_tgt = gsi_start_bb (new_bb);
5829 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5830 !gsi_end_p (gsi);
5831 gsi_next (&gsi))
5833 def_operand_p def_p;
5834 ssa_op_iter op_iter;
5835 tree lhs;
5836 gimple stmt, copy;
5838 stmt = gsi_stmt (gsi);
5839 if (gimple_code (stmt) == GIMPLE_LABEL)
5840 continue;
5842 /* Don't duplicate label debug stmts. */
5843 if (gimple_debug_bind_p (stmt)
5844 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5845 == LABEL_DECL)
5846 continue;
5848 /* Create a new copy of STMT and duplicate STMT's virtual
5849 operands. */
5850 copy = gimple_copy (stmt);
5851 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5853 maybe_duplicate_eh_stmt (copy, stmt);
5854 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5856 /* When copying around a stmt writing into a local non-user
5857 aggregate, make sure it won't share stack slot with other
5858 vars. */
5859 lhs = gimple_get_lhs (stmt);
5860 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5862 tree base = get_base_address (lhs);
5863 if (base
5864 && (TREE_CODE (base) == VAR_DECL
5865 || TREE_CODE (base) == RESULT_DECL)
5866 && DECL_IGNORED_P (base)
5867 && !TREE_STATIC (base)
5868 && !DECL_EXTERNAL (base)
5869 && (TREE_CODE (base) != VAR_DECL
5870 || !DECL_HAS_VALUE_EXPR_P (base)))
5871 DECL_NONSHAREABLE (base) = 1;
5874 /* Create new names for all the definitions created by COPY and
5875 add replacement mappings for each new name. */
5876 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5877 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5880 return new_bb;
5883 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5885 static void
5886 add_phi_args_after_copy_edge (edge e_copy)
5888 basic_block bb, bb_copy = e_copy->src, dest;
5889 edge e;
5890 edge_iterator ei;
5891 gphi *phi, *phi_copy;
5892 tree def;
5893 gphi_iterator psi, psi_copy;
5895 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5896 return;
5898 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5900 if (e_copy->dest->flags & BB_DUPLICATED)
5901 dest = get_bb_original (e_copy->dest);
5902 else
5903 dest = e_copy->dest;
5905 e = find_edge (bb, dest);
5906 if (!e)
5908 /* During loop unrolling the target of the latch edge is copied.
5909 In this case we are not looking for edge to dest, but to
5910 duplicated block whose original was dest. */
5911 FOR_EACH_EDGE (e, ei, bb->succs)
5913 if ((e->dest->flags & BB_DUPLICATED)
5914 && get_bb_original (e->dest) == dest)
5915 break;
5918 gcc_assert (e != NULL);
5921 for (psi = gsi_start_phis (e->dest),
5922 psi_copy = gsi_start_phis (e_copy->dest);
5923 !gsi_end_p (psi);
5924 gsi_next (&psi), gsi_next (&psi_copy))
5926 phi = psi.phi ();
5927 phi_copy = psi_copy.phi ();
5928 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5929 add_phi_arg (phi_copy, def, e_copy,
5930 gimple_phi_arg_location_from_edge (phi, e));
5935 /* Basic block BB_COPY was created by code duplication. Add phi node
5936 arguments for edges going out of BB_COPY. The blocks that were
5937 duplicated have BB_DUPLICATED set. */
5939 void
5940 add_phi_args_after_copy_bb (basic_block bb_copy)
5942 edge e_copy;
5943 edge_iterator ei;
5945 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5947 add_phi_args_after_copy_edge (e_copy);
5951 /* Blocks in REGION_COPY array of length N_REGION were created by
5952 duplication of basic blocks. Add phi node arguments for edges
5953 going from these blocks. If E_COPY is not NULL, also add
5954 phi node arguments for its destination.*/
5956 void
5957 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5958 edge e_copy)
5960 unsigned i;
5962 for (i = 0; i < n_region; i++)
5963 region_copy[i]->flags |= BB_DUPLICATED;
5965 for (i = 0; i < n_region; i++)
5966 add_phi_args_after_copy_bb (region_copy[i]);
5967 if (e_copy)
5968 add_phi_args_after_copy_edge (e_copy);
5970 for (i = 0; i < n_region; i++)
5971 region_copy[i]->flags &= ~BB_DUPLICATED;
5974 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5975 important exit edge EXIT. By important we mean that no SSA name defined
5976 inside region is live over the other exit edges of the region. All entry
5977 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5978 to the duplicate of the region. Dominance and loop information is
5979 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5980 UPDATE_DOMINANCE is false then we assume that the caller will update the
5981 dominance information after calling this function. The new basic
5982 blocks are stored to REGION_COPY in the same order as they had in REGION,
5983 provided that REGION_COPY is not NULL.
5984 The function returns false if it is unable to copy the region,
5985 true otherwise. */
5987 bool
5988 gimple_duplicate_sese_region (edge entry, edge exit,
5989 basic_block *region, unsigned n_region,
5990 basic_block *region_copy,
5991 bool update_dominance)
5993 unsigned i;
5994 bool free_region_copy = false, copying_header = false;
5995 struct loop *loop = entry->dest->loop_father;
5996 edge exit_copy;
5997 vec<basic_block> doms;
5998 edge redirected;
5999 int total_freq = 0, entry_freq = 0;
6000 gcov_type total_count = 0, entry_count = 0;
6002 if (!can_copy_bbs_p (region, n_region))
6003 return false;
6005 /* Some sanity checking. Note that we do not check for all possible
6006 missuses of the functions. I.e. if you ask to copy something weird,
6007 it will work, but the state of structures probably will not be
6008 correct. */
6009 for (i = 0; i < n_region; i++)
6011 /* We do not handle subloops, i.e. all the blocks must belong to the
6012 same loop. */
6013 if (region[i]->loop_father != loop)
6014 return false;
6016 if (region[i] != entry->dest
6017 && region[i] == loop->header)
6018 return false;
6021 /* In case the function is used for loop header copying (which is the primary
6022 use), ensure that EXIT and its copy will be new latch and entry edges. */
6023 if (loop->header == entry->dest)
6025 copying_header = true;
6027 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6028 return false;
6030 for (i = 0; i < n_region; i++)
6031 if (region[i] != exit->src
6032 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6033 return false;
6036 initialize_original_copy_tables ();
6038 if (copying_header)
6039 set_loop_copy (loop, loop_outer (loop));
6040 else
6041 set_loop_copy (loop, loop);
6043 if (!region_copy)
6045 region_copy = XNEWVEC (basic_block, n_region);
6046 free_region_copy = true;
6049 /* Record blocks outside the region that are dominated by something
6050 inside. */
6051 if (update_dominance)
6053 doms.create (0);
6054 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6057 if (entry->dest->count)
6059 total_count = entry->dest->count;
6060 entry_count = entry->count;
6061 /* Fix up corner cases, to avoid division by zero or creation of negative
6062 frequencies. */
6063 if (entry_count > total_count)
6064 entry_count = total_count;
6066 else
6068 total_freq = entry->dest->frequency;
6069 entry_freq = EDGE_FREQUENCY (entry);
6070 /* Fix up corner cases, to avoid division by zero or creation of negative
6071 frequencies. */
6072 if (total_freq == 0)
6073 total_freq = 1;
6074 else if (entry_freq > total_freq)
6075 entry_freq = total_freq;
6078 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6079 split_edge_bb_loc (entry), update_dominance);
6080 if (total_count)
6082 scale_bbs_frequencies_gcov_type (region, n_region,
6083 total_count - entry_count,
6084 total_count);
6085 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6086 total_count);
6088 else
6090 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6091 total_freq);
6092 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6095 if (copying_header)
6097 loop->header = exit->dest;
6098 loop->latch = exit->src;
6101 /* Redirect the entry and add the phi node arguments. */
6102 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6103 gcc_assert (redirected != NULL);
6104 flush_pending_stmts (entry);
6106 /* Concerning updating of dominators: We must recount dominators
6107 for entry block and its copy. Anything that is outside of the
6108 region, but was dominated by something inside needs recounting as
6109 well. */
6110 if (update_dominance)
6112 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6113 doms.safe_push (get_bb_original (entry->dest));
6114 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6115 doms.release ();
6118 /* Add the other PHI node arguments. */
6119 add_phi_args_after_copy (region_copy, n_region, NULL);
6121 if (free_region_copy)
6122 free (region_copy);
6124 free_original_copy_tables ();
6125 return true;
6128 /* Checks if BB is part of the region defined by N_REGION BBS. */
6129 static bool
6130 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6132 unsigned int n;
6134 for (n = 0; n < n_region; n++)
6136 if (bb == bbs[n])
6137 return true;
6139 return false;
6142 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6143 are stored to REGION_COPY in the same order in that they appear
6144 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6145 the region, EXIT an exit from it. The condition guarding EXIT
6146 is moved to ENTRY. Returns true if duplication succeeds, false
6147 otherwise.
6149 For example,
6151 some_code;
6152 if (cond)
6154 else
6157 is transformed to
6159 if (cond)
6161 some_code;
6164 else
6166 some_code;
6171 bool
6172 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6173 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6174 basic_block *region_copy ATTRIBUTE_UNUSED)
6176 unsigned i;
6177 bool free_region_copy = false;
6178 struct loop *loop = exit->dest->loop_father;
6179 struct loop *orig_loop = entry->dest->loop_father;
6180 basic_block switch_bb, entry_bb, nentry_bb;
6181 vec<basic_block> doms;
6182 int total_freq = 0, exit_freq = 0;
6183 gcov_type total_count = 0, exit_count = 0;
6184 edge exits[2], nexits[2], e;
6185 gimple_stmt_iterator gsi;
6186 gimple cond_stmt;
6187 edge sorig, snew;
6188 basic_block exit_bb;
6189 gphi_iterator psi;
6190 gphi *phi;
6191 tree def;
6192 struct loop *target, *aloop, *cloop;
6194 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6195 exits[0] = exit;
6196 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6198 if (!can_copy_bbs_p (region, n_region))
6199 return false;
6201 initialize_original_copy_tables ();
6202 set_loop_copy (orig_loop, loop);
6204 target= loop;
6205 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6207 if (bb_part_of_region_p (aloop->header, region, n_region))
6209 cloop = duplicate_loop (aloop, target);
6210 duplicate_subloops (aloop, cloop);
6214 if (!region_copy)
6216 region_copy = XNEWVEC (basic_block, n_region);
6217 free_region_copy = true;
6220 gcc_assert (!need_ssa_update_p (cfun));
6222 /* Record blocks outside the region that are dominated by something
6223 inside. */
6224 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6226 if (exit->src->count)
6228 total_count = exit->src->count;
6229 exit_count = exit->count;
6230 /* Fix up corner cases, to avoid division by zero or creation of negative
6231 frequencies. */
6232 if (exit_count > total_count)
6233 exit_count = total_count;
6235 else
6237 total_freq = exit->src->frequency;
6238 exit_freq = EDGE_FREQUENCY (exit);
6239 /* Fix up corner cases, to avoid division by zero or creation of negative
6240 frequencies. */
6241 if (total_freq == 0)
6242 total_freq = 1;
6243 if (exit_freq > total_freq)
6244 exit_freq = total_freq;
6247 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6248 split_edge_bb_loc (exit), true);
6249 if (total_count)
6251 scale_bbs_frequencies_gcov_type (region, n_region,
6252 total_count - exit_count,
6253 total_count);
6254 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6255 total_count);
6257 else
6259 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6260 total_freq);
6261 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6264 /* Create the switch block, and put the exit condition to it. */
6265 entry_bb = entry->dest;
6266 nentry_bb = get_bb_copy (entry_bb);
6267 if (!last_stmt (entry->src)
6268 || !stmt_ends_bb_p (last_stmt (entry->src)))
6269 switch_bb = entry->src;
6270 else
6271 switch_bb = split_edge (entry);
6272 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6274 gsi = gsi_last_bb (switch_bb);
6275 cond_stmt = last_stmt (exit->src);
6276 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6277 cond_stmt = gimple_copy (cond_stmt);
6279 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6281 sorig = single_succ_edge (switch_bb);
6282 sorig->flags = exits[1]->flags;
6283 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6285 /* Register the new edge from SWITCH_BB in loop exit lists. */
6286 rescan_loop_exit (snew, true, false);
6288 /* Add the PHI node arguments. */
6289 add_phi_args_after_copy (region_copy, n_region, snew);
6291 /* Get rid of now superfluous conditions and associated edges (and phi node
6292 arguments). */
6293 exit_bb = exit->dest;
6295 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6296 PENDING_STMT (e) = NULL;
6298 /* The latch of ORIG_LOOP was copied, and so was the backedge
6299 to the original header. We redirect this backedge to EXIT_BB. */
6300 for (i = 0; i < n_region; i++)
6301 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6303 gcc_assert (single_succ_edge (region_copy[i]));
6304 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6305 PENDING_STMT (e) = NULL;
6306 for (psi = gsi_start_phis (exit_bb);
6307 !gsi_end_p (psi);
6308 gsi_next (&psi))
6310 phi = psi.phi ();
6311 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6312 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6315 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6316 PENDING_STMT (e) = NULL;
6318 /* Anything that is outside of the region, but was dominated by something
6319 inside needs to update dominance info. */
6320 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6321 doms.release ();
6322 /* Update the SSA web. */
6323 update_ssa (TODO_update_ssa);
6325 if (free_region_copy)
6326 free (region_copy);
6328 free_original_copy_tables ();
6329 return true;
6332 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6333 adding blocks when the dominator traversal reaches EXIT. This
6334 function silently assumes that ENTRY strictly dominates EXIT. */
6336 void
6337 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6338 vec<basic_block> *bbs_p)
6340 basic_block son;
6342 for (son = first_dom_son (CDI_DOMINATORS, entry);
6343 son;
6344 son = next_dom_son (CDI_DOMINATORS, son))
6346 bbs_p->safe_push (son);
6347 if (son != exit)
6348 gather_blocks_in_sese_region (son, exit, bbs_p);
6352 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6353 The duplicates are recorded in VARS_MAP. */
6355 static void
6356 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6357 tree to_context)
6359 tree t = *tp, new_t;
6360 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6362 if (DECL_CONTEXT (t) == to_context)
6363 return;
6365 bool existed;
6366 tree &loc = vars_map->get_or_insert (t, &existed);
6368 if (!existed)
6370 if (SSA_VAR_P (t))
6372 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6373 add_local_decl (f, new_t);
6375 else
6377 gcc_assert (TREE_CODE (t) == CONST_DECL);
6378 new_t = copy_node (t);
6380 DECL_CONTEXT (new_t) = to_context;
6382 loc = new_t;
6384 else
6385 new_t = loc;
6387 *tp = new_t;
6391 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6392 VARS_MAP maps old ssa names and var_decls to the new ones. */
6394 static tree
6395 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6396 tree to_context)
6398 tree new_name;
6400 gcc_assert (!virtual_operand_p (name));
6402 tree *loc = vars_map->get (name);
6404 if (!loc)
6406 tree decl = SSA_NAME_VAR (name);
6407 if (decl)
6409 replace_by_duplicate_decl (&decl, vars_map, to_context);
6410 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6411 decl, SSA_NAME_DEF_STMT (name));
6412 if (SSA_NAME_IS_DEFAULT_DEF (name))
6413 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6414 decl, new_name);
6416 else
6417 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6418 name, SSA_NAME_DEF_STMT (name));
6420 vars_map->put (name, new_name);
6422 else
6423 new_name = *loc;
6425 return new_name;
6428 struct move_stmt_d
6430 tree orig_block;
6431 tree new_block;
6432 tree from_context;
6433 tree to_context;
6434 hash_map<tree, tree> *vars_map;
6435 htab_t new_label_map;
6436 hash_map<void *, void *> *eh_map;
6437 bool remap_decls_p;
6440 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6441 contained in *TP if it has been ORIG_BLOCK previously and change the
6442 DECL_CONTEXT of every local variable referenced in *TP. */
6444 static tree
6445 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6447 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6448 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6449 tree t = *tp;
6451 if (EXPR_P (t))
6453 tree block = TREE_BLOCK (t);
6454 if (block == p->orig_block
6455 || (p->orig_block == NULL_TREE
6456 && block != NULL_TREE))
6457 TREE_SET_BLOCK (t, p->new_block);
6458 #ifdef ENABLE_CHECKING
6459 else if (block != NULL_TREE)
6461 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6462 block = BLOCK_SUPERCONTEXT (block);
6463 gcc_assert (block == p->orig_block);
6465 #endif
6467 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6469 if (TREE_CODE (t) == SSA_NAME)
6470 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6471 else if (TREE_CODE (t) == LABEL_DECL)
6473 if (p->new_label_map)
6475 struct tree_map in, *out;
6476 in.base.from = t;
6477 out = (struct tree_map *)
6478 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6479 if (out)
6480 *tp = t = out->to;
6483 DECL_CONTEXT (t) = p->to_context;
6485 else if (p->remap_decls_p)
6487 /* Replace T with its duplicate. T should no longer appear in the
6488 parent function, so this looks wasteful; however, it may appear
6489 in referenced_vars, and more importantly, as virtual operands of
6490 statements, and in alias lists of other variables. It would be
6491 quite difficult to expunge it from all those places. ??? It might
6492 suffice to do this for addressable variables. */
6493 if ((TREE_CODE (t) == VAR_DECL
6494 && !is_global_var (t))
6495 || TREE_CODE (t) == CONST_DECL)
6496 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6498 *walk_subtrees = 0;
6500 else if (TYPE_P (t))
6501 *walk_subtrees = 0;
6503 return NULL_TREE;
6506 /* Helper for move_stmt_r. Given an EH region number for the source
6507 function, map that to the duplicate EH regio number in the dest. */
6509 static int
6510 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6512 eh_region old_r, new_r;
6514 old_r = get_eh_region_from_number (old_nr);
6515 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6517 return new_r->index;
6520 /* Similar, but operate on INTEGER_CSTs. */
6522 static tree
6523 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6525 int old_nr, new_nr;
6527 old_nr = tree_to_shwi (old_t_nr);
6528 new_nr = move_stmt_eh_region_nr (old_nr, p);
6530 return build_int_cst (integer_type_node, new_nr);
6533 /* Like move_stmt_op, but for gimple statements.
6535 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6536 contained in the current statement in *GSI_P and change the
6537 DECL_CONTEXT of every local variable referenced in the current
6538 statement. */
6540 static tree
6541 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6542 struct walk_stmt_info *wi)
6544 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6545 gimple stmt = gsi_stmt (*gsi_p);
6546 tree block = gimple_block (stmt);
6548 if (block == p->orig_block
6549 || (p->orig_block == NULL_TREE
6550 && block != NULL_TREE))
6551 gimple_set_block (stmt, p->new_block);
6553 switch (gimple_code (stmt))
6555 case GIMPLE_CALL:
6556 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6558 tree r, fndecl = gimple_call_fndecl (stmt);
6559 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6560 switch (DECL_FUNCTION_CODE (fndecl))
6562 case BUILT_IN_EH_COPY_VALUES:
6563 r = gimple_call_arg (stmt, 1);
6564 r = move_stmt_eh_region_tree_nr (r, p);
6565 gimple_call_set_arg (stmt, 1, r);
6566 /* FALLTHRU */
6568 case BUILT_IN_EH_POINTER:
6569 case BUILT_IN_EH_FILTER:
6570 r = gimple_call_arg (stmt, 0);
6571 r = move_stmt_eh_region_tree_nr (r, p);
6572 gimple_call_set_arg (stmt, 0, r);
6573 break;
6575 default:
6576 break;
6579 break;
6581 case GIMPLE_RESX:
6583 gresx *resx_stmt = as_a <gresx *> (stmt);
6584 int r = gimple_resx_region (resx_stmt);
6585 r = move_stmt_eh_region_nr (r, p);
6586 gimple_resx_set_region (resx_stmt, r);
6588 break;
6590 case GIMPLE_EH_DISPATCH:
6592 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6593 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6594 r = move_stmt_eh_region_nr (r, p);
6595 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6597 break;
6599 case GIMPLE_OMP_RETURN:
6600 case GIMPLE_OMP_CONTINUE:
6601 break;
6602 default:
6603 if (is_gimple_omp (stmt))
6605 /* Do not remap variables inside OMP directives. Variables
6606 referenced in clauses and directive header belong to the
6607 parent function and should not be moved into the child
6608 function. */
6609 bool save_remap_decls_p = p->remap_decls_p;
6610 p->remap_decls_p = false;
6611 *handled_ops_p = true;
6613 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6614 move_stmt_op, wi);
6616 p->remap_decls_p = save_remap_decls_p;
6618 break;
6621 return NULL_TREE;
6624 /* Move basic block BB from function CFUN to function DEST_FN. The
6625 block is moved out of the original linked list and placed after
6626 block AFTER in the new list. Also, the block is removed from the
6627 original array of blocks and placed in DEST_FN's array of blocks.
6628 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6629 updated to reflect the moved edges.
6631 The local variables are remapped to new instances, VARS_MAP is used
6632 to record the mapping. */
6634 static void
6635 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6636 basic_block after, bool update_edge_count_p,
6637 struct move_stmt_d *d)
6639 struct control_flow_graph *cfg;
6640 edge_iterator ei;
6641 edge e;
6642 gimple_stmt_iterator si;
6643 unsigned old_len, new_len;
6645 /* Remove BB from dominance structures. */
6646 delete_from_dominance_info (CDI_DOMINATORS, bb);
6648 /* Move BB from its current loop to the copy in the new function. */
6649 if (current_loops)
6651 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6652 if (new_loop)
6653 bb->loop_father = new_loop;
6656 /* Link BB to the new linked list. */
6657 move_block_after (bb, after);
6659 /* Update the edge count in the corresponding flowgraphs. */
6660 if (update_edge_count_p)
6661 FOR_EACH_EDGE (e, ei, bb->succs)
6663 cfun->cfg->x_n_edges--;
6664 dest_cfun->cfg->x_n_edges++;
6667 /* Remove BB from the original basic block array. */
6668 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6669 cfun->cfg->x_n_basic_blocks--;
6671 /* Grow DEST_CFUN's basic block array if needed. */
6672 cfg = dest_cfun->cfg;
6673 cfg->x_n_basic_blocks++;
6674 if (bb->index >= cfg->x_last_basic_block)
6675 cfg->x_last_basic_block = bb->index + 1;
6677 old_len = vec_safe_length (cfg->x_basic_block_info);
6678 if ((unsigned) cfg->x_last_basic_block >= old_len)
6680 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6681 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6684 (*cfg->x_basic_block_info)[bb->index] = bb;
6686 /* Remap the variables in phi nodes. */
6687 for (gphi_iterator psi = gsi_start_phis (bb);
6688 !gsi_end_p (psi); )
6690 gphi *phi = psi.phi ();
6691 use_operand_p use;
6692 tree op = PHI_RESULT (phi);
6693 ssa_op_iter oi;
6694 unsigned i;
6696 if (virtual_operand_p (op))
6698 /* Remove the phi nodes for virtual operands (alias analysis will be
6699 run for the new function, anyway). */
6700 remove_phi_node (&psi, true);
6701 continue;
6704 SET_PHI_RESULT (phi,
6705 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6706 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6708 op = USE_FROM_PTR (use);
6709 if (TREE_CODE (op) == SSA_NAME)
6710 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6713 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6715 location_t locus = gimple_phi_arg_location (phi, i);
6716 tree block = LOCATION_BLOCK (locus);
6718 if (locus == UNKNOWN_LOCATION)
6719 continue;
6720 if (d->orig_block == NULL_TREE || block == d->orig_block)
6722 if (d->new_block == NULL_TREE)
6723 locus = LOCATION_LOCUS (locus);
6724 else
6725 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6726 gimple_phi_arg_set_location (phi, i, locus);
6730 gsi_next (&psi);
6733 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6735 gimple stmt = gsi_stmt (si);
6736 struct walk_stmt_info wi;
6738 memset (&wi, 0, sizeof (wi));
6739 wi.info = d;
6740 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6742 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6744 tree label = gimple_label_label (label_stmt);
6745 int uid = LABEL_DECL_UID (label);
6747 gcc_assert (uid > -1);
6749 old_len = vec_safe_length (cfg->x_label_to_block_map);
6750 if (old_len <= (unsigned) uid)
6752 new_len = 3 * uid / 2 + 1;
6753 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6756 (*cfg->x_label_to_block_map)[uid] = bb;
6757 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6759 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6761 if (uid >= dest_cfun->cfg->last_label_uid)
6762 dest_cfun->cfg->last_label_uid = uid + 1;
6765 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6766 remove_stmt_from_eh_lp_fn (cfun, stmt);
6768 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6769 gimple_remove_stmt_histograms (cfun, stmt);
6771 /* We cannot leave any operands allocated from the operand caches of
6772 the current function. */
6773 free_stmt_operands (cfun, stmt);
6774 push_cfun (dest_cfun);
6775 update_stmt (stmt);
6776 pop_cfun ();
6779 FOR_EACH_EDGE (e, ei, bb->succs)
6780 if (e->goto_locus != UNKNOWN_LOCATION)
6782 tree block = LOCATION_BLOCK (e->goto_locus);
6783 if (d->orig_block == NULL_TREE
6784 || block == d->orig_block)
6785 e->goto_locus = d->new_block ?
6786 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6787 LOCATION_LOCUS (e->goto_locus);
6791 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6792 the outermost EH region. Use REGION as the incoming base EH region. */
6794 static eh_region
6795 find_outermost_region_in_block (struct function *src_cfun,
6796 basic_block bb, eh_region region)
6798 gimple_stmt_iterator si;
6800 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6802 gimple stmt = gsi_stmt (si);
6803 eh_region stmt_region;
6804 int lp_nr;
6806 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6807 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6808 if (stmt_region)
6810 if (region == NULL)
6811 region = stmt_region;
6812 else if (stmt_region != region)
6814 region = eh_region_outermost (src_cfun, stmt_region, region);
6815 gcc_assert (region != NULL);
6820 return region;
6823 static tree
6824 new_label_mapper (tree decl, void *data)
6826 htab_t hash = (htab_t) data;
6827 struct tree_map *m;
6828 void **slot;
6830 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6832 m = XNEW (struct tree_map);
6833 m->hash = DECL_UID (decl);
6834 m->base.from = decl;
6835 m->to = create_artificial_label (UNKNOWN_LOCATION);
6836 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6837 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6838 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6840 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6841 gcc_assert (*slot == NULL);
6843 *slot = m;
6845 return m->to;
6848 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6849 subblocks. */
6851 static void
6852 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6853 tree to_context)
6855 tree *tp, t;
6857 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6859 t = *tp;
6860 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6861 continue;
6862 replace_by_duplicate_decl (&t, vars_map, to_context);
6863 if (t != *tp)
6865 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6867 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6868 DECL_HAS_VALUE_EXPR_P (t) = 1;
6870 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6871 *tp = t;
6875 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6876 replace_block_vars_by_duplicates (block, vars_map, to_context);
6879 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6880 from FN1 to FN2. */
6882 static void
6883 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6884 struct loop *loop)
6886 /* Discard it from the old loop array. */
6887 (*get_loops (fn1))[loop->num] = NULL;
6889 /* Place it in the new loop array, assigning it a new number. */
6890 loop->num = number_of_loops (fn2);
6891 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6893 /* Recurse to children. */
6894 for (loop = loop->inner; loop; loop = loop->next)
6895 fixup_loop_arrays_after_move (fn1, fn2, loop);
6898 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6899 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6900 single basic block in the original CFG and the new basic block is
6901 returned. DEST_CFUN must not have a CFG yet.
6903 Note that the region need not be a pure SESE region. Blocks inside
6904 the region may contain calls to abort/exit. The only restriction
6905 is that ENTRY_BB should be the only entry point and it must
6906 dominate EXIT_BB.
6908 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6909 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6910 to the new function.
6912 All local variables referenced in the region are assumed to be in
6913 the corresponding BLOCK_VARS and unexpanded variable lists
6914 associated with DEST_CFUN. */
6916 basic_block
6917 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6918 basic_block exit_bb, tree orig_block)
6920 vec<basic_block> bbs, dom_bbs;
6921 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6922 basic_block after, bb, *entry_pred, *exit_succ, abb;
6923 struct function *saved_cfun = cfun;
6924 int *entry_flag, *exit_flag;
6925 unsigned *entry_prob, *exit_prob;
6926 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6927 edge e;
6928 edge_iterator ei;
6929 htab_t new_label_map;
6930 hash_map<void *, void *> *eh_map;
6931 struct loop *loop = entry_bb->loop_father;
6932 struct loop *loop0 = get_loop (saved_cfun, 0);
6933 struct move_stmt_d d;
6935 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6936 region. */
6937 gcc_assert (entry_bb != exit_bb
6938 && (!exit_bb
6939 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6941 /* Collect all the blocks in the region. Manually add ENTRY_BB
6942 because it won't be added by dfs_enumerate_from. */
6943 bbs.create (0);
6944 bbs.safe_push (entry_bb);
6945 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6947 /* The blocks that used to be dominated by something in BBS will now be
6948 dominated by the new block. */
6949 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6950 bbs.address (),
6951 bbs.length ());
6953 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6954 the predecessor edges to ENTRY_BB and the successor edges to
6955 EXIT_BB so that we can re-attach them to the new basic block that
6956 will replace the region. */
6957 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6958 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6959 entry_flag = XNEWVEC (int, num_entry_edges);
6960 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6961 i = 0;
6962 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6964 entry_prob[i] = e->probability;
6965 entry_flag[i] = e->flags;
6966 entry_pred[i++] = e->src;
6967 remove_edge (e);
6970 if (exit_bb)
6972 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6973 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6974 exit_flag = XNEWVEC (int, num_exit_edges);
6975 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6976 i = 0;
6977 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6979 exit_prob[i] = e->probability;
6980 exit_flag[i] = e->flags;
6981 exit_succ[i++] = e->dest;
6982 remove_edge (e);
6985 else
6987 num_exit_edges = 0;
6988 exit_succ = NULL;
6989 exit_flag = NULL;
6990 exit_prob = NULL;
6993 /* Switch context to the child function to initialize DEST_FN's CFG. */
6994 gcc_assert (dest_cfun->cfg == NULL);
6995 push_cfun (dest_cfun);
6997 init_empty_tree_cfg ();
6999 /* Initialize EH information for the new function. */
7000 eh_map = NULL;
7001 new_label_map = NULL;
7002 if (saved_cfun->eh)
7004 eh_region region = NULL;
7006 FOR_EACH_VEC_ELT (bbs, i, bb)
7007 region = find_outermost_region_in_block (saved_cfun, bb, region);
7009 init_eh_for_function ();
7010 if (region != NULL)
7012 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7013 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7014 new_label_mapper, new_label_map);
7018 /* Initialize an empty loop tree. */
7019 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7020 init_loops_structure (dest_cfun, loops, 1);
7021 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7022 set_loops_for_fn (dest_cfun, loops);
7024 /* Move the outlined loop tree part. */
7025 num_nodes = bbs.length ();
7026 FOR_EACH_VEC_ELT (bbs, i, bb)
7028 if (bb->loop_father->header == bb)
7030 struct loop *this_loop = bb->loop_father;
7031 struct loop *outer = loop_outer (this_loop);
7032 if (outer == loop
7033 /* If the SESE region contains some bbs ending with
7034 a noreturn call, those are considered to belong
7035 to the outermost loop in saved_cfun, rather than
7036 the entry_bb's loop_father. */
7037 || outer == loop0)
7039 if (outer != loop)
7040 num_nodes -= this_loop->num_nodes;
7041 flow_loop_tree_node_remove (bb->loop_father);
7042 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7043 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7046 else if (bb->loop_father == loop0 && loop0 != loop)
7047 num_nodes--;
7049 /* Remove loop exits from the outlined region. */
7050 if (loops_for_fn (saved_cfun)->exits)
7051 FOR_EACH_EDGE (e, ei, bb->succs)
7053 struct loops *l = loops_for_fn (saved_cfun);
7054 loop_exit **slot
7055 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7056 NO_INSERT);
7057 if (slot)
7058 l->exits->clear_slot (slot);
7063 /* Adjust the number of blocks in the tree root of the outlined part. */
7064 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7066 /* Setup a mapping to be used by move_block_to_fn. */
7067 loop->aux = current_loops->tree_root;
7068 loop0->aux = current_loops->tree_root;
7070 pop_cfun ();
7072 /* Move blocks from BBS into DEST_CFUN. */
7073 gcc_assert (bbs.length () >= 2);
7074 after = dest_cfun->cfg->x_entry_block_ptr;
7075 hash_map<tree, tree> vars_map;
7077 memset (&d, 0, sizeof (d));
7078 d.orig_block = orig_block;
7079 d.new_block = DECL_INITIAL (dest_cfun->decl);
7080 d.from_context = cfun->decl;
7081 d.to_context = dest_cfun->decl;
7082 d.vars_map = &vars_map;
7083 d.new_label_map = new_label_map;
7084 d.eh_map = eh_map;
7085 d.remap_decls_p = true;
7087 FOR_EACH_VEC_ELT (bbs, i, bb)
7089 /* No need to update edge counts on the last block. It has
7090 already been updated earlier when we detached the region from
7091 the original CFG. */
7092 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7093 after = bb;
7096 loop->aux = NULL;
7097 loop0->aux = NULL;
7098 /* Loop sizes are no longer correct, fix them up. */
7099 loop->num_nodes -= num_nodes;
7100 for (struct loop *outer = loop_outer (loop);
7101 outer; outer = loop_outer (outer))
7102 outer->num_nodes -= num_nodes;
7103 loop0->num_nodes -= bbs.length () - num_nodes;
7105 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7107 struct loop *aloop;
7108 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7109 if (aloop != NULL)
7111 if (aloop->simduid)
7113 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7114 d.to_context);
7115 dest_cfun->has_simduid_loops = true;
7117 if (aloop->force_vectorize)
7118 dest_cfun->has_force_vectorize_loops = true;
7122 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7123 if (orig_block)
7125 tree block;
7126 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7127 == NULL_TREE);
7128 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7129 = BLOCK_SUBBLOCKS (orig_block);
7130 for (block = BLOCK_SUBBLOCKS (orig_block);
7131 block; block = BLOCK_CHAIN (block))
7132 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7133 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7136 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7137 &vars_map, dest_cfun->decl);
7139 if (new_label_map)
7140 htab_delete (new_label_map);
7141 if (eh_map)
7142 delete eh_map;
7144 /* Rewire the entry and exit blocks. The successor to the entry
7145 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7146 the child function. Similarly, the predecessor of DEST_FN's
7147 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7148 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7149 various CFG manipulation function get to the right CFG.
7151 FIXME, this is silly. The CFG ought to become a parameter to
7152 these helpers. */
7153 push_cfun (dest_cfun);
7154 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7155 if (exit_bb)
7156 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7157 pop_cfun ();
7159 /* Back in the original function, the SESE region has disappeared,
7160 create a new basic block in its place. */
7161 bb = create_empty_bb (entry_pred[0]);
7162 if (current_loops)
7163 add_bb_to_loop (bb, loop);
7164 for (i = 0; i < num_entry_edges; i++)
7166 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7167 e->probability = entry_prob[i];
7170 for (i = 0; i < num_exit_edges; i++)
7172 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7173 e->probability = exit_prob[i];
7176 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7177 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7178 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7179 dom_bbs.release ();
7181 if (exit_bb)
7183 free (exit_prob);
7184 free (exit_flag);
7185 free (exit_succ);
7187 free (entry_prob);
7188 free (entry_flag);
7189 free (entry_pred);
7190 bbs.release ();
7192 return bb;
7196 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7199 void
7200 dump_function_to_file (tree fndecl, FILE *file, int flags)
7202 tree arg, var, old_current_fndecl = current_function_decl;
7203 struct function *dsf;
7204 bool ignore_topmost_bind = false, any_var = false;
7205 basic_block bb;
7206 tree chain;
7207 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7208 && decl_is_tm_clone (fndecl));
7209 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7211 current_function_decl = fndecl;
7212 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7214 arg = DECL_ARGUMENTS (fndecl);
7215 while (arg)
7217 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7218 fprintf (file, " ");
7219 print_generic_expr (file, arg, dump_flags);
7220 if (flags & TDF_VERBOSE)
7221 print_node (file, "", arg, 4);
7222 if (DECL_CHAIN (arg))
7223 fprintf (file, ", ");
7224 arg = DECL_CHAIN (arg);
7226 fprintf (file, ")\n");
7228 if (flags & TDF_VERBOSE)
7229 print_node (file, "", fndecl, 2);
7231 dsf = DECL_STRUCT_FUNCTION (fndecl);
7232 if (dsf && (flags & TDF_EH))
7233 dump_eh_tree (file, dsf);
7235 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7237 dump_node (fndecl, TDF_SLIM | flags, file);
7238 current_function_decl = old_current_fndecl;
7239 return;
7242 /* When GIMPLE is lowered, the variables are no longer available in
7243 BIND_EXPRs, so display them separately. */
7244 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7246 unsigned ix;
7247 ignore_topmost_bind = true;
7249 fprintf (file, "{\n");
7250 if (!vec_safe_is_empty (fun->local_decls))
7251 FOR_EACH_LOCAL_DECL (fun, ix, var)
7253 print_generic_decl (file, var, flags);
7254 if (flags & TDF_VERBOSE)
7255 print_node (file, "", var, 4);
7256 fprintf (file, "\n");
7258 any_var = true;
7260 if (gimple_in_ssa_p (cfun))
7261 for (ix = 1; ix < num_ssa_names; ++ix)
7263 tree name = ssa_name (ix);
7264 if (name && !SSA_NAME_VAR (name))
7266 fprintf (file, " ");
7267 print_generic_expr (file, TREE_TYPE (name), flags);
7268 fprintf (file, " ");
7269 print_generic_expr (file, name, flags);
7270 fprintf (file, ";\n");
7272 any_var = true;
7277 if (fun && fun->decl == fndecl
7278 && fun->cfg
7279 && basic_block_info_for_fn (fun))
7281 /* If the CFG has been built, emit a CFG-based dump. */
7282 if (!ignore_topmost_bind)
7283 fprintf (file, "{\n");
7285 if (any_var && n_basic_blocks_for_fn (fun))
7286 fprintf (file, "\n");
7288 FOR_EACH_BB_FN (bb, fun)
7289 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7291 fprintf (file, "}\n");
7293 else if (DECL_SAVED_TREE (fndecl) == NULL)
7295 /* The function is now in GIMPLE form but the CFG has not been
7296 built yet. Emit the single sequence of GIMPLE statements
7297 that make up its body. */
7298 gimple_seq body = gimple_body (fndecl);
7300 if (gimple_seq_first_stmt (body)
7301 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7302 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7303 print_gimple_seq (file, body, 0, flags);
7304 else
7306 if (!ignore_topmost_bind)
7307 fprintf (file, "{\n");
7309 if (any_var)
7310 fprintf (file, "\n");
7312 print_gimple_seq (file, body, 2, flags);
7313 fprintf (file, "}\n");
7316 else
7318 int indent;
7320 /* Make a tree based dump. */
7321 chain = DECL_SAVED_TREE (fndecl);
7322 if (chain && TREE_CODE (chain) == BIND_EXPR)
7324 if (ignore_topmost_bind)
7326 chain = BIND_EXPR_BODY (chain);
7327 indent = 2;
7329 else
7330 indent = 0;
7332 else
7334 if (!ignore_topmost_bind)
7335 fprintf (file, "{\n");
7336 indent = 2;
7339 if (any_var)
7340 fprintf (file, "\n");
7342 print_generic_stmt_indented (file, chain, flags, indent);
7343 if (ignore_topmost_bind)
7344 fprintf (file, "}\n");
7347 if (flags & TDF_ENUMERATE_LOCALS)
7348 dump_enumerated_decls (file, flags);
7349 fprintf (file, "\n\n");
7351 current_function_decl = old_current_fndecl;
7354 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7356 DEBUG_FUNCTION void
7357 debug_function (tree fn, int flags)
7359 dump_function_to_file (fn, stderr, flags);
7363 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7365 static void
7366 print_pred_bbs (FILE *file, basic_block bb)
7368 edge e;
7369 edge_iterator ei;
7371 FOR_EACH_EDGE (e, ei, bb->preds)
7372 fprintf (file, "bb_%d ", e->src->index);
7376 /* Print on FILE the indexes for the successors of basic_block BB. */
7378 static void
7379 print_succ_bbs (FILE *file, basic_block bb)
7381 edge e;
7382 edge_iterator ei;
7384 FOR_EACH_EDGE (e, ei, bb->succs)
7385 fprintf (file, "bb_%d ", e->dest->index);
7388 /* Print to FILE the basic block BB following the VERBOSITY level. */
7390 void
7391 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7393 char *s_indent = (char *) alloca ((size_t) indent + 1);
7394 memset ((void *) s_indent, ' ', (size_t) indent);
7395 s_indent[indent] = '\0';
7397 /* Print basic_block's header. */
7398 if (verbosity >= 2)
7400 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7401 print_pred_bbs (file, bb);
7402 fprintf (file, "}, succs = {");
7403 print_succ_bbs (file, bb);
7404 fprintf (file, "})\n");
7407 /* Print basic_block's body. */
7408 if (verbosity >= 3)
7410 fprintf (file, "%s {\n", s_indent);
7411 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7412 fprintf (file, "%s }\n", s_indent);
7416 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7418 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7419 VERBOSITY level this outputs the contents of the loop, or just its
7420 structure. */
7422 static void
7423 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7425 char *s_indent;
7426 basic_block bb;
7428 if (loop == NULL)
7429 return;
7431 s_indent = (char *) alloca ((size_t) indent + 1);
7432 memset ((void *) s_indent, ' ', (size_t) indent);
7433 s_indent[indent] = '\0';
7435 /* Print loop's header. */
7436 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7437 if (loop->header)
7438 fprintf (file, "header = %d", loop->header->index);
7439 else
7441 fprintf (file, "deleted)\n");
7442 return;
7444 if (loop->latch)
7445 fprintf (file, ", latch = %d", loop->latch->index);
7446 else
7447 fprintf (file, ", multiple latches");
7448 fprintf (file, ", niter = ");
7449 print_generic_expr (file, loop->nb_iterations, 0);
7451 if (loop->any_upper_bound)
7453 fprintf (file, ", upper_bound = ");
7454 print_decu (loop->nb_iterations_upper_bound, file);
7457 if (loop->any_estimate)
7459 fprintf (file, ", estimate = ");
7460 print_decu (loop->nb_iterations_estimate, file);
7462 fprintf (file, ")\n");
7464 /* Print loop's body. */
7465 if (verbosity >= 1)
7467 fprintf (file, "%s{\n", s_indent);
7468 FOR_EACH_BB_FN (bb, cfun)
7469 if (bb->loop_father == loop)
7470 print_loops_bb (file, bb, indent, verbosity);
7472 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7473 fprintf (file, "%s}\n", s_indent);
7477 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7478 spaces. Following VERBOSITY level this outputs the contents of the
7479 loop, or just its structure. */
7481 static void
7482 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7483 int verbosity)
7485 if (loop == NULL)
7486 return;
7488 print_loop (file, loop, indent, verbosity);
7489 print_loop_and_siblings (file, loop->next, indent, verbosity);
7492 /* Follow a CFG edge from the entry point of the program, and on entry
7493 of a loop, pretty print the loop structure on FILE. */
7495 void
7496 print_loops (FILE *file, int verbosity)
7498 basic_block bb;
7500 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7501 if (bb && bb->loop_father)
7502 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7505 /* Dump a loop. */
7507 DEBUG_FUNCTION void
7508 debug (struct loop &ref)
7510 print_loop (stderr, &ref, 0, /*verbosity*/0);
7513 DEBUG_FUNCTION void
7514 debug (struct loop *ptr)
7516 if (ptr)
7517 debug (*ptr);
7518 else
7519 fprintf (stderr, "<nil>\n");
7522 /* Dump a loop verbosely. */
7524 DEBUG_FUNCTION void
7525 debug_verbose (struct loop &ref)
7527 print_loop (stderr, &ref, 0, /*verbosity*/3);
7530 DEBUG_FUNCTION void
7531 debug_verbose (struct loop *ptr)
7533 if (ptr)
7534 debug (*ptr);
7535 else
7536 fprintf (stderr, "<nil>\n");
7540 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7542 DEBUG_FUNCTION void
7543 debug_loops (int verbosity)
7545 print_loops (stderr, verbosity);
7548 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7550 DEBUG_FUNCTION void
7551 debug_loop (struct loop *loop, int verbosity)
7553 print_loop (stderr, loop, 0, verbosity);
7556 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7557 level. */
7559 DEBUG_FUNCTION void
7560 debug_loop_num (unsigned num, int verbosity)
7562 debug_loop (get_loop (cfun, num), verbosity);
7565 /* Return true if BB ends with a call, possibly followed by some
7566 instructions that must stay with the call. Return false,
7567 otherwise. */
7569 static bool
7570 gimple_block_ends_with_call_p (basic_block bb)
7572 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7573 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7577 /* Return true if BB ends with a conditional branch. Return false,
7578 otherwise. */
7580 static bool
7581 gimple_block_ends_with_condjump_p (const_basic_block bb)
7583 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7584 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7588 /* Return true if we need to add fake edge to exit at statement T.
7589 Helper function for gimple_flow_call_edges_add. */
7591 static bool
7592 need_fake_edge_p (gimple t)
7594 tree fndecl = NULL_TREE;
7595 int call_flags = 0;
7597 /* NORETURN and LONGJMP calls already have an edge to exit.
7598 CONST and PURE calls do not need one.
7599 We don't currently check for CONST and PURE here, although
7600 it would be a good idea, because those attributes are
7601 figured out from the RTL in mark_constant_function, and
7602 the counter incrementation code from -fprofile-arcs
7603 leads to different results from -fbranch-probabilities. */
7604 if (is_gimple_call (t))
7606 fndecl = gimple_call_fndecl (t);
7607 call_flags = gimple_call_flags (t);
7610 if (is_gimple_call (t)
7611 && fndecl
7612 && DECL_BUILT_IN (fndecl)
7613 && (call_flags & ECF_NOTHROW)
7614 && !(call_flags & ECF_RETURNS_TWICE)
7615 /* fork() doesn't really return twice, but the effect of
7616 wrapping it in __gcov_fork() which calls __gcov_flush()
7617 and clears the counters before forking has the same
7618 effect as returning twice. Force a fake edge. */
7619 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7620 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7621 return false;
7623 if (is_gimple_call (t))
7625 edge_iterator ei;
7626 edge e;
7627 basic_block bb;
7629 if (!(call_flags & ECF_NORETURN))
7630 return true;
7632 bb = gimple_bb (t);
7633 FOR_EACH_EDGE (e, ei, bb->succs)
7634 if ((e->flags & EDGE_FAKE) == 0)
7635 return true;
7638 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7639 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7640 return true;
7642 return false;
7646 /* Add fake edges to the function exit for any non constant and non
7647 noreturn calls (or noreturn calls with EH/abnormal edges),
7648 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7649 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7650 that were split.
7652 The goal is to expose cases in which entering a basic block does
7653 not imply that all subsequent instructions must be executed. */
7655 static int
7656 gimple_flow_call_edges_add (sbitmap blocks)
7658 int i;
7659 int blocks_split = 0;
7660 int last_bb = last_basic_block_for_fn (cfun);
7661 bool check_last_block = false;
7663 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7664 return 0;
7666 if (! blocks)
7667 check_last_block = true;
7668 else
7669 check_last_block = bitmap_bit_p (blocks,
7670 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7672 /* In the last basic block, before epilogue generation, there will be
7673 a fallthru edge to EXIT. Special care is required if the last insn
7674 of the last basic block is a call because make_edge folds duplicate
7675 edges, which would result in the fallthru edge also being marked
7676 fake, which would result in the fallthru edge being removed by
7677 remove_fake_edges, which would result in an invalid CFG.
7679 Moreover, we can't elide the outgoing fake edge, since the block
7680 profiler needs to take this into account in order to solve the minimal
7681 spanning tree in the case that the call doesn't return.
7683 Handle this by adding a dummy instruction in a new last basic block. */
7684 if (check_last_block)
7686 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7687 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7688 gimple t = NULL;
7690 if (!gsi_end_p (gsi))
7691 t = gsi_stmt (gsi);
7693 if (t && need_fake_edge_p (t))
7695 edge e;
7697 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7698 if (e)
7700 gsi_insert_on_edge (e, gimple_build_nop ());
7701 gsi_commit_edge_inserts ();
7706 /* Now add fake edges to the function exit for any non constant
7707 calls since there is no way that we can determine if they will
7708 return or not... */
7709 for (i = 0; i < last_bb; i++)
7711 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7712 gimple_stmt_iterator gsi;
7713 gimple stmt, last_stmt;
7715 if (!bb)
7716 continue;
7718 if (blocks && !bitmap_bit_p (blocks, i))
7719 continue;
7721 gsi = gsi_last_nondebug_bb (bb);
7722 if (!gsi_end_p (gsi))
7724 last_stmt = gsi_stmt (gsi);
7727 stmt = gsi_stmt (gsi);
7728 if (need_fake_edge_p (stmt))
7730 edge e;
7732 /* The handling above of the final block before the
7733 epilogue should be enough to verify that there is
7734 no edge to the exit block in CFG already.
7735 Calling make_edge in such case would cause us to
7736 mark that edge as fake and remove it later. */
7737 #ifdef ENABLE_CHECKING
7738 if (stmt == last_stmt)
7740 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7741 gcc_assert (e == NULL);
7743 #endif
7745 /* Note that the following may create a new basic block
7746 and renumber the existing basic blocks. */
7747 if (stmt != last_stmt)
7749 e = split_block (bb, stmt);
7750 if (e)
7751 blocks_split++;
7753 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7755 gsi_prev (&gsi);
7757 while (!gsi_end_p (gsi));
7761 if (blocks_split)
7762 verify_flow_info ();
7764 return blocks_split;
7767 /* Removes edge E and all the blocks dominated by it, and updates dominance
7768 information. The IL in E->src needs to be updated separately.
7769 If dominance info is not available, only the edge E is removed.*/
7771 void
7772 remove_edge_and_dominated_blocks (edge e)
7774 vec<basic_block> bbs_to_remove = vNULL;
7775 vec<basic_block> bbs_to_fix_dom = vNULL;
7776 bitmap df, df_idom;
7777 edge f;
7778 edge_iterator ei;
7779 bool none_removed = false;
7780 unsigned i;
7781 basic_block bb, dbb;
7782 bitmap_iterator bi;
7784 if (!dom_info_available_p (CDI_DOMINATORS))
7786 remove_edge (e);
7787 return;
7790 /* No updating is needed for edges to exit. */
7791 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7793 if (cfgcleanup_altered_bbs)
7794 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7795 remove_edge (e);
7796 return;
7799 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7800 that is not dominated by E->dest, then this set is empty. Otherwise,
7801 all the basic blocks dominated by E->dest are removed.
7803 Also, to DF_IDOM we store the immediate dominators of the blocks in
7804 the dominance frontier of E (i.e., of the successors of the
7805 removed blocks, if there are any, and of E->dest otherwise). */
7806 FOR_EACH_EDGE (f, ei, e->dest->preds)
7808 if (f == e)
7809 continue;
7811 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7813 none_removed = true;
7814 break;
7818 df = BITMAP_ALLOC (NULL);
7819 df_idom = BITMAP_ALLOC (NULL);
7821 if (none_removed)
7822 bitmap_set_bit (df_idom,
7823 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7824 else
7826 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7827 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7829 FOR_EACH_EDGE (f, ei, bb->succs)
7831 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7832 bitmap_set_bit (df, f->dest->index);
7835 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7836 bitmap_clear_bit (df, bb->index);
7838 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7840 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7841 bitmap_set_bit (df_idom,
7842 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7846 if (cfgcleanup_altered_bbs)
7848 /* Record the set of the altered basic blocks. */
7849 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7850 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7853 /* Remove E and the cancelled blocks. */
7854 if (none_removed)
7855 remove_edge (e);
7856 else
7858 /* Walk backwards so as to get a chance to substitute all
7859 released DEFs into debug stmts. See
7860 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7861 details. */
7862 for (i = bbs_to_remove.length (); i-- > 0; )
7863 delete_basic_block (bbs_to_remove[i]);
7866 /* Update the dominance information. The immediate dominator may change only
7867 for blocks whose immediate dominator belongs to DF_IDOM:
7869 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7870 removal. Let Z the arbitrary block such that idom(Z) = Y and
7871 Z dominates X after the removal. Before removal, there exists a path P
7872 from Y to X that avoids Z. Let F be the last edge on P that is
7873 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7874 dominates W, and because of P, Z does not dominate W), and W belongs to
7875 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7876 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7878 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7879 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7880 dbb;
7881 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7882 bbs_to_fix_dom.safe_push (dbb);
7885 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7887 BITMAP_FREE (df);
7888 BITMAP_FREE (df_idom);
7889 bbs_to_remove.release ();
7890 bbs_to_fix_dom.release ();
7893 /* Purge dead EH edges from basic block BB. */
7895 bool
7896 gimple_purge_dead_eh_edges (basic_block bb)
7898 bool changed = false;
7899 edge e;
7900 edge_iterator ei;
7901 gimple stmt = last_stmt (bb);
7903 if (stmt && stmt_can_throw_internal (stmt))
7904 return false;
7906 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7908 if (e->flags & EDGE_EH)
7910 remove_edge_and_dominated_blocks (e);
7911 changed = true;
7913 else
7914 ei_next (&ei);
7917 return changed;
7920 /* Purge dead EH edges from basic block listed in BLOCKS. */
7922 bool
7923 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7925 bool changed = false;
7926 unsigned i;
7927 bitmap_iterator bi;
7929 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7931 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7933 /* Earlier gimple_purge_dead_eh_edges could have removed
7934 this basic block already. */
7935 gcc_assert (bb || changed);
7936 if (bb != NULL)
7937 changed |= gimple_purge_dead_eh_edges (bb);
7940 return changed;
7943 /* Purge dead abnormal call edges from basic block BB. */
7945 bool
7946 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7948 bool changed = false;
7949 edge e;
7950 edge_iterator ei;
7951 gimple stmt = last_stmt (bb);
7953 if (!cfun->has_nonlocal_label
7954 && !cfun->calls_setjmp)
7955 return false;
7957 if (stmt && stmt_can_make_abnormal_goto (stmt))
7958 return false;
7960 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7962 if (e->flags & EDGE_ABNORMAL)
7964 if (e->flags & EDGE_FALLTHRU)
7965 e->flags &= ~EDGE_ABNORMAL;
7966 else
7967 remove_edge_and_dominated_blocks (e);
7968 changed = true;
7970 else
7971 ei_next (&ei);
7974 return changed;
7977 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7979 bool
7980 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7982 bool changed = false;
7983 unsigned i;
7984 bitmap_iterator bi;
7986 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7988 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7990 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7991 this basic block already. */
7992 gcc_assert (bb || changed);
7993 if (bb != NULL)
7994 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7997 return changed;
8000 /* This function is called whenever a new edge is created or
8001 redirected. */
8003 static void
8004 gimple_execute_on_growing_pred (edge e)
8006 basic_block bb = e->dest;
8008 if (!gimple_seq_empty_p (phi_nodes (bb)))
8009 reserve_phi_args_for_new_edge (bb);
8012 /* This function is called immediately before edge E is removed from
8013 the edge vector E->dest->preds. */
8015 static void
8016 gimple_execute_on_shrinking_pred (edge e)
8018 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8019 remove_phi_args (e);
8022 /*---------------------------------------------------------------------------
8023 Helper functions for Loop versioning
8024 ---------------------------------------------------------------------------*/
8026 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8027 of 'first'. Both of them are dominated by 'new_head' basic block. When
8028 'new_head' was created by 'second's incoming edge it received phi arguments
8029 on the edge by split_edge(). Later, additional edge 'e' was created to
8030 connect 'new_head' and 'first'. Now this routine adds phi args on this
8031 additional edge 'e' that new_head to second edge received as part of edge
8032 splitting. */
8034 static void
8035 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8036 basic_block new_head, edge e)
8038 gphi *phi1, *phi2;
8039 gphi_iterator psi1, psi2;
8040 tree def;
8041 edge e2 = find_edge (new_head, second);
8043 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8044 edge, we should always have an edge from NEW_HEAD to SECOND. */
8045 gcc_assert (e2 != NULL);
8047 /* Browse all 'second' basic block phi nodes and add phi args to
8048 edge 'e' for 'first' head. PHI args are always in correct order. */
8050 for (psi2 = gsi_start_phis (second),
8051 psi1 = gsi_start_phis (first);
8052 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8053 gsi_next (&psi2), gsi_next (&psi1))
8055 phi1 = psi1.phi ();
8056 phi2 = psi2.phi ();
8057 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8058 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8063 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8064 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8065 the destination of the ELSE part. */
8067 static void
8068 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8069 basic_block second_head ATTRIBUTE_UNUSED,
8070 basic_block cond_bb, void *cond_e)
8072 gimple_stmt_iterator gsi;
8073 gimple new_cond_expr;
8074 tree cond_expr = (tree) cond_e;
8075 edge e0;
8077 /* Build new conditional expr */
8078 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8079 NULL_TREE, NULL_TREE);
8081 /* Add new cond in cond_bb. */
8082 gsi = gsi_last_bb (cond_bb);
8083 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8085 /* Adjust edges appropriately to connect new head with first head
8086 as well as second head. */
8087 e0 = single_succ_edge (cond_bb);
8088 e0->flags &= ~EDGE_FALLTHRU;
8089 e0->flags |= EDGE_FALSE_VALUE;
8093 /* Do book-keeping of basic block BB for the profile consistency checker.
8094 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8095 then do post-pass accounting. Store the counting in RECORD. */
8096 static void
8097 gimple_account_profile_record (basic_block bb, int after_pass,
8098 struct profile_record *record)
8100 gimple_stmt_iterator i;
8101 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8103 record->size[after_pass]
8104 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8105 if (profile_status_for_fn (cfun) == PROFILE_READ)
8106 record->time[after_pass]
8107 += estimate_num_insns (gsi_stmt (i),
8108 &eni_time_weights) * bb->count;
8109 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8110 record->time[after_pass]
8111 += estimate_num_insns (gsi_stmt (i),
8112 &eni_time_weights) * bb->frequency;
8116 struct cfg_hooks gimple_cfg_hooks = {
8117 "gimple",
8118 gimple_verify_flow_info,
8119 gimple_dump_bb, /* dump_bb */
8120 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8121 create_bb, /* create_basic_block */
8122 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8123 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8124 gimple_can_remove_branch_p, /* can_remove_branch_p */
8125 remove_bb, /* delete_basic_block */
8126 gimple_split_block, /* split_block */
8127 gimple_move_block_after, /* move_block_after */
8128 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8129 gimple_merge_blocks, /* merge_blocks */
8130 gimple_predict_edge, /* predict_edge */
8131 gimple_predicted_by_p, /* predicted_by_p */
8132 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8133 gimple_duplicate_bb, /* duplicate_block */
8134 gimple_split_edge, /* split_edge */
8135 gimple_make_forwarder_block, /* make_forward_block */
8136 NULL, /* tidy_fallthru_edge */
8137 NULL, /* force_nonfallthru */
8138 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8139 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8140 gimple_flow_call_edges_add, /* flow_call_edges_add */
8141 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8142 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8143 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8144 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8145 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8146 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8147 flush_pending_stmts, /* flush_pending_stmts */
8148 gimple_empty_block_p, /* block_empty_p */
8149 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8150 gimple_account_profile_record,
8154 /* Split all critical edges. */
8156 unsigned int
8157 split_critical_edges (void)
8159 basic_block bb;
8160 edge e;
8161 edge_iterator ei;
8163 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8164 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8165 mappings around the calls to split_edge. */
8166 start_recording_case_labels ();
8167 FOR_ALL_BB_FN (bb, cfun)
8169 FOR_EACH_EDGE (e, ei, bb->succs)
8171 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8172 split_edge (e);
8173 /* PRE inserts statements to edges and expects that
8174 since split_critical_edges was done beforehand, committing edge
8175 insertions will not split more edges. In addition to critical
8176 edges we must split edges that have multiple successors and
8177 end by control flow statements, such as RESX.
8178 Go ahead and split them too. This matches the logic in
8179 gimple_find_edge_insert_loc. */
8180 else if ((!single_pred_p (e->dest)
8181 || !gimple_seq_empty_p (phi_nodes (e->dest))
8182 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8183 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8184 && !(e->flags & EDGE_ABNORMAL))
8186 gimple_stmt_iterator gsi;
8188 gsi = gsi_last_bb (e->src);
8189 if (!gsi_end_p (gsi)
8190 && stmt_ends_bb_p (gsi_stmt (gsi))
8191 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8192 && !gimple_call_builtin_p (gsi_stmt (gsi),
8193 BUILT_IN_RETURN)))
8194 split_edge (e);
8198 end_recording_case_labels ();
8199 return 0;
8202 namespace {
8204 const pass_data pass_data_split_crit_edges =
8206 GIMPLE_PASS, /* type */
8207 "crited", /* name */
8208 OPTGROUP_NONE, /* optinfo_flags */
8209 TV_TREE_SPLIT_EDGES, /* tv_id */
8210 PROP_cfg, /* properties_required */
8211 PROP_no_crit_edges, /* properties_provided */
8212 0, /* properties_destroyed */
8213 0, /* todo_flags_start */
8214 0, /* todo_flags_finish */
8217 class pass_split_crit_edges : public gimple_opt_pass
8219 public:
8220 pass_split_crit_edges (gcc::context *ctxt)
8221 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8224 /* opt_pass methods: */
8225 virtual unsigned int execute (function *) { return split_critical_edges (); }
8227 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8228 }; // class pass_split_crit_edges
8230 } // anon namespace
8232 gimple_opt_pass *
8233 make_pass_split_crit_edges (gcc::context *ctxt)
8235 return new pass_split_crit_edges (ctxt);
8239 /* Build a ternary operation and gimplify it. Emit code before GSI.
8240 Return the gimple_val holding the result. */
8242 tree
8243 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8244 tree type, tree a, tree b, tree c)
8246 tree ret;
8247 location_t loc = gimple_location (gsi_stmt (*gsi));
8249 ret = fold_build3_loc (loc, code, type, a, b, c);
8250 STRIP_NOPS (ret);
8252 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8253 GSI_SAME_STMT);
8256 /* Build a binary operation and gimplify it. Emit code before GSI.
8257 Return the gimple_val holding the result. */
8259 tree
8260 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8261 tree type, tree a, tree b)
8263 tree ret;
8265 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8266 STRIP_NOPS (ret);
8268 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8269 GSI_SAME_STMT);
8272 /* Build a unary operation and gimplify it. Emit code before GSI.
8273 Return the gimple_val holding the result. */
8275 tree
8276 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8277 tree a)
8279 tree ret;
8281 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8282 STRIP_NOPS (ret);
8284 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8285 GSI_SAME_STMT);
8290 /* Given a basic block B which ends with a conditional and has
8291 precisely two successors, determine which of the edges is taken if
8292 the conditional is true and which is taken if the conditional is
8293 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8295 void
8296 extract_true_false_edges_from_block (basic_block b,
8297 edge *true_edge,
8298 edge *false_edge)
8300 edge e = EDGE_SUCC (b, 0);
8302 if (e->flags & EDGE_TRUE_VALUE)
8304 *true_edge = e;
8305 *false_edge = EDGE_SUCC (b, 1);
8307 else
8309 *false_edge = e;
8310 *true_edge = EDGE_SUCC (b, 1);
8314 /* Emit return warnings. */
8316 namespace {
8318 const pass_data pass_data_warn_function_return =
8320 GIMPLE_PASS, /* type */
8321 "*warn_function_return", /* name */
8322 OPTGROUP_NONE, /* optinfo_flags */
8323 TV_NONE, /* tv_id */
8324 PROP_cfg, /* properties_required */
8325 0, /* properties_provided */
8326 0, /* properties_destroyed */
8327 0, /* todo_flags_start */
8328 0, /* todo_flags_finish */
8331 class pass_warn_function_return : public gimple_opt_pass
8333 public:
8334 pass_warn_function_return (gcc::context *ctxt)
8335 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8338 /* opt_pass methods: */
8339 virtual unsigned int execute (function *);
8341 }; // class pass_warn_function_return
8343 unsigned int
8344 pass_warn_function_return::execute (function *fun)
8346 source_location location;
8347 gimple last;
8348 edge e;
8349 edge_iterator ei;
8351 if (!targetm.warn_func_return (fun->decl))
8352 return 0;
8354 /* If we have a path to EXIT, then we do return. */
8355 if (TREE_THIS_VOLATILE (fun->decl)
8356 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8358 location = UNKNOWN_LOCATION;
8359 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8361 last = last_stmt (e->src);
8362 if ((gimple_code (last) == GIMPLE_RETURN
8363 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8364 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8365 break;
8367 if (location == UNKNOWN_LOCATION)
8368 location = cfun->function_end_locus;
8369 warning_at (location, 0, "%<noreturn%> function does return");
8372 /* If we see "return;" in some basic block, then we do reach the end
8373 without returning a value. */
8374 else if (warn_return_type
8375 && !TREE_NO_WARNING (fun->decl)
8376 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8377 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8379 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8381 gimple last = last_stmt (e->src);
8382 greturn *return_stmt = dyn_cast <greturn *> (last);
8383 if (return_stmt
8384 && gimple_return_retval (return_stmt) == NULL
8385 && !gimple_no_warning_p (last))
8387 location = gimple_location (last);
8388 if (location == UNKNOWN_LOCATION)
8389 location = fun->function_end_locus;
8390 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8391 TREE_NO_WARNING (fun->decl) = 1;
8392 break;
8396 return 0;
8399 } // anon namespace
8401 gimple_opt_pass *
8402 make_pass_warn_function_return (gcc::context *ctxt)
8404 return new pass_warn_function_return (ctxt);
8407 /* Walk a gimplified function and warn for functions whose return value is
8408 ignored and attribute((warn_unused_result)) is set. This is done before
8409 inlining, so we don't have to worry about that. */
8411 static void
8412 do_warn_unused_result (gimple_seq seq)
8414 tree fdecl, ftype;
8415 gimple_stmt_iterator i;
8417 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8419 gimple g = gsi_stmt (i);
8421 switch (gimple_code (g))
8423 case GIMPLE_BIND:
8424 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8425 break;
8426 case GIMPLE_TRY:
8427 do_warn_unused_result (gimple_try_eval (g));
8428 do_warn_unused_result (gimple_try_cleanup (g));
8429 break;
8430 case GIMPLE_CATCH:
8431 do_warn_unused_result (gimple_catch_handler (
8432 as_a <gcatch *> (g)));
8433 break;
8434 case GIMPLE_EH_FILTER:
8435 do_warn_unused_result (gimple_eh_filter_failure (g));
8436 break;
8438 case GIMPLE_CALL:
8439 if (gimple_call_lhs (g))
8440 break;
8441 if (gimple_call_internal_p (g))
8442 break;
8444 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8445 LHS. All calls whose value is ignored should be
8446 represented like this. Look for the attribute. */
8447 fdecl = gimple_call_fndecl (g);
8448 ftype = gimple_call_fntype (g);
8450 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8452 location_t loc = gimple_location (g);
8454 if (fdecl)
8455 warning_at (loc, OPT_Wunused_result,
8456 "ignoring return value of %qD, "
8457 "declared with attribute warn_unused_result",
8458 fdecl);
8459 else
8460 warning_at (loc, OPT_Wunused_result,
8461 "ignoring return value of function "
8462 "declared with attribute warn_unused_result");
8464 break;
8466 default:
8467 /* Not a container, not a call, or a call whose value is used. */
8468 break;
8473 namespace {
8475 const pass_data pass_data_warn_unused_result =
8477 GIMPLE_PASS, /* type */
8478 "*warn_unused_result", /* name */
8479 OPTGROUP_NONE, /* optinfo_flags */
8480 TV_NONE, /* tv_id */
8481 PROP_gimple_any, /* properties_required */
8482 0, /* properties_provided */
8483 0, /* properties_destroyed */
8484 0, /* todo_flags_start */
8485 0, /* todo_flags_finish */
8488 class pass_warn_unused_result : public gimple_opt_pass
8490 public:
8491 pass_warn_unused_result (gcc::context *ctxt)
8492 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8495 /* opt_pass methods: */
8496 virtual bool gate (function *) { return flag_warn_unused_result; }
8497 virtual unsigned int execute (function *)
8499 do_warn_unused_result (gimple_body (current_function_decl));
8500 return 0;
8503 }; // class pass_warn_unused_result
8505 } // anon namespace
8507 gimple_opt_pass *
8508 make_pass_warn_unused_result (gcc::context *ctxt)
8510 return new pass_warn_unused_result (ctxt);
8513 /* IPA passes, compilation of earlier functions or inlining
8514 might have changed some properties, such as marked functions nothrow,
8515 pure, const or noreturn.
8516 Remove redundant edges and basic blocks, and create new ones if necessary.
8518 This pass can't be executed as stand alone pass from pass manager, because
8519 in between inlining and this fixup the verify_flow_info would fail. */
8521 unsigned int
8522 execute_fixup_cfg (void)
8524 basic_block bb;
8525 gimple_stmt_iterator gsi;
8526 int todo = 0;
8527 gcov_type count_scale;
8528 edge e;
8529 edge_iterator ei;
8531 count_scale
8532 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8533 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8535 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8536 cgraph_node::get (current_function_decl)->count;
8537 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8538 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8539 count_scale);
8541 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8542 e->count = apply_scale (e->count, count_scale);
8544 FOR_EACH_BB_FN (bb, cfun)
8546 bb->count = apply_scale (bb->count, count_scale);
8547 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8549 gimple stmt = gsi_stmt (gsi);
8550 tree decl = is_gimple_call (stmt)
8551 ? gimple_call_fndecl (stmt)
8552 : NULL;
8553 if (decl)
8555 int flags = gimple_call_flags (stmt);
8556 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8558 if (gimple_purge_dead_abnormal_call_edges (bb))
8559 todo |= TODO_cleanup_cfg;
8561 if (gimple_in_ssa_p (cfun))
8563 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8564 update_stmt (stmt);
8568 if (flags & ECF_NORETURN
8569 && fixup_noreturn_call (stmt))
8570 todo |= TODO_cleanup_cfg;
8573 /* Remove stores to variables we marked write-only.
8574 Keep access when store has side effect, i.e. in case when source
8575 is volatile. */
8576 if (gimple_store_p (stmt)
8577 && !gimple_has_side_effects (stmt))
8579 tree lhs = get_base_address (gimple_get_lhs (stmt));
8581 if (TREE_CODE (lhs) == VAR_DECL
8582 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8583 && varpool_node::get (lhs)->writeonly)
8585 unlink_stmt_vdef (stmt);
8586 gsi_remove (&gsi, true);
8587 release_defs (stmt);
8588 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8589 continue;
8592 /* For calls we can simply remove LHS when it is known
8593 to be write-only. */
8594 if (is_gimple_call (stmt)
8595 && gimple_get_lhs (stmt))
8597 tree lhs = get_base_address (gimple_get_lhs (stmt));
8599 if (TREE_CODE (lhs) == VAR_DECL
8600 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8601 && varpool_node::get (lhs)->writeonly)
8603 gimple_call_set_lhs (stmt, NULL);
8604 update_stmt (stmt);
8605 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8609 if (maybe_clean_eh_stmt (stmt)
8610 && gimple_purge_dead_eh_edges (bb))
8611 todo |= TODO_cleanup_cfg;
8612 gsi_next (&gsi);
8615 FOR_EACH_EDGE (e, ei, bb->succs)
8616 e->count = apply_scale (e->count, count_scale);
8618 /* If we have a basic block with no successors that does not
8619 end with a control statement or a noreturn call end it with
8620 a call to __builtin_unreachable. This situation can occur
8621 when inlining a noreturn call that does in fact return. */
8622 if (EDGE_COUNT (bb->succs) == 0)
8624 gimple stmt = last_stmt (bb);
8625 if (!stmt
8626 || (!is_ctrl_stmt (stmt)
8627 && (!is_gimple_call (stmt)
8628 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8630 if (stmt && is_gimple_call (stmt))
8631 gimple_call_set_ctrl_altering (stmt, false);
8632 stmt = gimple_build_call
8633 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8634 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8635 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8639 if (count_scale != REG_BR_PROB_BASE)
8640 compute_function_frequency ();
8642 /* Dump a textual representation of the flowgraph. */
8643 if (dump_file)
8644 gimple_dump_cfg (dump_file, dump_flags);
8646 if (current_loops
8647 && (todo & TODO_cleanup_cfg))
8648 loops_state_set (LOOPS_NEED_FIXUP);
8650 return todo;
8653 namespace {
8655 const pass_data pass_data_fixup_cfg =
8657 GIMPLE_PASS, /* type */
8658 "*free_cfg_annotations", /* name */
8659 OPTGROUP_NONE, /* optinfo_flags */
8660 TV_NONE, /* tv_id */
8661 PROP_cfg, /* properties_required */
8662 0, /* properties_provided */
8663 0, /* properties_destroyed */
8664 0, /* todo_flags_start */
8665 0, /* todo_flags_finish */
8668 class pass_fixup_cfg : public gimple_opt_pass
8670 public:
8671 pass_fixup_cfg (gcc::context *ctxt)
8672 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8675 /* opt_pass methods: */
8676 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8677 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8679 }; // class pass_fixup_cfg
8681 } // anon namespace
8683 gimple_opt_pass *
8684 make_pass_fixup_cfg (gcc::context *ctxt)
8686 return new pass_fixup_cfg (ctxt);
8689 /* Garbage collection support for edge_def. */
8691 extern void gt_ggc_mx (tree&);
8692 extern void gt_ggc_mx (gimple&);
8693 extern void gt_ggc_mx (rtx&);
8694 extern void gt_ggc_mx (basic_block&);
8696 static void
8697 gt_ggc_mx (rtx_insn *& x)
8699 if (x)
8700 gt_ggc_mx_rtx_def ((void *) x);
8703 void
8704 gt_ggc_mx (edge_def *e)
8706 tree block = LOCATION_BLOCK (e->goto_locus);
8707 gt_ggc_mx (e->src);
8708 gt_ggc_mx (e->dest);
8709 if (current_ir_type () == IR_GIMPLE)
8710 gt_ggc_mx (e->insns.g);
8711 else
8712 gt_ggc_mx (e->insns.r);
8713 gt_ggc_mx (block);
8716 /* PCH support for edge_def. */
8718 extern void gt_pch_nx (tree&);
8719 extern void gt_pch_nx (gimple&);
8720 extern void gt_pch_nx (rtx&);
8721 extern void gt_pch_nx (basic_block&);
8723 static void
8724 gt_pch_nx (rtx_insn *& x)
8726 if (x)
8727 gt_pch_nx_rtx_def ((void *) x);
8730 void
8731 gt_pch_nx (edge_def *e)
8733 tree block = LOCATION_BLOCK (e->goto_locus);
8734 gt_pch_nx (e->src);
8735 gt_pch_nx (e->dest);
8736 if (current_ir_type () == IR_GIMPLE)
8737 gt_pch_nx (e->insns.g);
8738 else
8739 gt_pch_nx (e->insns.r);
8740 gt_pch_nx (block);
8743 void
8744 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8746 tree block = LOCATION_BLOCK (e->goto_locus);
8747 op (&(e->src), cookie);
8748 op (&(e->dest), cookie);
8749 if (current_ir_type () == IR_GIMPLE)
8750 op (&(e->insns.g), cookie);
8751 else
8752 op (&(e->insns.r), cookie);
8753 op (&(block), cookie);