* lib/ubsan-dg.exp (check_effective_target_fsanitize_undefined):
[official-gcc.git] / gcc / tree-cfg.c
blob0ae4bd76f408910196e213cff324634c95de61f8
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "trans-mem.h"
29 #include "stor-layout.h"
30 #include "print-tree.h"
31 #include "tm_p.h"
32 #include "predict.h"
33 #include "vec.h"
34 #include "hashtab.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfganal.h"
43 #include "basic-block.h"
44 #include "flags.h"
45 #include "gimple-pretty-print.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-fold.h"
49 #include "tree-eh.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "gimple-iterator.h"
54 #include "gimplify-me.h"
55 #include "gimple-walk.h"
56 #include "gimple-ssa.h"
57 #include "plugin-api.h"
58 #include "ipa-ref.h"
59 #include "cgraph.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "stringpool.h"
64 #include "tree-ssanames.h"
65 #include "tree-ssa-loop-manip.h"
66 #include "tree-ssa-loop-niter.h"
67 #include "tree-into-ssa.h"
68 #include "expr.h"
69 #include "tree-dfa.h"
70 #include "tree-ssa.h"
71 #include "tree-dump.h"
72 #include "tree-pass.h"
73 #include "diagnostic-core.h"
74 #include "except.h"
75 #include "cfgloop.h"
76 #include "tree-ssa-propagate.h"
77 #include "value-prof.h"
78 #include "tree-inline.h"
79 #include "target.h"
80 #include "tree-ssa-live.h"
81 #include "omp-low.h"
82 #include "tree-cfgcleanup.h"
83 #include "wide-int.h"
84 #include "wide-int-print.h"
86 /* This file contains functions for building the Control Flow Graph (CFG)
87 for a function tree. */
89 /* Local declarations. */
91 /* Initial capacity for the basic block array. */
92 static const int initial_cfg_capacity = 20;
94 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
95 which use a particular edge. The CASE_LABEL_EXPRs are chained together
96 via their CASE_CHAIN field, which we clear after we're done with the
97 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
99 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
100 update the case vector in response to edge redirections.
102 Right now this table is set up and torn down at key points in the
103 compilation process. It would be nice if we could make the table
104 more persistent. The key is getting notification of changes to
105 the CFG (particularly edge removal, creation and redirection). */
107 static hash_map<edge, tree> *edge_to_cases;
109 /* If we record edge_to_cases, this bitmap will hold indexes
110 of basic blocks that end in a GIMPLE_SWITCH which we touched
111 due to edge manipulations. */
113 static bitmap touched_switch_bbs;
115 /* CFG statistics. */
116 struct cfg_stats_d
118 long num_merged_labels;
121 static struct cfg_stats_d cfg_stats;
123 /* Hash table to store last discriminator assigned for each locus. */
124 struct locus_discrim_map
126 location_t locus;
127 int discriminator;
130 /* Hashtable helpers. */
132 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
134 typedef locus_discrim_map value_type;
135 typedef locus_discrim_map compare_type;
136 static inline hashval_t hash (const value_type *);
137 static inline bool equal (const value_type *, const compare_type *);
140 /* Trivial hash function for a location_t. ITEM is a pointer to
141 a hash table entry that maps a location_t to a discriminator. */
143 inline hashval_t
144 locus_discrim_hasher::hash (const value_type *item)
146 return LOCATION_LINE (item->locus);
149 /* Equality function for the locus-to-discriminator map. A and B
150 point to the two hash table entries to compare. */
152 inline bool
153 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
155 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
158 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
160 /* Basic blocks and flowgraphs. */
161 static void make_blocks (gimple_seq);
163 /* Edges. */
164 static void make_edges (void);
165 static void assign_discriminators (void);
166 static void make_cond_expr_edges (basic_block);
167 static void make_gimple_switch_edges (gswitch *, basic_block);
168 static bool make_goto_expr_edges (basic_block);
169 static void make_gimple_asm_edges (basic_block);
170 static edge gimple_redirect_edge_and_branch (edge, basic_block);
171 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
173 /* Various helpers. */
174 static inline bool stmt_starts_bb_p (gimple, gimple);
175 static int gimple_verify_flow_info (void);
176 static void gimple_make_forwarder_block (edge);
177 static gimple first_non_label_stmt (basic_block);
178 static bool verify_gimple_transaction (gtransaction *);
179 static bool call_can_make_abnormal_goto (gimple);
181 /* Flowgraph optimization and cleanup. */
182 static void gimple_merge_blocks (basic_block, basic_block);
183 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
184 static void remove_bb (basic_block);
185 static edge find_taken_edge_computed_goto (basic_block, tree);
186 static edge find_taken_edge_cond_expr (basic_block, tree);
187 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
188 static tree find_case_label_for_value (gswitch *, tree);
190 void
191 init_empty_tree_cfg_for_function (struct function *fn)
193 /* Initialize the basic block array. */
194 init_flow (fn);
195 profile_status_for_fn (fn) = PROFILE_ABSENT;
196 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
197 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
198 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
199 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
200 initial_cfg_capacity);
202 /* Build a mapping of labels to their associated blocks. */
203 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
204 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
205 initial_cfg_capacity);
207 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
208 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
210 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
211 = EXIT_BLOCK_PTR_FOR_FN (fn);
212 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
213 = ENTRY_BLOCK_PTR_FOR_FN (fn);
216 void
217 init_empty_tree_cfg (void)
219 init_empty_tree_cfg_for_function (cfun);
222 /*---------------------------------------------------------------------------
223 Create basic blocks
224 ---------------------------------------------------------------------------*/
226 /* Entry point to the CFG builder for trees. SEQ is the sequence of
227 statements to be added to the flowgraph. */
229 static void
230 build_gimple_cfg (gimple_seq seq)
232 /* Register specific gimple functions. */
233 gimple_register_cfg_hooks ();
235 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
237 init_empty_tree_cfg ();
239 make_blocks (seq);
241 /* Make sure there is always at least one block, even if it's empty. */
242 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
243 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
245 /* Adjust the size of the array. */
246 if (basic_block_info_for_fn (cfun)->length ()
247 < (size_t) n_basic_blocks_for_fn (cfun))
248 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
249 n_basic_blocks_for_fn (cfun));
251 /* To speed up statement iterator walks, we first purge dead labels. */
252 cleanup_dead_labels ();
254 /* Group case nodes to reduce the number of edges.
255 We do this after cleaning up dead labels because otherwise we miss
256 a lot of obvious case merging opportunities. */
257 group_case_labels ();
259 /* Create the edges of the flowgraph. */
260 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
261 make_edges ();
262 assign_discriminators ();
263 cleanup_dead_labels ();
264 delete discriminator_per_locus;
265 discriminator_per_locus = NULL;
268 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
269 them and propagate the information to LOOP. We assume that the annotations
270 come immediately before the condition in BB, if any. */
272 static void
273 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
275 gimple_stmt_iterator gsi = gsi_last_bb (bb);
276 gimple stmt = gsi_stmt (gsi);
278 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
279 return;
281 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
283 stmt = gsi_stmt (gsi);
284 if (gimple_code (stmt) != GIMPLE_CALL)
285 break;
286 if (!gimple_call_internal_p (stmt)
287 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
288 break;
290 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
292 case annot_expr_ivdep_kind:
293 loop->safelen = INT_MAX;
294 break;
295 case annot_expr_no_vector_kind:
296 loop->dont_vectorize = true;
297 break;
298 case annot_expr_vector_kind:
299 loop->force_vectorize = true;
300 cfun->has_force_vectorize_loops = true;
301 break;
302 default:
303 gcc_unreachable ();
306 stmt = gimple_build_assign (gimple_call_lhs (stmt),
307 gimple_call_arg (stmt, 0));
308 gsi_replace (&gsi, stmt, true);
312 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
313 them and propagate the information to the loop. We assume that the
314 annotations come immediately before the condition of the loop. */
316 static void
317 replace_loop_annotate (void)
319 struct loop *loop;
320 basic_block bb;
321 gimple_stmt_iterator gsi;
322 gimple stmt;
324 FOR_EACH_LOOP (loop, 0)
326 /* First look into the header. */
327 replace_loop_annotate_in_block (loop->header, loop);
329 /* Then look into the latch, if any. */
330 if (loop->latch)
331 replace_loop_annotate_in_block (loop->latch, loop);
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb, cfun)
337 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
339 stmt = gsi_stmt (gsi);
340 if (gimple_code (stmt) != GIMPLE_CALL)
341 continue;
342 if (!gimple_call_internal_p (stmt)
343 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 continue;
346 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
348 case annot_expr_ivdep_kind:
349 case annot_expr_no_vector_kind:
350 case annot_expr_vector_kind:
351 break;
352 default:
353 gcc_unreachable ();
356 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
357 stmt = gimple_build_assign (gimple_call_lhs (stmt),
358 gimple_call_arg (stmt, 0));
359 gsi_replace (&gsi, stmt, true);
365 static unsigned int
366 execute_build_cfg (void)
368 gimple_seq body = gimple_body (current_function_decl);
370 build_gimple_cfg (body);
371 gimple_set_body (current_function_decl, NULL);
372 if (dump_file && (dump_flags & TDF_DETAILS))
374 fprintf (dump_file, "Scope blocks:\n");
375 dump_scope_blocks (dump_file, dump_flags);
377 cleanup_tree_cfg ();
378 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
379 replace_loop_annotate ();
380 return 0;
383 namespace {
385 const pass_data pass_data_build_cfg =
387 GIMPLE_PASS, /* type */
388 "cfg", /* name */
389 OPTGROUP_NONE, /* optinfo_flags */
390 TV_TREE_CFG, /* tv_id */
391 PROP_gimple_leh, /* properties_required */
392 ( PROP_cfg | PROP_loops ), /* properties_provided */
393 0, /* properties_destroyed */
394 0, /* todo_flags_start */
395 0, /* todo_flags_finish */
398 class pass_build_cfg : public gimple_opt_pass
400 public:
401 pass_build_cfg (gcc::context *ctxt)
402 : gimple_opt_pass (pass_data_build_cfg, ctxt)
405 /* opt_pass methods: */
406 virtual unsigned int execute (function *) { return execute_build_cfg (); }
408 }; // class pass_build_cfg
410 } // anon namespace
412 gimple_opt_pass *
413 make_pass_build_cfg (gcc::context *ctxt)
415 return new pass_build_cfg (ctxt);
419 /* Return true if T is a computed goto. */
421 bool
422 computed_goto_p (gimple t)
424 return (gimple_code (t) == GIMPLE_GOTO
425 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
428 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
429 the other edge points to a bb with just __builtin_unreachable ().
430 I.e. return true for C->M edge in:
431 <bb C>:
433 if (something)
434 goto <bb N>;
435 else
436 goto <bb M>;
437 <bb N>:
438 __builtin_unreachable ();
439 <bb M>: */
441 bool
442 assert_unreachable_fallthru_edge_p (edge e)
444 basic_block pred_bb = e->src;
445 gimple last = last_stmt (pred_bb);
446 if (last && gimple_code (last) == GIMPLE_COND)
448 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
449 if (other_bb == e->dest)
450 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
451 if (EDGE_COUNT (other_bb->succs) == 0)
453 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
454 gimple stmt;
456 if (gsi_end_p (gsi))
457 return false;
458 stmt = gsi_stmt (gsi);
459 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
461 gsi_next (&gsi);
462 if (gsi_end_p (gsi))
463 return false;
464 stmt = gsi_stmt (gsi);
466 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
469 return false;
473 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
474 could alter control flow except via eh. We initialize the flag at
475 CFG build time and only ever clear it later. */
477 static void
478 gimple_call_initialize_ctrl_altering (gimple stmt)
480 int flags = gimple_call_flags (stmt);
482 /* A call alters control flow if it can make an abnormal goto. */
483 if (call_can_make_abnormal_goto (stmt)
484 /* A call also alters control flow if it does not return. */
485 || flags & ECF_NORETURN
486 /* TM ending statements have backedges out of the transaction.
487 Return true so we split the basic block containing them.
488 Note that the TM_BUILTIN test is merely an optimization. */
489 || ((flags & ECF_TM_BUILTIN)
490 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
491 /* BUILT_IN_RETURN call is same as return statement. */
492 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
493 gimple_call_set_ctrl_altering (stmt, true);
494 else
495 gimple_call_set_ctrl_altering (stmt, false);
499 /* Build a flowgraph for the sequence of stmts SEQ. */
501 static void
502 make_blocks (gimple_seq seq)
504 gimple_stmt_iterator i = gsi_start (seq);
505 gimple stmt = NULL;
506 bool start_new_block = true;
507 bool first_stmt_of_seq = true;
508 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
510 while (!gsi_end_p (i))
512 gimple prev_stmt;
514 prev_stmt = stmt;
515 stmt = gsi_stmt (i);
517 if (stmt && is_gimple_call (stmt))
518 gimple_call_initialize_ctrl_altering (stmt);
520 /* If the statement starts a new basic block or if we have determined
521 in a previous pass that we need to create a new block for STMT, do
522 so now. */
523 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
525 if (!first_stmt_of_seq)
526 gsi_split_seq_before (&i, &seq);
527 bb = create_basic_block (seq, NULL, bb);
528 start_new_block = false;
531 /* Now add STMT to BB and create the subgraphs for special statement
532 codes. */
533 gimple_set_bb (stmt, bb);
535 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
536 next iteration. */
537 if (stmt_ends_bb_p (stmt))
539 /* If the stmt can make abnormal goto use a new temporary
540 for the assignment to the LHS. This makes sure the old value
541 of the LHS is available on the abnormal edge. Otherwise
542 we will end up with overlapping life-ranges for abnormal
543 SSA names. */
544 if (gimple_has_lhs (stmt)
545 && stmt_can_make_abnormal_goto (stmt)
546 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
548 tree lhs = gimple_get_lhs (stmt);
549 tree tmp = create_tmp_var (TREE_TYPE (lhs));
550 gimple s = gimple_build_assign (lhs, tmp);
551 gimple_set_location (s, gimple_location (stmt));
552 gimple_set_block (s, gimple_block (stmt));
553 gimple_set_lhs (stmt, tmp);
554 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
555 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
556 DECL_GIMPLE_REG_P (tmp) = 1;
557 gsi_insert_after (&i, s, GSI_SAME_STMT);
559 start_new_block = true;
562 gsi_next (&i);
563 first_stmt_of_seq = false;
568 /* Create and return a new empty basic block after bb AFTER. */
570 static basic_block
571 create_bb (void *h, void *e, basic_block after)
573 basic_block bb;
575 gcc_assert (!e);
577 /* Create and initialize a new basic block. Since alloc_block uses
578 GC allocation that clears memory to allocate a basic block, we do
579 not have to clear the newly allocated basic block here. */
580 bb = alloc_block ();
582 bb->index = last_basic_block_for_fn (cfun);
583 bb->flags = BB_NEW;
584 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
586 /* Add the new block to the linked list of blocks. */
587 link_block (bb, after);
589 /* Grow the basic block array if needed. */
590 if ((size_t) last_basic_block_for_fn (cfun)
591 == basic_block_info_for_fn (cfun)->length ())
593 size_t new_size =
594 (last_basic_block_for_fn (cfun)
595 + (last_basic_block_for_fn (cfun) + 3) / 4);
596 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
599 /* Add the newly created block to the array. */
600 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
602 n_basic_blocks_for_fn (cfun)++;
603 last_basic_block_for_fn (cfun)++;
605 return bb;
609 /*---------------------------------------------------------------------------
610 Edge creation
611 ---------------------------------------------------------------------------*/
613 /* Fold COND_EXPR_COND of each COND_EXPR. */
615 void
616 fold_cond_expr_cond (void)
618 basic_block bb;
620 FOR_EACH_BB_FN (bb, cfun)
622 gimple stmt = last_stmt (bb);
624 if (stmt && gimple_code (stmt) == GIMPLE_COND)
626 gcond *cond_stmt = as_a <gcond *> (stmt);
627 location_t loc = gimple_location (stmt);
628 tree cond;
629 bool zerop, onep;
631 fold_defer_overflow_warnings ();
632 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
633 boolean_type_node,
634 gimple_cond_lhs (cond_stmt),
635 gimple_cond_rhs (cond_stmt));
636 if (cond)
638 zerop = integer_zerop (cond);
639 onep = integer_onep (cond);
641 else
642 zerop = onep = false;
644 fold_undefer_overflow_warnings (zerop || onep,
645 stmt,
646 WARN_STRICT_OVERFLOW_CONDITIONAL);
647 if (zerop)
648 gimple_cond_make_false (cond_stmt);
649 else if (onep)
650 gimple_cond_make_true (cond_stmt);
655 /* If basic block BB has an abnormal edge to a basic block
656 containing IFN_ABNORMAL_DISPATCHER internal call, return
657 that the dispatcher's basic block, otherwise return NULL. */
659 basic_block
660 get_abnormal_succ_dispatcher (basic_block bb)
662 edge e;
663 edge_iterator ei;
665 FOR_EACH_EDGE (e, ei, bb->succs)
666 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
668 gimple_stmt_iterator gsi
669 = gsi_start_nondebug_after_labels_bb (e->dest);
670 gimple g = gsi_stmt (gsi);
671 if (g
672 && is_gimple_call (g)
673 && gimple_call_internal_p (g)
674 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
675 return e->dest;
677 return NULL;
680 /* Helper function for make_edges. Create a basic block with
681 with ABNORMAL_DISPATCHER internal call in it if needed, and
682 create abnormal edges from BBS to it and from it to FOR_BB
683 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
685 static void
686 handle_abnormal_edges (basic_block *dispatcher_bbs,
687 basic_block for_bb, int *bb_to_omp_idx,
688 auto_vec<basic_block> *bbs, bool computed_goto)
690 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
691 unsigned int idx = 0;
692 basic_block bb;
693 bool inner = false;
695 if (bb_to_omp_idx)
697 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
698 if (bb_to_omp_idx[for_bb->index] != 0)
699 inner = true;
702 /* If the dispatcher has been created already, then there are basic
703 blocks with abnormal edges to it, so just make a new edge to
704 for_bb. */
705 if (*dispatcher == NULL)
707 /* Check if there are any basic blocks that need to have
708 abnormal edges to this dispatcher. If there are none, return
709 early. */
710 if (bb_to_omp_idx == NULL)
712 if (bbs->is_empty ())
713 return;
715 else
717 FOR_EACH_VEC_ELT (*bbs, idx, bb)
718 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
719 break;
720 if (bb == NULL)
721 return;
724 /* Create the dispatcher bb. */
725 *dispatcher = create_basic_block (NULL, NULL, for_bb);
726 if (computed_goto)
728 /* Factor computed gotos into a common computed goto site. Also
729 record the location of that site so that we can un-factor the
730 gotos after we have converted back to normal form. */
731 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
733 /* Create the destination of the factored goto. Each original
734 computed goto will put its desired destination into this
735 variable and jump to the label we create immediately below. */
736 tree var = create_tmp_var (ptr_type_node, "gotovar");
738 /* Build a label for the new block which will contain the
739 factored computed goto. */
740 tree factored_label_decl
741 = create_artificial_label (UNKNOWN_LOCATION);
742 gimple factored_computed_goto_label
743 = gimple_build_label (factored_label_decl);
744 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
746 /* Build our new computed goto. */
747 gimple factored_computed_goto = gimple_build_goto (var);
748 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
750 FOR_EACH_VEC_ELT (*bbs, idx, bb)
752 if (bb_to_omp_idx
753 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
754 continue;
756 gsi = gsi_last_bb (bb);
757 gimple last = gsi_stmt (gsi);
759 gcc_assert (computed_goto_p (last));
761 /* Copy the original computed goto's destination into VAR. */
762 gimple assignment
763 = gimple_build_assign (var, gimple_goto_dest (last));
764 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
766 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
767 e->goto_locus = gimple_location (last);
768 gsi_remove (&gsi, true);
771 else
773 tree arg = inner ? boolean_true_node : boolean_false_node;
774 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
775 1, arg);
776 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
777 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
779 /* Create predecessor edges of the dispatcher. */
780 FOR_EACH_VEC_ELT (*bbs, idx, bb)
782 if (bb_to_omp_idx
783 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
784 continue;
785 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
790 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
793 /* Join all the blocks in the flowgraph. */
795 static void
796 make_edges (void)
798 basic_block bb;
799 struct omp_region *cur_region = NULL;
800 auto_vec<basic_block> ab_edge_goto;
801 auto_vec<basic_block> ab_edge_call;
802 int *bb_to_omp_idx = NULL;
803 int cur_omp_region_idx = 0;
805 /* Create an edge from entry to the first block with executable
806 statements in it. */
807 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
808 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
809 EDGE_FALLTHRU);
811 /* Traverse the basic block array placing edges. */
812 FOR_EACH_BB_FN (bb, cfun)
814 gimple last = last_stmt (bb);
815 bool fallthru;
817 if (bb_to_omp_idx)
818 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
820 if (last)
822 enum gimple_code code = gimple_code (last);
823 switch (code)
825 case GIMPLE_GOTO:
826 if (make_goto_expr_edges (bb))
827 ab_edge_goto.safe_push (bb);
828 fallthru = false;
829 break;
830 case GIMPLE_RETURN:
832 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
833 e->goto_locus = gimple_location (last);
834 fallthru = false;
836 break;
837 case GIMPLE_COND:
838 make_cond_expr_edges (bb);
839 fallthru = false;
840 break;
841 case GIMPLE_SWITCH:
842 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
843 fallthru = false;
844 break;
845 case GIMPLE_RESX:
846 make_eh_edges (last);
847 fallthru = false;
848 break;
849 case GIMPLE_EH_DISPATCH:
850 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
851 break;
853 case GIMPLE_CALL:
854 /* If this function receives a nonlocal goto, then we need to
855 make edges from this call site to all the nonlocal goto
856 handlers. */
857 if (stmt_can_make_abnormal_goto (last))
858 ab_edge_call.safe_push (bb);
860 /* If this statement has reachable exception handlers, then
861 create abnormal edges to them. */
862 make_eh_edges (last);
864 /* BUILTIN_RETURN is really a return statement. */
865 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
867 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
868 fallthru = false;
870 /* Some calls are known not to return. */
871 else
872 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
873 break;
875 case GIMPLE_ASSIGN:
876 /* A GIMPLE_ASSIGN may throw internally and thus be considered
877 control-altering. */
878 if (is_ctrl_altering_stmt (last))
879 make_eh_edges (last);
880 fallthru = true;
881 break;
883 case GIMPLE_ASM:
884 make_gimple_asm_edges (bb);
885 fallthru = true;
886 break;
888 CASE_GIMPLE_OMP:
889 fallthru = make_gimple_omp_edges (bb, &cur_region,
890 &cur_omp_region_idx);
891 if (cur_region && bb_to_omp_idx == NULL)
892 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
893 break;
895 case GIMPLE_TRANSACTION:
897 tree abort_label
898 = gimple_transaction_label (as_a <gtransaction *> (last));
899 if (abort_label)
900 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
901 fallthru = true;
903 break;
905 default:
906 gcc_assert (!stmt_ends_bb_p (last));
907 fallthru = true;
910 else
911 fallthru = true;
913 if (fallthru)
914 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
917 /* Computed gotos are hell to deal with, especially if there are
918 lots of them with a large number of destinations. So we factor
919 them to a common computed goto location before we build the
920 edge list. After we convert back to normal form, we will un-factor
921 the computed gotos since factoring introduces an unwanted jump.
922 For non-local gotos and abnormal edges from calls to calls that return
923 twice or forced labels, factor the abnormal edges too, by having all
924 abnormal edges from the calls go to a common artificial basic block
925 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
926 basic block to all forced labels and calls returning twice.
927 We do this per-OpenMP structured block, because those regions
928 are guaranteed to be single entry single exit by the standard,
929 so it is not allowed to enter or exit such regions abnormally this way,
930 thus all computed gotos, non-local gotos and setjmp/longjmp calls
931 must not transfer control across SESE region boundaries. */
932 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
934 gimple_stmt_iterator gsi;
935 basic_block dispatcher_bb_array[2] = { NULL, NULL };
936 basic_block *dispatcher_bbs = dispatcher_bb_array;
937 int count = n_basic_blocks_for_fn (cfun);
939 if (bb_to_omp_idx)
940 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
942 FOR_EACH_BB_FN (bb, cfun)
944 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
946 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
947 tree target;
949 if (!label_stmt)
950 break;
952 target = gimple_label_label (label_stmt);
954 /* Make an edge to every label block that has been marked as a
955 potential target for a computed goto or a non-local goto. */
956 if (FORCED_LABEL (target))
957 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
958 &ab_edge_goto, true);
959 if (DECL_NONLOCAL (target))
961 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
962 &ab_edge_call, false);
963 break;
967 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
968 gsi_next_nondebug (&gsi);
969 if (!gsi_end_p (gsi))
971 /* Make an edge to every setjmp-like call. */
972 gimple call_stmt = gsi_stmt (gsi);
973 if (is_gimple_call (call_stmt)
974 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
975 || gimple_call_builtin_p (call_stmt,
976 BUILT_IN_SETJMP_RECEIVER)))
977 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
978 &ab_edge_call, false);
982 if (bb_to_omp_idx)
983 XDELETE (dispatcher_bbs);
986 XDELETE (bb_to_omp_idx);
988 free_omp_regions ();
990 /* Fold COND_EXPR_COND of each COND_EXPR. */
991 fold_cond_expr_cond ();
994 /* Find the next available discriminator value for LOCUS. The
995 discriminator distinguishes among several basic blocks that
996 share a common locus, allowing for more accurate sample-based
997 profiling. */
999 static int
1000 next_discriminator_for_locus (location_t locus)
1002 struct locus_discrim_map item;
1003 struct locus_discrim_map **slot;
1005 item.locus = locus;
1006 item.discriminator = 0;
1007 slot = discriminator_per_locus->find_slot_with_hash (
1008 &item, LOCATION_LINE (locus), INSERT);
1009 gcc_assert (slot);
1010 if (*slot == HTAB_EMPTY_ENTRY)
1012 *slot = XNEW (struct locus_discrim_map);
1013 gcc_assert (*slot);
1014 (*slot)->locus = locus;
1015 (*slot)->discriminator = 0;
1017 (*slot)->discriminator++;
1018 return (*slot)->discriminator;
1021 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1023 static bool
1024 same_line_p (location_t locus1, location_t locus2)
1026 expanded_location from, to;
1028 if (locus1 == locus2)
1029 return true;
1031 from = expand_location (locus1);
1032 to = expand_location (locus2);
1034 if (from.line != to.line)
1035 return false;
1036 if (from.file == to.file)
1037 return true;
1038 return (from.file != NULL
1039 && to.file != NULL
1040 && filename_cmp (from.file, to.file) == 0);
1043 /* Assign discriminators to each basic block. */
1045 static void
1046 assign_discriminators (void)
1048 basic_block bb;
1050 FOR_EACH_BB_FN (bb, cfun)
1052 edge e;
1053 edge_iterator ei;
1054 gimple last = last_stmt (bb);
1055 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1057 if (locus == UNKNOWN_LOCATION)
1058 continue;
1060 FOR_EACH_EDGE (e, ei, bb->succs)
1062 gimple first = first_non_label_stmt (e->dest);
1063 gimple last = last_stmt (e->dest);
1064 if ((first && same_line_p (locus, gimple_location (first)))
1065 || (last && same_line_p (locus, gimple_location (last))))
1067 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1068 bb->discriminator = next_discriminator_for_locus (locus);
1069 else
1070 e->dest->discriminator = next_discriminator_for_locus (locus);
1076 /* Create the edges for a GIMPLE_COND starting at block BB. */
1078 static void
1079 make_cond_expr_edges (basic_block bb)
1081 gcond *entry = as_a <gcond *> (last_stmt (bb));
1082 gimple then_stmt, else_stmt;
1083 basic_block then_bb, else_bb;
1084 tree then_label, else_label;
1085 edge e;
1087 gcc_assert (entry);
1088 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1090 /* Entry basic blocks for each component. */
1091 then_label = gimple_cond_true_label (entry);
1092 else_label = gimple_cond_false_label (entry);
1093 then_bb = label_to_block (then_label);
1094 else_bb = label_to_block (else_label);
1095 then_stmt = first_stmt (then_bb);
1096 else_stmt = first_stmt (else_bb);
1098 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1099 e->goto_locus = gimple_location (then_stmt);
1100 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1101 if (e)
1102 e->goto_locus = gimple_location (else_stmt);
1104 /* We do not need the labels anymore. */
1105 gimple_cond_set_true_label (entry, NULL_TREE);
1106 gimple_cond_set_false_label (entry, NULL_TREE);
1110 /* Called for each element in the hash table (P) as we delete the
1111 edge to cases hash table.
1113 Clear all the TREE_CHAINs to prevent problems with copying of
1114 SWITCH_EXPRs and structure sharing rules, then free the hash table
1115 element. */
1117 bool
1118 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1120 tree t, next;
1122 for (t = value; t; t = next)
1124 next = CASE_CHAIN (t);
1125 CASE_CHAIN (t) = NULL;
1128 return true;
1131 /* Start recording information mapping edges to case labels. */
1133 void
1134 start_recording_case_labels (void)
1136 gcc_assert (edge_to_cases == NULL);
1137 edge_to_cases = new hash_map<edge, tree>;
1138 touched_switch_bbs = BITMAP_ALLOC (NULL);
1141 /* Return nonzero if we are recording information for case labels. */
1143 static bool
1144 recording_case_labels_p (void)
1146 return (edge_to_cases != NULL);
1149 /* Stop recording information mapping edges to case labels and
1150 remove any information we have recorded. */
1151 void
1152 end_recording_case_labels (void)
1154 bitmap_iterator bi;
1155 unsigned i;
1156 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1157 delete edge_to_cases;
1158 edge_to_cases = NULL;
1159 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1161 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1162 if (bb)
1164 gimple stmt = last_stmt (bb);
1165 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1166 group_case_labels_stmt (as_a <gswitch *> (stmt));
1169 BITMAP_FREE (touched_switch_bbs);
1172 /* If we are inside a {start,end}_recording_cases block, then return
1173 a chain of CASE_LABEL_EXPRs from T which reference E.
1175 Otherwise return NULL. */
1177 static tree
1178 get_cases_for_edge (edge e, gswitch *t)
1180 tree *slot;
1181 size_t i, n;
1183 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1184 chains available. Return NULL so the caller can detect this case. */
1185 if (!recording_case_labels_p ())
1186 return NULL;
1188 slot = edge_to_cases->get (e);
1189 if (slot)
1190 return *slot;
1192 /* If we did not find E in the hash table, then this must be the first
1193 time we have been queried for information about E & T. Add all the
1194 elements from T to the hash table then perform the query again. */
1196 n = gimple_switch_num_labels (t);
1197 for (i = 0; i < n; i++)
1199 tree elt = gimple_switch_label (t, i);
1200 tree lab = CASE_LABEL (elt);
1201 basic_block label_bb = label_to_block (lab);
1202 edge this_edge = find_edge (e->src, label_bb);
1204 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1205 a new chain. */
1206 tree &s = edge_to_cases->get_or_insert (this_edge);
1207 CASE_CHAIN (elt) = s;
1208 s = elt;
1211 return *edge_to_cases->get (e);
1214 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1216 static void
1217 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1219 size_t i, n;
1221 n = gimple_switch_num_labels (entry);
1223 for (i = 0; i < n; ++i)
1225 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1226 basic_block label_bb = label_to_block (lab);
1227 make_edge (bb, label_bb, 0);
1232 /* Return the basic block holding label DEST. */
1234 basic_block
1235 label_to_block_fn (struct function *ifun, tree dest)
1237 int uid = LABEL_DECL_UID (dest);
1239 /* We would die hard when faced by an undefined label. Emit a label to
1240 the very first basic block. This will hopefully make even the dataflow
1241 and undefined variable warnings quite right. */
1242 if (seen_error () && uid < 0)
1244 gimple_stmt_iterator gsi =
1245 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1246 gimple stmt;
1248 stmt = gimple_build_label (dest);
1249 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1250 uid = LABEL_DECL_UID (dest);
1252 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1253 return NULL;
1254 return (*ifun->cfg->x_label_to_block_map)[uid];
1257 /* Create edges for a goto statement at block BB. Returns true
1258 if abnormal edges should be created. */
1260 static bool
1261 make_goto_expr_edges (basic_block bb)
1263 gimple_stmt_iterator last = gsi_last_bb (bb);
1264 gimple goto_t = gsi_stmt (last);
1266 /* A simple GOTO creates normal edges. */
1267 if (simple_goto_p (goto_t))
1269 tree dest = gimple_goto_dest (goto_t);
1270 basic_block label_bb = label_to_block (dest);
1271 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1272 e->goto_locus = gimple_location (goto_t);
1273 gsi_remove (&last, true);
1274 return false;
1277 /* A computed GOTO creates abnormal edges. */
1278 return true;
1281 /* Create edges for an asm statement with labels at block BB. */
1283 static void
1284 make_gimple_asm_edges (basic_block bb)
1286 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1287 int i, n = gimple_asm_nlabels (stmt);
1289 for (i = 0; i < n; ++i)
1291 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1292 basic_block label_bb = label_to_block (label);
1293 make_edge (bb, label_bb, 0);
1297 /*---------------------------------------------------------------------------
1298 Flowgraph analysis
1299 ---------------------------------------------------------------------------*/
1301 /* Cleanup useless labels in basic blocks. This is something we wish
1302 to do early because it allows us to group case labels before creating
1303 the edges for the CFG, and it speeds up block statement iterators in
1304 all passes later on.
1305 We rerun this pass after CFG is created, to get rid of the labels that
1306 are no longer referenced. After then we do not run it any more, since
1307 (almost) no new labels should be created. */
1309 /* A map from basic block index to the leading label of that block. */
1310 static struct label_record
1312 /* The label. */
1313 tree label;
1315 /* True if the label is referenced from somewhere. */
1316 bool used;
1317 } *label_for_bb;
1319 /* Given LABEL return the first label in the same basic block. */
1321 static tree
1322 main_block_label (tree label)
1324 basic_block bb = label_to_block (label);
1325 tree main_label = label_for_bb[bb->index].label;
1327 /* label_to_block possibly inserted undefined label into the chain. */
1328 if (!main_label)
1330 label_for_bb[bb->index].label = label;
1331 main_label = label;
1334 label_for_bb[bb->index].used = true;
1335 return main_label;
1338 /* Clean up redundant labels within the exception tree. */
1340 static void
1341 cleanup_dead_labels_eh (void)
1343 eh_landing_pad lp;
1344 eh_region r;
1345 tree lab;
1346 int i;
1348 if (cfun->eh == NULL)
1349 return;
1351 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1352 if (lp && lp->post_landing_pad)
1354 lab = main_block_label (lp->post_landing_pad);
1355 if (lab != lp->post_landing_pad)
1357 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1358 EH_LANDING_PAD_NR (lab) = lp->index;
1362 FOR_ALL_EH_REGION (r)
1363 switch (r->type)
1365 case ERT_CLEANUP:
1366 case ERT_MUST_NOT_THROW:
1367 break;
1369 case ERT_TRY:
1371 eh_catch c;
1372 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1374 lab = c->label;
1375 if (lab)
1376 c->label = main_block_label (lab);
1379 break;
1381 case ERT_ALLOWED_EXCEPTIONS:
1382 lab = r->u.allowed.label;
1383 if (lab)
1384 r->u.allowed.label = main_block_label (lab);
1385 break;
1390 /* Cleanup redundant labels. This is a three-step process:
1391 1) Find the leading label for each block.
1392 2) Redirect all references to labels to the leading labels.
1393 3) Cleanup all useless labels. */
1395 void
1396 cleanup_dead_labels (void)
1398 basic_block bb;
1399 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1401 /* Find a suitable label for each block. We use the first user-defined
1402 label if there is one, or otherwise just the first label we see. */
1403 FOR_EACH_BB_FN (bb, cfun)
1405 gimple_stmt_iterator i;
1407 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1409 tree label;
1410 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1412 if (!label_stmt)
1413 break;
1415 label = gimple_label_label (label_stmt);
1417 /* If we have not yet seen a label for the current block,
1418 remember this one and see if there are more labels. */
1419 if (!label_for_bb[bb->index].label)
1421 label_for_bb[bb->index].label = label;
1422 continue;
1425 /* If we did see a label for the current block already, but it
1426 is an artificially created label, replace it if the current
1427 label is a user defined label. */
1428 if (!DECL_ARTIFICIAL (label)
1429 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1431 label_for_bb[bb->index].label = label;
1432 break;
1437 /* Now redirect all jumps/branches to the selected label.
1438 First do so for each block ending in a control statement. */
1439 FOR_EACH_BB_FN (bb, cfun)
1441 gimple stmt = last_stmt (bb);
1442 tree label, new_label;
1444 if (!stmt)
1445 continue;
1447 switch (gimple_code (stmt))
1449 case GIMPLE_COND:
1451 gcond *cond_stmt = as_a <gcond *> (stmt);
1452 label = gimple_cond_true_label (cond_stmt);
1453 if (label)
1455 new_label = main_block_label (label);
1456 if (new_label != label)
1457 gimple_cond_set_true_label (cond_stmt, new_label);
1460 label = gimple_cond_false_label (cond_stmt);
1461 if (label)
1463 new_label = main_block_label (label);
1464 if (new_label != label)
1465 gimple_cond_set_false_label (cond_stmt, new_label);
1468 break;
1470 case GIMPLE_SWITCH:
1472 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1473 size_t i, n = gimple_switch_num_labels (switch_stmt);
1475 /* Replace all destination labels. */
1476 for (i = 0; i < n; ++i)
1478 tree case_label = gimple_switch_label (switch_stmt, i);
1479 label = CASE_LABEL (case_label);
1480 new_label = main_block_label (label);
1481 if (new_label != label)
1482 CASE_LABEL (case_label) = new_label;
1484 break;
1487 case GIMPLE_ASM:
1489 gasm *asm_stmt = as_a <gasm *> (stmt);
1490 int i, n = gimple_asm_nlabels (asm_stmt);
1492 for (i = 0; i < n; ++i)
1494 tree cons = gimple_asm_label_op (asm_stmt, i);
1495 tree label = main_block_label (TREE_VALUE (cons));
1496 TREE_VALUE (cons) = label;
1498 break;
1501 /* We have to handle gotos until they're removed, and we don't
1502 remove them until after we've created the CFG edges. */
1503 case GIMPLE_GOTO:
1504 if (!computed_goto_p (stmt))
1506 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1507 label = gimple_goto_dest (goto_stmt);
1508 new_label = main_block_label (label);
1509 if (new_label != label)
1510 gimple_goto_set_dest (goto_stmt, new_label);
1512 break;
1514 case GIMPLE_TRANSACTION:
1516 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1517 tree label = gimple_transaction_label (trans_stmt);
1518 if (label)
1520 tree new_label = main_block_label (label);
1521 if (new_label != label)
1522 gimple_transaction_set_label (trans_stmt, new_label);
1525 break;
1527 default:
1528 break;
1532 /* Do the same for the exception region tree labels. */
1533 cleanup_dead_labels_eh ();
1535 /* Finally, purge dead labels. All user-defined labels and labels that
1536 can be the target of non-local gotos and labels which have their
1537 address taken are preserved. */
1538 FOR_EACH_BB_FN (bb, cfun)
1540 gimple_stmt_iterator i;
1541 tree label_for_this_bb = label_for_bb[bb->index].label;
1543 if (!label_for_this_bb)
1544 continue;
1546 /* If the main label of the block is unused, we may still remove it. */
1547 if (!label_for_bb[bb->index].used)
1548 label_for_this_bb = NULL;
1550 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1552 tree label;
1553 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1555 if (!label_stmt)
1556 break;
1558 label = gimple_label_label (label_stmt);
1560 if (label == label_for_this_bb
1561 || !DECL_ARTIFICIAL (label)
1562 || DECL_NONLOCAL (label)
1563 || FORCED_LABEL (label))
1564 gsi_next (&i);
1565 else
1566 gsi_remove (&i, true);
1570 free (label_for_bb);
1573 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1574 the ones jumping to the same label.
1575 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1577 void
1578 group_case_labels_stmt (gswitch *stmt)
1580 int old_size = gimple_switch_num_labels (stmt);
1581 int i, j, new_size = old_size;
1582 basic_block default_bb = NULL;
1584 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1586 /* Look for possible opportunities to merge cases. */
1587 i = 1;
1588 while (i < old_size)
1590 tree base_case, base_high;
1591 basic_block base_bb;
1593 base_case = gimple_switch_label (stmt, i);
1595 gcc_assert (base_case);
1596 base_bb = label_to_block (CASE_LABEL (base_case));
1598 /* Discard cases that have the same destination as the
1599 default case. */
1600 if (base_bb == default_bb)
1602 gimple_switch_set_label (stmt, i, NULL_TREE);
1603 i++;
1604 new_size--;
1605 continue;
1608 base_high = CASE_HIGH (base_case)
1609 ? CASE_HIGH (base_case)
1610 : CASE_LOW (base_case);
1611 i++;
1613 /* Try to merge case labels. Break out when we reach the end
1614 of the label vector or when we cannot merge the next case
1615 label with the current one. */
1616 while (i < old_size)
1618 tree merge_case = gimple_switch_label (stmt, i);
1619 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1620 wide_int bhp1 = wi::add (base_high, 1);
1622 /* Merge the cases if they jump to the same place,
1623 and their ranges are consecutive. */
1624 if (merge_bb == base_bb
1625 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1627 base_high = CASE_HIGH (merge_case) ?
1628 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1629 CASE_HIGH (base_case) = base_high;
1630 gimple_switch_set_label (stmt, i, NULL_TREE);
1631 new_size--;
1632 i++;
1634 else
1635 break;
1639 /* Compress the case labels in the label vector, and adjust the
1640 length of the vector. */
1641 for (i = 0, j = 0; i < new_size; i++)
1643 while (! gimple_switch_label (stmt, j))
1644 j++;
1645 gimple_switch_set_label (stmt, i,
1646 gimple_switch_label (stmt, j++));
1649 gcc_assert (new_size <= old_size);
1650 gimple_switch_set_num_labels (stmt, new_size);
1653 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1654 and scan the sorted vector of cases. Combine the ones jumping to the
1655 same label. */
1657 void
1658 group_case_labels (void)
1660 basic_block bb;
1662 FOR_EACH_BB_FN (bb, cfun)
1664 gimple stmt = last_stmt (bb);
1665 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1666 group_case_labels_stmt (as_a <gswitch *> (stmt));
1670 /* Checks whether we can merge block B into block A. */
1672 static bool
1673 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1675 gimple stmt;
1677 if (!single_succ_p (a))
1678 return false;
1680 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1681 return false;
1683 if (single_succ (a) != b)
1684 return false;
1686 if (!single_pred_p (b))
1687 return false;
1689 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1690 return false;
1692 /* If A ends by a statement causing exceptions or something similar, we
1693 cannot merge the blocks. */
1694 stmt = last_stmt (a);
1695 if (stmt && stmt_ends_bb_p (stmt))
1696 return false;
1698 /* Do not allow a block with only a non-local label to be merged. */
1699 if (stmt)
1700 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1701 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1702 return false;
1704 /* Examine the labels at the beginning of B. */
1705 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1706 gsi_next (&gsi))
1708 tree lab;
1709 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1710 if (!label_stmt)
1711 break;
1712 lab = gimple_label_label (label_stmt);
1714 /* Do not remove user forced labels or for -O0 any user labels. */
1715 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1716 return false;
1719 /* Protect simple loop latches. We only want to avoid merging
1720 the latch with the loop header in this case. */
1721 if (current_loops
1722 && b->loop_father->latch == b
1723 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1724 && b->loop_father->header == a)
1725 return false;
1727 /* It must be possible to eliminate all phi nodes in B. If ssa form
1728 is not up-to-date and a name-mapping is registered, we cannot eliminate
1729 any phis. Symbols marked for renaming are never a problem though. */
1730 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1731 gsi_next (&gsi))
1733 gphi *phi = gsi.phi ();
1734 /* Technically only new names matter. */
1735 if (name_registered_for_update_p (PHI_RESULT (phi)))
1736 return false;
1739 /* When not optimizing, don't merge if we'd lose goto_locus. */
1740 if (!optimize
1741 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1743 location_t goto_locus = single_succ_edge (a)->goto_locus;
1744 gimple_stmt_iterator prev, next;
1745 prev = gsi_last_nondebug_bb (a);
1746 next = gsi_after_labels (b);
1747 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1748 gsi_next_nondebug (&next);
1749 if ((gsi_end_p (prev)
1750 || gimple_location (gsi_stmt (prev)) != goto_locus)
1751 && (gsi_end_p (next)
1752 || gimple_location (gsi_stmt (next)) != goto_locus))
1753 return false;
1756 return true;
1759 /* Replaces all uses of NAME by VAL. */
1761 void
1762 replace_uses_by (tree name, tree val)
1764 imm_use_iterator imm_iter;
1765 use_operand_p use;
1766 gimple stmt;
1767 edge e;
1769 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1771 /* Mark the block if we change the last stmt in it. */
1772 if (cfgcleanup_altered_bbs
1773 && stmt_ends_bb_p (stmt))
1774 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1776 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1778 replace_exp (use, val);
1780 if (gimple_code (stmt) == GIMPLE_PHI)
1782 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1783 PHI_ARG_INDEX_FROM_USE (use));
1784 if (e->flags & EDGE_ABNORMAL
1785 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1787 /* This can only occur for virtual operands, since
1788 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1789 would prevent replacement. */
1790 gcc_checking_assert (virtual_operand_p (name));
1791 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1796 if (gimple_code (stmt) != GIMPLE_PHI)
1798 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1799 gimple orig_stmt = stmt;
1800 size_t i;
1802 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1803 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1804 only change sth from non-invariant to invariant, and only
1805 when propagating constants. */
1806 if (is_gimple_min_invariant (val))
1807 for (i = 0; i < gimple_num_ops (stmt); i++)
1809 tree op = gimple_op (stmt, i);
1810 /* Operands may be empty here. For example, the labels
1811 of a GIMPLE_COND are nulled out following the creation
1812 of the corresponding CFG edges. */
1813 if (op && TREE_CODE (op) == ADDR_EXPR)
1814 recompute_tree_invariant_for_addr_expr (op);
1817 if (fold_stmt (&gsi))
1818 stmt = gsi_stmt (gsi);
1820 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1821 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1823 update_stmt (stmt);
1827 gcc_checking_assert (has_zero_uses (name));
1829 /* Also update the trees stored in loop structures. */
1830 if (current_loops)
1832 struct loop *loop;
1834 FOR_EACH_LOOP (loop, 0)
1836 substitute_in_loop_info (loop, name, val);
1841 /* Merge block B into block A. */
1843 static void
1844 gimple_merge_blocks (basic_block a, basic_block b)
1846 gimple_stmt_iterator last, gsi;
1847 gphi_iterator psi;
1849 if (dump_file)
1850 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1852 /* Remove all single-valued PHI nodes from block B of the form
1853 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1854 gsi = gsi_last_bb (a);
1855 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1857 gimple phi = gsi_stmt (psi);
1858 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1859 gimple copy;
1860 bool may_replace_uses = (virtual_operand_p (def)
1861 || may_propagate_copy (def, use));
1863 /* In case we maintain loop closed ssa form, do not propagate arguments
1864 of loop exit phi nodes. */
1865 if (current_loops
1866 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1867 && !virtual_operand_p (def)
1868 && TREE_CODE (use) == SSA_NAME
1869 && a->loop_father != b->loop_father)
1870 may_replace_uses = false;
1872 if (!may_replace_uses)
1874 gcc_assert (!virtual_operand_p (def));
1876 /* Note that just emitting the copies is fine -- there is no problem
1877 with ordering of phi nodes. This is because A is the single
1878 predecessor of B, therefore results of the phi nodes cannot
1879 appear as arguments of the phi nodes. */
1880 copy = gimple_build_assign (def, use);
1881 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1882 remove_phi_node (&psi, false);
1884 else
1886 /* If we deal with a PHI for virtual operands, we can simply
1887 propagate these without fussing with folding or updating
1888 the stmt. */
1889 if (virtual_operand_p (def))
1891 imm_use_iterator iter;
1892 use_operand_p use_p;
1893 gimple stmt;
1895 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1896 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1897 SET_USE (use_p, use);
1899 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1900 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1902 else
1903 replace_uses_by (def, use);
1905 remove_phi_node (&psi, true);
1909 /* Ensure that B follows A. */
1910 move_block_after (b, a);
1912 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1913 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1915 /* Remove labels from B and set gimple_bb to A for other statements. */
1916 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1918 gimple stmt = gsi_stmt (gsi);
1919 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1921 tree label = gimple_label_label (label_stmt);
1922 int lp_nr;
1924 gsi_remove (&gsi, false);
1926 /* Now that we can thread computed gotos, we might have
1927 a situation where we have a forced label in block B
1928 However, the label at the start of block B might still be
1929 used in other ways (think about the runtime checking for
1930 Fortran assigned gotos). So we can not just delete the
1931 label. Instead we move the label to the start of block A. */
1932 if (FORCED_LABEL (label))
1934 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1935 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1937 /* Other user labels keep around in a form of a debug stmt. */
1938 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1940 gimple dbg = gimple_build_debug_bind (label,
1941 integer_zero_node,
1942 stmt);
1943 gimple_debug_bind_reset_value (dbg);
1944 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1947 lp_nr = EH_LANDING_PAD_NR (label);
1948 if (lp_nr)
1950 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1951 lp->post_landing_pad = NULL;
1954 else
1956 gimple_set_bb (stmt, a);
1957 gsi_next (&gsi);
1961 /* When merging two BBs, if their counts are different, the larger count
1962 is selected as the new bb count. This is to handle inconsistent
1963 profiles. */
1964 if (a->loop_father == b->loop_father)
1966 a->count = MAX (a->count, b->count);
1967 a->frequency = MAX (a->frequency, b->frequency);
1970 /* Merge the sequences. */
1971 last = gsi_last_bb (a);
1972 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1973 set_bb_seq (b, NULL);
1975 if (cfgcleanup_altered_bbs)
1976 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1980 /* Return the one of two successors of BB that is not reachable by a
1981 complex edge, if there is one. Else, return BB. We use
1982 this in optimizations that use post-dominators for their heuristics,
1983 to catch the cases in C++ where function calls are involved. */
1985 basic_block
1986 single_noncomplex_succ (basic_block bb)
1988 edge e0, e1;
1989 if (EDGE_COUNT (bb->succs) != 2)
1990 return bb;
1992 e0 = EDGE_SUCC (bb, 0);
1993 e1 = EDGE_SUCC (bb, 1);
1994 if (e0->flags & EDGE_COMPLEX)
1995 return e1->dest;
1996 if (e1->flags & EDGE_COMPLEX)
1997 return e0->dest;
1999 return bb;
2002 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2004 void
2005 notice_special_calls (gcall *call)
2007 int flags = gimple_call_flags (call);
2009 if (flags & ECF_MAY_BE_ALLOCA)
2010 cfun->calls_alloca = true;
2011 if (flags & ECF_RETURNS_TWICE)
2012 cfun->calls_setjmp = true;
2016 /* Clear flags set by notice_special_calls. Used by dead code removal
2017 to update the flags. */
2019 void
2020 clear_special_calls (void)
2022 cfun->calls_alloca = false;
2023 cfun->calls_setjmp = false;
2026 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2028 static void
2029 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2031 /* Since this block is no longer reachable, we can just delete all
2032 of its PHI nodes. */
2033 remove_phi_nodes (bb);
2035 /* Remove edges to BB's successors. */
2036 while (EDGE_COUNT (bb->succs) > 0)
2037 remove_edge (EDGE_SUCC (bb, 0));
2041 /* Remove statements of basic block BB. */
2043 static void
2044 remove_bb (basic_block bb)
2046 gimple_stmt_iterator i;
2048 if (dump_file)
2050 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2051 if (dump_flags & TDF_DETAILS)
2053 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2054 fprintf (dump_file, "\n");
2058 if (current_loops)
2060 struct loop *loop = bb->loop_father;
2062 /* If a loop gets removed, clean up the information associated
2063 with it. */
2064 if (loop->latch == bb
2065 || loop->header == bb)
2066 free_numbers_of_iterations_estimates_loop (loop);
2069 /* Remove all the instructions in the block. */
2070 if (bb_seq (bb) != NULL)
2072 /* Walk backwards so as to get a chance to substitute all
2073 released DEFs into debug stmts. See
2074 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2075 details. */
2076 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2078 gimple stmt = gsi_stmt (i);
2079 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2080 if (label_stmt
2081 && (FORCED_LABEL (gimple_label_label (label_stmt))
2082 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2084 basic_block new_bb;
2085 gimple_stmt_iterator new_gsi;
2087 /* A non-reachable non-local label may still be referenced.
2088 But it no longer needs to carry the extra semantics of
2089 non-locality. */
2090 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2092 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2093 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2096 new_bb = bb->prev_bb;
2097 new_gsi = gsi_start_bb (new_bb);
2098 gsi_remove (&i, false);
2099 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2101 else
2103 /* Release SSA definitions if we are in SSA. Note that we
2104 may be called when not in SSA. For example,
2105 final_cleanup calls this function via
2106 cleanup_tree_cfg. */
2107 if (gimple_in_ssa_p (cfun))
2108 release_defs (stmt);
2110 gsi_remove (&i, true);
2113 if (gsi_end_p (i))
2114 i = gsi_last_bb (bb);
2115 else
2116 gsi_prev (&i);
2120 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2121 bb->il.gimple.seq = NULL;
2122 bb->il.gimple.phi_nodes = NULL;
2126 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2127 predicate VAL, return the edge that will be taken out of the block.
2128 If VAL does not match a unique edge, NULL is returned. */
2130 edge
2131 find_taken_edge (basic_block bb, tree val)
2133 gimple stmt;
2135 stmt = last_stmt (bb);
2137 gcc_assert (stmt);
2138 gcc_assert (is_ctrl_stmt (stmt));
2140 if (val == NULL)
2141 return NULL;
2143 if (!is_gimple_min_invariant (val))
2144 return NULL;
2146 if (gimple_code (stmt) == GIMPLE_COND)
2147 return find_taken_edge_cond_expr (bb, val);
2149 if (gimple_code (stmt) == GIMPLE_SWITCH)
2150 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2152 if (computed_goto_p (stmt))
2154 /* Only optimize if the argument is a label, if the argument is
2155 not a label then we can not construct a proper CFG.
2157 It may be the case that we only need to allow the LABEL_REF to
2158 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2159 appear inside a LABEL_EXPR just to be safe. */
2160 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2161 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2162 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2163 return NULL;
2166 gcc_unreachable ();
2169 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2170 statement, determine which of the outgoing edges will be taken out of the
2171 block. Return NULL if either edge may be taken. */
2173 static edge
2174 find_taken_edge_computed_goto (basic_block bb, tree val)
2176 basic_block dest;
2177 edge e = NULL;
2179 dest = label_to_block (val);
2180 if (dest)
2182 e = find_edge (bb, dest);
2183 gcc_assert (e != NULL);
2186 return e;
2189 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2190 statement, determine which of the two edges will be taken out of the
2191 block. Return NULL if either edge may be taken. */
2193 static edge
2194 find_taken_edge_cond_expr (basic_block bb, tree val)
2196 edge true_edge, false_edge;
2198 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2200 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2201 return (integer_zerop (val) ? false_edge : true_edge);
2204 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2205 statement, determine which edge will be taken out of the block. Return
2206 NULL if any edge may be taken. */
2208 static edge
2209 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2210 tree val)
2212 basic_block dest_bb;
2213 edge e;
2214 tree taken_case;
2216 taken_case = find_case_label_for_value (switch_stmt, val);
2217 dest_bb = label_to_block (CASE_LABEL (taken_case));
2219 e = find_edge (bb, dest_bb);
2220 gcc_assert (e);
2221 return e;
2225 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2226 We can make optimal use here of the fact that the case labels are
2227 sorted: We can do a binary search for a case matching VAL. */
2229 static tree
2230 find_case_label_for_value (gswitch *switch_stmt, tree val)
2232 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2233 tree default_case = gimple_switch_default_label (switch_stmt);
2235 for (low = 0, high = n; high - low > 1; )
2237 size_t i = (high + low) / 2;
2238 tree t = gimple_switch_label (switch_stmt, i);
2239 int cmp;
2241 /* Cache the result of comparing CASE_LOW and val. */
2242 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2244 if (cmp > 0)
2245 high = i;
2246 else
2247 low = i;
2249 if (CASE_HIGH (t) == NULL)
2251 /* A singe-valued case label. */
2252 if (cmp == 0)
2253 return t;
2255 else
2257 /* A case range. We can only handle integer ranges. */
2258 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2259 return t;
2263 return default_case;
2267 /* Dump a basic block on stderr. */
2269 void
2270 gimple_debug_bb (basic_block bb)
2272 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2276 /* Dump basic block with index N on stderr. */
2278 basic_block
2279 gimple_debug_bb_n (int n)
2281 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2282 return BASIC_BLOCK_FOR_FN (cfun, n);
2286 /* Dump the CFG on stderr.
2288 FLAGS are the same used by the tree dumping functions
2289 (see TDF_* in dumpfile.h). */
2291 void
2292 gimple_debug_cfg (int flags)
2294 gimple_dump_cfg (stderr, flags);
2298 /* Dump the program showing basic block boundaries on the given FILE.
2300 FLAGS are the same used by the tree dumping functions (see TDF_* in
2301 tree.h). */
2303 void
2304 gimple_dump_cfg (FILE *file, int flags)
2306 if (flags & TDF_DETAILS)
2308 dump_function_header (file, current_function_decl, flags);
2309 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2310 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2311 last_basic_block_for_fn (cfun));
2313 brief_dump_cfg (file, flags | TDF_COMMENT);
2314 fprintf (file, "\n");
2317 if (flags & TDF_STATS)
2318 dump_cfg_stats (file);
2320 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2324 /* Dump CFG statistics on FILE. */
2326 void
2327 dump_cfg_stats (FILE *file)
2329 static long max_num_merged_labels = 0;
2330 unsigned long size, total = 0;
2331 long num_edges;
2332 basic_block bb;
2333 const char * const fmt_str = "%-30s%-13s%12s\n";
2334 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2335 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2336 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2337 const char *funcname = current_function_name ();
2339 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2341 fprintf (file, "---------------------------------------------------------\n");
2342 fprintf (file, fmt_str, "", " Number of ", "Memory");
2343 fprintf (file, fmt_str, "", " instances ", "used ");
2344 fprintf (file, "---------------------------------------------------------\n");
2346 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2347 total += size;
2348 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2349 SCALE (size), LABEL (size));
2351 num_edges = 0;
2352 FOR_EACH_BB_FN (bb, cfun)
2353 num_edges += EDGE_COUNT (bb->succs);
2354 size = num_edges * sizeof (struct edge_def);
2355 total += size;
2356 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2358 fprintf (file, "---------------------------------------------------------\n");
2359 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2360 LABEL (total));
2361 fprintf (file, "---------------------------------------------------------\n");
2362 fprintf (file, "\n");
2364 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2365 max_num_merged_labels = cfg_stats.num_merged_labels;
2367 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2368 cfg_stats.num_merged_labels, max_num_merged_labels);
2370 fprintf (file, "\n");
2374 /* Dump CFG statistics on stderr. Keep extern so that it's always
2375 linked in the final executable. */
2377 DEBUG_FUNCTION void
2378 debug_cfg_stats (void)
2380 dump_cfg_stats (stderr);
2383 /*---------------------------------------------------------------------------
2384 Miscellaneous helpers
2385 ---------------------------------------------------------------------------*/
2387 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2388 flow. Transfers of control flow associated with EH are excluded. */
2390 static bool
2391 call_can_make_abnormal_goto (gimple t)
2393 /* If the function has no non-local labels, then a call cannot make an
2394 abnormal transfer of control. */
2395 if (!cfun->has_nonlocal_label
2396 && !cfun->calls_setjmp)
2397 return false;
2399 /* Likewise if the call has no side effects. */
2400 if (!gimple_has_side_effects (t))
2401 return false;
2403 /* Likewise if the called function is leaf. */
2404 if (gimple_call_flags (t) & ECF_LEAF)
2405 return false;
2407 return true;
2411 /* Return true if T can make an abnormal transfer of control flow.
2412 Transfers of control flow associated with EH are excluded. */
2414 bool
2415 stmt_can_make_abnormal_goto (gimple t)
2417 if (computed_goto_p (t))
2418 return true;
2419 if (is_gimple_call (t))
2420 return call_can_make_abnormal_goto (t);
2421 return false;
2425 /* Return true if T represents a stmt that always transfers control. */
2427 bool
2428 is_ctrl_stmt (gimple t)
2430 switch (gimple_code (t))
2432 case GIMPLE_COND:
2433 case GIMPLE_SWITCH:
2434 case GIMPLE_GOTO:
2435 case GIMPLE_RETURN:
2436 case GIMPLE_RESX:
2437 return true;
2438 default:
2439 return false;
2444 /* Return true if T is a statement that may alter the flow of control
2445 (e.g., a call to a non-returning function). */
2447 bool
2448 is_ctrl_altering_stmt (gimple t)
2450 gcc_assert (t);
2452 switch (gimple_code (t))
2454 case GIMPLE_CALL:
2455 /* Per stmt call flag indicates whether the call could alter
2456 controlflow. */
2457 if (gimple_call_ctrl_altering_p (t))
2458 return true;
2459 break;
2461 case GIMPLE_EH_DISPATCH:
2462 /* EH_DISPATCH branches to the individual catch handlers at
2463 this level of a try or allowed-exceptions region. It can
2464 fallthru to the next statement as well. */
2465 return true;
2467 case GIMPLE_ASM:
2468 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2469 return true;
2470 break;
2472 CASE_GIMPLE_OMP:
2473 /* OpenMP directives alter control flow. */
2474 return true;
2476 case GIMPLE_TRANSACTION:
2477 /* A transaction start alters control flow. */
2478 return true;
2480 default:
2481 break;
2484 /* If a statement can throw, it alters control flow. */
2485 return stmt_can_throw_internal (t);
2489 /* Return true if T is a simple local goto. */
2491 bool
2492 simple_goto_p (gimple t)
2494 return (gimple_code (t) == GIMPLE_GOTO
2495 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2499 /* Return true if STMT should start a new basic block. PREV_STMT is
2500 the statement preceding STMT. It is used when STMT is a label or a
2501 case label. Labels should only start a new basic block if their
2502 previous statement wasn't a label. Otherwise, sequence of labels
2503 would generate unnecessary basic blocks that only contain a single
2504 label. */
2506 static inline bool
2507 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2509 if (stmt == NULL)
2510 return false;
2512 /* Labels start a new basic block only if the preceding statement
2513 wasn't a label of the same type. This prevents the creation of
2514 consecutive blocks that have nothing but a single label. */
2515 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2517 /* Nonlocal and computed GOTO targets always start a new block. */
2518 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2519 || FORCED_LABEL (gimple_label_label (label_stmt)))
2520 return true;
2522 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2524 if (DECL_NONLOCAL (gimple_label_label (
2525 as_a <glabel *> (prev_stmt))))
2526 return true;
2528 cfg_stats.num_merged_labels++;
2529 return false;
2531 else
2532 return true;
2534 else if (gimple_code (stmt) == GIMPLE_CALL
2535 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2536 /* setjmp acts similar to a nonlocal GOTO target and thus should
2537 start a new block. */
2538 return true;
2540 return false;
2544 /* Return true if T should end a basic block. */
2546 bool
2547 stmt_ends_bb_p (gimple t)
2549 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2552 /* Remove block annotations and other data structures. */
2554 void
2555 delete_tree_cfg_annotations (void)
2557 vec_free (label_to_block_map_for_fn (cfun));
2561 /* Return the first statement in basic block BB. */
2563 gimple
2564 first_stmt (basic_block bb)
2566 gimple_stmt_iterator i = gsi_start_bb (bb);
2567 gimple stmt = NULL;
2569 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2571 gsi_next (&i);
2572 stmt = NULL;
2574 return stmt;
2577 /* Return the first non-label statement in basic block BB. */
2579 static gimple
2580 first_non_label_stmt (basic_block bb)
2582 gimple_stmt_iterator i = gsi_start_bb (bb);
2583 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2584 gsi_next (&i);
2585 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2588 /* Return the last statement in basic block BB. */
2590 gimple
2591 last_stmt (basic_block bb)
2593 gimple_stmt_iterator i = gsi_last_bb (bb);
2594 gimple stmt = NULL;
2596 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2598 gsi_prev (&i);
2599 stmt = NULL;
2601 return stmt;
2604 /* Return the last statement of an otherwise empty block. Return NULL
2605 if the block is totally empty, or if it contains more than one
2606 statement. */
2608 gimple
2609 last_and_only_stmt (basic_block bb)
2611 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2612 gimple last, prev;
2614 if (gsi_end_p (i))
2615 return NULL;
2617 last = gsi_stmt (i);
2618 gsi_prev_nondebug (&i);
2619 if (gsi_end_p (i))
2620 return last;
2622 /* Empty statements should no longer appear in the instruction stream.
2623 Everything that might have appeared before should be deleted by
2624 remove_useless_stmts, and the optimizers should just gsi_remove
2625 instead of smashing with build_empty_stmt.
2627 Thus the only thing that should appear here in a block containing
2628 one executable statement is a label. */
2629 prev = gsi_stmt (i);
2630 if (gimple_code (prev) == GIMPLE_LABEL)
2631 return last;
2632 else
2633 return NULL;
2636 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2638 static void
2639 reinstall_phi_args (edge new_edge, edge old_edge)
2641 edge_var_map *vm;
2642 int i;
2643 gphi_iterator phis;
2645 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2646 if (!v)
2647 return;
2649 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2650 v->iterate (i, &vm) && !gsi_end_p (phis);
2651 i++, gsi_next (&phis))
2653 gphi *phi = phis.phi ();
2654 tree result = redirect_edge_var_map_result (vm);
2655 tree arg = redirect_edge_var_map_def (vm);
2657 gcc_assert (result == gimple_phi_result (phi));
2659 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2662 redirect_edge_var_map_clear (old_edge);
2665 /* Returns the basic block after which the new basic block created
2666 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2667 near its "logical" location. This is of most help to humans looking
2668 at debugging dumps. */
2670 basic_block
2671 split_edge_bb_loc (edge edge_in)
2673 basic_block dest = edge_in->dest;
2674 basic_block dest_prev = dest->prev_bb;
2676 if (dest_prev)
2678 edge e = find_edge (dest_prev, dest);
2679 if (e && !(e->flags & EDGE_COMPLEX))
2680 return edge_in->src;
2682 return dest_prev;
2685 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2686 Abort on abnormal edges. */
2688 static basic_block
2689 gimple_split_edge (edge edge_in)
2691 basic_block new_bb, after_bb, dest;
2692 edge new_edge, e;
2694 /* Abnormal edges cannot be split. */
2695 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2697 dest = edge_in->dest;
2699 after_bb = split_edge_bb_loc (edge_in);
2701 new_bb = create_empty_bb (after_bb);
2702 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2703 new_bb->count = edge_in->count;
2704 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2705 new_edge->probability = REG_BR_PROB_BASE;
2706 new_edge->count = edge_in->count;
2708 e = redirect_edge_and_branch (edge_in, new_bb);
2709 gcc_assert (e == edge_in);
2710 reinstall_phi_args (new_edge, e);
2712 return new_bb;
2716 /* Verify properties of the address expression T with base object BASE. */
2718 static tree
2719 verify_address (tree t, tree base)
2721 bool old_constant;
2722 bool old_side_effects;
2723 bool new_constant;
2724 bool new_side_effects;
2726 old_constant = TREE_CONSTANT (t);
2727 old_side_effects = TREE_SIDE_EFFECTS (t);
2729 recompute_tree_invariant_for_addr_expr (t);
2730 new_side_effects = TREE_SIDE_EFFECTS (t);
2731 new_constant = TREE_CONSTANT (t);
2733 if (old_constant != new_constant)
2735 error ("constant not recomputed when ADDR_EXPR changed");
2736 return t;
2738 if (old_side_effects != new_side_effects)
2740 error ("side effects not recomputed when ADDR_EXPR changed");
2741 return t;
2744 if (!(TREE_CODE (base) == VAR_DECL
2745 || TREE_CODE (base) == PARM_DECL
2746 || TREE_CODE (base) == RESULT_DECL))
2747 return NULL_TREE;
2749 if (DECL_GIMPLE_REG_P (base))
2751 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2752 return base;
2755 return NULL_TREE;
2758 /* Callback for walk_tree, check that all elements with address taken are
2759 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2760 inside a PHI node. */
2762 static tree
2763 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2765 tree t = *tp, x;
2767 if (TYPE_P (t))
2768 *walk_subtrees = 0;
2770 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2771 #define CHECK_OP(N, MSG) \
2772 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2773 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2775 switch (TREE_CODE (t))
2777 case SSA_NAME:
2778 if (SSA_NAME_IN_FREE_LIST (t))
2780 error ("SSA name in freelist but still referenced");
2781 return *tp;
2783 break;
2785 case INDIRECT_REF:
2786 error ("INDIRECT_REF in gimple IL");
2787 return t;
2789 case MEM_REF:
2790 x = TREE_OPERAND (t, 0);
2791 if (!POINTER_TYPE_P (TREE_TYPE (x))
2792 || !is_gimple_mem_ref_addr (x))
2794 error ("invalid first operand of MEM_REF");
2795 return x;
2797 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2798 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2800 error ("invalid offset operand of MEM_REF");
2801 return TREE_OPERAND (t, 1);
2803 if (TREE_CODE (x) == ADDR_EXPR
2804 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2805 return x;
2806 *walk_subtrees = 0;
2807 break;
2809 case ASSERT_EXPR:
2810 x = fold (ASSERT_EXPR_COND (t));
2811 if (x == boolean_false_node)
2813 error ("ASSERT_EXPR with an always-false condition");
2814 return *tp;
2816 break;
2818 case MODIFY_EXPR:
2819 error ("MODIFY_EXPR not expected while having tuples");
2820 return *tp;
2822 case ADDR_EXPR:
2824 tree tem;
2826 gcc_assert (is_gimple_address (t));
2828 /* Skip any references (they will be checked when we recurse down the
2829 tree) and ensure that any variable used as a prefix is marked
2830 addressable. */
2831 for (x = TREE_OPERAND (t, 0);
2832 handled_component_p (x);
2833 x = TREE_OPERAND (x, 0))
2836 if ((tem = verify_address (t, x)))
2837 return tem;
2839 if (!(TREE_CODE (x) == VAR_DECL
2840 || TREE_CODE (x) == PARM_DECL
2841 || TREE_CODE (x) == RESULT_DECL))
2842 return NULL;
2844 if (!TREE_ADDRESSABLE (x))
2846 error ("address taken, but ADDRESSABLE bit not set");
2847 return x;
2850 break;
2853 case COND_EXPR:
2854 x = COND_EXPR_COND (t);
2855 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2857 error ("non-integral used in condition");
2858 return x;
2860 if (!is_gimple_condexpr (x))
2862 error ("invalid conditional operand");
2863 return x;
2865 break;
2867 case NON_LVALUE_EXPR:
2868 case TRUTH_NOT_EXPR:
2869 gcc_unreachable ();
2871 CASE_CONVERT:
2872 case FIX_TRUNC_EXPR:
2873 case FLOAT_EXPR:
2874 case NEGATE_EXPR:
2875 case ABS_EXPR:
2876 case BIT_NOT_EXPR:
2877 CHECK_OP (0, "invalid operand to unary operator");
2878 break;
2880 case REALPART_EXPR:
2881 case IMAGPART_EXPR:
2882 case BIT_FIELD_REF:
2883 if (!is_gimple_reg_type (TREE_TYPE (t)))
2885 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2886 return t;
2889 if (TREE_CODE (t) == BIT_FIELD_REF)
2891 tree t0 = TREE_OPERAND (t, 0);
2892 tree t1 = TREE_OPERAND (t, 1);
2893 tree t2 = TREE_OPERAND (t, 2);
2894 if (!tree_fits_uhwi_p (t1)
2895 || !tree_fits_uhwi_p (t2))
2897 error ("invalid position or size operand to BIT_FIELD_REF");
2898 return t;
2900 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2901 && (TYPE_PRECISION (TREE_TYPE (t))
2902 != tree_to_uhwi (t1)))
2904 error ("integral result type precision does not match "
2905 "field size of BIT_FIELD_REF");
2906 return t;
2908 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2909 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2910 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2911 != tree_to_uhwi (t1)))
2913 error ("mode precision of non-integral result does not "
2914 "match field size of BIT_FIELD_REF");
2915 return t;
2917 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2918 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2919 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2921 error ("position plus size exceeds size of referenced object in "
2922 "BIT_FIELD_REF");
2923 return t;
2926 t = TREE_OPERAND (t, 0);
2928 /* Fall-through. */
2929 case COMPONENT_REF:
2930 case ARRAY_REF:
2931 case ARRAY_RANGE_REF:
2932 case VIEW_CONVERT_EXPR:
2933 /* We have a nest of references. Verify that each of the operands
2934 that determine where to reference is either a constant or a variable,
2935 verify that the base is valid, and then show we've already checked
2936 the subtrees. */
2937 while (handled_component_p (t))
2939 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2940 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2941 else if (TREE_CODE (t) == ARRAY_REF
2942 || TREE_CODE (t) == ARRAY_RANGE_REF)
2944 CHECK_OP (1, "invalid array index");
2945 if (TREE_OPERAND (t, 2))
2946 CHECK_OP (2, "invalid array lower bound");
2947 if (TREE_OPERAND (t, 3))
2948 CHECK_OP (3, "invalid array stride");
2950 else if (TREE_CODE (t) == BIT_FIELD_REF
2951 || TREE_CODE (t) == REALPART_EXPR
2952 || TREE_CODE (t) == IMAGPART_EXPR)
2954 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2955 "REALPART_EXPR");
2956 return t;
2959 t = TREE_OPERAND (t, 0);
2962 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2964 error ("invalid reference prefix");
2965 return t;
2967 *walk_subtrees = 0;
2968 break;
2969 case PLUS_EXPR:
2970 case MINUS_EXPR:
2971 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2972 POINTER_PLUS_EXPR. */
2973 if (POINTER_TYPE_P (TREE_TYPE (t)))
2975 error ("invalid operand to plus/minus, type is a pointer");
2976 return t;
2978 CHECK_OP (0, "invalid operand to binary operator");
2979 CHECK_OP (1, "invalid operand to binary operator");
2980 break;
2982 case POINTER_PLUS_EXPR:
2983 /* Check to make sure the first operand is a pointer or reference type. */
2984 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2986 error ("invalid operand to pointer plus, first operand is not a pointer");
2987 return t;
2989 /* Check to make sure the second operand is a ptrofftype. */
2990 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2992 error ("invalid operand to pointer plus, second operand is not an "
2993 "integer type of appropriate width");
2994 return t;
2996 /* FALLTHROUGH */
2997 case LT_EXPR:
2998 case LE_EXPR:
2999 case GT_EXPR:
3000 case GE_EXPR:
3001 case EQ_EXPR:
3002 case NE_EXPR:
3003 case UNORDERED_EXPR:
3004 case ORDERED_EXPR:
3005 case UNLT_EXPR:
3006 case UNLE_EXPR:
3007 case UNGT_EXPR:
3008 case UNGE_EXPR:
3009 case UNEQ_EXPR:
3010 case LTGT_EXPR:
3011 case MULT_EXPR:
3012 case TRUNC_DIV_EXPR:
3013 case CEIL_DIV_EXPR:
3014 case FLOOR_DIV_EXPR:
3015 case ROUND_DIV_EXPR:
3016 case TRUNC_MOD_EXPR:
3017 case CEIL_MOD_EXPR:
3018 case FLOOR_MOD_EXPR:
3019 case ROUND_MOD_EXPR:
3020 case RDIV_EXPR:
3021 case EXACT_DIV_EXPR:
3022 case MIN_EXPR:
3023 case MAX_EXPR:
3024 case LSHIFT_EXPR:
3025 case RSHIFT_EXPR:
3026 case LROTATE_EXPR:
3027 case RROTATE_EXPR:
3028 case BIT_IOR_EXPR:
3029 case BIT_XOR_EXPR:
3030 case BIT_AND_EXPR:
3031 CHECK_OP (0, "invalid operand to binary operator");
3032 CHECK_OP (1, "invalid operand to binary operator");
3033 break;
3035 case CONSTRUCTOR:
3036 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3037 *walk_subtrees = 0;
3038 break;
3040 case CASE_LABEL_EXPR:
3041 if (CASE_CHAIN (t))
3043 error ("invalid CASE_CHAIN");
3044 return t;
3046 break;
3048 default:
3049 break;
3051 return NULL;
3053 #undef CHECK_OP
3057 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3058 Returns true if there is an error, otherwise false. */
3060 static bool
3061 verify_types_in_gimple_min_lval (tree expr)
3063 tree op;
3065 if (is_gimple_id (expr))
3066 return false;
3068 if (TREE_CODE (expr) != TARGET_MEM_REF
3069 && TREE_CODE (expr) != MEM_REF)
3071 error ("invalid expression for min lvalue");
3072 return true;
3075 /* TARGET_MEM_REFs are strange beasts. */
3076 if (TREE_CODE (expr) == TARGET_MEM_REF)
3077 return false;
3079 op = TREE_OPERAND (expr, 0);
3080 if (!is_gimple_val (op))
3082 error ("invalid operand in indirect reference");
3083 debug_generic_stmt (op);
3084 return true;
3086 /* Memory references now generally can involve a value conversion. */
3088 return false;
3091 /* Verify if EXPR is a valid GIMPLE reference expression. If
3092 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3093 if there is an error, otherwise false. */
3095 static bool
3096 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3098 while (handled_component_p (expr))
3100 tree op = TREE_OPERAND (expr, 0);
3102 if (TREE_CODE (expr) == ARRAY_REF
3103 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3105 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3106 || (TREE_OPERAND (expr, 2)
3107 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3108 || (TREE_OPERAND (expr, 3)
3109 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3111 error ("invalid operands to array reference");
3112 debug_generic_stmt (expr);
3113 return true;
3117 /* Verify if the reference array element types are compatible. */
3118 if (TREE_CODE (expr) == ARRAY_REF
3119 && !useless_type_conversion_p (TREE_TYPE (expr),
3120 TREE_TYPE (TREE_TYPE (op))))
3122 error ("type mismatch in array reference");
3123 debug_generic_stmt (TREE_TYPE (expr));
3124 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3125 return true;
3127 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3128 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3129 TREE_TYPE (TREE_TYPE (op))))
3131 error ("type mismatch in array range reference");
3132 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3133 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3134 return true;
3137 if ((TREE_CODE (expr) == REALPART_EXPR
3138 || TREE_CODE (expr) == IMAGPART_EXPR)
3139 && !useless_type_conversion_p (TREE_TYPE (expr),
3140 TREE_TYPE (TREE_TYPE (op))))
3142 error ("type mismatch in real/imagpart reference");
3143 debug_generic_stmt (TREE_TYPE (expr));
3144 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3145 return true;
3148 if (TREE_CODE (expr) == COMPONENT_REF
3149 && !useless_type_conversion_p (TREE_TYPE (expr),
3150 TREE_TYPE (TREE_OPERAND (expr, 1))))
3152 error ("type mismatch in component reference");
3153 debug_generic_stmt (TREE_TYPE (expr));
3154 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3155 return true;
3158 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3160 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3161 that their operand is not an SSA name or an invariant when
3162 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3163 bug). Otherwise there is nothing to verify, gross mismatches at
3164 most invoke undefined behavior. */
3165 if (require_lvalue
3166 && (TREE_CODE (op) == SSA_NAME
3167 || is_gimple_min_invariant (op)))
3169 error ("conversion of an SSA_NAME on the left hand side");
3170 debug_generic_stmt (expr);
3171 return true;
3173 else if (TREE_CODE (op) == SSA_NAME
3174 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3176 error ("conversion of register to a different size");
3177 debug_generic_stmt (expr);
3178 return true;
3180 else if (!handled_component_p (op))
3181 return false;
3184 expr = op;
3187 if (TREE_CODE (expr) == MEM_REF)
3189 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3191 error ("invalid address operand in MEM_REF");
3192 debug_generic_stmt (expr);
3193 return true;
3195 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3196 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3198 error ("invalid offset operand in MEM_REF");
3199 debug_generic_stmt (expr);
3200 return true;
3203 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3205 if (!TMR_BASE (expr)
3206 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3208 error ("invalid address operand in TARGET_MEM_REF");
3209 return true;
3211 if (!TMR_OFFSET (expr)
3212 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3213 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3215 error ("invalid offset operand in TARGET_MEM_REF");
3216 debug_generic_stmt (expr);
3217 return true;
3221 return ((require_lvalue || !is_gimple_min_invariant (expr))
3222 && verify_types_in_gimple_min_lval (expr));
3225 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3226 list of pointer-to types that is trivially convertible to DEST. */
3228 static bool
3229 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3231 tree src;
3233 if (!TYPE_POINTER_TO (src_obj))
3234 return true;
3236 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3237 if (useless_type_conversion_p (dest, src))
3238 return true;
3240 return false;
3243 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3244 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3246 static bool
3247 valid_fixed_convert_types_p (tree type1, tree type2)
3249 return (FIXED_POINT_TYPE_P (type1)
3250 && (INTEGRAL_TYPE_P (type2)
3251 || SCALAR_FLOAT_TYPE_P (type2)
3252 || FIXED_POINT_TYPE_P (type2)));
3255 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3256 is a problem, otherwise false. */
3258 static bool
3259 verify_gimple_call (gcall *stmt)
3261 tree fn = gimple_call_fn (stmt);
3262 tree fntype, fndecl;
3263 unsigned i;
3265 if (gimple_call_internal_p (stmt))
3267 if (fn)
3269 error ("gimple call has two targets");
3270 debug_generic_stmt (fn);
3271 return true;
3274 else
3276 if (!fn)
3278 error ("gimple call has no target");
3279 return true;
3283 if (fn && !is_gimple_call_addr (fn))
3285 error ("invalid function in gimple call");
3286 debug_generic_stmt (fn);
3287 return true;
3290 if (fn
3291 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3292 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3293 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3295 error ("non-function in gimple call");
3296 return true;
3299 fndecl = gimple_call_fndecl (stmt);
3300 if (fndecl
3301 && TREE_CODE (fndecl) == FUNCTION_DECL
3302 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3303 && !DECL_PURE_P (fndecl)
3304 && !TREE_READONLY (fndecl))
3306 error ("invalid pure const state for function");
3307 return true;
3310 if (gimple_call_lhs (stmt)
3311 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3312 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3314 error ("invalid LHS in gimple call");
3315 return true;
3318 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3320 error ("LHS in noreturn call");
3321 return true;
3324 fntype = gimple_call_fntype (stmt);
3325 if (fntype
3326 && gimple_call_lhs (stmt)
3327 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3328 TREE_TYPE (fntype))
3329 /* ??? At least C++ misses conversions at assignments from
3330 void * call results.
3331 ??? Java is completely off. Especially with functions
3332 returning java.lang.Object.
3333 For now simply allow arbitrary pointer type conversions. */
3334 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3335 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3337 error ("invalid conversion in gimple call");
3338 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3339 debug_generic_stmt (TREE_TYPE (fntype));
3340 return true;
3343 if (gimple_call_chain (stmt)
3344 && !is_gimple_val (gimple_call_chain (stmt)))
3346 error ("invalid static chain in gimple call");
3347 debug_generic_stmt (gimple_call_chain (stmt));
3348 return true;
3351 /* If there is a static chain argument, the call should either be
3352 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3353 if (gimple_call_chain (stmt)
3354 && fndecl
3355 && !DECL_STATIC_CHAIN (fndecl))
3357 error ("static chain with function that doesn%'t use one");
3358 return true;
3361 /* ??? The C frontend passes unpromoted arguments in case it
3362 didn't see a function declaration before the call. So for now
3363 leave the call arguments mostly unverified. Once we gimplify
3364 unit-at-a-time we have a chance to fix this. */
3366 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3368 tree arg = gimple_call_arg (stmt, i);
3369 if ((is_gimple_reg_type (TREE_TYPE (arg))
3370 && !is_gimple_val (arg))
3371 || (!is_gimple_reg_type (TREE_TYPE (arg))
3372 && !is_gimple_lvalue (arg)))
3374 error ("invalid argument to gimple call");
3375 debug_generic_expr (arg);
3376 return true;
3380 return false;
3383 /* Verifies the gimple comparison with the result type TYPE and
3384 the operands OP0 and OP1. */
3386 static bool
3387 verify_gimple_comparison (tree type, tree op0, tree op1)
3389 tree op0_type = TREE_TYPE (op0);
3390 tree op1_type = TREE_TYPE (op1);
3392 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3394 error ("invalid operands in gimple comparison");
3395 return true;
3398 /* For comparisons we do not have the operations type as the
3399 effective type the comparison is carried out in. Instead
3400 we require that either the first operand is trivially
3401 convertible into the second, or the other way around.
3402 Because we special-case pointers to void we allow
3403 comparisons of pointers with the same mode as well. */
3404 if (!useless_type_conversion_p (op0_type, op1_type)
3405 && !useless_type_conversion_p (op1_type, op0_type)
3406 && (!POINTER_TYPE_P (op0_type)
3407 || !POINTER_TYPE_P (op1_type)
3408 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3410 error ("mismatching comparison operand types");
3411 debug_generic_expr (op0_type);
3412 debug_generic_expr (op1_type);
3413 return true;
3416 /* The resulting type of a comparison may be an effective boolean type. */
3417 if (INTEGRAL_TYPE_P (type)
3418 && (TREE_CODE (type) == BOOLEAN_TYPE
3419 || TYPE_PRECISION (type) == 1))
3421 if (TREE_CODE (op0_type) == VECTOR_TYPE
3422 || TREE_CODE (op1_type) == VECTOR_TYPE)
3424 error ("vector comparison returning a boolean");
3425 debug_generic_expr (op0_type);
3426 debug_generic_expr (op1_type);
3427 return true;
3430 /* Or an integer vector type with the same size and element count
3431 as the comparison operand types. */
3432 else if (TREE_CODE (type) == VECTOR_TYPE
3433 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3435 if (TREE_CODE (op0_type) != VECTOR_TYPE
3436 || TREE_CODE (op1_type) != VECTOR_TYPE)
3438 error ("non-vector operands in vector comparison");
3439 debug_generic_expr (op0_type);
3440 debug_generic_expr (op1_type);
3441 return true;
3444 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3445 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3446 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3447 /* The result of a vector comparison is of signed
3448 integral type. */
3449 || TYPE_UNSIGNED (TREE_TYPE (type)))
3451 error ("invalid vector comparison resulting type");
3452 debug_generic_expr (type);
3453 return true;
3456 else
3458 error ("bogus comparison result type");
3459 debug_generic_expr (type);
3460 return true;
3463 return false;
3466 /* Verify a gimple assignment statement STMT with an unary rhs.
3467 Returns true if anything is wrong. */
3469 static bool
3470 verify_gimple_assign_unary (gassign *stmt)
3472 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3473 tree lhs = gimple_assign_lhs (stmt);
3474 tree lhs_type = TREE_TYPE (lhs);
3475 tree rhs1 = gimple_assign_rhs1 (stmt);
3476 tree rhs1_type = TREE_TYPE (rhs1);
3478 if (!is_gimple_reg (lhs))
3480 error ("non-register as LHS of unary operation");
3481 return true;
3484 if (!is_gimple_val (rhs1))
3486 error ("invalid operand in unary operation");
3487 return true;
3490 /* First handle conversions. */
3491 switch (rhs_code)
3493 CASE_CONVERT:
3495 /* Allow conversions from pointer type to integral type only if
3496 there is no sign or zero extension involved.
3497 For targets were the precision of ptrofftype doesn't match that
3498 of pointers we need to allow arbitrary conversions to ptrofftype. */
3499 if ((POINTER_TYPE_P (lhs_type)
3500 && INTEGRAL_TYPE_P (rhs1_type))
3501 || (POINTER_TYPE_P (rhs1_type)
3502 && INTEGRAL_TYPE_P (lhs_type)
3503 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3504 || ptrofftype_p (sizetype))))
3505 return false;
3507 /* Allow conversion from integral to offset type and vice versa. */
3508 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3509 && INTEGRAL_TYPE_P (rhs1_type))
3510 || (INTEGRAL_TYPE_P (lhs_type)
3511 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3512 return false;
3514 /* Otherwise assert we are converting between types of the
3515 same kind. */
3516 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3518 error ("invalid types in nop conversion");
3519 debug_generic_expr (lhs_type);
3520 debug_generic_expr (rhs1_type);
3521 return true;
3524 return false;
3527 case ADDR_SPACE_CONVERT_EXPR:
3529 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3530 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3531 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3533 error ("invalid types in address space conversion");
3534 debug_generic_expr (lhs_type);
3535 debug_generic_expr (rhs1_type);
3536 return true;
3539 return false;
3542 case FIXED_CONVERT_EXPR:
3544 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3545 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3547 error ("invalid types in fixed-point conversion");
3548 debug_generic_expr (lhs_type);
3549 debug_generic_expr (rhs1_type);
3550 return true;
3553 return false;
3556 case FLOAT_EXPR:
3558 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3559 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3560 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3562 error ("invalid types in conversion to floating point");
3563 debug_generic_expr (lhs_type);
3564 debug_generic_expr (rhs1_type);
3565 return true;
3568 return false;
3571 case FIX_TRUNC_EXPR:
3573 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3574 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3575 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3577 error ("invalid types in conversion to integer");
3578 debug_generic_expr (lhs_type);
3579 debug_generic_expr (rhs1_type);
3580 return true;
3583 return false;
3585 case REDUC_MAX_EXPR:
3586 case REDUC_MIN_EXPR:
3587 case REDUC_PLUS_EXPR:
3588 if (!VECTOR_TYPE_P (rhs1_type)
3589 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3591 error ("reduction should convert from vector to element type");
3592 debug_generic_expr (lhs_type);
3593 debug_generic_expr (rhs1_type);
3594 return true;
3596 return false;
3598 case VEC_UNPACK_HI_EXPR:
3599 case VEC_UNPACK_LO_EXPR:
3600 case VEC_UNPACK_FLOAT_HI_EXPR:
3601 case VEC_UNPACK_FLOAT_LO_EXPR:
3602 /* FIXME. */
3603 return false;
3605 case NEGATE_EXPR:
3606 case ABS_EXPR:
3607 case BIT_NOT_EXPR:
3608 case PAREN_EXPR:
3609 case CONJ_EXPR:
3610 break;
3612 default:
3613 gcc_unreachable ();
3616 /* For the remaining codes assert there is no conversion involved. */
3617 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3619 error ("non-trivial conversion in unary operation");
3620 debug_generic_expr (lhs_type);
3621 debug_generic_expr (rhs1_type);
3622 return true;
3625 return false;
3628 /* Verify a gimple assignment statement STMT with a binary rhs.
3629 Returns true if anything is wrong. */
3631 static bool
3632 verify_gimple_assign_binary (gassign *stmt)
3634 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3635 tree lhs = gimple_assign_lhs (stmt);
3636 tree lhs_type = TREE_TYPE (lhs);
3637 tree rhs1 = gimple_assign_rhs1 (stmt);
3638 tree rhs1_type = TREE_TYPE (rhs1);
3639 tree rhs2 = gimple_assign_rhs2 (stmt);
3640 tree rhs2_type = TREE_TYPE (rhs2);
3642 if (!is_gimple_reg (lhs))
3644 error ("non-register as LHS of binary operation");
3645 return true;
3648 if (!is_gimple_val (rhs1)
3649 || !is_gimple_val (rhs2))
3651 error ("invalid operands in binary operation");
3652 return true;
3655 /* First handle operations that involve different types. */
3656 switch (rhs_code)
3658 case COMPLEX_EXPR:
3660 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3661 || !(INTEGRAL_TYPE_P (rhs1_type)
3662 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3663 || !(INTEGRAL_TYPE_P (rhs2_type)
3664 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3666 error ("type mismatch in complex expression");
3667 debug_generic_expr (lhs_type);
3668 debug_generic_expr (rhs1_type);
3669 debug_generic_expr (rhs2_type);
3670 return true;
3673 return false;
3676 case LSHIFT_EXPR:
3677 case RSHIFT_EXPR:
3678 case LROTATE_EXPR:
3679 case RROTATE_EXPR:
3681 /* Shifts and rotates are ok on integral types, fixed point
3682 types and integer vector types. */
3683 if ((!INTEGRAL_TYPE_P (rhs1_type)
3684 && !FIXED_POINT_TYPE_P (rhs1_type)
3685 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3686 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3687 || (!INTEGRAL_TYPE_P (rhs2_type)
3688 /* Vector shifts of vectors are also ok. */
3689 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3690 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3691 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3692 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3693 || !useless_type_conversion_p (lhs_type, rhs1_type))
3695 error ("type mismatch in shift expression");
3696 debug_generic_expr (lhs_type);
3697 debug_generic_expr (rhs1_type);
3698 debug_generic_expr (rhs2_type);
3699 return true;
3702 return false;
3705 case WIDEN_LSHIFT_EXPR:
3707 if (!INTEGRAL_TYPE_P (lhs_type)
3708 || !INTEGRAL_TYPE_P (rhs1_type)
3709 || TREE_CODE (rhs2) != INTEGER_CST
3710 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3712 error ("type mismatch in widening vector shift expression");
3713 debug_generic_expr (lhs_type);
3714 debug_generic_expr (rhs1_type);
3715 debug_generic_expr (rhs2_type);
3716 return true;
3719 return false;
3722 case VEC_WIDEN_LSHIFT_HI_EXPR:
3723 case VEC_WIDEN_LSHIFT_LO_EXPR:
3725 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3726 || TREE_CODE (lhs_type) != VECTOR_TYPE
3727 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3728 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3729 || TREE_CODE (rhs2) != INTEGER_CST
3730 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3731 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3733 error ("type mismatch in widening vector shift expression");
3734 debug_generic_expr (lhs_type);
3735 debug_generic_expr (rhs1_type);
3736 debug_generic_expr (rhs2_type);
3737 return true;
3740 return false;
3743 case PLUS_EXPR:
3744 case MINUS_EXPR:
3746 tree lhs_etype = lhs_type;
3747 tree rhs1_etype = rhs1_type;
3748 tree rhs2_etype = rhs2_type;
3749 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3751 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3752 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3754 error ("invalid non-vector operands to vector valued plus");
3755 return true;
3757 lhs_etype = TREE_TYPE (lhs_type);
3758 rhs1_etype = TREE_TYPE (rhs1_type);
3759 rhs2_etype = TREE_TYPE (rhs2_type);
3761 if (POINTER_TYPE_P (lhs_etype)
3762 || POINTER_TYPE_P (rhs1_etype)
3763 || POINTER_TYPE_P (rhs2_etype))
3765 error ("invalid (pointer) operands to plus/minus");
3766 return true;
3769 /* Continue with generic binary expression handling. */
3770 break;
3773 case POINTER_PLUS_EXPR:
3775 if (!POINTER_TYPE_P (rhs1_type)
3776 || !useless_type_conversion_p (lhs_type, rhs1_type)
3777 || !ptrofftype_p (rhs2_type))
3779 error ("type mismatch in pointer plus expression");
3780 debug_generic_stmt (lhs_type);
3781 debug_generic_stmt (rhs1_type);
3782 debug_generic_stmt (rhs2_type);
3783 return true;
3786 return false;
3789 case TRUTH_ANDIF_EXPR:
3790 case TRUTH_ORIF_EXPR:
3791 case TRUTH_AND_EXPR:
3792 case TRUTH_OR_EXPR:
3793 case TRUTH_XOR_EXPR:
3795 gcc_unreachable ();
3797 case LT_EXPR:
3798 case LE_EXPR:
3799 case GT_EXPR:
3800 case GE_EXPR:
3801 case EQ_EXPR:
3802 case NE_EXPR:
3803 case UNORDERED_EXPR:
3804 case ORDERED_EXPR:
3805 case UNLT_EXPR:
3806 case UNLE_EXPR:
3807 case UNGT_EXPR:
3808 case UNGE_EXPR:
3809 case UNEQ_EXPR:
3810 case LTGT_EXPR:
3811 /* Comparisons are also binary, but the result type is not
3812 connected to the operand types. */
3813 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3815 case WIDEN_MULT_EXPR:
3816 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3817 return true;
3818 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3819 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3821 case WIDEN_SUM_EXPR:
3822 case VEC_WIDEN_MULT_HI_EXPR:
3823 case VEC_WIDEN_MULT_LO_EXPR:
3824 case VEC_WIDEN_MULT_EVEN_EXPR:
3825 case VEC_WIDEN_MULT_ODD_EXPR:
3826 case VEC_PACK_TRUNC_EXPR:
3827 case VEC_PACK_SAT_EXPR:
3828 case VEC_PACK_FIX_TRUNC_EXPR:
3829 /* FIXME. */
3830 return false;
3832 case MULT_EXPR:
3833 case MULT_HIGHPART_EXPR:
3834 case TRUNC_DIV_EXPR:
3835 case CEIL_DIV_EXPR:
3836 case FLOOR_DIV_EXPR:
3837 case ROUND_DIV_EXPR:
3838 case TRUNC_MOD_EXPR:
3839 case CEIL_MOD_EXPR:
3840 case FLOOR_MOD_EXPR:
3841 case ROUND_MOD_EXPR:
3842 case RDIV_EXPR:
3843 case EXACT_DIV_EXPR:
3844 case MIN_EXPR:
3845 case MAX_EXPR:
3846 case BIT_IOR_EXPR:
3847 case BIT_XOR_EXPR:
3848 case BIT_AND_EXPR:
3849 /* Continue with generic binary expression handling. */
3850 break;
3852 default:
3853 gcc_unreachable ();
3856 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3857 || !useless_type_conversion_p (lhs_type, rhs2_type))
3859 error ("type mismatch in binary expression");
3860 debug_generic_stmt (lhs_type);
3861 debug_generic_stmt (rhs1_type);
3862 debug_generic_stmt (rhs2_type);
3863 return true;
3866 return false;
3869 /* Verify a gimple assignment statement STMT with a ternary rhs.
3870 Returns true if anything is wrong. */
3872 static bool
3873 verify_gimple_assign_ternary (gassign *stmt)
3875 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3876 tree lhs = gimple_assign_lhs (stmt);
3877 tree lhs_type = TREE_TYPE (lhs);
3878 tree rhs1 = gimple_assign_rhs1 (stmt);
3879 tree rhs1_type = TREE_TYPE (rhs1);
3880 tree rhs2 = gimple_assign_rhs2 (stmt);
3881 tree rhs2_type = TREE_TYPE (rhs2);
3882 tree rhs3 = gimple_assign_rhs3 (stmt);
3883 tree rhs3_type = TREE_TYPE (rhs3);
3885 if (!is_gimple_reg (lhs))
3887 error ("non-register as LHS of ternary operation");
3888 return true;
3891 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3892 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3893 || !is_gimple_val (rhs2)
3894 || !is_gimple_val (rhs3))
3896 error ("invalid operands in ternary operation");
3897 return true;
3900 /* First handle operations that involve different types. */
3901 switch (rhs_code)
3903 case WIDEN_MULT_PLUS_EXPR:
3904 case WIDEN_MULT_MINUS_EXPR:
3905 if ((!INTEGRAL_TYPE_P (rhs1_type)
3906 && !FIXED_POINT_TYPE_P (rhs1_type))
3907 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3908 || !useless_type_conversion_p (lhs_type, rhs3_type)
3909 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3910 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3912 error ("type mismatch in widening multiply-accumulate expression");
3913 debug_generic_expr (lhs_type);
3914 debug_generic_expr (rhs1_type);
3915 debug_generic_expr (rhs2_type);
3916 debug_generic_expr (rhs3_type);
3917 return true;
3919 break;
3921 case FMA_EXPR:
3922 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3923 || !useless_type_conversion_p (lhs_type, rhs2_type)
3924 || !useless_type_conversion_p (lhs_type, rhs3_type))
3926 error ("type mismatch in fused multiply-add expression");
3927 debug_generic_expr (lhs_type);
3928 debug_generic_expr (rhs1_type);
3929 debug_generic_expr (rhs2_type);
3930 debug_generic_expr (rhs3_type);
3931 return true;
3933 break;
3935 case COND_EXPR:
3936 case VEC_COND_EXPR:
3937 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3938 || !useless_type_conversion_p (lhs_type, rhs3_type))
3940 error ("type mismatch in conditional expression");
3941 debug_generic_expr (lhs_type);
3942 debug_generic_expr (rhs2_type);
3943 debug_generic_expr (rhs3_type);
3944 return true;
3946 break;
3948 case VEC_PERM_EXPR:
3949 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3950 || !useless_type_conversion_p (lhs_type, rhs2_type))
3952 error ("type mismatch in vector permute expression");
3953 debug_generic_expr (lhs_type);
3954 debug_generic_expr (rhs1_type);
3955 debug_generic_expr (rhs2_type);
3956 debug_generic_expr (rhs3_type);
3957 return true;
3960 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3961 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3962 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3964 error ("vector types expected in vector permute expression");
3965 debug_generic_expr (lhs_type);
3966 debug_generic_expr (rhs1_type);
3967 debug_generic_expr (rhs2_type);
3968 debug_generic_expr (rhs3_type);
3969 return true;
3972 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3973 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3974 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3975 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3976 != TYPE_VECTOR_SUBPARTS (lhs_type))
3978 error ("vectors with different element number found "
3979 "in vector permute expression");
3980 debug_generic_expr (lhs_type);
3981 debug_generic_expr (rhs1_type);
3982 debug_generic_expr (rhs2_type);
3983 debug_generic_expr (rhs3_type);
3984 return true;
3987 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3988 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3989 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3991 error ("invalid mask type in vector permute expression");
3992 debug_generic_expr (lhs_type);
3993 debug_generic_expr (rhs1_type);
3994 debug_generic_expr (rhs2_type);
3995 debug_generic_expr (rhs3_type);
3996 return true;
3999 return false;
4001 case SAD_EXPR:
4002 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4003 || !useless_type_conversion_p (lhs_type, rhs3_type)
4004 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4005 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4006 > GET_MODE_BITSIZE (GET_MODE_INNER
4007 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4009 error ("type mismatch in sad expression");
4010 debug_generic_expr (lhs_type);
4011 debug_generic_expr (rhs1_type);
4012 debug_generic_expr (rhs2_type);
4013 debug_generic_expr (rhs3_type);
4014 return true;
4017 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4018 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4019 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4021 error ("vector types expected in sad expression");
4022 debug_generic_expr (lhs_type);
4023 debug_generic_expr (rhs1_type);
4024 debug_generic_expr (rhs2_type);
4025 debug_generic_expr (rhs3_type);
4026 return true;
4029 return false;
4031 case DOT_PROD_EXPR:
4032 case REALIGN_LOAD_EXPR:
4033 /* FIXME. */
4034 return false;
4036 default:
4037 gcc_unreachable ();
4039 return false;
4042 /* Verify a gimple assignment statement STMT with a single rhs.
4043 Returns true if anything is wrong. */
4045 static bool
4046 verify_gimple_assign_single (gassign *stmt)
4048 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4049 tree lhs = gimple_assign_lhs (stmt);
4050 tree lhs_type = TREE_TYPE (lhs);
4051 tree rhs1 = gimple_assign_rhs1 (stmt);
4052 tree rhs1_type = TREE_TYPE (rhs1);
4053 bool res = false;
4055 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4057 error ("non-trivial conversion at assignment");
4058 debug_generic_expr (lhs_type);
4059 debug_generic_expr (rhs1_type);
4060 return true;
4063 if (gimple_clobber_p (stmt)
4064 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4066 error ("non-decl/MEM_REF LHS in clobber statement");
4067 debug_generic_expr (lhs);
4068 return true;
4071 if (handled_component_p (lhs)
4072 || TREE_CODE (lhs) == MEM_REF
4073 || TREE_CODE (lhs) == TARGET_MEM_REF)
4074 res |= verify_types_in_gimple_reference (lhs, true);
4076 /* Special codes we cannot handle via their class. */
4077 switch (rhs_code)
4079 case ADDR_EXPR:
4081 tree op = TREE_OPERAND (rhs1, 0);
4082 if (!is_gimple_addressable (op))
4084 error ("invalid operand in unary expression");
4085 return true;
4088 /* Technically there is no longer a need for matching types, but
4089 gimple hygiene asks for this check. In LTO we can end up
4090 combining incompatible units and thus end up with addresses
4091 of globals that change their type to a common one. */
4092 if (!in_lto_p
4093 && !types_compatible_p (TREE_TYPE (op),
4094 TREE_TYPE (TREE_TYPE (rhs1)))
4095 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4096 TREE_TYPE (op)))
4098 error ("type mismatch in address expression");
4099 debug_generic_stmt (TREE_TYPE (rhs1));
4100 debug_generic_stmt (TREE_TYPE (op));
4101 return true;
4104 return verify_types_in_gimple_reference (op, true);
4107 /* tcc_reference */
4108 case INDIRECT_REF:
4109 error ("INDIRECT_REF in gimple IL");
4110 return true;
4112 case COMPONENT_REF:
4113 case BIT_FIELD_REF:
4114 case ARRAY_REF:
4115 case ARRAY_RANGE_REF:
4116 case VIEW_CONVERT_EXPR:
4117 case REALPART_EXPR:
4118 case IMAGPART_EXPR:
4119 case TARGET_MEM_REF:
4120 case MEM_REF:
4121 if (!is_gimple_reg (lhs)
4122 && is_gimple_reg_type (TREE_TYPE (lhs)))
4124 error ("invalid rhs for gimple memory store");
4125 debug_generic_stmt (lhs);
4126 debug_generic_stmt (rhs1);
4127 return true;
4129 return res || verify_types_in_gimple_reference (rhs1, false);
4131 /* tcc_constant */
4132 case SSA_NAME:
4133 case INTEGER_CST:
4134 case REAL_CST:
4135 case FIXED_CST:
4136 case COMPLEX_CST:
4137 case VECTOR_CST:
4138 case STRING_CST:
4139 return res;
4141 /* tcc_declaration */
4142 case CONST_DECL:
4143 return res;
4144 case VAR_DECL:
4145 case PARM_DECL:
4146 if (!is_gimple_reg (lhs)
4147 && !is_gimple_reg (rhs1)
4148 && is_gimple_reg_type (TREE_TYPE (lhs)))
4150 error ("invalid rhs for gimple memory store");
4151 debug_generic_stmt (lhs);
4152 debug_generic_stmt (rhs1);
4153 return true;
4155 return res;
4157 case CONSTRUCTOR:
4158 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4160 unsigned int i;
4161 tree elt_i, elt_v, elt_t = NULL_TREE;
4163 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4164 return res;
4165 /* For vector CONSTRUCTORs we require that either it is empty
4166 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4167 (then the element count must be correct to cover the whole
4168 outer vector and index must be NULL on all elements, or it is
4169 a CONSTRUCTOR of scalar elements, where we as an exception allow
4170 smaller number of elements (assuming zero filling) and
4171 consecutive indexes as compared to NULL indexes (such
4172 CONSTRUCTORs can appear in the IL from FEs). */
4173 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4175 if (elt_t == NULL_TREE)
4177 elt_t = TREE_TYPE (elt_v);
4178 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4180 tree elt_t = TREE_TYPE (elt_v);
4181 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4182 TREE_TYPE (elt_t)))
4184 error ("incorrect type of vector CONSTRUCTOR"
4185 " elements");
4186 debug_generic_stmt (rhs1);
4187 return true;
4189 else if (CONSTRUCTOR_NELTS (rhs1)
4190 * TYPE_VECTOR_SUBPARTS (elt_t)
4191 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4193 error ("incorrect number of vector CONSTRUCTOR"
4194 " elements");
4195 debug_generic_stmt (rhs1);
4196 return true;
4199 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4200 elt_t))
4202 error ("incorrect type of vector CONSTRUCTOR elements");
4203 debug_generic_stmt (rhs1);
4204 return true;
4206 else if (CONSTRUCTOR_NELTS (rhs1)
4207 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4209 error ("incorrect number of vector CONSTRUCTOR elements");
4210 debug_generic_stmt (rhs1);
4211 return true;
4214 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4216 error ("incorrect type of vector CONSTRUCTOR elements");
4217 debug_generic_stmt (rhs1);
4218 return true;
4220 if (elt_i != NULL_TREE
4221 && (TREE_CODE (elt_t) == VECTOR_TYPE
4222 || TREE_CODE (elt_i) != INTEGER_CST
4223 || compare_tree_int (elt_i, i) != 0))
4225 error ("vector CONSTRUCTOR with non-NULL element index");
4226 debug_generic_stmt (rhs1);
4227 return true;
4229 if (!is_gimple_val (elt_v))
4231 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4232 debug_generic_stmt (rhs1);
4233 return true;
4237 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4239 error ("non-vector CONSTRUCTOR with elements");
4240 debug_generic_stmt (rhs1);
4241 return true;
4243 return res;
4244 case OBJ_TYPE_REF:
4245 case ASSERT_EXPR:
4246 case WITH_SIZE_EXPR:
4247 /* FIXME. */
4248 return res;
4250 default:;
4253 return res;
4256 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4257 is a problem, otherwise false. */
4259 static bool
4260 verify_gimple_assign (gassign *stmt)
4262 switch (gimple_assign_rhs_class (stmt))
4264 case GIMPLE_SINGLE_RHS:
4265 return verify_gimple_assign_single (stmt);
4267 case GIMPLE_UNARY_RHS:
4268 return verify_gimple_assign_unary (stmt);
4270 case GIMPLE_BINARY_RHS:
4271 return verify_gimple_assign_binary (stmt);
4273 case GIMPLE_TERNARY_RHS:
4274 return verify_gimple_assign_ternary (stmt);
4276 default:
4277 gcc_unreachable ();
4281 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4282 is a problem, otherwise false. */
4284 static bool
4285 verify_gimple_return (greturn *stmt)
4287 tree op = gimple_return_retval (stmt);
4288 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4290 /* We cannot test for present return values as we do not fix up missing
4291 return values from the original source. */
4292 if (op == NULL)
4293 return false;
4295 if (!is_gimple_val (op)
4296 && TREE_CODE (op) != RESULT_DECL)
4298 error ("invalid operand in return statement");
4299 debug_generic_stmt (op);
4300 return true;
4303 if ((TREE_CODE (op) == RESULT_DECL
4304 && DECL_BY_REFERENCE (op))
4305 || (TREE_CODE (op) == SSA_NAME
4306 && SSA_NAME_VAR (op)
4307 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4308 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4309 op = TREE_TYPE (op);
4311 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4313 error ("invalid conversion in return statement");
4314 debug_generic_stmt (restype);
4315 debug_generic_stmt (TREE_TYPE (op));
4316 return true;
4319 return false;
4323 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4324 is a problem, otherwise false. */
4326 static bool
4327 verify_gimple_goto (ggoto *stmt)
4329 tree dest = gimple_goto_dest (stmt);
4331 /* ??? We have two canonical forms of direct goto destinations, a
4332 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4333 if (TREE_CODE (dest) != LABEL_DECL
4334 && (!is_gimple_val (dest)
4335 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4337 error ("goto destination is neither a label nor a pointer");
4338 return true;
4341 return false;
4344 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4345 is a problem, otherwise false. */
4347 static bool
4348 verify_gimple_switch (gswitch *stmt)
4350 unsigned int i, n;
4351 tree elt, prev_upper_bound = NULL_TREE;
4352 tree index_type, elt_type = NULL_TREE;
4354 if (!is_gimple_val (gimple_switch_index (stmt)))
4356 error ("invalid operand to switch statement");
4357 debug_generic_stmt (gimple_switch_index (stmt));
4358 return true;
4361 index_type = TREE_TYPE (gimple_switch_index (stmt));
4362 if (! INTEGRAL_TYPE_P (index_type))
4364 error ("non-integral type switch statement");
4365 debug_generic_expr (index_type);
4366 return true;
4369 elt = gimple_switch_label (stmt, 0);
4370 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4372 error ("invalid default case label in switch statement");
4373 debug_generic_expr (elt);
4374 return true;
4377 n = gimple_switch_num_labels (stmt);
4378 for (i = 1; i < n; i++)
4380 elt = gimple_switch_label (stmt, i);
4382 if (! CASE_LOW (elt))
4384 error ("invalid case label in switch statement");
4385 debug_generic_expr (elt);
4386 return true;
4388 if (CASE_HIGH (elt)
4389 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4391 error ("invalid case range in switch statement");
4392 debug_generic_expr (elt);
4393 return true;
4396 if (elt_type)
4398 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4399 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4401 error ("type mismatch for case label in switch statement");
4402 debug_generic_expr (elt);
4403 return true;
4406 else
4408 elt_type = TREE_TYPE (CASE_LOW (elt));
4409 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4411 error ("type precision mismatch in switch statement");
4412 return true;
4416 if (prev_upper_bound)
4418 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4420 error ("case labels not sorted in switch statement");
4421 return true;
4425 prev_upper_bound = CASE_HIGH (elt);
4426 if (! prev_upper_bound)
4427 prev_upper_bound = CASE_LOW (elt);
4430 return false;
4433 /* Verify a gimple debug statement STMT.
4434 Returns true if anything is wrong. */
4436 static bool
4437 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4439 /* There isn't much that could be wrong in a gimple debug stmt. A
4440 gimple debug bind stmt, for example, maps a tree, that's usually
4441 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4442 component or member of an aggregate type, to another tree, that
4443 can be an arbitrary expression. These stmts expand into debug
4444 insns, and are converted to debug notes by var-tracking.c. */
4445 return false;
4448 /* Verify a gimple label statement STMT.
4449 Returns true if anything is wrong. */
4451 static bool
4452 verify_gimple_label (glabel *stmt)
4454 tree decl = gimple_label_label (stmt);
4455 int uid;
4456 bool err = false;
4458 if (TREE_CODE (decl) != LABEL_DECL)
4459 return true;
4460 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4461 && DECL_CONTEXT (decl) != current_function_decl)
4463 error ("label's context is not the current function decl");
4464 err |= true;
4467 uid = LABEL_DECL_UID (decl);
4468 if (cfun->cfg
4469 && (uid == -1
4470 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4472 error ("incorrect entry in label_to_block_map");
4473 err |= true;
4476 uid = EH_LANDING_PAD_NR (decl);
4477 if (uid)
4479 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4480 if (decl != lp->post_landing_pad)
4482 error ("incorrect setting of landing pad number");
4483 err |= true;
4487 return err;
4490 /* Verify a gimple cond statement STMT.
4491 Returns true if anything is wrong. */
4493 static bool
4494 verify_gimple_cond (gcond *stmt)
4496 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4498 error ("invalid comparison code in gimple cond");
4499 return true;
4501 if (!(!gimple_cond_true_label (stmt)
4502 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4503 || !(!gimple_cond_false_label (stmt)
4504 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4506 error ("invalid labels in gimple cond");
4507 return true;
4510 return verify_gimple_comparison (boolean_type_node,
4511 gimple_cond_lhs (stmt),
4512 gimple_cond_rhs (stmt));
4515 /* Verify the GIMPLE statement STMT. Returns true if there is an
4516 error, otherwise false. */
4518 static bool
4519 verify_gimple_stmt (gimple stmt)
4521 switch (gimple_code (stmt))
4523 case GIMPLE_ASSIGN:
4524 return verify_gimple_assign (as_a <gassign *> (stmt));
4526 case GIMPLE_LABEL:
4527 return verify_gimple_label (as_a <glabel *> (stmt));
4529 case GIMPLE_CALL:
4530 return verify_gimple_call (as_a <gcall *> (stmt));
4532 case GIMPLE_COND:
4533 return verify_gimple_cond (as_a <gcond *> (stmt));
4535 case GIMPLE_GOTO:
4536 return verify_gimple_goto (as_a <ggoto *> (stmt));
4538 case GIMPLE_SWITCH:
4539 return verify_gimple_switch (as_a <gswitch *> (stmt));
4541 case GIMPLE_RETURN:
4542 return verify_gimple_return (as_a <greturn *> (stmt));
4544 case GIMPLE_ASM:
4545 return false;
4547 case GIMPLE_TRANSACTION:
4548 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4550 /* Tuples that do not have tree operands. */
4551 case GIMPLE_NOP:
4552 case GIMPLE_PREDICT:
4553 case GIMPLE_RESX:
4554 case GIMPLE_EH_DISPATCH:
4555 case GIMPLE_EH_MUST_NOT_THROW:
4556 return false;
4558 CASE_GIMPLE_OMP:
4559 /* OpenMP directives are validated by the FE and never operated
4560 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4561 non-gimple expressions when the main index variable has had
4562 its address taken. This does not affect the loop itself
4563 because the header of an GIMPLE_OMP_FOR is merely used to determine
4564 how to setup the parallel iteration. */
4565 return false;
4567 case GIMPLE_DEBUG:
4568 return verify_gimple_debug (stmt);
4570 default:
4571 gcc_unreachable ();
4575 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4576 and false otherwise. */
4578 static bool
4579 verify_gimple_phi (gimple phi)
4581 bool err = false;
4582 unsigned i;
4583 tree phi_result = gimple_phi_result (phi);
4584 bool virtual_p;
4586 if (!phi_result)
4588 error ("invalid PHI result");
4589 return true;
4592 virtual_p = virtual_operand_p (phi_result);
4593 if (TREE_CODE (phi_result) != SSA_NAME
4594 || (virtual_p
4595 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4597 error ("invalid PHI result");
4598 err = true;
4601 for (i = 0; i < gimple_phi_num_args (phi); i++)
4603 tree t = gimple_phi_arg_def (phi, i);
4605 if (!t)
4607 error ("missing PHI def");
4608 err |= true;
4609 continue;
4611 /* Addressable variables do have SSA_NAMEs but they
4612 are not considered gimple values. */
4613 else if ((TREE_CODE (t) == SSA_NAME
4614 && virtual_p != virtual_operand_p (t))
4615 || (virtual_p
4616 && (TREE_CODE (t) != SSA_NAME
4617 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4618 || (!virtual_p
4619 && !is_gimple_val (t)))
4621 error ("invalid PHI argument");
4622 debug_generic_expr (t);
4623 err |= true;
4625 #ifdef ENABLE_TYPES_CHECKING
4626 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4628 error ("incompatible types in PHI argument %u", i);
4629 debug_generic_stmt (TREE_TYPE (phi_result));
4630 debug_generic_stmt (TREE_TYPE (t));
4631 err |= true;
4633 #endif
4636 return err;
4639 /* Verify the GIMPLE statements inside the sequence STMTS. */
4641 static bool
4642 verify_gimple_in_seq_2 (gimple_seq stmts)
4644 gimple_stmt_iterator ittr;
4645 bool err = false;
4647 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4649 gimple stmt = gsi_stmt (ittr);
4651 switch (gimple_code (stmt))
4653 case GIMPLE_BIND:
4654 err |= verify_gimple_in_seq_2 (
4655 gimple_bind_body (as_a <gbind *> (stmt)));
4656 break;
4658 case GIMPLE_TRY:
4659 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4660 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4661 break;
4663 case GIMPLE_EH_FILTER:
4664 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4665 break;
4667 case GIMPLE_EH_ELSE:
4669 geh_else *eh_else = as_a <geh_else *> (stmt);
4670 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4671 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4673 break;
4675 case GIMPLE_CATCH:
4676 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4677 as_a <gcatch *> (stmt)));
4678 break;
4680 case GIMPLE_TRANSACTION:
4681 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4682 break;
4684 default:
4686 bool err2 = verify_gimple_stmt (stmt);
4687 if (err2)
4688 debug_gimple_stmt (stmt);
4689 err |= err2;
4694 return err;
4697 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4698 is a problem, otherwise false. */
4700 static bool
4701 verify_gimple_transaction (gtransaction *stmt)
4703 tree lab = gimple_transaction_label (stmt);
4704 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4705 return true;
4706 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4710 /* Verify the GIMPLE statements inside the statement list STMTS. */
4712 DEBUG_FUNCTION void
4713 verify_gimple_in_seq (gimple_seq stmts)
4715 timevar_push (TV_TREE_STMT_VERIFY);
4716 if (verify_gimple_in_seq_2 (stmts))
4717 internal_error ("verify_gimple failed");
4718 timevar_pop (TV_TREE_STMT_VERIFY);
4721 /* Return true when the T can be shared. */
4723 static bool
4724 tree_node_can_be_shared (tree t)
4726 if (IS_TYPE_OR_DECL_P (t)
4727 || is_gimple_min_invariant (t)
4728 || TREE_CODE (t) == SSA_NAME
4729 || t == error_mark_node
4730 || TREE_CODE (t) == IDENTIFIER_NODE)
4731 return true;
4733 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4734 return true;
4736 if (DECL_P (t))
4737 return true;
4739 return false;
4742 /* Called via walk_tree. Verify tree sharing. */
4744 static tree
4745 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4747 hash_set<void *> *visited = (hash_set<void *> *) data;
4749 if (tree_node_can_be_shared (*tp))
4751 *walk_subtrees = false;
4752 return NULL;
4755 if (visited->add (*tp))
4756 return *tp;
4758 return NULL;
4761 /* Called via walk_gimple_stmt. Verify tree sharing. */
4763 static tree
4764 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4766 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4767 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4770 static bool eh_error_found;
4771 bool
4772 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4773 hash_set<gimple> *visited)
4775 if (!visited->contains (stmt))
4777 error ("dead STMT in EH table");
4778 debug_gimple_stmt (stmt);
4779 eh_error_found = true;
4781 return true;
4784 /* Verify if the location LOCs block is in BLOCKS. */
4786 static bool
4787 verify_location (hash_set<tree> *blocks, location_t loc)
4789 tree block = LOCATION_BLOCK (loc);
4790 if (block != NULL_TREE
4791 && !blocks->contains (block))
4793 error ("location references block not in block tree");
4794 return true;
4796 if (block != NULL_TREE)
4797 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4798 return false;
4801 /* Called via walk_tree. Verify that expressions have no blocks. */
4803 static tree
4804 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4806 if (!EXPR_P (*tp))
4808 *walk_subtrees = false;
4809 return NULL;
4812 location_t loc = EXPR_LOCATION (*tp);
4813 if (LOCATION_BLOCK (loc) != NULL)
4814 return *tp;
4816 return NULL;
4819 /* Called via walk_tree. Verify locations of expressions. */
4821 static tree
4822 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4824 hash_set<tree> *blocks = (hash_set<tree> *) data;
4826 if (TREE_CODE (*tp) == VAR_DECL
4827 && DECL_HAS_DEBUG_EXPR_P (*tp))
4829 tree t = DECL_DEBUG_EXPR (*tp);
4830 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4831 if (addr)
4832 return addr;
4834 if ((TREE_CODE (*tp) == VAR_DECL
4835 || TREE_CODE (*tp) == PARM_DECL
4836 || TREE_CODE (*tp) == RESULT_DECL)
4837 && DECL_HAS_VALUE_EXPR_P (*tp))
4839 tree t = DECL_VALUE_EXPR (*tp);
4840 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4841 if (addr)
4842 return addr;
4845 if (!EXPR_P (*tp))
4847 *walk_subtrees = false;
4848 return NULL;
4851 location_t loc = EXPR_LOCATION (*tp);
4852 if (verify_location (blocks, loc))
4853 return *tp;
4855 return NULL;
4858 /* Called via walk_gimple_op. Verify locations of expressions. */
4860 static tree
4861 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4863 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4864 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4867 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4869 static void
4870 collect_subblocks (hash_set<tree> *blocks, tree block)
4872 tree t;
4873 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4875 blocks->add (t);
4876 collect_subblocks (blocks, t);
4880 /* Verify the GIMPLE statements in the CFG of FN. */
4882 DEBUG_FUNCTION void
4883 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4885 basic_block bb;
4886 bool err = false;
4888 timevar_push (TV_TREE_STMT_VERIFY);
4889 hash_set<void *> visited;
4890 hash_set<gimple> visited_stmts;
4892 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4893 hash_set<tree> blocks;
4894 if (DECL_INITIAL (fn->decl))
4896 blocks.add (DECL_INITIAL (fn->decl));
4897 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4900 FOR_EACH_BB_FN (bb, fn)
4902 gimple_stmt_iterator gsi;
4904 for (gphi_iterator gpi = gsi_start_phis (bb);
4905 !gsi_end_p (gpi);
4906 gsi_next (&gpi))
4908 gphi *phi = gpi.phi ();
4909 bool err2 = false;
4910 unsigned i;
4912 visited_stmts.add (phi);
4914 if (gimple_bb (phi) != bb)
4916 error ("gimple_bb (phi) is set to a wrong basic block");
4917 err2 = true;
4920 err2 |= verify_gimple_phi (phi);
4922 /* Only PHI arguments have locations. */
4923 if (gimple_location (phi) != UNKNOWN_LOCATION)
4925 error ("PHI node with location");
4926 err2 = true;
4929 for (i = 0; i < gimple_phi_num_args (phi); i++)
4931 tree arg = gimple_phi_arg_def (phi, i);
4932 tree addr = walk_tree (&arg, verify_node_sharing_1,
4933 &visited, NULL);
4934 if (addr)
4936 error ("incorrect sharing of tree nodes");
4937 debug_generic_expr (addr);
4938 err2 |= true;
4940 location_t loc = gimple_phi_arg_location (phi, i);
4941 if (virtual_operand_p (gimple_phi_result (phi))
4942 && loc != UNKNOWN_LOCATION)
4944 error ("virtual PHI with argument locations");
4945 err2 = true;
4947 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4948 if (addr)
4950 debug_generic_expr (addr);
4951 err2 = true;
4953 err2 |= verify_location (&blocks, loc);
4956 if (err2)
4957 debug_gimple_stmt (phi);
4958 err |= err2;
4961 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4963 gimple stmt = gsi_stmt (gsi);
4964 bool err2 = false;
4965 struct walk_stmt_info wi;
4966 tree addr;
4967 int lp_nr;
4969 visited_stmts.add (stmt);
4971 if (gimple_bb (stmt) != bb)
4973 error ("gimple_bb (stmt) is set to a wrong basic block");
4974 err2 = true;
4977 err2 |= verify_gimple_stmt (stmt);
4978 err2 |= verify_location (&blocks, gimple_location (stmt));
4980 memset (&wi, 0, sizeof (wi));
4981 wi.info = (void *) &visited;
4982 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4983 if (addr)
4985 error ("incorrect sharing of tree nodes");
4986 debug_generic_expr (addr);
4987 err2 |= true;
4990 memset (&wi, 0, sizeof (wi));
4991 wi.info = (void *) &blocks;
4992 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4993 if (addr)
4995 debug_generic_expr (addr);
4996 err2 |= true;
4999 /* ??? Instead of not checking these stmts at all the walker
5000 should know its context via wi. */
5001 if (!is_gimple_debug (stmt)
5002 && !is_gimple_omp (stmt))
5004 memset (&wi, 0, sizeof (wi));
5005 addr = walk_gimple_op (stmt, verify_expr, &wi);
5006 if (addr)
5008 debug_generic_expr (addr);
5009 inform (gimple_location (stmt), "in statement");
5010 err2 |= true;
5014 /* If the statement is marked as part of an EH region, then it is
5015 expected that the statement could throw. Verify that when we
5016 have optimizations that simplify statements such that we prove
5017 that they cannot throw, that we update other data structures
5018 to match. */
5019 lp_nr = lookup_stmt_eh_lp (stmt);
5020 if (lp_nr > 0)
5022 if (!stmt_could_throw_p (stmt))
5024 if (verify_nothrow)
5026 error ("statement marked for throw, but doesn%'t");
5027 err2 |= true;
5030 else if (!gsi_one_before_end_p (gsi))
5032 error ("statement marked for throw in middle of block");
5033 err2 |= true;
5037 if (err2)
5038 debug_gimple_stmt (stmt);
5039 err |= err2;
5043 eh_error_found = false;
5044 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5045 if (eh_table)
5046 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5047 (&visited_stmts);
5049 if (err || eh_error_found)
5050 internal_error ("verify_gimple failed");
5052 verify_histograms ();
5053 timevar_pop (TV_TREE_STMT_VERIFY);
5057 /* Verifies that the flow information is OK. */
5059 static int
5060 gimple_verify_flow_info (void)
5062 int err = 0;
5063 basic_block bb;
5064 gimple_stmt_iterator gsi;
5065 gimple stmt;
5066 edge e;
5067 edge_iterator ei;
5069 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5070 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5072 error ("ENTRY_BLOCK has IL associated with it");
5073 err = 1;
5076 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5077 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5079 error ("EXIT_BLOCK has IL associated with it");
5080 err = 1;
5083 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5084 if (e->flags & EDGE_FALLTHRU)
5086 error ("fallthru to exit from bb %d", e->src->index);
5087 err = 1;
5090 FOR_EACH_BB_FN (bb, cfun)
5092 bool found_ctrl_stmt = false;
5094 stmt = NULL;
5096 /* Skip labels on the start of basic block. */
5097 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5099 tree label;
5100 gimple prev_stmt = stmt;
5102 stmt = gsi_stmt (gsi);
5104 if (gimple_code (stmt) != GIMPLE_LABEL)
5105 break;
5107 label = gimple_label_label (as_a <glabel *> (stmt));
5108 if (prev_stmt && DECL_NONLOCAL (label))
5110 error ("nonlocal label ");
5111 print_generic_expr (stderr, label, 0);
5112 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5113 bb->index);
5114 err = 1;
5117 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5119 error ("EH landing pad label ");
5120 print_generic_expr (stderr, label, 0);
5121 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5122 bb->index);
5123 err = 1;
5126 if (label_to_block (label) != bb)
5128 error ("label ");
5129 print_generic_expr (stderr, label, 0);
5130 fprintf (stderr, " to block does not match in bb %d",
5131 bb->index);
5132 err = 1;
5135 if (decl_function_context (label) != current_function_decl)
5137 error ("label ");
5138 print_generic_expr (stderr, label, 0);
5139 fprintf (stderr, " has incorrect context in bb %d",
5140 bb->index);
5141 err = 1;
5145 /* Verify that body of basic block BB is free of control flow. */
5146 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5148 gimple stmt = gsi_stmt (gsi);
5150 if (found_ctrl_stmt)
5152 error ("control flow in the middle of basic block %d",
5153 bb->index);
5154 err = 1;
5157 if (stmt_ends_bb_p (stmt))
5158 found_ctrl_stmt = true;
5160 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5162 error ("label ");
5163 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5164 fprintf (stderr, " in the middle of basic block %d", bb->index);
5165 err = 1;
5169 gsi = gsi_last_bb (bb);
5170 if (gsi_end_p (gsi))
5171 continue;
5173 stmt = gsi_stmt (gsi);
5175 if (gimple_code (stmt) == GIMPLE_LABEL)
5176 continue;
5178 err |= verify_eh_edges (stmt);
5180 if (is_ctrl_stmt (stmt))
5182 FOR_EACH_EDGE (e, ei, bb->succs)
5183 if (e->flags & EDGE_FALLTHRU)
5185 error ("fallthru edge after a control statement in bb %d",
5186 bb->index);
5187 err = 1;
5191 if (gimple_code (stmt) != GIMPLE_COND)
5193 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5194 after anything else but if statement. */
5195 FOR_EACH_EDGE (e, ei, bb->succs)
5196 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5198 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5199 bb->index);
5200 err = 1;
5204 switch (gimple_code (stmt))
5206 case GIMPLE_COND:
5208 edge true_edge;
5209 edge false_edge;
5211 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5213 if (!true_edge
5214 || !false_edge
5215 || !(true_edge->flags & EDGE_TRUE_VALUE)
5216 || !(false_edge->flags & EDGE_FALSE_VALUE)
5217 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5218 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5219 || EDGE_COUNT (bb->succs) >= 3)
5221 error ("wrong outgoing edge flags at end of bb %d",
5222 bb->index);
5223 err = 1;
5226 break;
5228 case GIMPLE_GOTO:
5229 if (simple_goto_p (stmt))
5231 error ("explicit goto at end of bb %d", bb->index);
5232 err = 1;
5234 else
5236 /* FIXME. We should double check that the labels in the
5237 destination blocks have their address taken. */
5238 FOR_EACH_EDGE (e, ei, bb->succs)
5239 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5240 | EDGE_FALSE_VALUE))
5241 || !(e->flags & EDGE_ABNORMAL))
5243 error ("wrong outgoing edge flags at end of bb %d",
5244 bb->index);
5245 err = 1;
5248 break;
5250 case GIMPLE_CALL:
5251 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5252 break;
5253 /* ... fallthru ... */
5254 case GIMPLE_RETURN:
5255 if (!single_succ_p (bb)
5256 || (single_succ_edge (bb)->flags
5257 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5258 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5260 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5261 err = 1;
5263 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5265 error ("return edge does not point to exit in bb %d",
5266 bb->index);
5267 err = 1;
5269 break;
5271 case GIMPLE_SWITCH:
5273 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5274 tree prev;
5275 edge e;
5276 size_t i, n;
5278 n = gimple_switch_num_labels (switch_stmt);
5280 /* Mark all the destination basic blocks. */
5281 for (i = 0; i < n; ++i)
5283 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5284 basic_block label_bb = label_to_block (lab);
5285 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5286 label_bb->aux = (void *)1;
5289 /* Verify that the case labels are sorted. */
5290 prev = gimple_switch_label (switch_stmt, 0);
5291 for (i = 1; i < n; ++i)
5293 tree c = gimple_switch_label (switch_stmt, i);
5294 if (!CASE_LOW (c))
5296 error ("found default case not at the start of "
5297 "case vector");
5298 err = 1;
5299 continue;
5301 if (CASE_LOW (prev)
5302 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5304 error ("case labels not sorted: ");
5305 print_generic_expr (stderr, prev, 0);
5306 fprintf (stderr," is greater than ");
5307 print_generic_expr (stderr, c, 0);
5308 fprintf (stderr," but comes before it.\n");
5309 err = 1;
5311 prev = c;
5313 /* VRP will remove the default case if it can prove it will
5314 never be executed. So do not verify there always exists
5315 a default case here. */
5317 FOR_EACH_EDGE (e, ei, bb->succs)
5319 if (!e->dest->aux)
5321 error ("extra outgoing edge %d->%d",
5322 bb->index, e->dest->index);
5323 err = 1;
5326 e->dest->aux = (void *)2;
5327 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5328 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5330 error ("wrong outgoing edge flags at end of bb %d",
5331 bb->index);
5332 err = 1;
5336 /* Check that we have all of them. */
5337 for (i = 0; i < n; ++i)
5339 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5340 basic_block label_bb = label_to_block (lab);
5342 if (label_bb->aux != (void *)2)
5344 error ("missing edge %i->%i", bb->index, label_bb->index);
5345 err = 1;
5349 FOR_EACH_EDGE (e, ei, bb->succs)
5350 e->dest->aux = (void *)0;
5352 break;
5354 case GIMPLE_EH_DISPATCH:
5355 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5356 break;
5358 default:
5359 break;
5363 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5364 verify_dominators (CDI_DOMINATORS);
5366 return err;
5370 /* Updates phi nodes after creating a forwarder block joined
5371 by edge FALLTHRU. */
5373 static void
5374 gimple_make_forwarder_block (edge fallthru)
5376 edge e;
5377 edge_iterator ei;
5378 basic_block dummy, bb;
5379 tree var;
5380 gphi_iterator gsi;
5382 dummy = fallthru->src;
5383 bb = fallthru->dest;
5385 if (single_pred_p (bb))
5386 return;
5388 /* If we redirected a branch we must create new PHI nodes at the
5389 start of BB. */
5390 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5392 gphi *phi, *new_phi;
5394 phi = gsi.phi ();
5395 var = gimple_phi_result (phi);
5396 new_phi = create_phi_node (var, bb);
5397 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5398 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5399 UNKNOWN_LOCATION);
5402 /* Add the arguments we have stored on edges. */
5403 FOR_EACH_EDGE (e, ei, bb->preds)
5405 if (e == fallthru)
5406 continue;
5408 flush_pending_stmts (e);
5413 /* Return a non-special label in the head of basic block BLOCK.
5414 Create one if it doesn't exist. */
5416 tree
5417 gimple_block_label (basic_block bb)
5419 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5420 bool first = true;
5421 tree label;
5422 glabel *stmt;
5424 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5426 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5427 if (!stmt)
5428 break;
5429 label = gimple_label_label (stmt);
5430 if (!DECL_NONLOCAL (label))
5432 if (!first)
5433 gsi_move_before (&i, &s);
5434 return label;
5438 label = create_artificial_label (UNKNOWN_LOCATION);
5439 stmt = gimple_build_label (label);
5440 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5441 return label;
5445 /* Attempt to perform edge redirection by replacing a possibly complex
5446 jump instruction by a goto or by removing the jump completely.
5447 This can apply only if all edges now point to the same block. The
5448 parameters and return values are equivalent to
5449 redirect_edge_and_branch. */
5451 static edge
5452 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5454 basic_block src = e->src;
5455 gimple_stmt_iterator i;
5456 gimple stmt;
5458 /* We can replace or remove a complex jump only when we have exactly
5459 two edges. */
5460 if (EDGE_COUNT (src->succs) != 2
5461 /* Verify that all targets will be TARGET. Specifically, the
5462 edge that is not E must also go to TARGET. */
5463 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5464 return NULL;
5466 i = gsi_last_bb (src);
5467 if (gsi_end_p (i))
5468 return NULL;
5470 stmt = gsi_stmt (i);
5472 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5474 gsi_remove (&i, true);
5475 e = ssa_redirect_edge (e, target);
5476 e->flags = EDGE_FALLTHRU;
5477 return e;
5480 return NULL;
5484 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5485 edge representing the redirected branch. */
5487 static edge
5488 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5490 basic_block bb = e->src;
5491 gimple_stmt_iterator gsi;
5492 edge ret;
5493 gimple stmt;
5495 if (e->flags & EDGE_ABNORMAL)
5496 return NULL;
5498 if (e->dest == dest)
5499 return NULL;
5501 if (e->flags & EDGE_EH)
5502 return redirect_eh_edge (e, dest);
5504 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5506 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5507 if (ret)
5508 return ret;
5511 gsi = gsi_last_bb (bb);
5512 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5514 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5516 case GIMPLE_COND:
5517 /* For COND_EXPR, we only need to redirect the edge. */
5518 break;
5520 case GIMPLE_GOTO:
5521 /* No non-abnormal edges should lead from a non-simple goto, and
5522 simple ones should be represented implicitly. */
5523 gcc_unreachable ();
5525 case GIMPLE_SWITCH:
5527 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5528 tree label = gimple_block_label (dest);
5529 tree cases = get_cases_for_edge (e, switch_stmt);
5531 /* If we have a list of cases associated with E, then use it
5532 as it's a lot faster than walking the entire case vector. */
5533 if (cases)
5535 edge e2 = find_edge (e->src, dest);
5536 tree last, first;
5538 first = cases;
5539 while (cases)
5541 last = cases;
5542 CASE_LABEL (cases) = label;
5543 cases = CASE_CHAIN (cases);
5546 /* If there was already an edge in the CFG, then we need
5547 to move all the cases associated with E to E2. */
5548 if (e2)
5550 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5552 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5553 CASE_CHAIN (cases2) = first;
5555 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5557 else
5559 size_t i, n = gimple_switch_num_labels (switch_stmt);
5561 for (i = 0; i < n; i++)
5563 tree elt = gimple_switch_label (switch_stmt, i);
5564 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5565 CASE_LABEL (elt) = label;
5569 break;
5571 case GIMPLE_ASM:
5573 gasm *asm_stmt = as_a <gasm *> (stmt);
5574 int i, n = gimple_asm_nlabels (asm_stmt);
5575 tree label = NULL;
5577 for (i = 0; i < n; ++i)
5579 tree cons = gimple_asm_label_op (asm_stmt, i);
5580 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5582 if (!label)
5583 label = gimple_block_label (dest);
5584 TREE_VALUE (cons) = label;
5588 /* If we didn't find any label matching the former edge in the
5589 asm labels, we must be redirecting the fallthrough
5590 edge. */
5591 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5593 break;
5595 case GIMPLE_RETURN:
5596 gsi_remove (&gsi, true);
5597 e->flags |= EDGE_FALLTHRU;
5598 break;
5600 case GIMPLE_OMP_RETURN:
5601 case GIMPLE_OMP_CONTINUE:
5602 case GIMPLE_OMP_SECTIONS_SWITCH:
5603 case GIMPLE_OMP_FOR:
5604 /* The edges from OMP constructs can be simply redirected. */
5605 break;
5607 case GIMPLE_EH_DISPATCH:
5608 if (!(e->flags & EDGE_FALLTHRU))
5609 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5610 break;
5612 case GIMPLE_TRANSACTION:
5613 /* The ABORT edge has a stored label associated with it, otherwise
5614 the edges are simply redirectable. */
5615 if (e->flags == 0)
5616 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5617 gimple_block_label (dest));
5618 break;
5620 default:
5621 /* Otherwise it must be a fallthru edge, and we don't need to
5622 do anything besides redirecting it. */
5623 gcc_assert (e->flags & EDGE_FALLTHRU);
5624 break;
5627 /* Update/insert PHI nodes as necessary. */
5629 /* Now update the edges in the CFG. */
5630 e = ssa_redirect_edge (e, dest);
5632 return e;
5635 /* Returns true if it is possible to remove edge E by redirecting
5636 it to the destination of the other edge from E->src. */
5638 static bool
5639 gimple_can_remove_branch_p (const_edge e)
5641 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5642 return false;
5644 return true;
5647 /* Simple wrapper, as we can always redirect fallthru edges. */
5649 static basic_block
5650 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5652 e = gimple_redirect_edge_and_branch (e, dest);
5653 gcc_assert (e);
5655 return NULL;
5659 /* Splits basic block BB after statement STMT (but at least after the
5660 labels). If STMT is NULL, BB is split just after the labels. */
5662 static basic_block
5663 gimple_split_block (basic_block bb, void *stmt)
5665 gimple_stmt_iterator gsi;
5666 gimple_stmt_iterator gsi_tgt;
5667 gimple act;
5668 gimple_seq list;
5669 basic_block new_bb;
5670 edge e;
5671 edge_iterator ei;
5673 new_bb = create_empty_bb (bb);
5675 /* Redirect the outgoing edges. */
5676 new_bb->succs = bb->succs;
5677 bb->succs = NULL;
5678 FOR_EACH_EDGE (e, ei, new_bb->succs)
5679 e->src = new_bb;
5681 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5682 stmt = NULL;
5684 /* Move everything from GSI to the new basic block. */
5685 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5687 act = gsi_stmt (gsi);
5688 if (gimple_code (act) == GIMPLE_LABEL)
5689 continue;
5691 if (!stmt)
5692 break;
5694 if (stmt == act)
5696 gsi_next (&gsi);
5697 break;
5701 if (gsi_end_p (gsi))
5702 return new_bb;
5704 /* Split the statement list - avoid re-creating new containers as this
5705 brings ugly quadratic memory consumption in the inliner.
5706 (We are still quadratic since we need to update stmt BB pointers,
5707 sadly.) */
5708 gsi_split_seq_before (&gsi, &list);
5709 set_bb_seq (new_bb, list);
5710 for (gsi_tgt = gsi_start (list);
5711 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5712 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5714 return new_bb;
5718 /* Moves basic block BB after block AFTER. */
5720 static bool
5721 gimple_move_block_after (basic_block bb, basic_block after)
5723 if (bb->prev_bb == after)
5724 return true;
5726 unlink_block (bb);
5727 link_block (bb, after);
5729 return true;
5733 /* Return TRUE if block BB has no executable statements, otherwise return
5734 FALSE. */
5736 static bool
5737 gimple_empty_block_p (basic_block bb)
5739 /* BB must have no executable statements. */
5740 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5741 if (phi_nodes (bb))
5742 return false;
5743 if (gsi_end_p (gsi))
5744 return true;
5745 if (is_gimple_debug (gsi_stmt (gsi)))
5746 gsi_next_nondebug (&gsi);
5747 return gsi_end_p (gsi);
5751 /* Split a basic block if it ends with a conditional branch and if the
5752 other part of the block is not empty. */
5754 static basic_block
5755 gimple_split_block_before_cond_jump (basic_block bb)
5757 gimple last, split_point;
5758 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5759 if (gsi_end_p (gsi))
5760 return NULL;
5761 last = gsi_stmt (gsi);
5762 if (gimple_code (last) != GIMPLE_COND
5763 && gimple_code (last) != GIMPLE_SWITCH)
5764 return NULL;
5765 gsi_prev_nondebug (&gsi);
5766 split_point = gsi_stmt (gsi);
5767 return split_block (bb, split_point)->dest;
5771 /* Return true if basic_block can be duplicated. */
5773 static bool
5774 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5776 return true;
5779 /* Create a duplicate of the basic block BB. NOTE: This does not
5780 preserve SSA form. */
5782 static basic_block
5783 gimple_duplicate_bb (basic_block bb)
5785 basic_block new_bb;
5786 gimple_stmt_iterator gsi_tgt;
5788 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5790 /* Copy the PHI nodes. We ignore PHI node arguments here because
5791 the incoming edges have not been setup yet. */
5792 for (gphi_iterator gpi = gsi_start_phis (bb);
5793 !gsi_end_p (gpi);
5794 gsi_next (&gpi))
5796 gphi *phi, *copy;
5797 phi = gpi.phi ();
5798 copy = create_phi_node (NULL_TREE, new_bb);
5799 create_new_def_for (gimple_phi_result (phi), copy,
5800 gimple_phi_result_ptr (copy));
5801 gimple_set_uid (copy, gimple_uid (phi));
5804 gsi_tgt = gsi_start_bb (new_bb);
5805 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5806 !gsi_end_p (gsi);
5807 gsi_next (&gsi))
5809 def_operand_p def_p;
5810 ssa_op_iter op_iter;
5811 tree lhs;
5812 gimple stmt, copy;
5814 stmt = gsi_stmt (gsi);
5815 if (gimple_code (stmt) == GIMPLE_LABEL)
5816 continue;
5818 /* Don't duplicate label debug stmts. */
5819 if (gimple_debug_bind_p (stmt)
5820 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5821 == LABEL_DECL)
5822 continue;
5824 /* Create a new copy of STMT and duplicate STMT's virtual
5825 operands. */
5826 copy = gimple_copy (stmt);
5827 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5829 maybe_duplicate_eh_stmt (copy, stmt);
5830 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5832 /* When copying around a stmt writing into a local non-user
5833 aggregate, make sure it won't share stack slot with other
5834 vars. */
5835 lhs = gimple_get_lhs (stmt);
5836 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5838 tree base = get_base_address (lhs);
5839 if (base
5840 && (TREE_CODE (base) == VAR_DECL
5841 || TREE_CODE (base) == RESULT_DECL)
5842 && DECL_IGNORED_P (base)
5843 && !TREE_STATIC (base)
5844 && !DECL_EXTERNAL (base)
5845 && (TREE_CODE (base) != VAR_DECL
5846 || !DECL_HAS_VALUE_EXPR_P (base)))
5847 DECL_NONSHAREABLE (base) = 1;
5850 /* Create new names for all the definitions created by COPY and
5851 add replacement mappings for each new name. */
5852 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5853 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5856 return new_bb;
5859 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5861 static void
5862 add_phi_args_after_copy_edge (edge e_copy)
5864 basic_block bb, bb_copy = e_copy->src, dest;
5865 edge e;
5866 edge_iterator ei;
5867 gphi *phi, *phi_copy;
5868 tree def;
5869 gphi_iterator psi, psi_copy;
5871 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5872 return;
5874 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5876 if (e_copy->dest->flags & BB_DUPLICATED)
5877 dest = get_bb_original (e_copy->dest);
5878 else
5879 dest = e_copy->dest;
5881 e = find_edge (bb, dest);
5882 if (!e)
5884 /* During loop unrolling the target of the latch edge is copied.
5885 In this case we are not looking for edge to dest, but to
5886 duplicated block whose original was dest. */
5887 FOR_EACH_EDGE (e, ei, bb->succs)
5889 if ((e->dest->flags & BB_DUPLICATED)
5890 && get_bb_original (e->dest) == dest)
5891 break;
5894 gcc_assert (e != NULL);
5897 for (psi = gsi_start_phis (e->dest),
5898 psi_copy = gsi_start_phis (e_copy->dest);
5899 !gsi_end_p (psi);
5900 gsi_next (&psi), gsi_next (&psi_copy))
5902 phi = psi.phi ();
5903 phi_copy = psi_copy.phi ();
5904 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5905 add_phi_arg (phi_copy, def, e_copy,
5906 gimple_phi_arg_location_from_edge (phi, e));
5911 /* Basic block BB_COPY was created by code duplication. Add phi node
5912 arguments for edges going out of BB_COPY. The blocks that were
5913 duplicated have BB_DUPLICATED set. */
5915 void
5916 add_phi_args_after_copy_bb (basic_block bb_copy)
5918 edge e_copy;
5919 edge_iterator ei;
5921 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5923 add_phi_args_after_copy_edge (e_copy);
5927 /* Blocks in REGION_COPY array of length N_REGION were created by
5928 duplication of basic blocks. Add phi node arguments for edges
5929 going from these blocks. If E_COPY is not NULL, also add
5930 phi node arguments for its destination.*/
5932 void
5933 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5934 edge e_copy)
5936 unsigned i;
5938 for (i = 0; i < n_region; i++)
5939 region_copy[i]->flags |= BB_DUPLICATED;
5941 for (i = 0; i < n_region; i++)
5942 add_phi_args_after_copy_bb (region_copy[i]);
5943 if (e_copy)
5944 add_phi_args_after_copy_edge (e_copy);
5946 for (i = 0; i < n_region; i++)
5947 region_copy[i]->flags &= ~BB_DUPLICATED;
5950 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5951 important exit edge EXIT. By important we mean that no SSA name defined
5952 inside region is live over the other exit edges of the region. All entry
5953 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5954 to the duplicate of the region. Dominance and loop information is
5955 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5956 UPDATE_DOMINANCE is false then we assume that the caller will update the
5957 dominance information after calling this function. The new basic
5958 blocks are stored to REGION_COPY in the same order as they had in REGION,
5959 provided that REGION_COPY is not NULL.
5960 The function returns false if it is unable to copy the region,
5961 true otherwise. */
5963 bool
5964 gimple_duplicate_sese_region (edge entry, edge exit,
5965 basic_block *region, unsigned n_region,
5966 basic_block *region_copy,
5967 bool update_dominance)
5969 unsigned i;
5970 bool free_region_copy = false, copying_header = false;
5971 struct loop *loop = entry->dest->loop_father;
5972 edge exit_copy;
5973 vec<basic_block> doms;
5974 edge redirected;
5975 int total_freq = 0, entry_freq = 0;
5976 gcov_type total_count = 0, entry_count = 0;
5978 if (!can_copy_bbs_p (region, n_region))
5979 return false;
5981 /* Some sanity checking. Note that we do not check for all possible
5982 missuses of the functions. I.e. if you ask to copy something weird,
5983 it will work, but the state of structures probably will not be
5984 correct. */
5985 for (i = 0; i < n_region; i++)
5987 /* We do not handle subloops, i.e. all the blocks must belong to the
5988 same loop. */
5989 if (region[i]->loop_father != loop)
5990 return false;
5992 if (region[i] != entry->dest
5993 && region[i] == loop->header)
5994 return false;
5997 /* In case the function is used for loop header copying (which is the primary
5998 use), ensure that EXIT and its copy will be new latch and entry edges. */
5999 if (loop->header == entry->dest)
6001 copying_header = true;
6003 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6004 return false;
6006 for (i = 0; i < n_region; i++)
6007 if (region[i] != exit->src
6008 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6009 return false;
6012 initialize_original_copy_tables ();
6014 if (copying_header)
6015 set_loop_copy (loop, loop_outer (loop));
6016 else
6017 set_loop_copy (loop, loop);
6019 if (!region_copy)
6021 region_copy = XNEWVEC (basic_block, n_region);
6022 free_region_copy = true;
6025 /* Record blocks outside the region that are dominated by something
6026 inside. */
6027 if (update_dominance)
6029 doms.create (0);
6030 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6033 if (entry->dest->count)
6035 total_count = entry->dest->count;
6036 entry_count = entry->count;
6037 /* Fix up corner cases, to avoid division by zero or creation of negative
6038 frequencies. */
6039 if (entry_count > total_count)
6040 entry_count = total_count;
6042 else
6044 total_freq = entry->dest->frequency;
6045 entry_freq = EDGE_FREQUENCY (entry);
6046 /* Fix up corner cases, to avoid division by zero or creation of negative
6047 frequencies. */
6048 if (total_freq == 0)
6049 total_freq = 1;
6050 else if (entry_freq > total_freq)
6051 entry_freq = total_freq;
6054 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6055 split_edge_bb_loc (entry), update_dominance);
6056 if (total_count)
6058 scale_bbs_frequencies_gcov_type (region, n_region,
6059 total_count - entry_count,
6060 total_count);
6061 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6062 total_count);
6064 else
6066 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6067 total_freq);
6068 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6071 if (copying_header)
6073 loop->header = exit->dest;
6074 loop->latch = exit->src;
6077 /* Redirect the entry and add the phi node arguments. */
6078 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6079 gcc_assert (redirected != NULL);
6080 flush_pending_stmts (entry);
6082 /* Concerning updating of dominators: We must recount dominators
6083 for entry block and its copy. Anything that is outside of the
6084 region, but was dominated by something inside needs recounting as
6085 well. */
6086 if (update_dominance)
6088 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6089 doms.safe_push (get_bb_original (entry->dest));
6090 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6091 doms.release ();
6094 /* Add the other PHI node arguments. */
6095 add_phi_args_after_copy (region_copy, n_region, NULL);
6097 if (free_region_copy)
6098 free (region_copy);
6100 free_original_copy_tables ();
6101 return true;
6104 /* Checks if BB is part of the region defined by N_REGION BBS. */
6105 static bool
6106 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6108 unsigned int n;
6110 for (n = 0; n < n_region; n++)
6112 if (bb == bbs[n])
6113 return true;
6115 return false;
6118 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6119 are stored to REGION_COPY in the same order in that they appear
6120 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6121 the region, EXIT an exit from it. The condition guarding EXIT
6122 is moved to ENTRY. Returns true if duplication succeeds, false
6123 otherwise.
6125 For example,
6127 some_code;
6128 if (cond)
6130 else
6133 is transformed to
6135 if (cond)
6137 some_code;
6140 else
6142 some_code;
6147 bool
6148 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6149 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6150 basic_block *region_copy ATTRIBUTE_UNUSED)
6152 unsigned i;
6153 bool free_region_copy = false;
6154 struct loop *loop = exit->dest->loop_father;
6155 struct loop *orig_loop = entry->dest->loop_father;
6156 basic_block switch_bb, entry_bb, nentry_bb;
6157 vec<basic_block> doms;
6158 int total_freq = 0, exit_freq = 0;
6159 gcov_type total_count = 0, exit_count = 0;
6160 edge exits[2], nexits[2], e;
6161 gimple_stmt_iterator gsi;
6162 gimple cond_stmt;
6163 edge sorig, snew;
6164 basic_block exit_bb;
6165 gphi_iterator psi;
6166 gphi *phi;
6167 tree def;
6168 struct loop *target, *aloop, *cloop;
6170 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6171 exits[0] = exit;
6172 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6174 if (!can_copy_bbs_p (region, n_region))
6175 return false;
6177 initialize_original_copy_tables ();
6178 set_loop_copy (orig_loop, loop);
6180 target= loop;
6181 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6183 if (bb_part_of_region_p (aloop->header, region, n_region))
6185 cloop = duplicate_loop (aloop, target);
6186 duplicate_subloops (aloop, cloop);
6190 if (!region_copy)
6192 region_copy = XNEWVEC (basic_block, n_region);
6193 free_region_copy = true;
6196 gcc_assert (!need_ssa_update_p (cfun));
6198 /* Record blocks outside the region that are dominated by something
6199 inside. */
6200 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6202 if (exit->src->count)
6204 total_count = exit->src->count;
6205 exit_count = exit->count;
6206 /* Fix up corner cases, to avoid division by zero or creation of negative
6207 frequencies. */
6208 if (exit_count > total_count)
6209 exit_count = total_count;
6211 else
6213 total_freq = exit->src->frequency;
6214 exit_freq = EDGE_FREQUENCY (exit);
6215 /* Fix up corner cases, to avoid division by zero or creation of negative
6216 frequencies. */
6217 if (total_freq == 0)
6218 total_freq = 1;
6219 if (exit_freq > total_freq)
6220 exit_freq = total_freq;
6223 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6224 split_edge_bb_loc (exit), true);
6225 if (total_count)
6227 scale_bbs_frequencies_gcov_type (region, n_region,
6228 total_count - exit_count,
6229 total_count);
6230 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6231 total_count);
6233 else
6235 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6236 total_freq);
6237 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6240 /* Create the switch block, and put the exit condition to it. */
6241 entry_bb = entry->dest;
6242 nentry_bb = get_bb_copy (entry_bb);
6243 if (!last_stmt (entry->src)
6244 || !stmt_ends_bb_p (last_stmt (entry->src)))
6245 switch_bb = entry->src;
6246 else
6247 switch_bb = split_edge (entry);
6248 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6250 gsi = gsi_last_bb (switch_bb);
6251 cond_stmt = last_stmt (exit->src);
6252 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6253 cond_stmt = gimple_copy (cond_stmt);
6255 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6257 sorig = single_succ_edge (switch_bb);
6258 sorig->flags = exits[1]->flags;
6259 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6261 /* Register the new edge from SWITCH_BB in loop exit lists. */
6262 rescan_loop_exit (snew, true, false);
6264 /* Add the PHI node arguments. */
6265 add_phi_args_after_copy (region_copy, n_region, snew);
6267 /* Get rid of now superfluous conditions and associated edges (and phi node
6268 arguments). */
6269 exit_bb = exit->dest;
6271 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6272 PENDING_STMT (e) = NULL;
6274 /* The latch of ORIG_LOOP was copied, and so was the backedge
6275 to the original header. We redirect this backedge to EXIT_BB. */
6276 for (i = 0; i < n_region; i++)
6277 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6279 gcc_assert (single_succ_edge (region_copy[i]));
6280 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6281 PENDING_STMT (e) = NULL;
6282 for (psi = gsi_start_phis (exit_bb);
6283 !gsi_end_p (psi);
6284 gsi_next (&psi))
6286 phi = psi.phi ();
6287 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6288 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6291 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6292 PENDING_STMT (e) = NULL;
6294 /* Anything that is outside of the region, but was dominated by something
6295 inside needs to update dominance info. */
6296 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6297 doms.release ();
6298 /* Update the SSA web. */
6299 update_ssa (TODO_update_ssa);
6301 if (free_region_copy)
6302 free (region_copy);
6304 free_original_copy_tables ();
6305 return true;
6308 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6309 adding blocks when the dominator traversal reaches EXIT. This
6310 function silently assumes that ENTRY strictly dominates EXIT. */
6312 void
6313 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6314 vec<basic_block> *bbs_p)
6316 basic_block son;
6318 for (son = first_dom_son (CDI_DOMINATORS, entry);
6319 son;
6320 son = next_dom_son (CDI_DOMINATORS, son))
6322 bbs_p->safe_push (son);
6323 if (son != exit)
6324 gather_blocks_in_sese_region (son, exit, bbs_p);
6328 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6329 The duplicates are recorded in VARS_MAP. */
6331 static void
6332 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6333 tree to_context)
6335 tree t = *tp, new_t;
6336 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6338 if (DECL_CONTEXT (t) == to_context)
6339 return;
6341 bool existed;
6342 tree &loc = vars_map->get_or_insert (t, &existed);
6344 if (!existed)
6346 if (SSA_VAR_P (t))
6348 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6349 add_local_decl (f, new_t);
6351 else
6353 gcc_assert (TREE_CODE (t) == CONST_DECL);
6354 new_t = copy_node (t);
6356 DECL_CONTEXT (new_t) = to_context;
6358 loc = new_t;
6360 else
6361 new_t = loc;
6363 *tp = new_t;
6367 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6368 VARS_MAP maps old ssa names and var_decls to the new ones. */
6370 static tree
6371 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6372 tree to_context)
6374 tree new_name;
6376 gcc_assert (!virtual_operand_p (name));
6378 tree *loc = vars_map->get (name);
6380 if (!loc)
6382 tree decl = SSA_NAME_VAR (name);
6383 if (decl)
6385 replace_by_duplicate_decl (&decl, vars_map, to_context);
6386 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6387 decl, SSA_NAME_DEF_STMT (name));
6388 if (SSA_NAME_IS_DEFAULT_DEF (name))
6389 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6390 decl, new_name);
6392 else
6393 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6394 name, SSA_NAME_DEF_STMT (name));
6396 vars_map->put (name, new_name);
6398 else
6399 new_name = *loc;
6401 return new_name;
6404 struct move_stmt_d
6406 tree orig_block;
6407 tree new_block;
6408 tree from_context;
6409 tree to_context;
6410 hash_map<tree, tree> *vars_map;
6411 htab_t new_label_map;
6412 hash_map<void *, void *> *eh_map;
6413 bool remap_decls_p;
6416 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6417 contained in *TP if it has been ORIG_BLOCK previously and change the
6418 DECL_CONTEXT of every local variable referenced in *TP. */
6420 static tree
6421 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6423 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6424 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6425 tree t = *tp;
6427 if (EXPR_P (t))
6429 tree block = TREE_BLOCK (t);
6430 if (block == p->orig_block
6431 || (p->orig_block == NULL_TREE
6432 && block != NULL_TREE))
6433 TREE_SET_BLOCK (t, p->new_block);
6434 #ifdef ENABLE_CHECKING
6435 else if (block != NULL_TREE)
6437 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6438 block = BLOCK_SUPERCONTEXT (block);
6439 gcc_assert (block == p->orig_block);
6441 #endif
6443 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6445 if (TREE_CODE (t) == SSA_NAME)
6446 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6447 else if (TREE_CODE (t) == LABEL_DECL)
6449 if (p->new_label_map)
6451 struct tree_map in, *out;
6452 in.base.from = t;
6453 out = (struct tree_map *)
6454 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6455 if (out)
6456 *tp = t = out->to;
6459 DECL_CONTEXT (t) = p->to_context;
6461 else if (p->remap_decls_p)
6463 /* Replace T with its duplicate. T should no longer appear in the
6464 parent function, so this looks wasteful; however, it may appear
6465 in referenced_vars, and more importantly, as virtual operands of
6466 statements, and in alias lists of other variables. It would be
6467 quite difficult to expunge it from all those places. ??? It might
6468 suffice to do this for addressable variables. */
6469 if ((TREE_CODE (t) == VAR_DECL
6470 && !is_global_var (t))
6471 || TREE_CODE (t) == CONST_DECL)
6472 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6474 *walk_subtrees = 0;
6476 else if (TYPE_P (t))
6477 *walk_subtrees = 0;
6479 return NULL_TREE;
6482 /* Helper for move_stmt_r. Given an EH region number for the source
6483 function, map that to the duplicate EH regio number in the dest. */
6485 static int
6486 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6488 eh_region old_r, new_r;
6490 old_r = get_eh_region_from_number (old_nr);
6491 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6493 return new_r->index;
6496 /* Similar, but operate on INTEGER_CSTs. */
6498 static tree
6499 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6501 int old_nr, new_nr;
6503 old_nr = tree_to_shwi (old_t_nr);
6504 new_nr = move_stmt_eh_region_nr (old_nr, p);
6506 return build_int_cst (integer_type_node, new_nr);
6509 /* Like move_stmt_op, but for gimple statements.
6511 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6512 contained in the current statement in *GSI_P and change the
6513 DECL_CONTEXT of every local variable referenced in the current
6514 statement. */
6516 static tree
6517 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6518 struct walk_stmt_info *wi)
6520 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6521 gimple stmt = gsi_stmt (*gsi_p);
6522 tree block = gimple_block (stmt);
6524 if (block == p->orig_block
6525 || (p->orig_block == NULL_TREE
6526 && block != NULL_TREE))
6527 gimple_set_block (stmt, p->new_block);
6529 switch (gimple_code (stmt))
6531 case GIMPLE_CALL:
6532 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6534 tree r, fndecl = gimple_call_fndecl (stmt);
6535 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6536 switch (DECL_FUNCTION_CODE (fndecl))
6538 case BUILT_IN_EH_COPY_VALUES:
6539 r = gimple_call_arg (stmt, 1);
6540 r = move_stmt_eh_region_tree_nr (r, p);
6541 gimple_call_set_arg (stmt, 1, r);
6542 /* FALLTHRU */
6544 case BUILT_IN_EH_POINTER:
6545 case BUILT_IN_EH_FILTER:
6546 r = gimple_call_arg (stmt, 0);
6547 r = move_stmt_eh_region_tree_nr (r, p);
6548 gimple_call_set_arg (stmt, 0, r);
6549 break;
6551 default:
6552 break;
6555 break;
6557 case GIMPLE_RESX:
6559 gresx *resx_stmt = as_a <gresx *> (stmt);
6560 int r = gimple_resx_region (resx_stmt);
6561 r = move_stmt_eh_region_nr (r, p);
6562 gimple_resx_set_region (resx_stmt, r);
6564 break;
6566 case GIMPLE_EH_DISPATCH:
6568 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6569 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6570 r = move_stmt_eh_region_nr (r, p);
6571 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6573 break;
6575 case GIMPLE_OMP_RETURN:
6576 case GIMPLE_OMP_CONTINUE:
6577 break;
6578 default:
6579 if (is_gimple_omp (stmt))
6581 /* Do not remap variables inside OMP directives. Variables
6582 referenced in clauses and directive header belong to the
6583 parent function and should not be moved into the child
6584 function. */
6585 bool save_remap_decls_p = p->remap_decls_p;
6586 p->remap_decls_p = false;
6587 *handled_ops_p = true;
6589 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6590 move_stmt_op, wi);
6592 p->remap_decls_p = save_remap_decls_p;
6594 break;
6597 return NULL_TREE;
6600 /* Move basic block BB from function CFUN to function DEST_FN. The
6601 block is moved out of the original linked list and placed after
6602 block AFTER in the new list. Also, the block is removed from the
6603 original array of blocks and placed in DEST_FN's array of blocks.
6604 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6605 updated to reflect the moved edges.
6607 The local variables are remapped to new instances, VARS_MAP is used
6608 to record the mapping. */
6610 static void
6611 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6612 basic_block after, bool update_edge_count_p,
6613 struct move_stmt_d *d)
6615 struct control_flow_graph *cfg;
6616 edge_iterator ei;
6617 edge e;
6618 gimple_stmt_iterator si;
6619 unsigned old_len, new_len;
6621 /* Remove BB from dominance structures. */
6622 delete_from_dominance_info (CDI_DOMINATORS, bb);
6624 /* Move BB from its current loop to the copy in the new function. */
6625 if (current_loops)
6627 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6628 if (new_loop)
6629 bb->loop_father = new_loop;
6632 /* Link BB to the new linked list. */
6633 move_block_after (bb, after);
6635 /* Update the edge count in the corresponding flowgraphs. */
6636 if (update_edge_count_p)
6637 FOR_EACH_EDGE (e, ei, bb->succs)
6639 cfun->cfg->x_n_edges--;
6640 dest_cfun->cfg->x_n_edges++;
6643 /* Remove BB from the original basic block array. */
6644 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6645 cfun->cfg->x_n_basic_blocks--;
6647 /* Grow DEST_CFUN's basic block array if needed. */
6648 cfg = dest_cfun->cfg;
6649 cfg->x_n_basic_blocks++;
6650 if (bb->index >= cfg->x_last_basic_block)
6651 cfg->x_last_basic_block = bb->index + 1;
6653 old_len = vec_safe_length (cfg->x_basic_block_info);
6654 if ((unsigned) cfg->x_last_basic_block >= old_len)
6656 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6657 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6660 (*cfg->x_basic_block_info)[bb->index] = bb;
6662 /* Remap the variables in phi nodes. */
6663 for (gphi_iterator psi = gsi_start_phis (bb);
6664 !gsi_end_p (psi); )
6666 gphi *phi = psi.phi ();
6667 use_operand_p use;
6668 tree op = PHI_RESULT (phi);
6669 ssa_op_iter oi;
6670 unsigned i;
6672 if (virtual_operand_p (op))
6674 /* Remove the phi nodes for virtual operands (alias analysis will be
6675 run for the new function, anyway). */
6676 remove_phi_node (&psi, true);
6677 continue;
6680 SET_PHI_RESULT (phi,
6681 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6682 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6684 op = USE_FROM_PTR (use);
6685 if (TREE_CODE (op) == SSA_NAME)
6686 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6689 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6691 location_t locus = gimple_phi_arg_location (phi, i);
6692 tree block = LOCATION_BLOCK (locus);
6694 if (locus == UNKNOWN_LOCATION)
6695 continue;
6696 if (d->orig_block == NULL_TREE || block == d->orig_block)
6698 if (d->new_block == NULL_TREE)
6699 locus = LOCATION_LOCUS (locus);
6700 else
6701 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6702 gimple_phi_arg_set_location (phi, i, locus);
6706 gsi_next (&psi);
6709 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6711 gimple stmt = gsi_stmt (si);
6712 struct walk_stmt_info wi;
6714 memset (&wi, 0, sizeof (wi));
6715 wi.info = d;
6716 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6718 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6720 tree label = gimple_label_label (label_stmt);
6721 int uid = LABEL_DECL_UID (label);
6723 gcc_assert (uid > -1);
6725 old_len = vec_safe_length (cfg->x_label_to_block_map);
6726 if (old_len <= (unsigned) uid)
6728 new_len = 3 * uid / 2 + 1;
6729 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6732 (*cfg->x_label_to_block_map)[uid] = bb;
6733 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6735 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6737 if (uid >= dest_cfun->cfg->last_label_uid)
6738 dest_cfun->cfg->last_label_uid = uid + 1;
6741 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6742 remove_stmt_from_eh_lp_fn (cfun, stmt);
6744 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6745 gimple_remove_stmt_histograms (cfun, stmt);
6747 /* We cannot leave any operands allocated from the operand caches of
6748 the current function. */
6749 free_stmt_operands (cfun, stmt);
6750 push_cfun (dest_cfun);
6751 update_stmt (stmt);
6752 pop_cfun ();
6755 FOR_EACH_EDGE (e, ei, bb->succs)
6756 if (e->goto_locus != UNKNOWN_LOCATION)
6758 tree block = LOCATION_BLOCK (e->goto_locus);
6759 if (d->orig_block == NULL_TREE
6760 || block == d->orig_block)
6761 e->goto_locus = d->new_block ?
6762 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6763 LOCATION_LOCUS (e->goto_locus);
6767 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6768 the outermost EH region. Use REGION as the incoming base EH region. */
6770 static eh_region
6771 find_outermost_region_in_block (struct function *src_cfun,
6772 basic_block bb, eh_region region)
6774 gimple_stmt_iterator si;
6776 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6778 gimple stmt = gsi_stmt (si);
6779 eh_region stmt_region;
6780 int lp_nr;
6782 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6783 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6784 if (stmt_region)
6786 if (region == NULL)
6787 region = stmt_region;
6788 else if (stmt_region != region)
6790 region = eh_region_outermost (src_cfun, stmt_region, region);
6791 gcc_assert (region != NULL);
6796 return region;
6799 static tree
6800 new_label_mapper (tree decl, void *data)
6802 htab_t hash = (htab_t) data;
6803 struct tree_map *m;
6804 void **slot;
6806 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6808 m = XNEW (struct tree_map);
6809 m->hash = DECL_UID (decl);
6810 m->base.from = decl;
6811 m->to = create_artificial_label (UNKNOWN_LOCATION);
6812 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6813 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6814 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6816 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6817 gcc_assert (*slot == NULL);
6819 *slot = m;
6821 return m->to;
6824 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6825 subblocks. */
6827 static void
6828 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6829 tree to_context)
6831 tree *tp, t;
6833 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6835 t = *tp;
6836 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6837 continue;
6838 replace_by_duplicate_decl (&t, vars_map, to_context);
6839 if (t != *tp)
6841 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6843 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6844 DECL_HAS_VALUE_EXPR_P (t) = 1;
6846 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6847 *tp = t;
6851 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6852 replace_block_vars_by_duplicates (block, vars_map, to_context);
6855 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6856 from FN1 to FN2. */
6858 static void
6859 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6860 struct loop *loop)
6862 /* Discard it from the old loop array. */
6863 (*get_loops (fn1))[loop->num] = NULL;
6865 /* Place it in the new loop array, assigning it a new number. */
6866 loop->num = number_of_loops (fn2);
6867 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6869 /* Recurse to children. */
6870 for (loop = loop->inner; loop; loop = loop->next)
6871 fixup_loop_arrays_after_move (fn1, fn2, loop);
6874 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6875 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6877 DEBUG_FUNCTION void
6878 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6880 basic_block bb;
6881 edge_iterator ei;
6882 edge e;
6883 bitmap bbs = BITMAP_ALLOC (NULL);
6884 int i;
6886 gcc_assert (entry != NULL);
6887 gcc_assert (entry != exit);
6888 gcc_assert (bbs_p != NULL);
6890 gcc_assert (bbs_p->length () > 0);
6892 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6893 bitmap_set_bit (bbs, bb->index);
6895 gcc_assert (bitmap_bit_p (bbs, entry->index));
6896 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6898 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6900 if (bb == entry)
6902 gcc_assert (single_pred_p (entry));
6903 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6905 else
6906 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6908 e = ei_edge (ei);
6909 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6912 if (bb == exit)
6914 gcc_assert (single_succ_p (exit));
6915 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6917 else
6918 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6920 e = ei_edge (ei);
6921 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6925 BITMAP_FREE (bbs);
6929 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6930 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6931 single basic block in the original CFG and the new basic block is
6932 returned. DEST_CFUN must not have a CFG yet.
6934 Note that the region need not be a pure SESE region. Blocks inside
6935 the region may contain calls to abort/exit. The only restriction
6936 is that ENTRY_BB should be the only entry point and it must
6937 dominate EXIT_BB.
6939 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6940 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6941 to the new function.
6943 All local variables referenced in the region are assumed to be in
6944 the corresponding BLOCK_VARS and unexpanded variable lists
6945 associated with DEST_CFUN. */
6947 basic_block
6948 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6949 basic_block exit_bb, tree orig_block)
6951 vec<basic_block> bbs, dom_bbs;
6952 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6953 basic_block after, bb, *entry_pred, *exit_succ, abb;
6954 struct function *saved_cfun = cfun;
6955 int *entry_flag, *exit_flag;
6956 unsigned *entry_prob, *exit_prob;
6957 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6958 edge e;
6959 edge_iterator ei;
6960 htab_t new_label_map;
6961 hash_map<void *, void *> *eh_map;
6962 struct loop *loop = entry_bb->loop_father;
6963 struct loop *loop0 = get_loop (saved_cfun, 0);
6964 struct move_stmt_d d;
6966 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6967 region. */
6968 gcc_assert (entry_bb != exit_bb
6969 && (!exit_bb
6970 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6972 /* Collect all the blocks in the region. Manually add ENTRY_BB
6973 because it won't be added by dfs_enumerate_from. */
6974 bbs.create (0);
6975 bbs.safe_push (entry_bb);
6976 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6977 #ifdef ENABLE_CHECKING
6978 verify_sese (entry_bb, exit_bb, &bbs);
6979 #endif
6981 /* The blocks that used to be dominated by something in BBS will now be
6982 dominated by the new block. */
6983 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6984 bbs.address (),
6985 bbs.length ());
6987 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6988 the predecessor edges to ENTRY_BB and the successor edges to
6989 EXIT_BB so that we can re-attach them to the new basic block that
6990 will replace the region. */
6991 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6992 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6993 entry_flag = XNEWVEC (int, num_entry_edges);
6994 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6995 i = 0;
6996 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6998 entry_prob[i] = e->probability;
6999 entry_flag[i] = e->flags;
7000 entry_pred[i++] = e->src;
7001 remove_edge (e);
7004 if (exit_bb)
7006 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7007 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7008 exit_flag = XNEWVEC (int, num_exit_edges);
7009 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7010 i = 0;
7011 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7013 exit_prob[i] = e->probability;
7014 exit_flag[i] = e->flags;
7015 exit_succ[i++] = e->dest;
7016 remove_edge (e);
7019 else
7021 num_exit_edges = 0;
7022 exit_succ = NULL;
7023 exit_flag = NULL;
7024 exit_prob = NULL;
7027 /* Switch context to the child function to initialize DEST_FN's CFG. */
7028 gcc_assert (dest_cfun->cfg == NULL);
7029 push_cfun (dest_cfun);
7031 init_empty_tree_cfg ();
7033 /* Initialize EH information for the new function. */
7034 eh_map = NULL;
7035 new_label_map = NULL;
7036 if (saved_cfun->eh)
7038 eh_region region = NULL;
7040 FOR_EACH_VEC_ELT (bbs, i, bb)
7041 region = find_outermost_region_in_block (saved_cfun, bb, region);
7043 init_eh_for_function ();
7044 if (region != NULL)
7046 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7047 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7048 new_label_mapper, new_label_map);
7052 /* Initialize an empty loop tree. */
7053 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7054 init_loops_structure (dest_cfun, loops, 1);
7055 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7056 set_loops_for_fn (dest_cfun, loops);
7058 /* Move the outlined loop tree part. */
7059 num_nodes = bbs.length ();
7060 FOR_EACH_VEC_ELT (bbs, i, bb)
7062 if (bb->loop_father->header == bb)
7064 struct loop *this_loop = bb->loop_father;
7065 struct loop *outer = loop_outer (this_loop);
7066 if (outer == loop
7067 /* If the SESE region contains some bbs ending with
7068 a noreturn call, those are considered to belong
7069 to the outermost loop in saved_cfun, rather than
7070 the entry_bb's loop_father. */
7071 || outer == loop0)
7073 if (outer != loop)
7074 num_nodes -= this_loop->num_nodes;
7075 flow_loop_tree_node_remove (bb->loop_father);
7076 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7077 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7080 else if (bb->loop_father == loop0 && loop0 != loop)
7081 num_nodes--;
7083 /* Remove loop exits from the outlined region. */
7084 if (loops_for_fn (saved_cfun)->exits)
7085 FOR_EACH_EDGE (e, ei, bb->succs)
7087 struct loops *l = loops_for_fn (saved_cfun);
7088 loop_exit **slot
7089 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7090 NO_INSERT);
7091 if (slot)
7092 l->exits->clear_slot (slot);
7097 /* Adjust the number of blocks in the tree root of the outlined part. */
7098 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7100 /* Setup a mapping to be used by move_block_to_fn. */
7101 loop->aux = current_loops->tree_root;
7102 loop0->aux = current_loops->tree_root;
7104 pop_cfun ();
7106 /* Move blocks from BBS into DEST_CFUN. */
7107 gcc_assert (bbs.length () >= 2);
7108 after = dest_cfun->cfg->x_entry_block_ptr;
7109 hash_map<tree, tree> vars_map;
7111 memset (&d, 0, sizeof (d));
7112 d.orig_block = orig_block;
7113 d.new_block = DECL_INITIAL (dest_cfun->decl);
7114 d.from_context = cfun->decl;
7115 d.to_context = dest_cfun->decl;
7116 d.vars_map = &vars_map;
7117 d.new_label_map = new_label_map;
7118 d.eh_map = eh_map;
7119 d.remap_decls_p = true;
7121 FOR_EACH_VEC_ELT (bbs, i, bb)
7123 /* No need to update edge counts on the last block. It has
7124 already been updated earlier when we detached the region from
7125 the original CFG. */
7126 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7127 after = bb;
7130 loop->aux = NULL;
7131 loop0->aux = NULL;
7132 /* Loop sizes are no longer correct, fix them up. */
7133 loop->num_nodes -= num_nodes;
7134 for (struct loop *outer = loop_outer (loop);
7135 outer; outer = loop_outer (outer))
7136 outer->num_nodes -= num_nodes;
7137 loop0->num_nodes -= bbs.length () - num_nodes;
7139 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7141 struct loop *aloop;
7142 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7143 if (aloop != NULL)
7145 if (aloop->simduid)
7147 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7148 d.to_context);
7149 dest_cfun->has_simduid_loops = true;
7151 if (aloop->force_vectorize)
7152 dest_cfun->has_force_vectorize_loops = true;
7156 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7157 if (orig_block)
7159 tree block;
7160 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7161 == NULL_TREE);
7162 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7163 = BLOCK_SUBBLOCKS (orig_block);
7164 for (block = BLOCK_SUBBLOCKS (orig_block);
7165 block; block = BLOCK_CHAIN (block))
7166 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7167 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7170 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7171 &vars_map, dest_cfun->decl);
7173 if (new_label_map)
7174 htab_delete (new_label_map);
7175 if (eh_map)
7176 delete eh_map;
7178 /* Rewire the entry and exit blocks. The successor to the entry
7179 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7180 the child function. Similarly, the predecessor of DEST_FN's
7181 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7182 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7183 various CFG manipulation function get to the right CFG.
7185 FIXME, this is silly. The CFG ought to become a parameter to
7186 these helpers. */
7187 push_cfun (dest_cfun);
7188 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7189 if (exit_bb)
7190 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7191 pop_cfun ();
7193 /* Back in the original function, the SESE region has disappeared,
7194 create a new basic block in its place. */
7195 bb = create_empty_bb (entry_pred[0]);
7196 if (current_loops)
7197 add_bb_to_loop (bb, loop);
7198 for (i = 0; i < num_entry_edges; i++)
7200 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7201 e->probability = entry_prob[i];
7204 for (i = 0; i < num_exit_edges; i++)
7206 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7207 e->probability = exit_prob[i];
7210 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7211 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7212 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7213 dom_bbs.release ();
7215 if (exit_bb)
7217 free (exit_prob);
7218 free (exit_flag);
7219 free (exit_succ);
7221 free (entry_prob);
7222 free (entry_flag);
7223 free (entry_pred);
7224 bbs.release ();
7226 return bb;
7230 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7233 void
7234 dump_function_to_file (tree fndecl, FILE *file, int flags)
7236 tree arg, var, old_current_fndecl = current_function_decl;
7237 struct function *dsf;
7238 bool ignore_topmost_bind = false, any_var = false;
7239 basic_block bb;
7240 tree chain;
7241 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7242 && decl_is_tm_clone (fndecl));
7243 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7245 current_function_decl = fndecl;
7246 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7248 arg = DECL_ARGUMENTS (fndecl);
7249 while (arg)
7251 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7252 fprintf (file, " ");
7253 print_generic_expr (file, arg, dump_flags);
7254 if (flags & TDF_VERBOSE)
7255 print_node (file, "", arg, 4);
7256 if (DECL_CHAIN (arg))
7257 fprintf (file, ", ");
7258 arg = DECL_CHAIN (arg);
7260 fprintf (file, ")\n");
7262 if (flags & TDF_VERBOSE)
7263 print_node (file, "", fndecl, 2);
7265 dsf = DECL_STRUCT_FUNCTION (fndecl);
7266 if (dsf && (flags & TDF_EH))
7267 dump_eh_tree (file, dsf);
7269 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7271 dump_node (fndecl, TDF_SLIM | flags, file);
7272 current_function_decl = old_current_fndecl;
7273 return;
7276 /* When GIMPLE is lowered, the variables are no longer available in
7277 BIND_EXPRs, so display them separately. */
7278 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7280 unsigned ix;
7281 ignore_topmost_bind = true;
7283 fprintf (file, "{\n");
7284 if (!vec_safe_is_empty (fun->local_decls))
7285 FOR_EACH_LOCAL_DECL (fun, ix, var)
7287 print_generic_decl (file, var, flags);
7288 if (flags & TDF_VERBOSE)
7289 print_node (file, "", var, 4);
7290 fprintf (file, "\n");
7292 any_var = true;
7294 if (gimple_in_ssa_p (cfun))
7295 for (ix = 1; ix < num_ssa_names; ++ix)
7297 tree name = ssa_name (ix);
7298 if (name && !SSA_NAME_VAR (name))
7300 fprintf (file, " ");
7301 print_generic_expr (file, TREE_TYPE (name), flags);
7302 fprintf (file, " ");
7303 print_generic_expr (file, name, flags);
7304 fprintf (file, ";\n");
7306 any_var = true;
7311 if (fun && fun->decl == fndecl
7312 && fun->cfg
7313 && basic_block_info_for_fn (fun))
7315 /* If the CFG has been built, emit a CFG-based dump. */
7316 if (!ignore_topmost_bind)
7317 fprintf (file, "{\n");
7319 if (any_var && n_basic_blocks_for_fn (fun))
7320 fprintf (file, "\n");
7322 FOR_EACH_BB_FN (bb, fun)
7323 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7325 fprintf (file, "}\n");
7327 else if (DECL_SAVED_TREE (fndecl) == NULL)
7329 /* The function is now in GIMPLE form but the CFG has not been
7330 built yet. Emit the single sequence of GIMPLE statements
7331 that make up its body. */
7332 gimple_seq body = gimple_body (fndecl);
7334 if (gimple_seq_first_stmt (body)
7335 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7336 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7337 print_gimple_seq (file, body, 0, flags);
7338 else
7340 if (!ignore_topmost_bind)
7341 fprintf (file, "{\n");
7343 if (any_var)
7344 fprintf (file, "\n");
7346 print_gimple_seq (file, body, 2, flags);
7347 fprintf (file, "}\n");
7350 else
7352 int indent;
7354 /* Make a tree based dump. */
7355 chain = DECL_SAVED_TREE (fndecl);
7356 if (chain && TREE_CODE (chain) == BIND_EXPR)
7358 if (ignore_topmost_bind)
7360 chain = BIND_EXPR_BODY (chain);
7361 indent = 2;
7363 else
7364 indent = 0;
7366 else
7368 if (!ignore_topmost_bind)
7369 fprintf (file, "{\n");
7370 indent = 2;
7373 if (any_var)
7374 fprintf (file, "\n");
7376 print_generic_stmt_indented (file, chain, flags, indent);
7377 if (ignore_topmost_bind)
7378 fprintf (file, "}\n");
7381 if (flags & TDF_ENUMERATE_LOCALS)
7382 dump_enumerated_decls (file, flags);
7383 fprintf (file, "\n\n");
7385 current_function_decl = old_current_fndecl;
7388 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7390 DEBUG_FUNCTION void
7391 debug_function (tree fn, int flags)
7393 dump_function_to_file (fn, stderr, flags);
7397 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7399 static void
7400 print_pred_bbs (FILE *file, basic_block bb)
7402 edge e;
7403 edge_iterator ei;
7405 FOR_EACH_EDGE (e, ei, bb->preds)
7406 fprintf (file, "bb_%d ", e->src->index);
7410 /* Print on FILE the indexes for the successors of basic_block BB. */
7412 static void
7413 print_succ_bbs (FILE *file, basic_block bb)
7415 edge e;
7416 edge_iterator ei;
7418 FOR_EACH_EDGE (e, ei, bb->succs)
7419 fprintf (file, "bb_%d ", e->dest->index);
7422 /* Print to FILE the basic block BB following the VERBOSITY level. */
7424 void
7425 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7427 char *s_indent = (char *) alloca ((size_t) indent + 1);
7428 memset ((void *) s_indent, ' ', (size_t) indent);
7429 s_indent[indent] = '\0';
7431 /* Print basic_block's header. */
7432 if (verbosity >= 2)
7434 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7435 print_pred_bbs (file, bb);
7436 fprintf (file, "}, succs = {");
7437 print_succ_bbs (file, bb);
7438 fprintf (file, "})\n");
7441 /* Print basic_block's body. */
7442 if (verbosity >= 3)
7444 fprintf (file, "%s {\n", s_indent);
7445 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7446 fprintf (file, "%s }\n", s_indent);
7450 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7452 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7453 VERBOSITY level this outputs the contents of the loop, or just its
7454 structure. */
7456 static void
7457 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7459 char *s_indent;
7460 basic_block bb;
7462 if (loop == NULL)
7463 return;
7465 s_indent = (char *) alloca ((size_t) indent + 1);
7466 memset ((void *) s_indent, ' ', (size_t) indent);
7467 s_indent[indent] = '\0';
7469 /* Print loop's header. */
7470 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7471 if (loop->header)
7472 fprintf (file, "header = %d", loop->header->index);
7473 else
7475 fprintf (file, "deleted)\n");
7476 return;
7478 if (loop->latch)
7479 fprintf (file, ", latch = %d", loop->latch->index);
7480 else
7481 fprintf (file, ", multiple latches");
7482 fprintf (file, ", niter = ");
7483 print_generic_expr (file, loop->nb_iterations, 0);
7485 if (loop->any_upper_bound)
7487 fprintf (file, ", upper_bound = ");
7488 print_decu (loop->nb_iterations_upper_bound, file);
7491 if (loop->any_estimate)
7493 fprintf (file, ", estimate = ");
7494 print_decu (loop->nb_iterations_estimate, file);
7496 fprintf (file, ")\n");
7498 /* Print loop's body. */
7499 if (verbosity >= 1)
7501 fprintf (file, "%s{\n", s_indent);
7502 FOR_EACH_BB_FN (bb, cfun)
7503 if (bb->loop_father == loop)
7504 print_loops_bb (file, bb, indent, verbosity);
7506 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7507 fprintf (file, "%s}\n", s_indent);
7511 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7512 spaces. Following VERBOSITY level this outputs the contents of the
7513 loop, or just its structure. */
7515 static void
7516 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7517 int verbosity)
7519 if (loop == NULL)
7520 return;
7522 print_loop (file, loop, indent, verbosity);
7523 print_loop_and_siblings (file, loop->next, indent, verbosity);
7526 /* Follow a CFG edge from the entry point of the program, and on entry
7527 of a loop, pretty print the loop structure on FILE. */
7529 void
7530 print_loops (FILE *file, int verbosity)
7532 basic_block bb;
7534 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7535 if (bb && bb->loop_father)
7536 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7539 /* Dump a loop. */
7541 DEBUG_FUNCTION void
7542 debug (struct loop &ref)
7544 print_loop (stderr, &ref, 0, /*verbosity*/0);
7547 DEBUG_FUNCTION void
7548 debug (struct loop *ptr)
7550 if (ptr)
7551 debug (*ptr);
7552 else
7553 fprintf (stderr, "<nil>\n");
7556 /* Dump a loop verbosely. */
7558 DEBUG_FUNCTION void
7559 debug_verbose (struct loop &ref)
7561 print_loop (stderr, &ref, 0, /*verbosity*/3);
7564 DEBUG_FUNCTION void
7565 debug_verbose (struct loop *ptr)
7567 if (ptr)
7568 debug (*ptr);
7569 else
7570 fprintf (stderr, "<nil>\n");
7574 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7576 DEBUG_FUNCTION void
7577 debug_loops (int verbosity)
7579 print_loops (stderr, verbosity);
7582 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7584 DEBUG_FUNCTION void
7585 debug_loop (struct loop *loop, int verbosity)
7587 print_loop (stderr, loop, 0, verbosity);
7590 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7591 level. */
7593 DEBUG_FUNCTION void
7594 debug_loop_num (unsigned num, int verbosity)
7596 debug_loop (get_loop (cfun, num), verbosity);
7599 /* Return true if BB ends with a call, possibly followed by some
7600 instructions that must stay with the call. Return false,
7601 otherwise. */
7603 static bool
7604 gimple_block_ends_with_call_p (basic_block bb)
7606 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7607 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7611 /* Return true if BB ends with a conditional branch. Return false,
7612 otherwise. */
7614 static bool
7615 gimple_block_ends_with_condjump_p (const_basic_block bb)
7617 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7618 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7622 /* Return true if we need to add fake edge to exit at statement T.
7623 Helper function for gimple_flow_call_edges_add. */
7625 static bool
7626 need_fake_edge_p (gimple t)
7628 tree fndecl = NULL_TREE;
7629 int call_flags = 0;
7631 /* NORETURN and LONGJMP calls already have an edge to exit.
7632 CONST and PURE calls do not need one.
7633 We don't currently check for CONST and PURE here, although
7634 it would be a good idea, because those attributes are
7635 figured out from the RTL in mark_constant_function, and
7636 the counter incrementation code from -fprofile-arcs
7637 leads to different results from -fbranch-probabilities. */
7638 if (is_gimple_call (t))
7640 fndecl = gimple_call_fndecl (t);
7641 call_flags = gimple_call_flags (t);
7644 if (is_gimple_call (t)
7645 && fndecl
7646 && DECL_BUILT_IN (fndecl)
7647 && (call_flags & ECF_NOTHROW)
7648 && !(call_flags & ECF_RETURNS_TWICE)
7649 /* fork() doesn't really return twice, but the effect of
7650 wrapping it in __gcov_fork() which calls __gcov_flush()
7651 and clears the counters before forking has the same
7652 effect as returning twice. Force a fake edge. */
7653 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7654 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7655 return false;
7657 if (is_gimple_call (t))
7659 edge_iterator ei;
7660 edge e;
7661 basic_block bb;
7663 if (!(call_flags & ECF_NORETURN))
7664 return true;
7666 bb = gimple_bb (t);
7667 FOR_EACH_EDGE (e, ei, bb->succs)
7668 if ((e->flags & EDGE_FAKE) == 0)
7669 return true;
7672 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7673 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7674 return true;
7676 return false;
7680 /* Add fake edges to the function exit for any non constant and non
7681 noreturn calls (or noreturn calls with EH/abnormal edges),
7682 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7683 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7684 that were split.
7686 The goal is to expose cases in which entering a basic block does
7687 not imply that all subsequent instructions must be executed. */
7689 static int
7690 gimple_flow_call_edges_add (sbitmap blocks)
7692 int i;
7693 int blocks_split = 0;
7694 int last_bb = last_basic_block_for_fn (cfun);
7695 bool check_last_block = false;
7697 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7698 return 0;
7700 if (! blocks)
7701 check_last_block = true;
7702 else
7703 check_last_block = bitmap_bit_p (blocks,
7704 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7706 /* In the last basic block, before epilogue generation, there will be
7707 a fallthru edge to EXIT. Special care is required if the last insn
7708 of the last basic block is a call because make_edge folds duplicate
7709 edges, which would result in the fallthru edge also being marked
7710 fake, which would result in the fallthru edge being removed by
7711 remove_fake_edges, which would result in an invalid CFG.
7713 Moreover, we can't elide the outgoing fake edge, since the block
7714 profiler needs to take this into account in order to solve the minimal
7715 spanning tree in the case that the call doesn't return.
7717 Handle this by adding a dummy instruction in a new last basic block. */
7718 if (check_last_block)
7720 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7721 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7722 gimple t = NULL;
7724 if (!gsi_end_p (gsi))
7725 t = gsi_stmt (gsi);
7727 if (t && need_fake_edge_p (t))
7729 edge e;
7731 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7732 if (e)
7734 gsi_insert_on_edge (e, gimple_build_nop ());
7735 gsi_commit_edge_inserts ();
7740 /* Now add fake edges to the function exit for any non constant
7741 calls since there is no way that we can determine if they will
7742 return or not... */
7743 for (i = 0; i < last_bb; i++)
7745 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7746 gimple_stmt_iterator gsi;
7747 gimple stmt, last_stmt;
7749 if (!bb)
7750 continue;
7752 if (blocks && !bitmap_bit_p (blocks, i))
7753 continue;
7755 gsi = gsi_last_nondebug_bb (bb);
7756 if (!gsi_end_p (gsi))
7758 last_stmt = gsi_stmt (gsi);
7761 stmt = gsi_stmt (gsi);
7762 if (need_fake_edge_p (stmt))
7764 edge e;
7766 /* The handling above of the final block before the
7767 epilogue should be enough to verify that there is
7768 no edge to the exit block in CFG already.
7769 Calling make_edge in such case would cause us to
7770 mark that edge as fake and remove it later. */
7771 #ifdef ENABLE_CHECKING
7772 if (stmt == last_stmt)
7774 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7775 gcc_assert (e == NULL);
7777 #endif
7779 /* Note that the following may create a new basic block
7780 and renumber the existing basic blocks. */
7781 if (stmt != last_stmt)
7783 e = split_block (bb, stmt);
7784 if (e)
7785 blocks_split++;
7787 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7789 gsi_prev (&gsi);
7791 while (!gsi_end_p (gsi));
7795 if (blocks_split)
7796 verify_flow_info ();
7798 return blocks_split;
7801 /* Removes edge E and all the blocks dominated by it, and updates dominance
7802 information. The IL in E->src needs to be updated separately.
7803 If dominance info is not available, only the edge E is removed.*/
7805 void
7806 remove_edge_and_dominated_blocks (edge e)
7808 vec<basic_block> bbs_to_remove = vNULL;
7809 vec<basic_block> bbs_to_fix_dom = vNULL;
7810 bitmap df, df_idom;
7811 edge f;
7812 edge_iterator ei;
7813 bool none_removed = false;
7814 unsigned i;
7815 basic_block bb, dbb;
7816 bitmap_iterator bi;
7818 if (!dom_info_available_p (CDI_DOMINATORS))
7820 remove_edge (e);
7821 return;
7824 /* No updating is needed for edges to exit. */
7825 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7827 if (cfgcleanup_altered_bbs)
7828 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7829 remove_edge (e);
7830 return;
7833 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7834 that is not dominated by E->dest, then this set is empty. Otherwise,
7835 all the basic blocks dominated by E->dest are removed.
7837 Also, to DF_IDOM we store the immediate dominators of the blocks in
7838 the dominance frontier of E (i.e., of the successors of the
7839 removed blocks, if there are any, and of E->dest otherwise). */
7840 FOR_EACH_EDGE (f, ei, e->dest->preds)
7842 if (f == e)
7843 continue;
7845 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7847 none_removed = true;
7848 break;
7852 df = BITMAP_ALLOC (NULL);
7853 df_idom = BITMAP_ALLOC (NULL);
7855 if (none_removed)
7856 bitmap_set_bit (df_idom,
7857 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7858 else
7860 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7861 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7863 FOR_EACH_EDGE (f, ei, bb->succs)
7865 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7866 bitmap_set_bit (df, f->dest->index);
7869 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7870 bitmap_clear_bit (df, bb->index);
7872 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7874 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7875 bitmap_set_bit (df_idom,
7876 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7880 if (cfgcleanup_altered_bbs)
7882 /* Record the set of the altered basic blocks. */
7883 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7884 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7887 /* Remove E and the cancelled blocks. */
7888 if (none_removed)
7889 remove_edge (e);
7890 else
7892 /* Walk backwards so as to get a chance to substitute all
7893 released DEFs into debug stmts. See
7894 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7895 details. */
7896 for (i = bbs_to_remove.length (); i-- > 0; )
7897 delete_basic_block (bbs_to_remove[i]);
7900 /* Update the dominance information. The immediate dominator may change only
7901 for blocks whose immediate dominator belongs to DF_IDOM:
7903 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7904 removal. Let Z the arbitrary block such that idom(Z) = Y and
7905 Z dominates X after the removal. Before removal, there exists a path P
7906 from Y to X that avoids Z. Let F be the last edge on P that is
7907 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7908 dominates W, and because of P, Z does not dominate W), and W belongs to
7909 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7910 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7912 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7913 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7914 dbb;
7915 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7916 bbs_to_fix_dom.safe_push (dbb);
7919 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7921 BITMAP_FREE (df);
7922 BITMAP_FREE (df_idom);
7923 bbs_to_remove.release ();
7924 bbs_to_fix_dom.release ();
7927 /* Purge dead EH edges from basic block BB. */
7929 bool
7930 gimple_purge_dead_eh_edges (basic_block bb)
7932 bool changed = false;
7933 edge e;
7934 edge_iterator ei;
7935 gimple stmt = last_stmt (bb);
7937 if (stmt && stmt_can_throw_internal (stmt))
7938 return false;
7940 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7942 if (e->flags & EDGE_EH)
7944 remove_edge_and_dominated_blocks (e);
7945 changed = true;
7947 else
7948 ei_next (&ei);
7951 return changed;
7954 /* Purge dead EH edges from basic block listed in BLOCKS. */
7956 bool
7957 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7959 bool changed = false;
7960 unsigned i;
7961 bitmap_iterator bi;
7963 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7965 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7967 /* Earlier gimple_purge_dead_eh_edges could have removed
7968 this basic block already. */
7969 gcc_assert (bb || changed);
7970 if (bb != NULL)
7971 changed |= gimple_purge_dead_eh_edges (bb);
7974 return changed;
7977 /* Purge dead abnormal call edges from basic block BB. */
7979 bool
7980 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7982 bool changed = false;
7983 edge e;
7984 edge_iterator ei;
7985 gimple stmt = last_stmt (bb);
7987 if (!cfun->has_nonlocal_label
7988 && !cfun->calls_setjmp)
7989 return false;
7991 if (stmt && stmt_can_make_abnormal_goto (stmt))
7992 return false;
7994 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7996 if (e->flags & EDGE_ABNORMAL)
7998 if (e->flags & EDGE_FALLTHRU)
7999 e->flags &= ~EDGE_ABNORMAL;
8000 else
8001 remove_edge_and_dominated_blocks (e);
8002 changed = true;
8004 else
8005 ei_next (&ei);
8008 return changed;
8011 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8013 bool
8014 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8016 bool changed = false;
8017 unsigned i;
8018 bitmap_iterator bi;
8020 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8022 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8024 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8025 this basic block already. */
8026 gcc_assert (bb || changed);
8027 if (bb != NULL)
8028 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8031 return changed;
8034 /* This function is called whenever a new edge is created or
8035 redirected. */
8037 static void
8038 gimple_execute_on_growing_pred (edge e)
8040 basic_block bb = e->dest;
8042 if (!gimple_seq_empty_p (phi_nodes (bb)))
8043 reserve_phi_args_for_new_edge (bb);
8046 /* This function is called immediately before edge E is removed from
8047 the edge vector E->dest->preds. */
8049 static void
8050 gimple_execute_on_shrinking_pred (edge e)
8052 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8053 remove_phi_args (e);
8056 /*---------------------------------------------------------------------------
8057 Helper functions for Loop versioning
8058 ---------------------------------------------------------------------------*/
8060 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8061 of 'first'. Both of them are dominated by 'new_head' basic block. When
8062 'new_head' was created by 'second's incoming edge it received phi arguments
8063 on the edge by split_edge(). Later, additional edge 'e' was created to
8064 connect 'new_head' and 'first'. Now this routine adds phi args on this
8065 additional edge 'e' that new_head to second edge received as part of edge
8066 splitting. */
8068 static void
8069 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8070 basic_block new_head, edge e)
8072 gphi *phi1, *phi2;
8073 gphi_iterator psi1, psi2;
8074 tree def;
8075 edge e2 = find_edge (new_head, second);
8077 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8078 edge, we should always have an edge from NEW_HEAD to SECOND. */
8079 gcc_assert (e2 != NULL);
8081 /* Browse all 'second' basic block phi nodes and add phi args to
8082 edge 'e' for 'first' head. PHI args are always in correct order. */
8084 for (psi2 = gsi_start_phis (second),
8085 psi1 = gsi_start_phis (first);
8086 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8087 gsi_next (&psi2), gsi_next (&psi1))
8089 phi1 = psi1.phi ();
8090 phi2 = psi2.phi ();
8091 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8092 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8097 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8098 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8099 the destination of the ELSE part. */
8101 static void
8102 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8103 basic_block second_head ATTRIBUTE_UNUSED,
8104 basic_block cond_bb, void *cond_e)
8106 gimple_stmt_iterator gsi;
8107 gimple new_cond_expr;
8108 tree cond_expr = (tree) cond_e;
8109 edge e0;
8111 /* Build new conditional expr */
8112 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8113 NULL_TREE, NULL_TREE);
8115 /* Add new cond in cond_bb. */
8116 gsi = gsi_last_bb (cond_bb);
8117 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8119 /* Adjust edges appropriately to connect new head with first head
8120 as well as second head. */
8121 e0 = single_succ_edge (cond_bb);
8122 e0->flags &= ~EDGE_FALLTHRU;
8123 e0->flags |= EDGE_FALSE_VALUE;
8127 /* Do book-keeping of basic block BB for the profile consistency checker.
8128 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8129 then do post-pass accounting. Store the counting in RECORD. */
8130 static void
8131 gimple_account_profile_record (basic_block bb, int after_pass,
8132 struct profile_record *record)
8134 gimple_stmt_iterator i;
8135 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8137 record->size[after_pass]
8138 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8139 if (profile_status_for_fn (cfun) == PROFILE_READ)
8140 record->time[after_pass]
8141 += estimate_num_insns (gsi_stmt (i),
8142 &eni_time_weights) * bb->count;
8143 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8144 record->time[after_pass]
8145 += estimate_num_insns (gsi_stmt (i),
8146 &eni_time_weights) * bb->frequency;
8150 struct cfg_hooks gimple_cfg_hooks = {
8151 "gimple",
8152 gimple_verify_flow_info,
8153 gimple_dump_bb, /* dump_bb */
8154 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8155 create_bb, /* create_basic_block */
8156 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8157 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8158 gimple_can_remove_branch_p, /* can_remove_branch_p */
8159 remove_bb, /* delete_basic_block */
8160 gimple_split_block, /* split_block */
8161 gimple_move_block_after, /* move_block_after */
8162 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8163 gimple_merge_blocks, /* merge_blocks */
8164 gimple_predict_edge, /* predict_edge */
8165 gimple_predicted_by_p, /* predicted_by_p */
8166 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8167 gimple_duplicate_bb, /* duplicate_block */
8168 gimple_split_edge, /* split_edge */
8169 gimple_make_forwarder_block, /* make_forward_block */
8170 NULL, /* tidy_fallthru_edge */
8171 NULL, /* force_nonfallthru */
8172 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8173 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8174 gimple_flow_call_edges_add, /* flow_call_edges_add */
8175 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8176 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8177 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8178 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8179 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8180 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8181 flush_pending_stmts, /* flush_pending_stmts */
8182 gimple_empty_block_p, /* block_empty_p */
8183 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8184 gimple_account_profile_record,
8188 /* Split all critical edges. */
8190 unsigned int
8191 split_critical_edges (void)
8193 basic_block bb;
8194 edge e;
8195 edge_iterator ei;
8197 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8198 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8199 mappings around the calls to split_edge. */
8200 start_recording_case_labels ();
8201 FOR_ALL_BB_FN (bb, cfun)
8203 FOR_EACH_EDGE (e, ei, bb->succs)
8205 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8206 split_edge (e);
8207 /* PRE inserts statements to edges and expects that
8208 since split_critical_edges was done beforehand, committing edge
8209 insertions will not split more edges. In addition to critical
8210 edges we must split edges that have multiple successors and
8211 end by control flow statements, such as RESX.
8212 Go ahead and split them too. This matches the logic in
8213 gimple_find_edge_insert_loc. */
8214 else if ((!single_pred_p (e->dest)
8215 || !gimple_seq_empty_p (phi_nodes (e->dest))
8216 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8217 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8218 && !(e->flags & EDGE_ABNORMAL))
8220 gimple_stmt_iterator gsi;
8222 gsi = gsi_last_bb (e->src);
8223 if (!gsi_end_p (gsi)
8224 && stmt_ends_bb_p (gsi_stmt (gsi))
8225 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8226 && !gimple_call_builtin_p (gsi_stmt (gsi),
8227 BUILT_IN_RETURN)))
8228 split_edge (e);
8232 end_recording_case_labels ();
8233 return 0;
8236 namespace {
8238 const pass_data pass_data_split_crit_edges =
8240 GIMPLE_PASS, /* type */
8241 "crited", /* name */
8242 OPTGROUP_NONE, /* optinfo_flags */
8243 TV_TREE_SPLIT_EDGES, /* tv_id */
8244 PROP_cfg, /* properties_required */
8245 PROP_no_crit_edges, /* properties_provided */
8246 0, /* properties_destroyed */
8247 0, /* todo_flags_start */
8248 0, /* todo_flags_finish */
8251 class pass_split_crit_edges : public gimple_opt_pass
8253 public:
8254 pass_split_crit_edges (gcc::context *ctxt)
8255 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8258 /* opt_pass methods: */
8259 virtual unsigned int execute (function *) { return split_critical_edges (); }
8261 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8262 }; // class pass_split_crit_edges
8264 } // anon namespace
8266 gimple_opt_pass *
8267 make_pass_split_crit_edges (gcc::context *ctxt)
8269 return new pass_split_crit_edges (ctxt);
8273 /* Insert COND expression which is GIMPLE_COND after STMT
8274 in basic block BB with appropriate basic block split
8275 and creation of a new conditionally executed basic block.
8276 Return created basic block. */
8277 basic_block
8278 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8280 edge fall = split_block (bb, stmt);
8281 gimple_stmt_iterator iter = gsi_last_bb (bb);
8282 basic_block new_bb;
8284 /* Insert cond statement. */
8285 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8286 if (gsi_end_p (iter))
8287 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8288 else
8289 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8291 /* Create conditionally executed block. */
8292 new_bb = create_empty_bb (bb);
8293 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8294 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8296 /* Fix edge for split bb. */
8297 fall->flags = EDGE_FALSE_VALUE;
8299 /* Update dominance info. */
8300 if (dom_info_available_p (CDI_DOMINATORS))
8302 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8303 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8306 /* Update loop info. */
8307 if (current_loops)
8308 add_bb_to_loop (new_bb, bb->loop_father);
8310 return new_bb;
8313 /* Build a ternary operation and gimplify it. Emit code before GSI.
8314 Return the gimple_val holding the result. */
8316 tree
8317 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8318 tree type, tree a, tree b, tree c)
8320 tree ret;
8321 location_t loc = gimple_location (gsi_stmt (*gsi));
8323 ret = fold_build3_loc (loc, code, type, a, b, c);
8324 STRIP_NOPS (ret);
8326 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8327 GSI_SAME_STMT);
8330 /* Build a binary operation and gimplify it. Emit code before GSI.
8331 Return the gimple_val holding the result. */
8333 tree
8334 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8335 tree type, tree a, tree b)
8337 tree ret;
8339 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8340 STRIP_NOPS (ret);
8342 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8343 GSI_SAME_STMT);
8346 /* Build a unary operation and gimplify it. Emit code before GSI.
8347 Return the gimple_val holding the result. */
8349 tree
8350 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8351 tree a)
8353 tree ret;
8355 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8356 STRIP_NOPS (ret);
8358 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8359 GSI_SAME_STMT);
8364 /* Given a basic block B which ends with a conditional and has
8365 precisely two successors, determine which of the edges is taken if
8366 the conditional is true and which is taken if the conditional is
8367 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8369 void
8370 extract_true_false_edges_from_block (basic_block b,
8371 edge *true_edge,
8372 edge *false_edge)
8374 edge e = EDGE_SUCC (b, 0);
8376 if (e->flags & EDGE_TRUE_VALUE)
8378 *true_edge = e;
8379 *false_edge = EDGE_SUCC (b, 1);
8381 else
8383 *false_edge = e;
8384 *true_edge = EDGE_SUCC (b, 1);
8388 /* Emit return warnings. */
8390 namespace {
8392 const pass_data pass_data_warn_function_return =
8394 GIMPLE_PASS, /* type */
8395 "*warn_function_return", /* name */
8396 OPTGROUP_NONE, /* optinfo_flags */
8397 TV_NONE, /* tv_id */
8398 PROP_cfg, /* properties_required */
8399 0, /* properties_provided */
8400 0, /* properties_destroyed */
8401 0, /* todo_flags_start */
8402 0, /* todo_flags_finish */
8405 class pass_warn_function_return : public gimple_opt_pass
8407 public:
8408 pass_warn_function_return (gcc::context *ctxt)
8409 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8412 /* opt_pass methods: */
8413 virtual unsigned int execute (function *);
8415 }; // class pass_warn_function_return
8417 unsigned int
8418 pass_warn_function_return::execute (function *fun)
8420 source_location location;
8421 gimple last;
8422 edge e;
8423 edge_iterator ei;
8425 if (!targetm.warn_func_return (fun->decl))
8426 return 0;
8428 /* If we have a path to EXIT, then we do return. */
8429 if (TREE_THIS_VOLATILE (fun->decl)
8430 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8432 location = UNKNOWN_LOCATION;
8433 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8435 last = last_stmt (e->src);
8436 if ((gimple_code (last) == GIMPLE_RETURN
8437 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8438 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8439 break;
8441 if (location == UNKNOWN_LOCATION)
8442 location = cfun->function_end_locus;
8443 warning_at (location, 0, "%<noreturn%> function does return");
8446 /* If we see "return;" in some basic block, then we do reach the end
8447 without returning a value. */
8448 else if (warn_return_type
8449 && !TREE_NO_WARNING (fun->decl)
8450 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8451 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8453 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8455 gimple last = last_stmt (e->src);
8456 greturn *return_stmt = dyn_cast <greturn *> (last);
8457 if (return_stmt
8458 && gimple_return_retval (return_stmt) == NULL
8459 && !gimple_no_warning_p (last))
8461 location = gimple_location (last);
8462 if (location == UNKNOWN_LOCATION)
8463 location = fun->function_end_locus;
8464 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8465 TREE_NO_WARNING (fun->decl) = 1;
8466 break;
8470 return 0;
8473 } // anon namespace
8475 gimple_opt_pass *
8476 make_pass_warn_function_return (gcc::context *ctxt)
8478 return new pass_warn_function_return (ctxt);
8481 /* Walk a gimplified function and warn for functions whose return value is
8482 ignored and attribute((warn_unused_result)) is set. This is done before
8483 inlining, so we don't have to worry about that. */
8485 static void
8486 do_warn_unused_result (gimple_seq seq)
8488 tree fdecl, ftype;
8489 gimple_stmt_iterator i;
8491 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8493 gimple g = gsi_stmt (i);
8495 switch (gimple_code (g))
8497 case GIMPLE_BIND:
8498 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8499 break;
8500 case GIMPLE_TRY:
8501 do_warn_unused_result (gimple_try_eval (g));
8502 do_warn_unused_result (gimple_try_cleanup (g));
8503 break;
8504 case GIMPLE_CATCH:
8505 do_warn_unused_result (gimple_catch_handler (
8506 as_a <gcatch *> (g)));
8507 break;
8508 case GIMPLE_EH_FILTER:
8509 do_warn_unused_result (gimple_eh_filter_failure (g));
8510 break;
8512 case GIMPLE_CALL:
8513 if (gimple_call_lhs (g))
8514 break;
8515 if (gimple_call_internal_p (g))
8516 break;
8518 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8519 LHS. All calls whose value is ignored should be
8520 represented like this. Look for the attribute. */
8521 fdecl = gimple_call_fndecl (g);
8522 ftype = gimple_call_fntype (g);
8524 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8526 location_t loc = gimple_location (g);
8528 if (fdecl)
8529 warning_at (loc, OPT_Wunused_result,
8530 "ignoring return value of %qD, "
8531 "declared with attribute warn_unused_result",
8532 fdecl);
8533 else
8534 warning_at (loc, OPT_Wunused_result,
8535 "ignoring return value of function "
8536 "declared with attribute warn_unused_result");
8538 break;
8540 default:
8541 /* Not a container, not a call, or a call whose value is used. */
8542 break;
8547 namespace {
8549 const pass_data pass_data_warn_unused_result =
8551 GIMPLE_PASS, /* type */
8552 "*warn_unused_result", /* name */
8553 OPTGROUP_NONE, /* optinfo_flags */
8554 TV_NONE, /* tv_id */
8555 PROP_gimple_any, /* properties_required */
8556 0, /* properties_provided */
8557 0, /* properties_destroyed */
8558 0, /* todo_flags_start */
8559 0, /* todo_flags_finish */
8562 class pass_warn_unused_result : public gimple_opt_pass
8564 public:
8565 pass_warn_unused_result (gcc::context *ctxt)
8566 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8569 /* opt_pass methods: */
8570 virtual bool gate (function *) { return flag_warn_unused_result; }
8571 virtual unsigned int execute (function *)
8573 do_warn_unused_result (gimple_body (current_function_decl));
8574 return 0;
8577 }; // class pass_warn_unused_result
8579 } // anon namespace
8581 gimple_opt_pass *
8582 make_pass_warn_unused_result (gcc::context *ctxt)
8584 return new pass_warn_unused_result (ctxt);
8587 /* IPA passes, compilation of earlier functions or inlining
8588 might have changed some properties, such as marked functions nothrow,
8589 pure, const or noreturn.
8590 Remove redundant edges and basic blocks, and create new ones if necessary.
8592 This pass can't be executed as stand alone pass from pass manager, because
8593 in between inlining and this fixup the verify_flow_info would fail. */
8595 unsigned int
8596 execute_fixup_cfg (void)
8598 basic_block bb;
8599 gimple_stmt_iterator gsi;
8600 int todo = 0;
8601 gcov_type count_scale;
8602 edge e;
8603 edge_iterator ei;
8605 count_scale
8606 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8607 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8609 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8610 cgraph_node::get (current_function_decl)->count;
8611 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8612 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8613 count_scale);
8615 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8616 e->count = apply_scale (e->count, count_scale);
8618 FOR_EACH_BB_FN (bb, cfun)
8620 bb->count = apply_scale (bb->count, count_scale);
8621 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8623 gimple stmt = gsi_stmt (gsi);
8624 tree decl = is_gimple_call (stmt)
8625 ? gimple_call_fndecl (stmt)
8626 : NULL;
8627 if (decl)
8629 int flags = gimple_call_flags (stmt);
8630 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8632 if (gimple_purge_dead_abnormal_call_edges (bb))
8633 todo |= TODO_cleanup_cfg;
8635 if (gimple_in_ssa_p (cfun))
8637 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8638 update_stmt (stmt);
8642 if (flags & ECF_NORETURN
8643 && fixup_noreturn_call (stmt))
8644 todo |= TODO_cleanup_cfg;
8647 /* Remove stores to variables we marked write-only.
8648 Keep access when store has side effect, i.e. in case when source
8649 is volatile. */
8650 if (gimple_store_p (stmt)
8651 && !gimple_has_side_effects (stmt))
8653 tree lhs = get_base_address (gimple_get_lhs (stmt));
8655 if (TREE_CODE (lhs) == VAR_DECL
8656 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8657 && varpool_node::get (lhs)->writeonly)
8659 unlink_stmt_vdef (stmt);
8660 gsi_remove (&gsi, true);
8661 release_defs (stmt);
8662 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8663 continue;
8666 /* For calls we can simply remove LHS when it is known
8667 to be write-only. */
8668 if (is_gimple_call (stmt)
8669 && gimple_get_lhs (stmt))
8671 tree lhs = get_base_address (gimple_get_lhs (stmt));
8673 if (TREE_CODE (lhs) == VAR_DECL
8674 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8675 && varpool_node::get (lhs)->writeonly)
8677 gimple_call_set_lhs (stmt, NULL);
8678 update_stmt (stmt);
8679 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8683 if (maybe_clean_eh_stmt (stmt)
8684 && gimple_purge_dead_eh_edges (bb))
8685 todo |= TODO_cleanup_cfg;
8686 gsi_next (&gsi);
8689 FOR_EACH_EDGE (e, ei, bb->succs)
8690 e->count = apply_scale (e->count, count_scale);
8692 /* If we have a basic block with no successors that does not
8693 end with a control statement or a noreturn call end it with
8694 a call to __builtin_unreachable. This situation can occur
8695 when inlining a noreturn call that does in fact return. */
8696 if (EDGE_COUNT (bb->succs) == 0)
8698 gimple stmt = last_stmt (bb);
8699 if (!stmt
8700 || (!is_ctrl_stmt (stmt)
8701 && (!is_gimple_call (stmt)
8702 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8704 if (stmt && is_gimple_call (stmt))
8705 gimple_call_set_ctrl_altering (stmt, false);
8706 stmt = gimple_build_call
8707 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8708 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8709 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8713 if (count_scale != REG_BR_PROB_BASE)
8714 compute_function_frequency ();
8716 /* Dump a textual representation of the flowgraph. */
8717 if (dump_file)
8718 gimple_dump_cfg (dump_file, dump_flags);
8720 if (current_loops
8721 && (todo & TODO_cleanup_cfg))
8722 loops_state_set (LOOPS_NEED_FIXUP);
8724 return todo;
8727 namespace {
8729 const pass_data pass_data_fixup_cfg =
8731 GIMPLE_PASS, /* type */
8732 "*free_cfg_annotations", /* name */
8733 OPTGROUP_NONE, /* optinfo_flags */
8734 TV_NONE, /* tv_id */
8735 PROP_cfg, /* properties_required */
8736 0, /* properties_provided */
8737 0, /* properties_destroyed */
8738 0, /* todo_flags_start */
8739 0, /* todo_flags_finish */
8742 class pass_fixup_cfg : public gimple_opt_pass
8744 public:
8745 pass_fixup_cfg (gcc::context *ctxt)
8746 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8749 /* opt_pass methods: */
8750 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8751 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8753 }; // class pass_fixup_cfg
8755 } // anon namespace
8757 gimple_opt_pass *
8758 make_pass_fixup_cfg (gcc::context *ctxt)
8760 return new pass_fixup_cfg (ctxt);
8763 /* Garbage collection support for edge_def. */
8765 extern void gt_ggc_mx (tree&);
8766 extern void gt_ggc_mx (gimple&);
8767 extern void gt_ggc_mx (rtx&);
8768 extern void gt_ggc_mx (basic_block&);
8770 static void
8771 gt_ggc_mx (rtx_insn *& x)
8773 if (x)
8774 gt_ggc_mx_rtx_def ((void *) x);
8777 void
8778 gt_ggc_mx (edge_def *e)
8780 tree block = LOCATION_BLOCK (e->goto_locus);
8781 gt_ggc_mx (e->src);
8782 gt_ggc_mx (e->dest);
8783 if (current_ir_type () == IR_GIMPLE)
8784 gt_ggc_mx (e->insns.g);
8785 else
8786 gt_ggc_mx (e->insns.r);
8787 gt_ggc_mx (block);
8790 /* PCH support for edge_def. */
8792 extern void gt_pch_nx (tree&);
8793 extern void gt_pch_nx (gimple&);
8794 extern void gt_pch_nx (rtx&);
8795 extern void gt_pch_nx (basic_block&);
8797 static void
8798 gt_pch_nx (rtx_insn *& x)
8800 if (x)
8801 gt_pch_nx_rtx_def ((void *) x);
8804 void
8805 gt_pch_nx (edge_def *e)
8807 tree block = LOCATION_BLOCK (e->goto_locus);
8808 gt_pch_nx (e->src);
8809 gt_pch_nx (e->dest);
8810 if (current_ir_type () == IR_GIMPLE)
8811 gt_pch_nx (e->insns.g);
8812 else
8813 gt_pch_nx (e->insns.r);
8814 gt_pch_nx (block);
8817 void
8818 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8820 tree block = LOCATION_BLOCK (e->goto_locus);
8821 op (&(e->src), cookie);
8822 op (&(e->dest), cookie);
8823 if (current_ir_type () == IR_GIMPLE)
8824 op (&(e->insns.g), cookie);
8825 else
8826 op (&(e->insns.r), cookie);
8827 op (&(block), cookie);