/cp
[official-gcc.git] / gcc / tree-cfg.c
blobd97b8240f228969e7133e5828fe3dcc9eb43376a
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "cfghooks.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "rtl.h"
29 #include "ssa.h"
30 #include "alias.h"
31 #include "fold-const.h"
32 #include "trans-mem.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "tm_p.h"
36 #include "cfganal.h"
37 #include "flags.h"
38 #include "gimple-pretty-print.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "cgraph.h"
46 #include "tree-cfg.h"
47 #include "tree-ssa-loop-manip.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "tree-into-ssa.h"
50 #include "insn-config.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "varasm.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "tree-dump.h"
62 #include "tree-pass.h"
63 #include "diagnostic-core.h"
64 #include "except.h"
65 #include "cfgloop.h"
66 #include "tree-ssa-propagate.h"
67 #include "value-prof.h"
68 #include "tree-inline.h"
69 #include "target.h"
70 #include "tree-ssa-live.h"
71 #include "omp-low.h"
72 #include "tree-cfgcleanup.h"
73 #include "wide-int-print.h"
75 /* This file contains functions for building the Control Flow Graph (CFG)
76 for a function tree. */
78 /* Local declarations. */
80 /* Initial capacity for the basic block array. */
81 static const int initial_cfg_capacity = 20;
83 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
84 which use a particular edge. The CASE_LABEL_EXPRs are chained together
85 via their CASE_CHAIN field, which we clear after we're done with the
86 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
88 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
89 update the case vector in response to edge redirections.
91 Right now this table is set up and torn down at key points in the
92 compilation process. It would be nice if we could make the table
93 more persistent. The key is getting notification of changes to
94 the CFG (particularly edge removal, creation and redirection). */
96 static hash_map<edge, tree> *edge_to_cases;
98 /* If we record edge_to_cases, this bitmap will hold indexes
99 of basic blocks that end in a GIMPLE_SWITCH which we touched
100 due to edge manipulations. */
102 static bitmap touched_switch_bbs;
104 /* CFG statistics. */
105 struct cfg_stats_d
107 long num_merged_labels;
110 static struct cfg_stats_d cfg_stats;
112 /* Hash table to store last discriminator assigned for each locus. */
113 struct locus_discrim_map
115 location_t locus;
116 int discriminator;
119 /* Hashtable helpers. */
121 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
123 static inline hashval_t hash (const locus_discrim_map *);
124 static inline bool equal (const locus_discrim_map *,
125 const locus_discrim_map *);
128 /* Trivial hash function for a location_t. ITEM is a pointer to
129 a hash table entry that maps a location_t to a discriminator. */
131 inline hashval_t
132 locus_discrim_hasher::hash (const locus_discrim_map *item)
134 return LOCATION_LINE (item->locus);
137 /* Equality function for the locus-to-discriminator map. A and B
138 point to the two hash table entries to compare. */
140 inline bool
141 locus_discrim_hasher::equal (const locus_discrim_map *a,
142 const locus_discrim_map *b)
144 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
147 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
149 /* Basic blocks and flowgraphs. */
150 static void make_blocks (gimple_seq);
152 /* Edges. */
153 static void make_edges (void);
154 static void assign_discriminators (void);
155 static void make_cond_expr_edges (basic_block);
156 static void make_gimple_switch_edges (gswitch *, basic_block);
157 static bool make_goto_expr_edges (basic_block);
158 static void make_gimple_asm_edges (basic_block);
159 static edge gimple_redirect_edge_and_branch (edge, basic_block);
160 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
162 /* Various helpers. */
163 static inline bool stmt_starts_bb_p (gimple, gimple);
164 static int gimple_verify_flow_info (void);
165 static void gimple_make_forwarder_block (edge);
166 static gimple first_non_label_stmt (basic_block);
167 static bool verify_gimple_transaction (gtransaction *);
168 static bool call_can_make_abnormal_goto (gimple);
170 /* Flowgraph optimization and cleanup. */
171 static void gimple_merge_blocks (basic_block, basic_block);
172 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
173 static void remove_bb (basic_block);
174 static edge find_taken_edge_computed_goto (basic_block, tree);
175 static edge find_taken_edge_cond_expr (basic_block, tree);
176 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
177 static tree find_case_label_for_value (gswitch *, tree);
179 void
180 init_empty_tree_cfg_for_function (struct function *fn)
182 /* Initialize the basic block array. */
183 init_flow (fn);
184 profile_status_for_fn (fn) = PROFILE_ABSENT;
185 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
186 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
187 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
188 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
189 initial_cfg_capacity);
191 /* Build a mapping of labels to their associated blocks. */
192 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
193 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
194 initial_cfg_capacity);
196 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
197 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
199 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
200 = EXIT_BLOCK_PTR_FOR_FN (fn);
201 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
202 = ENTRY_BLOCK_PTR_FOR_FN (fn);
205 void
206 init_empty_tree_cfg (void)
208 init_empty_tree_cfg_for_function (cfun);
211 /*---------------------------------------------------------------------------
212 Create basic blocks
213 ---------------------------------------------------------------------------*/
215 /* Entry point to the CFG builder for trees. SEQ is the sequence of
216 statements to be added to the flowgraph. */
218 static void
219 build_gimple_cfg (gimple_seq seq)
221 /* Register specific gimple functions. */
222 gimple_register_cfg_hooks ();
224 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
226 init_empty_tree_cfg ();
228 make_blocks (seq);
230 /* Make sure there is always at least one block, even if it's empty. */
231 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
232 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
234 /* Adjust the size of the array. */
235 if (basic_block_info_for_fn (cfun)->length ()
236 < (size_t) n_basic_blocks_for_fn (cfun))
237 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
238 n_basic_blocks_for_fn (cfun));
240 /* To speed up statement iterator walks, we first purge dead labels. */
241 cleanup_dead_labels ();
243 /* Group case nodes to reduce the number of edges.
244 We do this after cleaning up dead labels because otherwise we miss
245 a lot of obvious case merging opportunities. */
246 group_case_labels ();
248 /* Create the edges of the flowgraph. */
249 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
250 make_edges ();
251 assign_discriminators ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
261 static void
262 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_no_vector_kind:
285 loop->dont_vectorize = true;
286 break;
287 case annot_expr_vector_kind:
288 loop->force_vectorize = true;
289 cfun->has_force_vectorize_loops = true;
290 break;
291 default:
292 gcc_unreachable ();
295 stmt = gimple_build_assign (gimple_call_lhs (stmt),
296 gimple_call_arg (stmt, 0));
297 gsi_replace (&gsi, stmt, true);
301 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
302 them and propagate the information to the loop. We assume that the
303 annotations come immediately before the condition of the loop. */
305 static void
306 replace_loop_annotate (void)
308 struct loop *loop;
309 basic_block bb;
310 gimple_stmt_iterator gsi;
311 gimple stmt;
313 FOR_EACH_LOOP (loop, 0)
315 /* First look into the header. */
316 replace_loop_annotate_in_block (loop->header, loop);
318 /* Then look into the latch, if any. */
319 if (loop->latch)
320 replace_loop_annotate_in_block (loop->latch, loop);
323 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
324 FOR_EACH_BB_FN (bb, cfun)
326 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
328 stmt = gsi_stmt (gsi);
329 if (gimple_code (stmt) != GIMPLE_CALL)
330 continue;
331 if (!gimple_call_internal_p (stmt)
332 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
333 continue;
335 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
337 case annot_expr_ivdep_kind:
338 case annot_expr_no_vector_kind:
339 case annot_expr_vector_kind:
340 break;
341 default:
342 gcc_unreachable ();
345 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
346 stmt = gimple_build_assign (gimple_call_lhs (stmt),
347 gimple_call_arg (stmt, 0));
348 gsi_replace (&gsi, stmt, true);
354 static unsigned int
355 execute_build_cfg (void)
357 gimple_seq body = gimple_body (current_function_decl);
359 build_gimple_cfg (body);
360 gimple_set_body (current_function_decl, NULL);
361 if (dump_file && (dump_flags & TDF_DETAILS))
363 fprintf (dump_file, "Scope blocks:\n");
364 dump_scope_blocks (dump_file, dump_flags);
366 cleanup_tree_cfg ();
367 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
368 replace_loop_annotate ();
369 return 0;
372 namespace {
374 const pass_data pass_data_build_cfg =
376 GIMPLE_PASS, /* type */
377 "cfg", /* name */
378 OPTGROUP_NONE, /* optinfo_flags */
379 TV_TREE_CFG, /* tv_id */
380 PROP_gimple_leh, /* properties_required */
381 ( PROP_cfg | PROP_loops ), /* properties_provided */
382 0, /* properties_destroyed */
383 0, /* todo_flags_start */
384 0, /* todo_flags_finish */
387 class pass_build_cfg : public gimple_opt_pass
389 public:
390 pass_build_cfg (gcc::context *ctxt)
391 : gimple_opt_pass (pass_data_build_cfg, ctxt)
394 /* opt_pass methods: */
395 virtual unsigned int execute (function *) { return execute_build_cfg (); }
397 }; // class pass_build_cfg
399 } // anon namespace
401 gimple_opt_pass *
402 make_pass_build_cfg (gcc::context *ctxt)
404 return new pass_build_cfg (ctxt);
408 /* Return true if T is a computed goto. */
410 bool
411 computed_goto_p (gimple t)
413 return (gimple_code (t) == GIMPLE_GOTO
414 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
417 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
418 the other edge points to a bb with just __builtin_unreachable ().
419 I.e. return true for C->M edge in:
420 <bb C>:
422 if (something)
423 goto <bb N>;
424 else
425 goto <bb M>;
426 <bb N>:
427 __builtin_unreachable ();
428 <bb M>: */
430 bool
431 assert_unreachable_fallthru_edge_p (edge e)
433 basic_block pred_bb = e->src;
434 gimple last = last_stmt (pred_bb);
435 if (last && gimple_code (last) == GIMPLE_COND)
437 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
438 if (other_bb == e->dest)
439 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
440 if (EDGE_COUNT (other_bb->succs) == 0)
442 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
443 gimple stmt;
445 if (gsi_end_p (gsi))
446 return false;
447 stmt = gsi_stmt (gsi);
448 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
450 gsi_next (&gsi);
451 if (gsi_end_p (gsi))
452 return false;
453 stmt = gsi_stmt (gsi);
455 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
458 return false;
462 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
463 could alter control flow except via eh. We initialize the flag at
464 CFG build time and only ever clear it later. */
466 static void
467 gimple_call_initialize_ctrl_altering (gimple stmt)
469 int flags = gimple_call_flags (stmt);
471 /* A call alters control flow if it can make an abnormal goto. */
472 if (call_can_make_abnormal_goto (stmt)
473 /* A call also alters control flow if it does not return. */
474 || flags & ECF_NORETURN
475 /* TM ending statements have backedges out of the transaction.
476 Return true so we split the basic block containing them.
477 Note that the TM_BUILTIN test is merely an optimization. */
478 || ((flags & ECF_TM_BUILTIN)
479 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
480 /* BUILT_IN_RETURN call is same as return statement. */
481 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
482 gimple_call_set_ctrl_altering (stmt, true);
483 else
484 gimple_call_set_ctrl_altering (stmt, false);
488 /* Insert SEQ after BB and build a flowgraph. */
490 static basic_block
491 make_blocks_1 (gimple_seq seq, basic_block bb)
493 gimple_stmt_iterator i = gsi_start (seq);
494 gimple stmt = NULL;
495 bool start_new_block = true;
496 bool first_stmt_of_seq = true;
498 while (!gsi_end_p (i))
500 gimple prev_stmt;
502 prev_stmt = stmt;
503 stmt = gsi_stmt (i);
505 if (stmt && is_gimple_call (stmt))
506 gimple_call_initialize_ctrl_altering (stmt);
508 /* If the statement starts a new basic block or if we have determined
509 in a previous pass that we need to create a new block for STMT, do
510 so now. */
511 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
513 if (!first_stmt_of_seq)
514 gsi_split_seq_before (&i, &seq);
515 bb = create_basic_block (seq, bb);
516 start_new_block = false;
519 /* Now add STMT to BB and create the subgraphs for special statement
520 codes. */
521 gimple_set_bb (stmt, bb);
523 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
524 next iteration. */
525 if (stmt_ends_bb_p (stmt))
527 /* If the stmt can make abnormal goto use a new temporary
528 for the assignment to the LHS. This makes sure the old value
529 of the LHS is available on the abnormal edge. Otherwise
530 we will end up with overlapping life-ranges for abnormal
531 SSA names. */
532 if (gimple_has_lhs (stmt)
533 && stmt_can_make_abnormal_goto (stmt)
534 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
536 tree lhs = gimple_get_lhs (stmt);
537 tree tmp = create_tmp_var (TREE_TYPE (lhs));
538 gimple s = gimple_build_assign (lhs, tmp);
539 gimple_set_location (s, gimple_location (stmt));
540 gimple_set_block (s, gimple_block (stmt));
541 gimple_set_lhs (stmt, tmp);
542 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
543 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
544 DECL_GIMPLE_REG_P (tmp) = 1;
545 gsi_insert_after (&i, s, GSI_SAME_STMT);
547 start_new_block = true;
550 gsi_next (&i);
551 first_stmt_of_seq = false;
553 return bb;
556 /* Build a flowgraph for the sequence of stmts SEQ. */
558 static void
559 make_blocks (gimple_seq seq)
561 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
564 /* Create and return a new empty basic block after bb AFTER. */
566 static basic_block
567 create_bb (void *h, void *e, basic_block after)
569 basic_block bb;
571 gcc_assert (!e);
573 /* Create and initialize a new basic block. Since alloc_block uses
574 GC allocation that clears memory to allocate a basic block, we do
575 not have to clear the newly allocated basic block here. */
576 bb = alloc_block ();
578 bb->index = last_basic_block_for_fn (cfun);
579 bb->flags = BB_NEW;
580 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
582 /* Add the new block to the linked list of blocks. */
583 link_block (bb, after);
585 /* Grow the basic block array if needed. */
586 if ((size_t) last_basic_block_for_fn (cfun)
587 == basic_block_info_for_fn (cfun)->length ())
589 size_t new_size =
590 (last_basic_block_for_fn (cfun)
591 + (last_basic_block_for_fn (cfun) + 3) / 4);
592 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
595 /* Add the newly created block to the array. */
596 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
598 n_basic_blocks_for_fn (cfun)++;
599 last_basic_block_for_fn (cfun)++;
601 return bb;
605 /*---------------------------------------------------------------------------
606 Edge creation
607 ---------------------------------------------------------------------------*/
609 /* Fold COND_EXPR_COND of each COND_EXPR. */
611 void
612 fold_cond_expr_cond (void)
614 basic_block bb;
616 FOR_EACH_BB_FN (bb, cfun)
618 gimple stmt = last_stmt (bb);
620 if (stmt && gimple_code (stmt) == GIMPLE_COND)
622 gcond *cond_stmt = as_a <gcond *> (stmt);
623 location_t loc = gimple_location (stmt);
624 tree cond;
625 bool zerop, onep;
627 fold_defer_overflow_warnings ();
628 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
629 boolean_type_node,
630 gimple_cond_lhs (cond_stmt),
631 gimple_cond_rhs (cond_stmt));
632 if (cond)
634 zerop = integer_zerop (cond);
635 onep = integer_onep (cond);
637 else
638 zerop = onep = false;
640 fold_undefer_overflow_warnings (zerop || onep,
641 stmt,
642 WARN_STRICT_OVERFLOW_CONDITIONAL);
643 if (zerop)
644 gimple_cond_make_false (cond_stmt);
645 else if (onep)
646 gimple_cond_make_true (cond_stmt);
651 /* If basic block BB has an abnormal edge to a basic block
652 containing IFN_ABNORMAL_DISPATCHER internal call, return
653 that the dispatcher's basic block, otherwise return NULL. */
655 basic_block
656 get_abnormal_succ_dispatcher (basic_block bb)
658 edge e;
659 edge_iterator ei;
661 FOR_EACH_EDGE (e, ei, bb->succs)
662 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
664 gimple_stmt_iterator gsi
665 = gsi_start_nondebug_after_labels_bb (e->dest);
666 gimple g = gsi_stmt (gsi);
667 if (g
668 && is_gimple_call (g)
669 && gimple_call_internal_p (g)
670 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
671 return e->dest;
673 return NULL;
676 /* Helper function for make_edges. Create a basic block with
677 with ABNORMAL_DISPATCHER internal call in it if needed, and
678 create abnormal edges from BBS to it and from it to FOR_BB
679 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
681 static void
682 handle_abnormal_edges (basic_block *dispatcher_bbs,
683 basic_block for_bb, int *bb_to_omp_idx,
684 auto_vec<basic_block> *bbs, bool computed_goto)
686 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
687 unsigned int idx = 0;
688 basic_block bb;
689 bool inner = false;
691 if (bb_to_omp_idx)
693 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
694 if (bb_to_omp_idx[for_bb->index] != 0)
695 inner = true;
698 /* If the dispatcher has been created already, then there are basic
699 blocks with abnormal edges to it, so just make a new edge to
700 for_bb. */
701 if (*dispatcher == NULL)
703 /* Check if there are any basic blocks that need to have
704 abnormal edges to this dispatcher. If there are none, return
705 early. */
706 if (bb_to_omp_idx == NULL)
708 if (bbs->is_empty ())
709 return;
711 else
713 FOR_EACH_VEC_ELT (*bbs, idx, bb)
714 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
715 break;
716 if (bb == NULL)
717 return;
720 /* Create the dispatcher bb. */
721 *dispatcher = create_basic_block (NULL, for_bb);
722 if (computed_goto)
724 /* Factor computed gotos into a common computed goto site. Also
725 record the location of that site so that we can un-factor the
726 gotos after we have converted back to normal form. */
727 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
729 /* Create the destination of the factored goto. Each original
730 computed goto will put its desired destination into this
731 variable and jump to the label we create immediately below. */
732 tree var = create_tmp_var (ptr_type_node, "gotovar");
734 /* Build a label for the new block which will contain the
735 factored computed goto. */
736 tree factored_label_decl
737 = create_artificial_label (UNKNOWN_LOCATION);
738 gimple factored_computed_goto_label
739 = gimple_build_label (factored_label_decl);
740 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
742 /* Build our new computed goto. */
743 gimple factored_computed_goto = gimple_build_goto (var);
744 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
746 FOR_EACH_VEC_ELT (*bbs, idx, bb)
748 if (bb_to_omp_idx
749 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
750 continue;
752 gsi = gsi_last_bb (bb);
753 gimple last = gsi_stmt (gsi);
755 gcc_assert (computed_goto_p (last));
757 /* Copy the original computed goto's destination into VAR. */
758 gimple assignment
759 = gimple_build_assign (var, gimple_goto_dest (last));
760 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
762 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
763 e->goto_locus = gimple_location (last);
764 gsi_remove (&gsi, true);
767 else
769 tree arg = inner ? boolean_true_node : boolean_false_node;
770 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
771 1, arg);
772 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
773 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
775 /* Create predecessor edges of the dispatcher. */
776 FOR_EACH_VEC_ELT (*bbs, idx, bb)
778 if (bb_to_omp_idx
779 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
780 continue;
781 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
786 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
789 /* Creates outgoing edges for BB. Returns 1 when it ends with an
790 computed goto, returns 2 when it ends with a statement that
791 might return to this function via an nonlocal goto, otherwise
792 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
794 static int
795 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
797 gimple last = last_stmt (bb);
798 bool fallthru = false;
799 int ret = 0;
801 if (!last)
802 return ret;
804 switch (gimple_code (last))
806 case GIMPLE_GOTO:
807 if (make_goto_expr_edges (bb))
808 ret = 1;
809 fallthru = false;
810 break;
811 case GIMPLE_RETURN:
813 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
814 e->goto_locus = gimple_location (last);
815 fallthru = false;
817 break;
818 case GIMPLE_COND:
819 make_cond_expr_edges (bb);
820 fallthru = false;
821 break;
822 case GIMPLE_SWITCH:
823 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
824 fallthru = false;
825 break;
826 case GIMPLE_RESX:
827 make_eh_edges (last);
828 fallthru = false;
829 break;
830 case GIMPLE_EH_DISPATCH:
831 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
832 break;
834 case GIMPLE_CALL:
835 /* If this function receives a nonlocal goto, then we need to
836 make edges from this call site to all the nonlocal goto
837 handlers. */
838 if (stmt_can_make_abnormal_goto (last))
839 ret = 2;
841 /* If this statement has reachable exception handlers, then
842 create abnormal edges to them. */
843 make_eh_edges (last);
845 /* BUILTIN_RETURN is really a return statement. */
846 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
848 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
849 fallthru = false;
851 /* Some calls are known not to return. */
852 else
853 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
854 break;
856 case GIMPLE_ASSIGN:
857 /* A GIMPLE_ASSIGN may throw internally and thus be considered
858 control-altering. */
859 if (is_ctrl_altering_stmt (last))
860 make_eh_edges (last);
861 fallthru = true;
862 break;
864 case GIMPLE_ASM:
865 make_gimple_asm_edges (bb);
866 fallthru = true;
867 break;
869 CASE_GIMPLE_OMP:
870 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
871 break;
873 case GIMPLE_TRANSACTION:
875 tree abort_label
876 = gimple_transaction_label (as_a <gtransaction *> (last));
877 if (abort_label)
878 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
879 fallthru = true;
881 break;
883 default:
884 gcc_assert (!stmt_ends_bb_p (last));
885 fallthru = true;
886 break;
889 if (fallthru)
890 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
892 return ret;
895 /* Join all the blocks in the flowgraph. */
897 static void
898 make_edges (void)
900 basic_block bb;
901 struct omp_region *cur_region = NULL;
902 auto_vec<basic_block> ab_edge_goto;
903 auto_vec<basic_block> ab_edge_call;
904 int *bb_to_omp_idx = NULL;
905 int cur_omp_region_idx = 0;
907 /* Create an edge from entry to the first block with executable
908 statements in it. */
909 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
910 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
911 EDGE_FALLTHRU);
913 /* Traverse the basic block array placing edges. */
914 FOR_EACH_BB_FN (bb, cfun)
916 int mer;
918 if (bb_to_omp_idx)
919 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
921 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
922 if (mer == 1)
923 ab_edge_goto.safe_push (bb);
924 else if (mer == 2)
925 ab_edge_call.safe_push (bb);
927 if (cur_region && bb_to_omp_idx == NULL)
928 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
931 /* Computed gotos are hell to deal with, especially if there are
932 lots of them with a large number of destinations. So we factor
933 them to a common computed goto location before we build the
934 edge list. After we convert back to normal form, we will un-factor
935 the computed gotos since factoring introduces an unwanted jump.
936 For non-local gotos and abnormal edges from calls to calls that return
937 twice or forced labels, factor the abnormal edges too, by having all
938 abnormal edges from the calls go to a common artificial basic block
939 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
940 basic block to all forced labels and calls returning twice.
941 We do this per-OpenMP structured block, because those regions
942 are guaranteed to be single entry single exit by the standard,
943 so it is not allowed to enter or exit such regions abnormally this way,
944 thus all computed gotos, non-local gotos and setjmp/longjmp calls
945 must not transfer control across SESE region boundaries. */
946 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
948 gimple_stmt_iterator gsi;
949 basic_block dispatcher_bb_array[2] = { NULL, NULL };
950 basic_block *dispatcher_bbs = dispatcher_bb_array;
951 int count = n_basic_blocks_for_fn (cfun);
953 if (bb_to_omp_idx)
954 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
956 FOR_EACH_BB_FN (bb, cfun)
958 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
960 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
961 tree target;
963 if (!label_stmt)
964 break;
966 target = gimple_label_label (label_stmt);
968 /* Make an edge to every label block that has been marked as a
969 potential target for a computed goto or a non-local goto. */
970 if (FORCED_LABEL (target))
971 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
972 &ab_edge_goto, true);
973 if (DECL_NONLOCAL (target))
975 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
976 &ab_edge_call, false);
977 break;
981 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
982 gsi_next_nondebug (&gsi);
983 if (!gsi_end_p (gsi))
985 /* Make an edge to every setjmp-like call. */
986 gimple call_stmt = gsi_stmt (gsi);
987 if (is_gimple_call (call_stmt)
988 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
989 || gimple_call_builtin_p (call_stmt,
990 BUILT_IN_SETJMP_RECEIVER)))
991 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
992 &ab_edge_call, false);
996 if (bb_to_omp_idx)
997 XDELETE (dispatcher_bbs);
1000 XDELETE (bb_to_omp_idx);
1002 free_omp_regions ();
1004 /* Fold COND_EXPR_COND of each COND_EXPR. */
1005 fold_cond_expr_cond ();
1008 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1009 needed. Returns true if new bbs were created.
1010 Note: This is transitional code, and should not be used for new code. We
1011 should be able to get rid of this by rewriting all target va-arg
1012 gimplification hooks to use an interface gimple_build_cond_value as described
1013 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1015 bool
1016 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1018 gimple stmt = gsi_stmt (*gsi);
1019 basic_block bb = gimple_bb (stmt);
1020 basic_block lastbb, afterbb;
1021 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1022 edge e;
1023 lastbb = make_blocks_1 (seq, bb);
1024 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1025 return false;
1026 e = split_block (bb, stmt);
1027 /* Move e->dest to come after the new basic blocks. */
1028 afterbb = e->dest;
1029 unlink_block (afterbb);
1030 link_block (afterbb, lastbb);
1031 redirect_edge_succ (e, bb->next_bb);
1032 bb = bb->next_bb;
1033 while (bb != afterbb)
1035 struct omp_region *cur_region = NULL;
1036 int cur_omp_region_idx = 0;
1037 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1038 gcc_assert (!mer && !cur_region);
1039 add_bb_to_loop (bb, afterbb->loop_father);
1040 bb = bb->next_bb;
1042 return true;
1045 /* Find the next available discriminator value for LOCUS. The
1046 discriminator distinguishes among several basic blocks that
1047 share a common locus, allowing for more accurate sample-based
1048 profiling. */
1050 static int
1051 next_discriminator_for_locus (location_t locus)
1053 struct locus_discrim_map item;
1054 struct locus_discrim_map **slot;
1056 item.locus = locus;
1057 item.discriminator = 0;
1058 slot = discriminator_per_locus->find_slot_with_hash (
1059 &item, LOCATION_LINE (locus), INSERT);
1060 gcc_assert (slot);
1061 if (*slot == HTAB_EMPTY_ENTRY)
1063 *slot = XNEW (struct locus_discrim_map);
1064 gcc_assert (*slot);
1065 (*slot)->locus = locus;
1066 (*slot)->discriminator = 0;
1068 (*slot)->discriminator++;
1069 return (*slot)->discriminator;
1072 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1074 static bool
1075 same_line_p (location_t locus1, location_t locus2)
1077 expanded_location from, to;
1079 if (locus1 == locus2)
1080 return true;
1082 from = expand_location (locus1);
1083 to = expand_location (locus2);
1085 if (from.line != to.line)
1086 return false;
1087 if (from.file == to.file)
1088 return true;
1089 return (from.file != NULL
1090 && to.file != NULL
1091 && filename_cmp (from.file, to.file) == 0);
1094 /* Assign discriminators to each basic block. */
1096 static void
1097 assign_discriminators (void)
1099 basic_block bb;
1101 FOR_EACH_BB_FN (bb, cfun)
1103 edge e;
1104 edge_iterator ei;
1105 gimple last = last_stmt (bb);
1106 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1108 if (locus == UNKNOWN_LOCATION)
1109 continue;
1111 FOR_EACH_EDGE (e, ei, bb->succs)
1113 gimple first = first_non_label_stmt (e->dest);
1114 gimple last = last_stmt (e->dest);
1115 if ((first && same_line_p (locus, gimple_location (first)))
1116 || (last && same_line_p (locus, gimple_location (last))))
1118 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1119 bb->discriminator = next_discriminator_for_locus (locus);
1120 else
1121 e->dest->discriminator = next_discriminator_for_locus (locus);
1127 /* Create the edges for a GIMPLE_COND starting at block BB. */
1129 static void
1130 make_cond_expr_edges (basic_block bb)
1132 gcond *entry = as_a <gcond *> (last_stmt (bb));
1133 gimple then_stmt, else_stmt;
1134 basic_block then_bb, else_bb;
1135 tree then_label, else_label;
1136 edge e;
1138 gcc_assert (entry);
1139 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1141 /* Entry basic blocks for each component. */
1142 then_label = gimple_cond_true_label (entry);
1143 else_label = gimple_cond_false_label (entry);
1144 then_bb = label_to_block (then_label);
1145 else_bb = label_to_block (else_label);
1146 then_stmt = first_stmt (then_bb);
1147 else_stmt = first_stmt (else_bb);
1149 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1150 e->goto_locus = gimple_location (then_stmt);
1151 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1152 if (e)
1153 e->goto_locus = gimple_location (else_stmt);
1155 /* We do not need the labels anymore. */
1156 gimple_cond_set_true_label (entry, NULL_TREE);
1157 gimple_cond_set_false_label (entry, NULL_TREE);
1161 /* Called for each element in the hash table (P) as we delete the
1162 edge to cases hash table.
1164 Clear all the TREE_CHAINs to prevent problems with copying of
1165 SWITCH_EXPRs and structure sharing rules, then free the hash table
1166 element. */
1168 bool
1169 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1171 tree t, next;
1173 for (t = value; t; t = next)
1175 next = CASE_CHAIN (t);
1176 CASE_CHAIN (t) = NULL;
1179 return true;
1182 /* Start recording information mapping edges to case labels. */
1184 void
1185 start_recording_case_labels (void)
1187 gcc_assert (edge_to_cases == NULL);
1188 edge_to_cases = new hash_map<edge, tree>;
1189 touched_switch_bbs = BITMAP_ALLOC (NULL);
1192 /* Return nonzero if we are recording information for case labels. */
1194 static bool
1195 recording_case_labels_p (void)
1197 return (edge_to_cases != NULL);
1200 /* Stop recording information mapping edges to case labels and
1201 remove any information we have recorded. */
1202 void
1203 end_recording_case_labels (void)
1205 bitmap_iterator bi;
1206 unsigned i;
1207 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1208 delete edge_to_cases;
1209 edge_to_cases = NULL;
1210 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1212 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1213 if (bb)
1215 gimple stmt = last_stmt (bb);
1216 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1217 group_case_labels_stmt (as_a <gswitch *> (stmt));
1220 BITMAP_FREE (touched_switch_bbs);
1223 /* If we are inside a {start,end}_recording_cases block, then return
1224 a chain of CASE_LABEL_EXPRs from T which reference E.
1226 Otherwise return NULL. */
1228 static tree
1229 get_cases_for_edge (edge e, gswitch *t)
1231 tree *slot;
1232 size_t i, n;
1234 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1235 chains available. Return NULL so the caller can detect this case. */
1236 if (!recording_case_labels_p ())
1237 return NULL;
1239 slot = edge_to_cases->get (e);
1240 if (slot)
1241 return *slot;
1243 /* If we did not find E in the hash table, then this must be the first
1244 time we have been queried for information about E & T. Add all the
1245 elements from T to the hash table then perform the query again. */
1247 n = gimple_switch_num_labels (t);
1248 for (i = 0; i < n; i++)
1250 tree elt = gimple_switch_label (t, i);
1251 tree lab = CASE_LABEL (elt);
1252 basic_block label_bb = label_to_block (lab);
1253 edge this_edge = find_edge (e->src, label_bb);
1255 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1256 a new chain. */
1257 tree &s = edge_to_cases->get_or_insert (this_edge);
1258 CASE_CHAIN (elt) = s;
1259 s = elt;
1262 return *edge_to_cases->get (e);
1265 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1267 static void
1268 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1270 size_t i, n;
1272 n = gimple_switch_num_labels (entry);
1274 for (i = 0; i < n; ++i)
1276 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1277 basic_block label_bb = label_to_block (lab);
1278 make_edge (bb, label_bb, 0);
1283 /* Return the basic block holding label DEST. */
1285 basic_block
1286 label_to_block_fn (struct function *ifun, tree dest)
1288 int uid = LABEL_DECL_UID (dest);
1290 /* We would die hard when faced by an undefined label. Emit a label to
1291 the very first basic block. This will hopefully make even the dataflow
1292 and undefined variable warnings quite right. */
1293 if (seen_error () && uid < 0)
1295 gimple_stmt_iterator gsi =
1296 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1297 gimple stmt;
1299 stmt = gimple_build_label (dest);
1300 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1301 uid = LABEL_DECL_UID (dest);
1303 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1304 return NULL;
1305 return (*ifun->cfg->x_label_to_block_map)[uid];
1308 /* Create edges for a goto statement at block BB. Returns true
1309 if abnormal edges should be created. */
1311 static bool
1312 make_goto_expr_edges (basic_block bb)
1314 gimple_stmt_iterator last = gsi_last_bb (bb);
1315 gimple goto_t = gsi_stmt (last);
1317 /* A simple GOTO creates normal edges. */
1318 if (simple_goto_p (goto_t))
1320 tree dest = gimple_goto_dest (goto_t);
1321 basic_block label_bb = label_to_block (dest);
1322 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1323 e->goto_locus = gimple_location (goto_t);
1324 gsi_remove (&last, true);
1325 return false;
1328 /* A computed GOTO creates abnormal edges. */
1329 return true;
1332 /* Create edges for an asm statement with labels at block BB. */
1334 static void
1335 make_gimple_asm_edges (basic_block bb)
1337 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1338 int i, n = gimple_asm_nlabels (stmt);
1340 for (i = 0; i < n; ++i)
1342 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1343 basic_block label_bb = label_to_block (label);
1344 make_edge (bb, label_bb, 0);
1348 /*---------------------------------------------------------------------------
1349 Flowgraph analysis
1350 ---------------------------------------------------------------------------*/
1352 /* Cleanup useless labels in basic blocks. This is something we wish
1353 to do early because it allows us to group case labels before creating
1354 the edges for the CFG, and it speeds up block statement iterators in
1355 all passes later on.
1356 We rerun this pass after CFG is created, to get rid of the labels that
1357 are no longer referenced. After then we do not run it any more, since
1358 (almost) no new labels should be created. */
1360 /* A map from basic block index to the leading label of that block. */
1361 static struct label_record
1363 /* The label. */
1364 tree label;
1366 /* True if the label is referenced from somewhere. */
1367 bool used;
1368 } *label_for_bb;
1370 /* Given LABEL return the first label in the same basic block. */
1372 static tree
1373 main_block_label (tree label)
1375 basic_block bb = label_to_block (label);
1376 tree main_label = label_for_bb[bb->index].label;
1378 /* label_to_block possibly inserted undefined label into the chain. */
1379 if (!main_label)
1381 label_for_bb[bb->index].label = label;
1382 main_label = label;
1385 label_for_bb[bb->index].used = true;
1386 return main_label;
1389 /* Clean up redundant labels within the exception tree. */
1391 static void
1392 cleanup_dead_labels_eh (void)
1394 eh_landing_pad lp;
1395 eh_region r;
1396 tree lab;
1397 int i;
1399 if (cfun->eh == NULL)
1400 return;
1402 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1403 if (lp && lp->post_landing_pad)
1405 lab = main_block_label (lp->post_landing_pad);
1406 if (lab != lp->post_landing_pad)
1408 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1409 EH_LANDING_PAD_NR (lab) = lp->index;
1413 FOR_ALL_EH_REGION (r)
1414 switch (r->type)
1416 case ERT_CLEANUP:
1417 case ERT_MUST_NOT_THROW:
1418 break;
1420 case ERT_TRY:
1422 eh_catch c;
1423 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1425 lab = c->label;
1426 if (lab)
1427 c->label = main_block_label (lab);
1430 break;
1432 case ERT_ALLOWED_EXCEPTIONS:
1433 lab = r->u.allowed.label;
1434 if (lab)
1435 r->u.allowed.label = main_block_label (lab);
1436 break;
1441 /* Cleanup redundant labels. This is a three-step process:
1442 1) Find the leading label for each block.
1443 2) Redirect all references to labels to the leading labels.
1444 3) Cleanup all useless labels. */
1446 void
1447 cleanup_dead_labels (void)
1449 basic_block bb;
1450 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1452 /* Find a suitable label for each block. We use the first user-defined
1453 label if there is one, or otherwise just the first label we see. */
1454 FOR_EACH_BB_FN (bb, cfun)
1456 gimple_stmt_iterator i;
1458 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1460 tree label;
1461 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1463 if (!label_stmt)
1464 break;
1466 label = gimple_label_label (label_stmt);
1468 /* If we have not yet seen a label for the current block,
1469 remember this one and see if there are more labels. */
1470 if (!label_for_bb[bb->index].label)
1472 label_for_bb[bb->index].label = label;
1473 continue;
1476 /* If we did see a label for the current block already, but it
1477 is an artificially created label, replace it if the current
1478 label is a user defined label. */
1479 if (!DECL_ARTIFICIAL (label)
1480 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1482 label_for_bb[bb->index].label = label;
1483 break;
1488 /* Now redirect all jumps/branches to the selected label.
1489 First do so for each block ending in a control statement. */
1490 FOR_EACH_BB_FN (bb, cfun)
1492 gimple stmt = last_stmt (bb);
1493 tree label, new_label;
1495 if (!stmt)
1496 continue;
1498 switch (gimple_code (stmt))
1500 case GIMPLE_COND:
1502 gcond *cond_stmt = as_a <gcond *> (stmt);
1503 label = gimple_cond_true_label (cond_stmt);
1504 if (label)
1506 new_label = main_block_label (label);
1507 if (new_label != label)
1508 gimple_cond_set_true_label (cond_stmt, new_label);
1511 label = gimple_cond_false_label (cond_stmt);
1512 if (label)
1514 new_label = main_block_label (label);
1515 if (new_label != label)
1516 gimple_cond_set_false_label (cond_stmt, new_label);
1519 break;
1521 case GIMPLE_SWITCH:
1523 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1524 size_t i, n = gimple_switch_num_labels (switch_stmt);
1526 /* Replace all destination labels. */
1527 for (i = 0; i < n; ++i)
1529 tree case_label = gimple_switch_label (switch_stmt, i);
1530 label = CASE_LABEL (case_label);
1531 new_label = main_block_label (label);
1532 if (new_label != label)
1533 CASE_LABEL (case_label) = new_label;
1535 break;
1538 case GIMPLE_ASM:
1540 gasm *asm_stmt = as_a <gasm *> (stmt);
1541 int i, n = gimple_asm_nlabels (asm_stmt);
1543 for (i = 0; i < n; ++i)
1545 tree cons = gimple_asm_label_op (asm_stmt, i);
1546 tree label = main_block_label (TREE_VALUE (cons));
1547 TREE_VALUE (cons) = label;
1549 break;
1552 /* We have to handle gotos until they're removed, and we don't
1553 remove them until after we've created the CFG edges. */
1554 case GIMPLE_GOTO:
1555 if (!computed_goto_p (stmt))
1557 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1558 label = gimple_goto_dest (goto_stmt);
1559 new_label = main_block_label (label);
1560 if (new_label != label)
1561 gimple_goto_set_dest (goto_stmt, new_label);
1563 break;
1565 case GIMPLE_TRANSACTION:
1567 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1568 tree label = gimple_transaction_label (trans_stmt);
1569 if (label)
1571 tree new_label = main_block_label (label);
1572 if (new_label != label)
1573 gimple_transaction_set_label (trans_stmt, new_label);
1576 break;
1578 default:
1579 break;
1583 /* Do the same for the exception region tree labels. */
1584 cleanup_dead_labels_eh ();
1586 /* Finally, purge dead labels. All user-defined labels and labels that
1587 can be the target of non-local gotos and labels which have their
1588 address taken are preserved. */
1589 FOR_EACH_BB_FN (bb, cfun)
1591 gimple_stmt_iterator i;
1592 tree label_for_this_bb = label_for_bb[bb->index].label;
1594 if (!label_for_this_bb)
1595 continue;
1597 /* If the main label of the block is unused, we may still remove it. */
1598 if (!label_for_bb[bb->index].used)
1599 label_for_this_bb = NULL;
1601 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1603 tree label;
1604 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1606 if (!label_stmt)
1607 break;
1609 label = gimple_label_label (label_stmt);
1611 if (label == label_for_this_bb
1612 || !DECL_ARTIFICIAL (label)
1613 || DECL_NONLOCAL (label)
1614 || FORCED_LABEL (label))
1615 gsi_next (&i);
1616 else
1617 gsi_remove (&i, true);
1621 free (label_for_bb);
1624 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1625 the ones jumping to the same label.
1626 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1628 void
1629 group_case_labels_stmt (gswitch *stmt)
1631 int old_size = gimple_switch_num_labels (stmt);
1632 int i, j, new_size = old_size;
1633 basic_block default_bb = NULL;
1635 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1637 /* Look for possible opportunities to merge cases. */
1638 i = 1;
1639 while (i < old_size)
1641 tree base_case, base_high;
1642 basic_block base_bb;
1644 base_case = gimple_switch_label (stmt, i);
1646 gcc_assert (base_case);
1647 base_bb = label_to_block (CASE_LABEL (base_case));
1649 /* Discard cases that have the same destination as the
1650 default case. */
1651 if (base_bb == default_bb)
1653 gimple_switch_set_label (stmt, i, NULL_TREE);
1654 i++;
1655 new_size--;
1656 continue;
1659 base_high = CASE_HIGH (base_case)
1660 ? CASE_HIGH (base_case)
1661 : CASE_LOW (base_case);
1662 i++;
1664 /* Try to merge case labels. Break out when we reach the end
1665 of the label vector or when we cannot merge the next case
1666 label with the current one. */
1667 while (i < old_size)
1669 tree merge_case = gimple_switch_label (stmt, i);
1670 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1671 wide_int bhp1 = wi::add (base_high, 1);
1673 /* Merge the cases if they jump to the same place,
1674 and their ranges are consecutive. */
1675 if (merge_bb == base_bb
1676 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1678 base_high = CASE_HIGH (merge_case) ?
1679 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1680 CASE_HIGH (base_case) = base_high;
1681 gimple_switch_set_label (stmt, i, NULL_TREE);
1682 new_size--;
1683 i++;
1685 else
1686 break;
1690 /* Compress the case labels in the label vector, and adjust the
1691 length of the vector. */
1692 for (i = 0, j = 0; i < new_size; i++)
1694 while (! gimple_switch_label (stmt, j))
1695 j++;
1696 gimple_switch_set_label (stmt, i,
1697 gimple_switch_label (stmt, j++));
1700 gcc_assert (new_size <= old_size);
1701 gimple_switch_set_num_labels (stmt, new_size);
1704 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1705 and scan the sorted vector of cases. Combine the ones jumping to the
1706 same label. */
1708 void
1709 group_case_labels (void)
1711 basic_block bb;
1713 FOR_EACH_BB_FN (bb, cfun)
1715 gimple stmt = last_stmt (bb);
1716 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1717 group_case_labels_stmt (as_a <gswitch *> (stmt));
1721 /* Checks whether we can merge block B into block A. */
1723 static bool
1724 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1726 gimple stmt;
1728 if (!single_succ_p (a))
1729 return false;
1731 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1732 return false;
1734 if (single_succ (a) != b)
1735 return false;
1737 if (!single_pred_p (b))
1738 return false;
1740 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1741 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1742 return false;
1744 /* If A ends by a statement causing exceptions or something similar, we
1745 cannot merge the blocks. */
1746 stmt = last_stmt (a);
1747 if (stmt && stmt_ends_bb_p (stmt))
1748 return false;
1750 /* Do not allow a block with only a non-local label to be merged. */
1751 if (stmt)
1752 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1753 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1754 return false;
1756 /* Examine the labels at the beginning of B. */
1757 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1758 gsi_next (&gsi))
1760 tree lab;
1761 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1762 if (!label_stmt)
1763 break;
1764 lab = gimple_label_label (label_stmt);
1766 /* Do not remove user forced labels or for -O0 any user labels. */
1767 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1768 return false;
1771 /* Protect simple loop latches. We only want to avoid merging
1772 the latch with the loop header or with a block in another
1773 loop in this case. */
1774 if (current_loops
1775 && b->loop_father->latch == b
1776 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1777 && (b->loop_father->header == a
1778 || b->loop_father != a->loop_father))
1779 return false;
1781 /* It must be possible to eliminate all phi nodes in B. If ssa form
1782 is not up-to-date and a name-mapping is registered, we cannot eliminate
1783 any phis. Symbols marked for renaming are never a problem though. */
1784 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1785 gsi_next (&gsi))
1787 gphi *phi = gsi.phi ();
1788 /* Technically only new names matter. */
1789 if (name_registered_for_update_p (PHI_RESULT (phi)))
1790 return false;
1793 /* When not optimizing, don't merge if we'd lose goto_locus. */
1794 if (!optimize
1795 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1797 location_t goto_locus = single_succ_edge (a)->goto_locus;
1798 gimple_stmt_iterator prev, next;
1799 prev = gsi_last_nondebug_bb (a);
1800 next = gsi_after_labels (b);
1801 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1802 gsi_next_nondebug (&next);
1803 if ((gsi_end_p (prev)
1804 || gimple_location (gsi_stmt (prev)) != goto_locus)
1805 && (gsi_end_p (next)
1806 || gimple_location (gsi_stmt (next)) != goto_locus))
1807 return false;
1810 return true;
1813 /* Replaces all uses of NAME by VAL. */
1815 void
1816 replace_uses_by (tree name, tree val)
1818 imm_use_iterator imm_iter;
1819 use_operand_p use;
1820 gimple stmt;
1821 edge e;
1823 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1825 /* Mark the block if we change the last stmt in it. */
1826 if (cfgcleanup_altered_bbs
1827 && stmt_ends_bb_p (stmt))
1828 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1830 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1832 replace_exp (use, val);
1834 if (gimple_code (stmt) == GIMPLE_PHI)
1836 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1837 PHI_ARG_INDEX_FROM_USE (use));
1838 if (e->flags & EDGE_ABNORMAL
1839 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1841 /* This can only occur for virtual operands, since
1842 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1843 would prevent replacement. */
1844 gcc_checking_assert (virtual_operand_p (name));
1845 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1850 if (gimple_code (stmt) != GIMPLE_PHI)
1852 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1853 gimple orig_stmt = stmt;
1854 size_t i;
1856 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1857 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1858 only change sth from non-invariant to invariant, and only
1859 when propagating constants. */
1860 if (is_gimple_min_invariant (val))
1861 for (i = 0; i < gimple_num_ops (stmt); i++)
1863 tree op = gimple_op (stmt, i);
1864 /* Operands may be empty here. For example, the labels
1865 of a GIMPLE_COND are nulled out following the creation
1866 of the corresponding CFG edges. */
1867 if (op && TREE_CODE (op) == ADDR_EXPR)
1868 recompute_tree_invariant_for_addr_expr (op);
1871 if (fold_stmt (&gsi))
1872 stmt = gsi_stmt (gsi);
1874 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1875 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1877 update_stmt (stmt);
1881 gcc_checking_assert (has_zero_uses (name));
1883 /* Also update the trees stored in loop structures. */
1884 if (current_loops)
1886 struct loop *loop;
1888 FOR_EACH_LOOP (loop, 0)
1890 substitute_in_loop_info (loop, name, val);
1895 /* Merge block B into block A. */
1897 static void
1898 gimple_merge_blocks (basic_block a, basic_block b)
1900 gimple_stmt_iterator last, gsi;
1901 gphi_iterator psi;
1903 if (dump_file)
1904 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1906 /* Remove all single-valued PHI nodes from block B of the form
1907 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1908 gsi = gsi_last_bb (a);
1909 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1911 gimple phi = gsi_stmt (psi);
1912 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1913 gimple copy;
1914 bool may_replace_uses = (virtual_operand_p (def)
1915 || may_propagate_copy (def, use));
1917 /* In case we maintain loop closed ssa form, do not propagate arguments
1918 of loop exit phi nodes. */
1919 if (current_loops
1920 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1921 && !virtual_operand_p (def)
1922 && TREE_CODE (use) == SSA_NAME
1923 && a->loop_father != b->loop_father)
1924 may_replace_uses = false;
1926 if (!may_replace_uses)
1928 gcc_assert (!virtual_operand_p (def));
1930 /* Note that just emitting the copies is fine -- there is no problem
1931 with ordering of phi nodes. This is because A is the single
1932 predecessor of B, therefore results of the phi nodes cannot
1933 appear as arguments of the phi nodes. */
1934 copy = gimple_build_assign (def, use);
1935 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1936 remove_phi_node (&psi, false);
1938 else
1940 /* If we deal with a PHI for virtual operands, we can simply
1941 propagate these without fussing with folding or updating
1942 the stmt. */
1943 if (virtual_operand_p (def))
1945 imm_use_iterator iter;
1946 use_operand_p use_p;
1947 gimple stmt;
1949 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1950 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1951 SET_USE (use_p, use);
1953 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1954 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1956 else
1957 replace_uses_by (def, use);
1959 remove_phi_node (&psi, true);
1963 /* Ensure that B follows A. */
1964 move_block_after (b, a);
1966 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1967 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1969 /* Remove labels from B and set gimple_bb to A for other statements. */
1970 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1972 gimple stmt = gsi_stmt (gsi);
1973 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1975 tree label = gimple_label_label (label_stmt);
1976 int lp_nr;
1978 gsi_remove (&gsi, false);
1980 /* Now that we can thread computed gotos, we might have
1981 a situation where we have a forced label in block B
1982 However, the label at the start of block B might still be
1983 used in other ways (think about the runtime checking for
1984 Fortran assigned gotos). So we can not just delete the
1985 label. Instead we move the label to the start of block A. */
1986 if (FORCED_LABEL (label))
1988 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1989 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1991 /* Other user labels keep around in a form of a debug stmt. */
1992 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1994 gimple dbg = gimple_build_debug_bind (label,
1995 integer_zero_node,
1996 stmt);
1997 gimple_debug_bind_reset_value (dbg);
1998 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2001 lp_nr = EH_LANDING_PAD_NR (label);
2002 if (lp_nr)
2004 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2005 lp->post_landing_pad = NULL;
2008 else
2010 gimple_set_bb (stmt, a);
2011 gsi_next (&gsi);
2015 /* When merging two BBs, if their counts are different, the larger count
2016 is selected as the new bb count. This is to handle inconsistent
2017 profiles. */
2018 if (a->loop_father == b->loop_father)
2020 a->count = MAX (a->count, b->count);
2021 a->frequency = MAX (a->frequency, b->frequency);
2024 /* Merge the sequences. */
2025 last = gsi_last_bb (a);
2026 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2027 set_bb_seq (b, NULL);
2029 if (cfgcleanup_altered_bbs)
2030 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2034 /* Return the one of two successors of BB that is not reachable by a
2035 complex edge, if there is one. Else, return BB. We use
2036 this in optimizations that use post-dominators for their heuristics,
2037 to catch the cases in C++ where function calls are involved. */
2039 basic_block
2040 single_noncomplex_succ (basic_block bb)
2042 edge e0, e1;
2043 if (EDGE_COUNT (bb->succs) != 2)
2044 return bb;
2046 e0 = EDGE_SUCC (bb, 0);
2047 e1 = EDGE_SUCC (bb, 1);
2048 if (e0->flags & EDGE_COMPLEX)
2049 return e1->dest;
2050 if (e1->flags & EDGE_COMPLEX)
2051 return e0->dest;
2053 return bb;
2056 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2058 void
2059 notice_special_calls (gcall *call)
2061 int flags = gimple_call_flags (call);
2063 if (flags & ECF_MAY_BE_ALLOCA)
2064 cfun->calls_alloca = true;
2065 if (flags & ECF_RETURNS_TWICE)
2066 cfun->calls_setjmp = true;
2070 /* Clear flags set by notice_special_calls. Used by dead code removal
2071 to update the flags. */
2073 void
2074 clear_special_calls (void)
2076 cfun->calls_alloca = false;
2077 cfun->calls_setjmp = false;
2080 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2082 static void
2083 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2085 /* Since this block is no longer reachable, we can just delete all
2086 of its PHI nodes. */
2087 remove_phi_nodes (bb);
2089 /* Remove edges to BB's successors. */
2090 while (EDGE_COUNT (bb->succs) > 0)
2091 remove_edge (EDGE_SUCC (bb, 0));
2095 /* Remove statements of basic block BB. */
2097 static void
2098 remove_bb (basic_block bb)
2100 gimple_stmt_iterator i;
2102 if (dump_file)
2104 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2105 if (dump_flags & TDF_DETAILS)
2107 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2108 fprintf (dump_file, "\n");
2112 if (current_loops)
2114 struct loop *loop = bb->loop_father;
2116 /* If a loop gets removed, clean up the information associated
2117 with it. */
2118 if (loop->latch == bb
2119 || loop->header == bb)
2120 free_numbers_of_iterations_estimates_loop (loop);
2123 /* Remove all the instructions in the block. */
2124 if (bb_seq (bb) != NULL)
2126 /* Walk backwards so as to get a chance to substitute all
2127 released DEFs into debug stmts. See
2128 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2129 details. */
2130 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2132 gimple stmt = gsi_stmt (i);
2133 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2134 if (label_stmt
2135 && (FORCED_LABEL (gimple_label_label (label_stmt))
2136 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2138 basic_block new_bb;
2139 gimple_stmt_iterator new_gsi;
2141 /* A non-reachable non-local label may still be referenced.
2142 But it no longer needs to carry the extra semantics of
2143 non-locality. */
2144 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2146 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2147 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2150 new_bb = bb->prev_bb;
2151 new_gsi = gsi_start_bb (new_bb);
2152 gsi_remove (&i, false);
2153 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2155 else
2157 /* Release SSA definitions if we are in SSA. Note that we
2158 may be called when not in SSA. For example,
2159 final_cleanup calls this function via
2160 cleanup_tree_cfg. */
2161 if (gimple_in_ssa_p (cfun))
2162 release_defs (stmt);
2164 gsi_remove (&i, true);
2167 if (gsi_end_p (i))
2168 i = gsi_last_bb (bb);
2169 else
2170 gsi_prev (&i);
2174 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2175 bb->il.gimple.seq = NULL;
2176 bb->il.gimple.phi_nodes = NULL;
2180 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2181 predicate VAL, return the edge that will be taken out of the block.
2182 If VAL does not match a unique edge, NULL is returned. */
2184 edge
2185 find_taken_edge (basic_block bb, tree val)
2187 gimple stmt;
2189 stmt = last_stmt (bb);
2191 gcc_assert (stmt);
2192 gcc_assert (is_ctrl_stmt (stmt));
2194 if (val == NULL)
2195 return NULL;
2197 if (!is_gimple_min_invariant (val))
2198 return NULL;
2200 if (gimple_code (stmt) == GIMPLE_COND)
2201 return find_taken_edge_cond_expr (bb, val);
2203 if (gimple_code (stmt) == GIMPLE_SWITCH)
2204 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2206 if (computed_goto_p (stmt))
2208 /* Only optimize if the argument is a label, if the argument is
2209 not a label then we can not construct a proper CFG.
2211 It may be the case that we only need to allow the LABEL_REF to
2212 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2213 appear inside a LABEL_EXPR just to be safe. */
2214 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2215 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2216 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2217 return NULL;
2220 gcc_unreachable ();
2223 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2224 statement, determine which of the outgoing edges will be taken out of the
2225 block. Return NULL if either edge may be taken. */
2227 static edge
2228 find_taken_edge_computed_goto (basic_block bb, tree val)
2230 basic_block dest;
2231 edge e = NULL;
2233 dest = label_to_block (val);
2234 if (dest)
2236 e = find_edge (bb, dest);
2237 gcc_assert (e != NULL);
2240 return e;
2243 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2244 statement, determine which of the two edges will be taken out of the
2245 block. Return NULL if either edge may be taken. */
2247 static edge
2248 find_taken_edge_cond_expr (basic_block bb, tree val)
2250 edge true_edge, false_edge;
2252 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2254 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2255 return (integer_zerop (val) ? false_edge : true_edge);
2258 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2259 statement, determine which edge will be taken out of the block. Return
2260 NULL if any edge may be taken. */
2262 static edge
2263 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2264 tree val)
2266 basic_block dest_bb;
2267 edge e;
2268 tree taken_case;
2270 taken_case = find_case_label_for_value (switch_stmt, val);
2271 dest_bb = label_to_block (CASE_LABEL (taken_case));
2273 e = find_edge (bb, dest_bb);
2274 gcc_assert (e);
2275 return e;
2279 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2280 We can make optimal use here of the fact that the case labels are
2281 sorted: We can do a binary search for a case matching VAL. */
2283 static tree
2284 find_case_label_for_value (gswitch *switch_stmt, tree val)
2286 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2287 tree default_case = gimple_switch_default_label (switch_stmt);
2289 for (low = 0, high = n; high - low > 1; )
2291 size_t i = (high + low) / 2;
2292 tree t = gimple_switch_label (switch_stmt, i);
2293 int cmp;
2295 /* Cache the result of comparing CASE_LOW and val. */
2296 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2298 if (cmp > 0)
2299 high = i;
2300 else
2301 low = i;
2303 if (CASE_HIGH (t) == NULL)
2305 /* A singe-valued case label. */
2306 if (cmp == 0)
2307 return t;
2309 else
2311 /* A case range. We can only handle integer ranges. */
2312 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2313 return t;
2317 return default_case;
2321 /* Dump a basic block on stderr. */
2323 void
2324 gimple_debug_bb (basic_block bb)
2326 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2330 /* Dump basic block with index N on stderr. */
2332 basic_block
2333 gimple_debug_bb_n (int n)
2335 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2336 return BASIC_BLOCK_FOR_FN (cfun, n);
2340 /* Dump the CFG on stderr.
2342 FLAGS are the same used by the tree dumping functions
2343 (see TDF_* in dumpfile.h). */
2345 void
2346 gimple_debug_cfg (int flags)
2348 gimple_dump_cfg (stderr, flags);
2352 /* Dump the program showing basic block boundaries on the given FILE.
2354 FLAGS are the same used by the tree dumping functions (see TDF_* in
2355 tree.h). */
2357 void
2358 gimple_dump_cfg (FILE *file, int flags)
2360 if (flags & TDF_DETAILS)
2362 dump_function_header (file, current_function_decl, flags);
2363 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2364 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2365 last_basic_block_for_fn (cfun));
2367 brief_dump_cfg (file, flags | TDF_COMMENT);
2368 fprintf (file, "\n");
2371 if (flags & TDF_STATS)
2372 dump_cfg_stats (file);
2374 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2378 /* Dump CFG statistics on FILE. */
2380 void
2381 dump_cfg_stats (FILE *file)
2383 static long max_num_merged_labels = 0;
2384 unsigned long size, total = 0;
2385 long num_edges;
2386 basic_block bb;
2387 const char * const fmt_str = "%-30s%-13s%12s\n";
2388 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2389 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2390 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2391 const char *funcname = current_function_name ();
2393 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2395 fprintf (file, "---------------------------------------------------------\n");
2396 fprintf (file, fmt_str, "", " Number of ", "Memory");
2397 fprintf (file, fmt_str, "", " instances ", "used ");
2398 fprintf (file, "---------------------------------------------------------\n");
2400 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2401 total += size;
2402 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2403 SCALE (size), LABEL (size));
2405 num_edges = 0;
2406 FOR_EACH_BB_FN (bb, cfun)
2407 num_edges += EDGE_COUNT (bb->succs);
2408 size = num_edges * sizeof (struct edge_def);
2409 total += size;
2410 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2412 fprintf (file, "---------------------------------------------------------\n");
2413 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2414 LABEL (total));
2415 fprintf (file, "---------------------------------------------------------\n");
2416 fprintf (file, "\n");
2418 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2419 max_num_merged_labels = cfg_stats.num_merged_labels;
2421 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2422 cfg_stats.num_merged_labels, max_num_merged_labels);
2424 fprintf (file, "\n");
2428 /* Dump CFG statistics on stderr. Keep extern so that it's always
2429 linked in the final executable. */
2431 DEBUG_FUNCTION void
2432 debug_cfg_stats (void)
2434 dump_cfg_stats (stderr);
2437 /*---------------------------------------------------------------------------
2438 Miscellaneous helpers
2439 ---------------------------------------------------------------------------*/
2441 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2442 flow. Transfers of control flow associated with EH are excluded. */
2444 static bool
2445 call_can_make_abnormal_goto (gimple t)
2447 /* If the function has no non-local labels, then a call cannot make an
2448 abnormal transfer of control. */
2449 if (!cfun->has_nonlocal_label
2450 && !cfun->calls_setjmp)
2451 return false;
2453 /* Likewise if the call has no side effects. */
2454 if (!gimple_has_side_effects (t))
2455 return false;
2457 /* Likewise if the called function is leaf. */
2458 if (gimple_call_flags (t) & ECF_LEAF)
2459 return false;
2461 return true;
2465 /* Return true if T can make an abnormal transfer of control flow.
2466 Transfers of control flow associated with EH are excluded. */
2468 bool
2469 stmt_can_make_abnormal_goto (gimple t)
2471 if (computed_goto_p (t))
2472 return true;
2473 if (is_gimple_call (t))
2474 return call_can_make_abnormal_goto (t);
2475 return false;
2479 /* Return true if T represents a stmt that always transfers control. */
2481 bool
2482 is_ctrl_stmt (gimple t)
2484 switch (gimple_code (t))
2486 case GIMPLE_COND:
2487 case GIMPLE_SWITCH:
2488 case GIMPLE_GOTO:
2489 case GIMPLE_RETURN:
2490 case GIMPLE_RESX:
2491 return true;
2492 default:
2493 return false;
2498 /* Return true if T is a statement that may alter the flow of control
2499 (e.g., a call to a non-returning function). */
2501 bool
2502 is_ctrl_altering_stmt (gimple t)
2504 gcc_assert (t);
2506 switch (gimple_code (t))
2508 case GIMPLE_CALL:
2509 /* Per stmt call flag indicates whether the call could alter
2510 controlflow. */
2511 if (gimple_call_ctrl_altering_p (t))
2512 return true;
2513 break;
2515 case GIMPLE_EH_DISPATCH:
2516 /* EH_DISPATCH branches to the individual catch handlers at
2517 this level of a try or allowed-exceptions region. It can
2518 fallthru to the next statement as well. */
2519 return true;
2521 case GIMPLE_ASM:
2522 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2523 return true;
2524 break;
2526 CASE_GIMPLE_OMP:
2527 /* OpenMP directives alter control flow. */
2528 return true;
2530 case GIMPLE_TRANSACTION:
2531 /* A transaction start alters control flow. */
2532 return true;
2534 default:
2535 break;
2538 /* If a statement can throw, it alters control flow. */
2539 return stmt_can_throw_internal (t);
2543 /* Return true if T is a simple local goto. */
2545 bool
2546 simple_goto_p (gimple t)
2548 return (gimple_code (t) == GIMPLE_GOTO
2549 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2553 /* Return true if STMT should start a new basic block. PREV_STMT is
2554 the statement preceding STMT. It is used when STMT is a label or a
2555 case label. Labels should only start a new basic block if their
2556 previous statement wasn't a label. Otherwise, sequence of labels
2557 would generate unnecessary basic blocks that only contain a single
2558 label. */
2560 static inline bool
2561 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2563 if (stmt == NULL)
2564 return false;
2566 /* Labels start a new basic block only if the preceding statement
2567 wasn't a label of the same type. This prevents the creation of
2568 consecutive blocks that have nothing but a single label. */
2569 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2571 /* Nonlocal and computed GOTO targets always start a new block. */
2572 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2573 || FORCED_LABEL (gimple_label_label (label_stmt)))
2574 return true;
2576 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2578 if (DECL_NONLOCAL (gimple_label_label (
2579 as_a <glabel *> (prev_stmt))))
2580 return true;
2582 cfg_stats.num_merged_labels++;
2583 return false;
2585 else
2586 return true;
2588 else if (gimple_code (stmt) == GIMPLE_CALL
2589 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2590 /* setjmp acts similar to a nonlocal GOTO target and thus should
2591 start a new block. */
2592 return true;
2594 return false;
2598 /* Return true if T should end a basic block. */
2600 bool
2601 stmt_ends_bb_p (gimple t)
2603 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2606 /* Remove block annotations and other data structures. */
2608 void
2609 delete_tree_cfg_annotations (void)
2611 vec_free (label_to_block_map_for_fn (cfun));
2614 /* Return the virtual phi in BB. */
2616 gphi *
2617 get_virtual_phi (basic_block bb)
2619 for (gphi_iterator gsi = gsi_start_phis (bb);
2620 !gsi_end_p (gsi);
2621 gsi_next (&gsi))
2623 gphi *phi = gsi.phi ();
2625 if (virtual_operand_p (PHI_RESULT (phi)))
2626 return phi;
2629 return NULL;
2632 /* Return the first statement in basic block BB. */
2634 gimple
2635 first_stmt (basic_block bb)
2637 gimple_stmt_iterator i = gsi_start_bb (bb);
2638 gimple stmt = NULL;
2640 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2642 gsi_next (&i);
2643 stmt = NULL;
2645 return stmt;
2648 /* Return the first non-label statement in basic block BB. */
2650 static gimple
2651 first_non_label_stmt (basic_block bb)
2653 gimple_stmt_iterator i = gsi_start_bb (bb);
2654 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2655 gsi_next (&i);
2656 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2659 /* Return the last statement in basic block BB. */
2661 gimple
2662 last_stmt (basic_block bb)
2664 gimple_stmt_iterator i = gsi_last_bb (bb);
2665 gimple stmt = NULL;
2667 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2669 gsi_prev (&i);
2670 stmt = NULL;
2672 return stmt;
2675 /* Return the last statement of an otherwise empty block. Return NULL
2676 if the block is totally empty, or if it contains more than one
2677 statement. */
2679 gimple
2680 last_and_only_stmt (basic_block bb)
2682 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2683 gimple last, prev;
2685 if (gsi_end_p (i))
2686 return NULL;
2688 last = gsi_stmt (i);
2689 gsi_prev_nondebug (&i);
2690 if (gsi_end_p (i))
2691 return last;
2693 /* Empty statements should no longer appear in the instruction stream.
2694 Everything that might have appeared before should be deleted by
2695 remove_useless_stmts, and the optimizers should just gsi_remove
2696 instead of smashing with build_empty_stmt.
2698 Thus the only thing that should appear here in a block containing
2699 one executable statement is a label. */
2700 prev = gsi_stmt (i);
2701 if (gimple_code (prev) == GIMPLE_LABEL)
2702 return last;
2703 else
2704 return NULL;
2707 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2709 static void
2710 reinstall_phi_args (edge new_edge, edge old_edge)
2712 edge_var_map *vm;
2713 int i;
2714 gphi_iterator phis;
2716 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2717 if (!v)
2718 return;
2720 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2721 v->iterate (i, &vm) && !gsi_end_p (phis);
2722 i++, gsi_next (&phis))
2724 gphi *phi = phis.phi ();
2725 tree result = redirect_edge_var_map_result (vm);
2726 tree arg = redirect_edge_var_map_def (vm);
2728 gcc_assert (result == gimple_phi_result (phi));
2730 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2733 redirect_edge_var_map_clear (old_edge);
2736 /* Returns the basic block after which the new basic block created
2737 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2738 near its "logical" location. This is of most help to humans looking
2739 at debugging dumps. */
2741 basic_block
2742 split_edge_bb_loc (edge edge_in)
2744 basic_block dest = edge_in->dest;
2745 basic_block dest_prev = dest->prev_bb;
2747 if (dest_prev)
2749 edge e = find_edge (dest_prev, dest);
2750 if (e && !(e->flags & EDGE_COMPLEX))
2751 return edge_in->src;
2753 return dest_prev;
2756 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2757 Abort on abnormal edges. */
2759 static basic_block
2760 gimple_split_edge (edge edge_in)
2762 basic_block new_bb, after_bb, dest;
2763 edge new_edge, e;
2765 /* Abnormal edges cannot be split. */
2766 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2768 dest = edge_in->dest;
2770 after_bb = split_edge_bb_loc (edge_in);
2772 new_bb = create_empty_bb (after_bb);
2773 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2774 new_bb->count = edge_in->count;
2775 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2776 new_edge->probability = REG_BR_PROB_BASE;
2777 new_edge->count = edge_in->count;
2779 e = redirect_edge_and_branch (edge_in, new_bb);
2780 gcc_assert (e == edge_in);
2781 reinstall_phi_args (new_edge, e);
2783 return new_bb;
2787 /* Verify properties of the address expression T with base object BASE. */
2789 static tree
2790 verify_address (tree t, tree base)
2792 bool old_constant;
2793 bool old_side_effects;
2794 bool new_constant;
2795 bool new_side_effects;
2797 old_constant = TREE_CONSTANT (t);
2798 old_side_effects = TREE_SIDE_EFFECTS (t);
2800 recompute_tree_invariant_for_addr_expr (t);
2801 new_side_effects = TREE_SIDE_EFFECTS (t);
2802 new_constant = TREE_CONSTANT (t);
2804 if (old_constant != new_constant)
2806 error ("constant not recomputed when ADDR_EXPR changed");
2807 return t;
2809 if (old_side_effects != new_side_effects)
2811 error ("side effects not recomputed when ADDR_EXPR changed");
2812 return t;
2815 if (!(TREE_CODE (base) == VAR_DECL
2816 || TREE_CODE (base) == PARM_DECL
2817 || TREE_CODE (base) == RESULT_DECL))
2818 return NULL_TREE;
2820 if (DECL_GIMPLE_REG_P (base))
2822 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2823 return base;
2826 return NULL_TREE;
2829 /* Callback for walk_tree, check that all elements with address taken are
2830 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2831 inside a PHI node. */
2833 static tree
2834 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2836 tree t = *tp, x;
2838 if (TYPE_P (t))
2839 *walk_subtrees = 0;
2841 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2842 #define CHECK_OP(N, MSG) \
2843 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2844 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2846 switch (TREE_CODE (t))
2848 case SSA_NAME:
2849 if (SSA_NAME_IN_FREE_LIST (t))
2851 error ("SSA name in freelist but still referenced");
2852 return *tp;
2854 break;
2856 case INDIRECT_REF:
2857 error ("INDIRECT_REF in gimple IL");
2858 return t;
2860 case MEM_REF:
2861 x = TREE_OPERAND (t, 0);
2862 if (!POINTER_TYPE_P (TREE_TYPE (x))
2863 || !is_gimple_mem_ref_addr (x))
2865 error ("invalid first operand of MEM_REF");
2866 return x;
2868 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2869 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2871 error ("invalid offset operand of MEM_REF");
2872 return TREE_OPERAND (t, 1);
2874 if (TREE_CODE (x) == ADDR_EXPR
2875 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2876 return x;
2877 *walk_subtrees = 0;
2878 break;
2880 case ASSERT_EXPR:
2881 x = fold (ASSERT_EXPR_COND (t));
2882 if (x == boolean_false_node)
2884 error ("ASSERT_EXPR with an always-false condition");
2885 return *tp;
2887 break;
2889 case MODIFY_EXPR:
2890 error ("MODIFY_EXPR not expected while having tuples");
2891 return *tp;
2893 case ADDR_EXPR:
2895 tree tem;
2897 gcc_assert (is_gimple_address (t));
2899 /* Skip any references (they will be checked when we recurse down the
2900 tree) and ensure that any variable used as a prefix is marked
2901 addressable. */
2902 for (x = TREE_OPERAND (t, 0);
2903 handled_component_p (x);
2904 x = TREE_OPERAND (x, 0))
2907 if ((tem = verify_address (t, x)))
2908 return tem;
2910 if (!(TREE_CODE (x) == VAR_DECL
2911 || TREE_CODE (x) == PARM_DECL
2912 || TREE_CODE (x) == RESULT_DECL))
2913 return NULL;
2915 if (!TREE_ADDRESSABLE (x))
2917 error ("address taken, but ADDRESSABLE bit not set");
2918 return x;
2921 break;
2924 case COND_EXPR:
2925 x = COND_EXPR_COND (t);
2926 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2928 error ("non-integral used in condition");
2929 return x;
2931 if (!is_gimple_condexpr (x))
2933 error ("invalid conditional operand");
2934 return x;
2936 break;
2938 case NON_LVALUE_EXPR:
2939 case TRUTH_NOT_EXPR:
2940 gcc_unreachable ();
2942 CASE_CONVERT:
2943 case FIX_TRUNC_EXPR:
2944 case FLOAT_EXPR:
2945 case NEGATE_EXPR:
2946 case ABS_EXPR:
2947 case BIT_NOT_EXPR:
2948 CHECK_OP (0, "invalid operand to unary operator");
2949 break;
2951 case REALPART_EXPR:
2952 case IMAGPART_EXPR:
2953 case BIT_FIELD_REF:
2954 if (!is_gimple_reg_type (TREE_TYPE (t)))
2956 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2957 return t;
2960 if (TREE_CODE (t) == BIT_FIELD_REF)
2962 tree t0 = TREE_OPERAND (t, 0);
2963 tree t1 = TREE_OPERAND (t, 1);
2964 tree t2 = TREE_OPERAND (t, 2);
2965 if (!tree_fits_uhwi_p (t1)
2966 || !tree_fits_uhwi_p (t2))
2968 error ("invalid position or size operand to BIT_FIELD_REF");
2969 return t;
2971 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2972 && (TYPE_PRECISION (TREE_TYPE (t))
2973 != tree_to_uhwi (t1)))
2975 error ("integral result type precision does not match "
2976 "field size of BIT_FIELD_REF");
2977 return t;
2979 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2980 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2981 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2982 != tree_to_uhwi (t1)))
2984 error ("mode precision of non-integral result does not "
2985 "match field size of BIT_FIELD_REF");
2986 return t;
2988 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2989 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2990 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2992 error ("position plus size exceeds size of referenced object in "
2993 "BIT_FIELD_REF");
2994 return t;
2997 t = TREE_OPERAND (t, 0);
2999 /* Fall-through. */
3000 case COMPONENT_REF:
3001 case ARRAY_REF:
3002 case ARRAY_RANGE_REF:
3003 case VIEW_CONVERT_EXPR:
3004 /* We have a nest of references. Verify that each of the operands
3005 that determine where to reference is either a constant or a variable,
3006 verify that the base is valid, and then show we've already checked
3007 the subtrees. */
3008 while (handled_component_p (t))
3010 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3011 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3012 else if (TREE_CODE (t) == ARRAY_REF
3013 || TREE_CODE (t) == ARRAY_RANGE_REF)
3015 CHECK_OP (1, "invalid array index");
3016 if (TREE_OPERAND (t, 2))
3017 CHECK_OP (2, "invalid array lower bound");
3018 if (TREE_OPERAND (t, 3))
3019 CHECK_OP (3, "invalid array stride");
3021 else if (TREE_CODE (t) == BIT_FIELD_REF
3022 || TREE_CODE (t) == REALPART_EXPR
3023 || TREE_CODE (t) == IMAGPART_EXPR)
3025 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3026 "REALPART_EXPR");
3027 return t;
3030 t = TREE_OPERAND (t, 0);
3033 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3035 error ("invalid reference prefix");
3036 return t;
3038 *walk_subtrees = 0;
3039 break;
3040 case PLUS_EXPR:
3041 case MINUS_EXPR:
3042 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3043 POINTER_PLUS_EXPR. */
3044 if (POINTER_TYPE_P (TREE_TYPE (t)))
3046 error ("invalid operand to plus/minus, type is a pointer");
3047 return t;
3049 CHECK_OP (0, "invalid operand to binary operator");
3050 CHECK_OP (1, "invalid operand to binary operator");
3051 break;
3053 case POINTER_PLUS_EXPR:
3054 /* Check to make sure the first operand is a pointer or reference type. */
3055 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3057 error ("invalid operand to pointer plus, first operand is not a pointer");
3058 return t;
3060 /* Check to make sure the second operand is a ptrofftype. */
3061 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3063 error ("invalid operand to pointer plus, second operand is not an "
3064 "integer type of appropriate width");
3065 return t;
3067 /* FALLTHROUGH */
3068 case LT_EXPR:
3069 case LE_EXPR:
3070 case GT_EXPR:
3071 case GE_EXPR:
3072 case EQ_EXPR:
3073 case NE_EXPR:
3074 case UNORDERED_EXPR:
3075 case ORDERED_EXPR:
3076 case UNLT_EXPR:
3077 case UNLE_EXPR:
3078 case UNGT_EXPR:
3079 case UNGE_EXPR:
3080 case UNEQ_EXPR:
3081 case LTGT_EXPR:
3082 case MULT_EXPR:
3083 case TRUNC_DIV_EXPR:
3084 case CEIL_DIV_EXPR:
3085 case FLOOR_DIV_EXPR:
3086 case ROUND_DIV_EXPR:
3087 case TRUNC_MOD_EXPR:
3088 case CEIL_MOD_EXPR:
3089 case FLOOR_MOD_EXPR:
3090 case ROUND_MOD_EXPR:
3091 case RDIV_EXPR:
3092 case EXACT_DIV_EXPR:
3093 case MIN_EXPR:
3094 case MAX_EXPR:
3095 case LSHIFT_EXPR:
3096 case RSHIFT_EXPR:
3097 case LROTATE_EXPR:
3098 case RROTATE_EXPR:
3099 case BIT_IOR_EXPR:
3100 case BIT_XOR_EXPR:
3101 case BIT_AND_EXPR:
3102 CHECK_OP (0, "invalid operand to binary operator");
3103 CHECK_OP (1, "invalid operand to binary operator");
3104 break;
3106 case CONSTRUCTOR:
3107 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3108 *walk_subtrees = 0;
3109 break;
3111 case CASE_LABEL_EXPR:
3112 if (CASE_CHAIN (t))
3114 error ("invalid CASE_CHAIN");
3115 return t;
3117 break;
3119 default:
3120 break;
3122 return NULL;
3124 #undef CHECK_OP
3128 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3129 Returns true if there is an error, otherwise false. */
3131 static bool
3132 verify_types_in_gimple_min_lval (tree expr)
3134 tree op;
3136 if (is_gimple_id (expr))
3137 return false;
3139 if (TREE_CODE (expr) != TARGET_MEM_REF
3140 && TREE_CODE (expr) != MEM_REF)
3142 error ("invalid expression for min lvalue");
3143 return true;
3146 /* TARGET_MEM_REFs are strange beasts. */
3147 if (TREE_CODE (expr) == TARGET_MEM_REF)
3148 return false;
3150 op = TREE_OPERAND (expr, 0);
3151 if (!is_gimple_val (op))
3153 error ("invalid operand in indirect reference");
3154 debug_generic_stmt (op);
3155 return true;
3157 /* Memory references now generally can involve a value conversion. */
3159 return false;
3162 /* Verify if EXPR is a valid GIMPLE reference expression. If
3163 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3164 if there is an error, otherwise false. */
3166 static bool
3167 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3169 while (handled_component_p (expr))
3171 tree op = TREE_OPERAND (expr, 0);
3173 if (TREE_CODE (expr) == ARRAY_REF
3174 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3176 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3177 || (TREE_OPERAND (expr, 2)
3178 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3179 || (TREE_OPERAND (expr, 3)
3180 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3182 error ("invalid operands to array reference");
3183 debug_generic_stmt (expr);
3184 return true;
3188 /* Verify if the reference array element types are compatible. */
3189 if (TREE_CODE (expr) == ARRAY_REF
3190 && !useless_type_conversion_p (TREE_TYPE (expr),
3191 TREE_TYPE (TREE_TYPE (op))))
3193 error ("type mismatch in array reference");
3194 debug_generic_stmt (TREE_TYPE (expr));
3195 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3196 return true;
3198 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3199 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3200 TREE_TYPE (TREE_TYPE (op))))
3202 error ("type mismatch in array range reference");
3203 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3204 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3205 return true;
3208 if ((TREE_CODE (expr) == REALPART_EXPR
3209 || TREE_CODE (expr) == IMAGPART_EXPR)
3210 && !useless_type_conversion_p (TREE_TYPE (expr),
3211 TREE_TYPE (TREE_TYPE (op))))
3213 error ("type mismatch in real/imagpart reference");
3214 debug_generic_stmt (TREE_TYPE (expr));
3215 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3216 return true;
3219 if (TREE_CODE (expr) == COMPONENT_REF
3220 && !useless_type_conversion_p (TREE_TYPE (expr),
3221 TREE_TYPE (TREE_OPERAND (expr, 1))))
3223 error ("type mismatch in component reference");
3224 debug_generic_stmt (TREE_TYPE (expr));
3225 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3226 return true;
3229 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3231 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3232 that their operand is not an SSA name or an invariant when
3233 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3234 bug). Otherwise there is nothing to verify, gross mismatches at
3235 most invoke undefined behavior. */
3236 if (require_lvalue
3237 && (TREE_CODE (op) == SSA_NAME
3238 || is_gimple_min_invariant (op)))
3240 error ("conversion of an SSA_NAME on the left hand side");
3241 debug_generic_stmt (expr);
3242 return true;
3244 else if (TREE_CODE (op) == SSA_NAME
3245 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3247 error ("conversion of register to a different size");
3248 debug_generic_stmt (expr);
3249 return true;
3251 else if (!handled_component_p (op))
3252 return false;
3255 expr = op;
3258 if (TREE_CODE (expr) == MEM_REF)
3260 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3262 error ("invalid address operand in MEM_REF");
3263 debug_generic_stmt (expr);
3264 return true;
3266 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3267 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3269 error ("invalid offset operand in MEM_REF");
3270 debug_generic_stmt (expr);
3271 return true;
3274 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3276 if (!TMR_BASE (expr)
3277 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3279 error ("invalid address operand in TARGET_MEM_REF");
3280 return true;
3282 if (!TMR_OFFSET (expr)
3283 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3284 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3286 error ("invalid offset operand in TARGET_MEM_REF");
3287 debug_generic_stmt (expr);
3288 return true;
3292 return ((require_lvalue || !is_gimple_min_invariant (expr))
3293 && verify_types_in_gimple_min_lval (expr));
3296 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3297 list of pointer-to types that is trivially convertible to DEST. */
3299 static bool
3300 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3302 tree src;
3304 if (!TYPE_POINTER_TO (src_obj))
3305 return true;
3307 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3308 if (useless_type_conversion_p (dest, src))
3309 return true;
3311 return false;
3314 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3315 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3317 static bool
3318 valid_fixed_convert_types_p (tree type1, tree type2)
3320 return (FIXED_POINT_TYPE_P (type1)
3321 && (INTEGRAL_TYPE_P (type2)
3322 || SCALAR_FLOAT_TYPE_P (type2)
3323 || FIXED_POINT_TYPE_P (type2)));
3326 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3327 is a problem, otherwise false. */
3329 static bool
3330 verify_gimple_call (gcall *stmt)
3332 tree fn = gimple_call_fn (stmt);
3333 tree fntype, fndecl;
3334 unsigned i;
3336 if (gimple_call_internal_p (stmt))
3338 if (fn)
3340 error ("gimple call has two targets");
3341 debug_generic_stmt (fn);
3342 return true;
3345 else
3347 if (!fn)
3349 error ("gimple call has no target");
3350 return true;
3354 if (fn && !is_gimple_call_addr (fn))
3356 error ("invalid function in gimple call");
3357 debug_generic_stmt (fn);
3358 return true;
3361 if (fn
3362 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3363 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3364 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3366 error ("non-function in gimple call");
3367 return true;
3370 fndecl = gimple_call_fndecl (stmt);
3371 if (fndecl
3372 && TREE_CODE (fndecl) == FUNCTION_DECL
3373 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3374 && !DECL_PURE_P (fndecl)
3375 && !TREE_READONLY (fndecl))
3377 error ("invalid pure const state for function");
3378 return true;
3381 tree lhs = gimple_call_lhs (stmt);
3382 if (lhs
3383 && (!is_gimple_lvalue (lhs)
3384 || verify_types_in_gimple_reference (lhs, true)))
3386 error ("invalid LHS in gimple call");
3387 return true;
3390 if (lhs
3391 && gimple_call_ctrl_altering_p (stmt)
3392 && gimple_call_noreturn_p (stmt)
3393 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3395 error ("LHS in noreturn call");
3396 return true;
3399 fntype = gimple_call_fntype (stmt);
3400 if (fntype
3401 && lhs
3402 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3403 /* ??? At least C++ misses conversions at assignments from
3404 void * call results.
3405 ??? Java is completely off. Especially with functions
3406 returning java.lang.Object.
3407 For now simply allow arbitrary pointer type conversions. */
3408 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3409 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3411 error ("invalid conversion in gimple call");
3412 debug_generic_stmt (TREE_TYPE (lhs));
3413 debug_generic_stmt (TREE_TYPE (fntype));
3414 return true;
3417 if (gimple_call_chain (stmt)
3418 && !is_gimple_val (gimple_call_chain (stmt)))
3420 error ("invalid static chain in gimple call");
3421 debug_generic_stmt (gimple_call_chain (stmt));
3422 return true;
3425 /* If there is a static chain argument, the call should either be
3426 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3427 if (gimple_call_chain (stmt)
3428 && fndecl
3429 && !DECL_STATIC_CHAIN (fndecl))
3431 error ("static chain with function that doesn%'t use one");
3432 return true;
3435 /* ??? The C frontend passes unpromoted arguments in case it
3436 didn't see a function declaration before the call. So for now
3437 leave the call arguments mostly unverified. Once we gimplify
3438 unit-at-a-time we have a chance to fix this. */
3440 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3442 tree arg = gimple_call_arg (stmt, i);
3443 if ((is_gimple_reg_type (TREE_TYPE (arg))
3444 && !is_gimple_val (arg))
3445 || (!is_gimple_reg_type (TREE_TYPE (arg))
3446 && !is_gimple_lvalue (arg)))
3448 error ("invalid argument to gimple call");
3449 debug_generic_expr (arg);
3450 return true;
3454 return false;
3457 /* Verifies the gimple comparison with the result type TYPE and
3458 the operands OP0 and OP1. */
3460 static bool
3461 verify_gimple_comparison (tree type, tree op0, tree op1)
3463 tree op0_type = TREE_TYPE (op0);
3464 tree op1_type = TREE_TYPE (op1);
3466 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3468 error ("invalid operands in gimple comparison");
3469 return true;
3472 /* For comparisons we do not have the operations type as the
3473 effective type the comparison is carried out in. Instead
3474 we require that either the first operand is trivially
3475 convertible into the second, or the other way around.
3476 Because we special-case pointers to void we allow
3477 comparisons of pointers with the same mode as well. */
3478 if (!useless_type_conversion_p (op0_type, op1_type)
3479 && !useless_type_conversion_p (op1_type, op0_type)
3480 && (!POINTER_TYPE_P (op0_type)
3481 || !POINTER_TYPE_P (op1_type)
3482 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3484 error ("mismatching comparison operand types");
3485 debug_generic_expr (op0_type);
3486 debug_generic_expr (op1_type);
3487 return true;
3490 /* The resulting type of a comparison may be an effective boolean type. */
3491 if (INTEGRAL_TYPE_P (type)
3492 && (TREE_CODE (type) == BOOLEAN_TYPE
3493 || TYPE_PRECISION (type) == 1))
3495 if (TREE_CODE (op0_type) == VECTOR_TYPE
3496 || TREE_CODE (op1_type) == VECTOR_TYPE)
3498 error ("vector comparison returning a boolean");
3499 debug_generic_expr (op0_type);
3500 debug_generic_expr (op1_type);
3501 return true;
3504 /* Or an integer vector type with the same size and element count
3505 as the comparison operand types. */
3506 else if (TREE_CODE (type) == VECTOR_TYPE
3507 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3509 if (TREE_CODE (op0_type) != VECTOR_TYPE
3510 || TREE_CODE (op1_type) != VECTOR_TYPE)
3512 error ("non-vector operands in vector comparison");
3513 debug_generic_expr (op0_type);
3514 debug_generic_expr (op1_type);
3515 return true;
3518 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3519 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3520 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3521 /* The result of a vector comparison is of signed
3522 integral type. */
3523 || TYPE_UNSIGNED (TREE_TYPE (type)))
3525 error ("invalid vector comparison resulting type");
3526 debug_generic_expr (type);
3527 return true;
3530 else
3532 error ("bogus comparison result type");
3533 debug_generic_expr (type);
3534 return true;
3537 return false;
3540 /* Verify a gimple assignment statement STMT with an unary rhs.
3541 Returns true if anything is wrong. */
3543 static bool
3544 verify_gimple_assign_unary (gassign *stmt)
3546 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3547 tree lhs = gimple_assign_lhs (stmt);
3548 tree lhs_type = TREE_TYPE (lhs);
3549 tree rhs1 = gimple_assign_rhs1 (stmt);
3550 tree rhs1_type = TREE_TYPE (rhs1);
3552 if (!is_gimple_reg (lhs))
3554 error ("non-register as LHS of unary operation");
3555 return true;
3558 if (!is_gimple_val (rhs1))
3560 error ("invalid operand in unary operation");
3561 return true;
3564 /* First handle conversions. */
3565 switch (rhs_code)
3567 CASE_CONVERT:
3569 /* Allow conversions from pointer type to integral type only if
3570 there is no sign or zero extension involved.
3571 For targets were the precision of ptrofftype doesn't match that
3572 of pointers we need to allow arbitrary conversions to ptrofftype. */
3573 if ((POINTER_TYPE_P (lhs_type)
3574 && INTEGRAL_TYPE_P (rhs1_type))
3575 || (POINTER_TYPE_P (rhs1_type)
3576 && INTEGRAL_TYPE_P (lhs_type)
3577 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3578 || ptrofftype_p (sizetype))))
3579 return false;
3581 /* Allow conversion from integral to offset type and vice versa. */
3582 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3583 && INTEGRAL_TYPE_P (rhs1_type))
3584 || (INTEGRAL_TYPE_P (lhs_type)
3585 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3586 return false;
3588 /* Otherwise assert we are converting between types of the
3589 same kind. */
3590 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3592 error ("invalid types in nop conversion");
3593 debug_generic_expr (lhs_type);
3594 debug_generic_expr (rhs1_type);
3595 return true;
3598 return false;
3601 case ADDR_SPACE_CONVERT_EXPR:
3603 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3604 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3605 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3607 error ("invalid types in address space conversion");
3608 debug_generic_expr (lhs_type);
3609 debug_generic_expr (rhs1_type);
3610 return true;
3613 return false;
3616 case FIXED_CONVERT_EXPR:
3618 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3619 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3621 error ("invalid types in fixed-point conversion");
3622 debug_generic_expr (lhs_type);
3623 debug_generic_expr (rhs1_type);
3624 return true;
3627 return false;
3630 case FLOAT_EXPR:
3632 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3633 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3634 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3636 error ("invalid types in conversion to floating point");
3637 debug_generic_expr (lhs_type);
3638 debug_generic_expr (rhs1_type);
3639 return true;
3642 return false;
3645 case FIX_TRUNC_EXPR:
3647 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3648 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3649 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3651 error ("invalid types in conversion to integer");
3652 debug_generic_expr (lhs_type);
3653 debug_generic_expr (rhs1_type);
3654 return true;
3657 return false;
3659 case REDUC_MAX_EXPR:
3660 case REDUC_MIN_EXPR:
3661 case REDUC_PLUS_EXPR:
3662 if (!VECTOR_TYPE_P (rhs1_type)
3663 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3665 error ("reduction should convert from vector to element type");
3666 debug_generic_expr (lhs_type);
3667 debug_generic_expr (rhs1_type);
3668 return true;
3670 return false;
3672 case VEC_UNPACK_HI_EXPR:
3673 case VEC_UNPACK_LO_EXPR:
3674 case VEC_UNPACK_FLOAT_HI_EXPR:
3675 case VEC_UNPACK_FLOAT_LO_EXPR:
3676 /* FIXME. */
3677 return false;
3679 case NEGATE_EXPR:
3680 case ABS_EXPR:
3681 case BIT_NOT_EXPR:
3682 case PAREN_EXPR:
3683 case CONJ_EXPR:
3684 break;
3686 default:
3687 gcc_unreachable ();
3690 /* For the remaining codes assert there is no conversion involved. */
3691 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3693 error ("non-trivial conversion in unary operation");
3694 debug_generic_expr (lhs_type);
3695 debug_generic_expr (rhs1_type);
3696 return true;
3699 return false;
3702 /* Verify a gimple assignment statement STMT with a binary rhs.
3703 Returns true if anything is wrong. */
3705 static bool
3706 verify_gimple_assign_binary (gassign *stmt)
3708 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3709 tree lhs = gimple_assign_lhs (stmt);
3710 tree lhs_type = TREE_TYPE (lhs);
3711 tree rhs1 = gimple_assign_rhs1 (stmt);
3712 tree rhs1_type = TREE_TYPE (rhs1);
3713 tree rhs2 = gimple_assign_rhs2 (stmt);
3714 tree rhs2_type = TREE_TYPE (rhs2);
3716 if (!is_gimple_reg (lhs))
3718 error ("non-register as LHS of binary operation");
3719 return true;
3722 if (!is_gimple_val (rhs1)
3723 || !is_gimple_val (rhs2))
3725 error ("invalid operands in binary operation");
3726 return true;
3729 /* First handle operations that involve different types. */
3730 switch (rhs_code)
3732 case COMPLEX_EXPR:
3734 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3735 || !(INTEGRAL_TYPE_P (rhs1_type)
3736 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3737 || !(INTEGRAL_TYPE_P (rhs2_type)
3738 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3740 error ("type mismatch in complex expression");
3741 debug_generic_expr (lhs_type);
3742 debug_generic_expr (rhs1_type);
3743 debug_generic_expr (rhs2_type);
3744 return true;
3747 return false;
3750 case LSHIFT_EXPR:
3751 case RSHIFT_EXPR:
3752 case LROTATE_EXPR:
3753 case RROTATE_EXPR:
3755 /* Shifts and rotates are ok on integral types, fixed point
3756 types and integer vector types. */
3757 if ((!INTEGRAL_TYPE_P (rhs1_type)
3758 && !FIXED_POINT_TYPE_P (rhs1_type)
3759 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3760 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3761 || (!INTEGRAL_TYPE_P (rhs2_type)
3762 /* Vector shifts of vectors are also ok. */
3763 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3764 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3765 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3766 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3767 || !useless_type_conversion_p (lhs_type, rhs1_type))
3769 error ("type mismatch in shift expression");
3770 debug_generic_expr (lhs_type);
3771 debug_generic_expr (rhs1_type);
3772 debug_generic_expr (rhs2_type);
3773 return true;
3776 return false;
3779 case WIDEN_LSHIFT_EXPR:
3781 if (!INTEGRAL_TYPE_P (lhs_type)
3782 || !INTEGRAL_TYPE_P (rhs1_type)
3783 || TREE_CODE (rhs2) != INTEGER_CST
3784 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3786 error ("type mismatch in widening vector shift expression");
3787 debug_generic_expr (lhs_type);
3788 debug_generic_expr (rhs1_type);
3789 debug_generic_expr (rhs2_type);
3790 return true;
3793 return false;
3796 case VEC_WIDEN_LSHIFT_HI_EXPR:
3797 case VEC_WIDEN_LSHIFT_LO_EXPR:
3799 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3800 || TREE_CODE (lhs_type) != VECTOR_TYPE
3801 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3802 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3803 || TREE_CODE (rhs2) != INTEGER_CST
3804 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3805 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3807 error ("type mismatch in widening vector shift expression");
3808 debug_generic_expr (lhs_type);
3809 debug_generic_expr (rhs1_type);
3810 debug_generic_expr (rhs2_type);
3811 return true;
3814 return false;
3817 case PLUS_EXPR:
3818 case MINUS_EXPR:
3820 tree lhs_etype = lhs_type;
3821 tree rhs1_etype = rhs1_type;
3822 tree rhs2_etype = rhs2_type;
3823 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3825 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3826 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3828 error ("invalid non-vector operands to vector valued plus");
3829 return true;
3831 lhs_etype = TREE_TYPE (lhs_type);
3832 rhs1_etype = TREE_TYPE (rhs1_type);
3833 rhs2_etype = TREE_TYPE (rhs2_type);
3835 if (POINTER_TYPE_P (lhs_etype)
3836 || POINTER_TYPE_P (rhs1_etype)
3837 || POINTER_TYPE_P (rhs2_etype))
3839 error ("invalid (pointer) operands to plus/minus");
3840 return true;
3843 /* Continue with generic binary expression handling. */
3844 break;
3847 case POINTER_PLUS_EXPR:
3849 if (!POINTER_TYPE_P (rhs1_type)
3850 || !useless_type_conversion_p (lhs_type, rhs1_type)
3851 || !ptrofftype_p (rhs2_type))
3853 error ("type mismatch in pointer plus expression");
3854 debug_generic_stmt (lhs_type);
3855 debug_generic_stmt (rhs1_type);
3856 debug_generic_stmt (rhs2_type);
3857 return true;
3860 return false;
3863 case TRUTH_ANDIF_EXPR:
3864 case TRUTH_ORIF_EXPR:
3865 case TRUTH_AND_EXPR:
3866 case TRUTH_OR_EXPR:
3867 case TRUTH_XOR_EXPR:
3869 gcc_unreachable ();
3871 case LT_EXPR:
3872 case LE_EXPR:
3873 case GT_EXPR:
3874 case GE_EXPR:
3875 case EQ_EXPR:
3876 case NE_EXPR:
3877 case UNORDERED_EXPR:
3878 case ORDERED_EXPR:
3879 case UNLT_EXPR:
3880 case UNLE_EXPR:
3881 case UNGT_EXPR:
3882 case UNGE_EXPR:
3883 case UNEQ_EXPR:
3884 case LTGT_EXPR:
3885 /* Comparisons are also binary, but the result type is not
3886 connected to the operand types. */
3887 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3889 case WIDEN_MULT_EXPR:
3890 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3891 return true;
3892 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3893 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3895 case WIDEN_SUM_EXPR:
3896 case VEC_WIDEN_MULT_HI_EXPR:
3897 case VEC_WIDEN_MULT_LO_EXPR:
3898 case VEC_WIDEN_MULT_EVEN_EXPR:
3899 case VEC_WIDEN_MULT_ODD_EXPR:
3900 case VEC_PACK_TRUNC_EXPR:
3901 case VEC_PACK_SAT_EXPR:
3902 case VEC_PACK_FIX_TRUNC_EXPR:
3903 /* FIXME. */
3904 return false;
3906 case MULT_EXPR:
3907 case MULT_HIGHPART_EXPR:
3908 case TRUNC_DIV_EXPR:
3909 case CEIL_DIV_EXPR:
3910 case FLOOR_DIV_EXPR:
3911 case ROUND_DIV_EXPR:
3912 case TRUNC_MOD_EXPR:
3913 case CEIL_MOD_EXPR:
3914 case FLOOR_MOD_EXPR:
3915 case ROUND_MOD_EXPR:
3916 case RDIV_EXPR:
3917 case EXACT_DIV_EXPR:
3918 case MIN_EXPR:
3919 case MAX_EXPR:
3920 case BIT_IOR_EXPR:
3921 case BIT_XOR_EXPR:
3922 case BIT_AND_EXPR:
3923 /* Continue with generic binary expression handling. */
3924 break;
3926 default:
3927 gcc_unreachable ();
3930 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3931 || !useless_type_conversion_p (lhs_type, rhs2_type))
3933 error ("type mismatch in binary expression");
3934 debug_generic_stmt (lhs_type);
3935 debug_generic_stmt (rhs1_type);
3936 debug_generic_stmt (rhs2_type);
3937 return true;
3940 return false;
3943 /* Verify a gimple assignment statement STMT with a ternary rhs.
3944 Returns true if anything is wrong. */
3946 static bool
3947 verify_gimple_assign_ternary (gassign *stmt)
3949 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3950 tree lhs = gimple_assign_lhs (stmt);
3951 tree lhs_type = TREE_TYPE (lhs);
3952 tree rhs1 = gimple_assign_rhs1 (stmt);
3953 tree rhs1_type = TREE_TYPE (rhs1);
3954 tree rhs2 = gimple_assign_rhs2 (stmt);
3955 tree rhs2_type = TREE_TYPE (rhs2);
3956 tree rhs3 = gimple_assign_rhs3 (stmt);
3957 tree rhs3_type = TREE_TYPE (rhs3);
3959 if (!is_gimple_reg (lhs))
3961 error ("non-register as LHS of ternary operation");
3962 return true;
3965 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3966 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3967 || !is_gimple_val (rhs2)
3968 || !is_gimple_val (rhs3))
3970 error ("invalid operands in ternary operation");
3971 return true;
3974 /* First handle operations that involve different types. */
3975 switch (rhs_code)
3977 case WIDEN_MULT_PLUS_EXPR:
3978 case WIDEN_MULT_MINUS_EXPR:
3979 if ((!INTEGRAL_TYPE_P (rhs1_type)
3980 && !FIXED_POINT_TYPE_P (rhs1_type))
3981 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3982 || !useless_type_conversion_p (lhs_type, rhs3_type)
3983 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3984 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3986 error ("type mismatch in widening multiply-accumulate expression");
3987 debug_generic_expr (lhs_type);
3988 debug_generic_expr (rhs1_type);
3989 debug_generic_expr (rhs2_type);
3990 debug_generic_expr (rhs3_type);
3991 return true;
3993 break;
3995 case FMA_EXPR:
3996 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3997 || !useless_type_conversion_p (lhs_type, rhs2_type)
3998 || !useless_type_conversion_p (lhs_type, rhs3_type))
4000 error ("type mismatch in fused multiply-add expression");
4001 debug_generic_expr (lhs_type);
4002 debug_generic_expr (rhs1_type);
4003 debug_generic_expr (rhs2_type);
4004 debug_generic_expr (rhs3_type);
4005 return true;
4007 break;
4009 case VEC_COND_EXPR:
4010 if (!VECTOR_INTEGER_TYPE_P (rhs1_type)
4011 || TYPE_SIGN (rhs1_type) != SIGNED
4012 || TYPE_SIZE (rhs1_type) != TYPE_SIZE (lhs_type)
4013 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4014 != TYPE_VECTOR_SUBPARTS (lhs_type))
4016 error ("the first argument of a VEC_COND_EXPR must be of a signed "
4017 "integral vector type of the same size and number of "
4018 "elements as the result");
4019 debug_generic_expr (lhs_type);
4020 debug_generic_expr (rhs1_type);
4021 return true;
4023 /* Fallthrough. */
4024 case COND_EXPR:
4025 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4026 || !useless_type_conversion_p (lhs_type, rhs3_type))
4028 error ("type mismatch in conditional expression");
4029 debug_generic_expr (lhs_type);
4030 debug_generic_expr (rhs2_type);
4031 debug_generic_expr (rhs3_type);
4032 return true;
4034 break;
4036 case VEC_PERM_EXPR:
4037 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4038 || !useless_type_conversion_p (lhs_type, rhs2_type))
4040 error ("type mismatch in vector permute expression");
4041 debug_generic_expr (lhs_type);
4042 debug_generic_expr (rhs1_type);
4043 debug_generic_expr (rhs2_type);
4044 debug_generic_expr (rhs3_type);
4045 return true;
4048 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4049 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4050 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4052 error ("vector types expected in vector permute expression");
4053 debug_generic_expr (lhs_type);
4054 debug_generic_expr (rhs1_type);
4055 debug_generic_expr (rhs2_type);
4056 debug_generic_expr (rhs3_type);
4057 return true;
4060 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4061 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4062 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4063 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4064 != TYPE_VECTOR_SUBPARTS (lhs_type))
4066 error ("vectors with different element number found "
4067 "in vector permute expression");
4068 debug_generic_expr (lhs_type);
4069 debug_generic_expr (rhs1_type);
4070 debug_generic_expr (rhs2_type);
4071 debug_generic_expr (rhs3_type);
4072 return true;
4075 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4076 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4077 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4079 error ("invalid mask type in vector permute expression");
4080 debug_generic_expr (lhs_type);
4081 debug_generic_expr (rhs1_type);
4082 debug_generic_expr (rhs2_type);
4083 debug_generic_expr (rhs3_type);
4084 return true;
4087 return false;
4089 case SAD_EXPR:
4090 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4091 || !useless_type_conversion_p (lhs_type, rhs3_type)
4092 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4093 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4094 > GET_MODE_BITSIZE (GET_MODE_INNER
4095 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4097 error ("type mismatch in sad expression");
4098 debug_generic_expr (lhs_type);
4099 debug_generic_expr (rhs1_type);
4100 debug_generic_expr (rhs2_type);
4101 debug_generic_expr (rhs3_type);
4102 return true;
4105 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4106 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4107 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4109 error ("vector types expected in sad expression");
4110 debug_generic_expr (lhs_type);
4111 debug_generic_expr (rhs1_type);
4112 debug_generic_expr (rhs2_type);
4113 debug_generic_expr (rhs3_type);
4114 return true;
4117 return false;
4119 case DOT_PROD_EXPR:
4120 case REALIGN_LOAD_EXPR:
4121 /* FIXME. */
4122 return false;
4124 default:
4125 gcc_unreachable ();
4127 return false;
4130 /* Verify a gimple assignment statement STMT with a single rhs.
4131 Returns true if anything is wrong. */
4133 static bool
4134 verify_gimple_assign_single (gassign *stmt)
4136 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4137 tree lhs = gimple_assign_lhs (stmt);
4138 tree lhs_type = TREE_TYPE (lhs);
4139 tree rhs1 = gimple_assign_rhs1 (stmt);
4140 tree rhs1_type = TREE_TYPE (rhs1);
4141 bool res = false;
4143 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4145 error ("non-trivial conversion at assignment");
4146 debug_generic_expr (lhs_type);
4147 debug_generic_expr (rhs1_type);
4148 return true;
4151 if (gimple_clobber_p (stmt)
4152 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4154 error ("non-decl/MEM_REF LHS in clobber statement");
4155 debug_generic_expr (lhs);
4156 return true;
4159 if (handled_component_p (lhs)
4160 || TREE_CODE (lhs) == MEM_REF
4161 || TREE_CODE (lhs) == TARGET_MEM_REF)
4162 res |= verify_types_in_gimple_reference (lhs, true);
4164 /* Special codes we cannot handle via their class. */
4165 switch (rhs_code)
4167 case ADDR_EXPR:
4169 tree op = TREE_OPERAND (rhs1, 0);
4170 if (!is_gimple_addressable (op))
4172 error ("invalid operand in unary expression");
4173 return true;
4176 /* Technically there is no longer a need for matching types, but
4177 gimple hygiene asks for this check. In LTO we can end up
4178 combining incompatible units and thus end up with addresses
4179 of globals that change their type to a common one. */
4180 if (!in_lto_p
4181 && !types_compatible_p (TREE_TYPE (op),
4182 TREE_TYPE (TREE_TYPE (rhs1)))
4183 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4184 TREE_TYPE (op)))
4186 error ("type mismatch in address expression");
4187 debug_generic_stmt (TREE_TYPE (rhs1));
4188 debug_generic_stmt (TREE_TYPE (op));
4189 return true;
4192 return verify_types_in_gimple_reference (op, true);
4195 /* tcc_reference */
4196 case INDIRECT_REF:
4197 error ("INDIRECT_REF in gimple IL");
4198 return true;
4200 case COMPONENT_REF:
4201 case BIT_FIELD_REF:
4202 case ARRAY_REF:
4203 case ARRAY_RANGE_REF:
4204 case VIEW_CONVERT_EXPR:
4205 case REALPART_EXPR:
4206 case IMAGPART_EXPR:
4207 case TARGET_MEM_REF:
4208 case MEM_REF:
4209 if (!is_gimple_reg (lhs)
4210 && is_gimple_reg_type (TREE_TYPE (lhs)))
4212 error ("invalid rhs for gimple memory store");
4213 debug_generic_stmt (lhs);
4214 debug_generic_stmt (rhs1);
4215 return true;
4217 return res || verify_types_in_gimple_reference (rhs1, false);
4219 /* tcc_constant */
4220 case SSA_NAME:
4221 case INTEGER_CST:
4222 case REAL_CST:
4223 case FIXED_CST:
4224 case COMPLEX_CST:
4225 case VECTOR_CST:
4226 case STRING_CST:
4227 return res;
4229 /* tcc_declaration */
4230 case CONST_DECL:
4231 return res;
4232 case VAR_DECL:
4233 case PARM_DECL:
4234 if (!is_gimple_reg (lhs)
4235 && !is_gimple_reg (rhs1)
4236 && is_gimple_reg_type (TREE_TYPE (lhs)))
4238 error ("invalid rhs for gimple memory store");
4239 debug_generic_stmt (lhs);
4240 debug_generic_stmt (rhs1);
4241 return true;
4243 return res;
4245 case CONSTRUCTOR:
4246 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4248 unsigned int i;
4249 tree elt_i, elt_v, elt_t = NULL_TREE;
4251 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4252 return res;
4253 /* For vector CONSTRUCTORs we require that either it is empty
4254 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4255 (then the element count must be correct to cover the whole
4256 outer vector and index must be NULL on all elements, or it is
4257 a CONSTRUCTOR of scalar elements, where we as an exception allow
4258 smaller number of elements (assuming zero filling) and
4259 consecutive indexes as compared to NULL indexes (such
4260 CONSTRUCTORs can appear in the IL from FEs). */
4261 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4263 if (elt_t == NULL_TREE)
4265 elt_t = TREE_TYPE (elt_v);
4266 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4268 tree elt_t = TREE_TYPE (elt_v);
4269 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4270 TREE_TYPE (elt_t)))
4272 error ("incorrect type of vector CONSTRUCTOR"
4273 " elements");
4274 debug_generic_stmt (rhs1);
4275 return true;
4277 else if (CONSTRUCTOR_NELTS (rhs1)
4278 * TYPE_VECTOR_SUBPARTS (elt_t)
4279 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4281 error ("incorrect number of vector CONSTRUCTOR"
4282 " elements");
4283 debug_generic_stmt (rhs1);
4284 return true;
4287 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4288 elt_t))
4290 error ("incorrect type of vector CONSTRUCTOR elements");
4291 debug_generic_stmt (rhs1);
4292 return true;
4294 else if (CONSTRUCTOR_NELTS (rhs1)
4295 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4297 error ("incorrect number of vector CONSTRUCTOR elements");
4298 debug_generic_stmt (rhs1);
4299 return true;
4302 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4304 error ("incorrect type of vector CONSTRUCTOR elements");
4305 debug_generic_stmt (rhs1);
4306 return true;
4308 if (elt_i != NULL_TREE
4309 && (TREE_CODE (elt_t) == VECTOR_TYPE
4310 || TREE_CODE (elt_i) != INTEGER_CST
4311 || compare_tree_int (elt_i, i) != 0))
4313 error ("vector CONSTRUCTOR with non-NULL element index");
4314 debug_generic_stmt (rhs1);
4315 return true;
4317 if (!is_gimple_val (elt_v))
4319 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4320 debug_generic_stmt (rhs1);
4321 return true;
4325 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4327 error ("non-vector CONSTRUCTOR with elements");
4328 debug_generic_stmt (rhs1);
4329 return true;
4331 return res;
4332 case OBJ_TYPE_REF:
4333 case ASSERT_EXPR:
4334 case WITH_SIZE_EXPR:
4335 /* FIXME. */
4336 return res;
4338 default:;
4341 return res;
4344 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4345 is a problem, otherwise false. */
4347 static bool
4348 verify_gimple_assign (gassign *stmt)
4350 switch (gimple_assign_rhs_class (stmt))
4352 case GIMPLE_SINGLE_RHS:
4353 return verify_gimple_assign_single (stmt);
4355 case GIMPLE_UNARY_RHS:
4356 return verify_gimple_assign_unary (stmt);
4358 case GIMPLE_BINARY_RHS:
4359 return verify_gimple_assign_binary (stmt);
4361 case GIMPLE_TERNARY_RHS:
4362 return verify_gimple_assign_ternary (stmt);
4364 default:
4365 gcc_unreachable ();
4369 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4370 is a problem, otherwise false. */
4372 static bool
4373 verify_gimple_return (greturn *stmt)
4375 tree op = gimple_return_retval (stmt);
4376 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4378 /* We cannot test for present return values as we do not fix up missing
4379 return values from the original source. */
4380 if (op == NULL)
4381 return false;
4383 if (!is_gimple_val (op)
4384 && TREE_CODE (op) != RESULT_DECL)
4386 error ("invalid operand in return statement");
4387 debug_generic_stmt (op);
4388 return true;
4391 if ((TREE_CODE (op) == RESULT_DECL
4392 && DECL_BY_REFERENCE (op))
4393 || (TREE_CODE (op) == SSA_NAME
4394 && SSA_NAME_VAR (op)
4395 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4396 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4397 op = TREE_TYPE (op);
4399 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4401 error ("invalid conversion in return statement");
4402 debug_generic_stmt (restype);
4403 debug_generic_stmt (TREE_TYPE (op));
4404 return true;
4407 return false;
4411 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4412 is a problem, otherwise false. */
4414 static bool
4415 verify_gimple_goto (ggoto *stmt)
4417 tree dest = gimple_goto_dest (stmt);
4419 /* ??? We have two canonical forms of direct goto destinations, a
4420 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4421 if (TREE_CODE (dest) != LABEL_DECL
4422 && (!is_gimple_val (dest)
4423 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4425 error ("goto destination is neither a label nor a pointer");
4426 return true;
4429 return false;
4432 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4433 is a problem, otherwise false. */
4435 static bool
4436 verify_gimple_switch (gswitch *stmt)
4438 unsigned int i, n;
4439 tree elt, prev_upper_bound = NULL_TREE;
4440 tree index_type, elt_type = NULL_TREE;
4442 if (!is_gimple_val (gimple_switch_index (stmt)))
4444 error ("invalid operand to switch statement");
4445 debug_generic_stmt (gimple_switch_index (stmt));
4446 return true;
4449 index_type = TREE_TYPE (gimple_switch_index (stmt));
4450 if (! INTEGRAL_TYPE_P (index_type))
4452 error ("non-integral type switch statement");
4453 debug_generic_expr (index_type);
4454 return true;
4457 elt = gimple_switch_label (stmt, 0);
4458 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4460 error ("invalid default case label in switch statement");
4461 debug_generic_expr (elt);
4462 return true;
4465 n = gimple_switch_num_labels (stmt);
4466 for (i = 1; i < n; i++)
4468 elt = gimple_switch_label (stmt, i);
4470 if (! CASE_LOW (elt))
4472 error ("invalid case label in switch statement");
4473 debug_generic_expr (elt);
4474 return true;
4476 if (CASE_HIGH (elt)
4477 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4479 error ("invalid case range in switch statement");
4480 debug_generic_expr (elt);
4481 return true;
4484 if (elt_type)
4486 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4487 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4489 error ("type mismatch for case label in switch statement");
4490 debug_generic_expr (elt);
4491 return true;
4494 else
4496 elt_type = TREE_TYPE (CASE_LOW (elt));
4497 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4499 error ("type precision mismatch in switch statement");
4500 return true;
4504 if (prev_upper_bound)
4506 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4508 error ("case labels not sorted in switch statement");
4509 return true;
4513 prev_upper_bound = CASE_HIGH (elt);
4514 if (! prev_upper_bound)
4515 prev_upper_bound = CASE_LOW (elt);
4518 return false;
4521 /* Verify a gimple debug statement STMT.
4522 Returns true if anything is wrong. */
4524 static bool
4525 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4527 /* There isn't much that could be wrong in a gimple debug stmt. A
4528 gimple debug bind stmt, for example, maps a tree, that's usually
4529 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4530 component or member of an aggregate type, to another tree, that
4531 can be an arbitrary expression. These stmts expand into debug
4532 insns, and are converted to debug notes by var-tracking.c. */
4533 return false;
4536 /* Verify a gimple label statement STMT.
4537 Returns true if anything is wrong. */
4539 static bool
4540 verify_gimple_label (glabel *stmt)
4542 tree decl = gimple_label_label (stmt);
4543 int uid;
4544 bool err = false;
4546 if (TREE_CODE (decl) != LABEL_DECL)
4547 return true;
4548 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4549 && DECL_CONTEXT (decl) != current_function_decl)
4551 error ("label's context is not the current function decl");
4552 err |= true;
4555 uid = LABEL_DECL_UID (decl);
4556 if (cfun->cfg
4557 && (uid == -1
4558 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4560 error ("incorrect entry in label_to_block_map");
4561 err |= true;
4564 uid = EH_LANDING_PAD_NR (decl);
4565 if (uid)
4567 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4568 if (decl != lp->post_landing_pad)
4570 error ("incorrect setting of landing pad number");
4571 err |= true;
4575 return err;
4578 /* Verify a gimple cond statement STMT.
4579 Returns true if anything is wrong. */
4581 static bool
4582 verify_gimple_cond (gcond *stmt)
4584 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4586 error ("invalid comparison code in gimple cond");
4587 return true;
4589 if (!(!gimple_cond_true_label (stmt)
4590 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4591 || !(!gimple_cond_false_label (stmt)
4592 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4594 error ("invalid labels in gimple cond");
4595 return true;
4598 return verify_gimple_comparison (boolean_type_node,
4599 gimple_cond_lhs (stmt),
4600 gimple_cond_rhs (stmt));
4603 /* Verify the GIMPLE statement STMT. Returns true if there is an
4604 error, otherwise false. */
4606 static bool
4607 verify_gimple_stmt (gimple stmt)
4609 switch (gimple_code (stmt))
4611 case GIMPLE_ASSIGN:
4612 return verify_gimple_assign (as_a <gassign *> (stmt));
4614 case GIMPLE_LABEL:
4615 return verify_gimple_label (as_a <glabel *> (stmt));
4617 case GIMPLE_CALL:
4618 return verify_gimple_call (as_a <gcall *> (stmt));
4620 case GIMPLE_COND:
4621 return verify_gimple_cond (as_a <gcond *> (stmt));
4623 case GIMPLE_GOTO:
4624 return verify_gimple_goto (as_a <ggoto *> (stmt));
4626 case GIMPLE_SWITCH:
4627 return verify_gimple_switch (as_a <gswitch *> (stmt));
4629 case GIMPLE_RETURN:
4630 return verify_gimple_return (as_a <greturn *> (stmt));
4632 case GIMPLE_ASM:
4633 return false;
4635 case GIMPLE_TRANSACTION:
4636 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4638 /* Tuples that do not have tree operands. */
4639 case GIMPLE_NOP:
4640 case GIMPLE_PREDICT:
4641 case GIMPLE_RESX:
4642 case GIMPLE_EH_DISPATCH:
4643 case GIMPLE_EH_MUST_NOT_THROW:
4644 return false;
4646 CASE_GIMPLE_OMP:
4647 /* OpenMP directives are validated by the FE and never operated
4648 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4649 non-gimple expressions when the main index variable has had
4650 its address taken. This does not affect the loop itself
4651 because the header of an GIMPLE_OMP_FOR is merely used to determine
4652 how to setup the parallel iteration. */
4653 return false;
4655 case GIMPLE_DEBUG:
4656 return verify_gimple_debug (stmt);
4658 default:
4659 gcc_unreachable ();
4663 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4664 and false otherwise. */
4666 static bool
4667 verify_gimple_phi (gimple phi)
4669 bool err = false;
4670 unsigned i;
4671 tree phi_result = gimple_phi_result (phi);
4672 bool virtual_p;
4674 if (!phi_result)
4676 error ("invalid PHI result");
4677 return true;
4680 virtual_p = virtual_operand_p (phi_result);
4681 if (TREE_CODE (phi_result) != SSA_NAME
4682 || (virtual_p
4683 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4685 error ("invalid PHI result");
4686 err = true;
4689 for (i = 0; i < gimple_phi_num_args (phi); i++)
4691 tree t = gimple_phi_arg_def (phi, i);
4693 if (!t)
4695 error ("missing PHI def");
4696 err |= true;
4697 continue;
4699 /* Addressable variables do have SSA_NAMEs but they
4700 are not considered gimple values. */
4701 else if ((TREE_CODE (t) == SSA_NAME
4702 && virtual_p != virtual_operand_p (t))
4703 || (virtual_p
4704 && (TREE_CODE (t) != SSA_NAME
4705 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4706 || (!virtual_p
4707 && !is_gimple_val (t)))
4709 error ("invalid PHI argument");
4710 debug_generic_expr (t);
4711 err |= true;
4713 #ifdef ENABLE_TYPES_CHECKING
4714 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4716 error ("incompatible types in PHI argument %u", i);
4717 debug_generic_stmt (TREE_TYPE (phi_result));
4718 debug_generic_stmt (TREE_TYPE (t));
4719 err |= true;
4721 #endif
4724 return err;
4727 /* Verify the GIMPLE statements inside the sequence STMTS. */
4729 static bool
4730 verify_gimple_in_seq_2 (gimple_seq stmts)
4732 gimple_stmt_iterator ittr;
4733 bool err = false;
4735 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4737 gimple stmt = gsi_stmt (ittr);
4739 switch (gimple_code (stmt))
4741 case GIMPLE_BIND:
4742 err |= verify_gimple_in_seq_2 (
4743 gimple_bind_body (as_a <gbind *> (stmt)));
4744 break;
4746 case GIMPLE_TRY:
4747 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4748 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4749 break;
4751 case GIMPLE_EH_FILTER:
4752 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4753 break;
4755 case GIMPLE_EH_ELSE:
4757 geh_else *eh_else = as_a <geh_else *> (stmt);
4758 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4759 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4761 break;
4763 case GIMPLE_CATCH:
4764 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4765 as_a <gcatch *> (stmt)));
4766 break;
4768 case GIMPLE_TRANSACTION:
4769 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4770 break;
4772 default:
4774 bool err2 = verify_gimple_stmt (stmt);
4775 if (err2)
4776 debug_gimple_stmt (stmt);
4777 err |= err2;
4782 return err;
4785 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4786 is a problem, otherwise false. */
4788 static bool
4789 verify_gimple_transaction (gtransaction *stmt)
4791 tree lab = gimple_transaction_label (stmt);
4792 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4793 return true;
4794 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4798 /* Verify the GIMPLE statements inside the statement list STMTS. */
4800 DEBUG_FUNCTION void
4801 verify_gimple_in_seq (gimple_seq stmts)
4803 timevar_push (TV_TREE_STMT_VERIFY);
4804 if (verify_gimple_in_seq_2 (stmts))
4805 internal_error ("verify_gimple failed");
4806 timevar_pop (TV_TREE_STMT_VERIFY);
4809 /* Return true when the T can be shared. */
4811 static bool
4812 tree_node_can_be_shared (tree t)
4814 if (IS_TYPE_OR_DECL_P (t)
4815 || is_gimple_min_invariant (t)
4816 || TREE_CODE (t) == SSA_NAME
4817 || t == error_mark_node
4818 || TREE_CODE (t) == IDENTIFIER_NODE)
4819 return true;
4821 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4822 return true;
4824 if (DECL_P (t))
4825 return true;
4827 return false;
4830 /* Called via walk_tree. Verify tree sharing. */
4832 static tree
4833 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4835 hash_set<void *> *visited = (hash_set<void *> *) data;
4837 if (tree_node_can_be_shared (*tp))
4839 *walk_subtrees = false;
4840 return NULL;
4843 if (visited->add (*tp))
4844 return *tp;
4846 return NULL;
4849 /* Called via walk_gimple_stmt. Verify tree sharing. */
4851 static tree
4852 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4854 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4855 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4858 static bool eh_error_found;
4859 bool
4860 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4861 hash_set<gimple> *visited)
4863 if (!visited->contains (stmt))
4865 error ("dead STMT in EH table");
4866 debug_gimple_stmt (stmt);
4867 eh_error_found = true;
4869 return true;
4872 /* Verify if the location LOCs block is in BLOCKS. */
4874 static bool
4875 verify_location (hash_set<tree> *blocks, location_t loc)
4877 tree block = LOCATION_BLOCK (loc);
4878 if (block != NULL_TREE
4879 && !blocks->contains (block))
4881 error ("location references block not in block tree");
4882 return true;
4884 if (block != NULL_TREE)
4885 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4886 return false;
4889 /* Called via walk_tree. Verify that expressions have no blocks. */
4891 static tree
4892 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4894 if (!EXPR_P (*tp))
4896 *walk_subtrees = false;
4897 return NULL;
4900 location_t loc = EXPR_LOCATION (*tp);
4901 if (LOCATION_BLOCK (loc) != NULL)
4902 return *tp;
4904 return NULL;
4907 /* Called via walk_tree. Verify locations of expressions. */
4909 static tree
4910 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4912 hash_set<tree> *blocks = (hash_set<tree> *) data;
4914 if (TREE_CODE (*tp) == VAR_DECL
4915 && DECL_HAS_DEBUG_EXPR_P (*tp))
4917 tree t = DECL_DEBUG_EXPR (*tp);
4918 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4919 if (addr)
4920 return addr;
4922 if ((TREE_CODE (*tp) == VAR_DECL
4923 || TREE_CODE (*tp) == PARM_DECL
4924 || TREE_CODE (*tp) == RESULT_DECL)
4925 && DECL_HAS_VALUE_EXPR_P (*tp))
4927 tree t = DECL_VALUE_EXPR (*tp);
4928 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4929 if (addr)
4930 return addr;
4933 if (!EXPR_P (*tp))
4935 *walk_subtrees = false;
4936 return NULL;
4939 location_t loc = EXPR_LOCATION (*tp);
4940 if (verify_location (blocks, loc))
4941 return *tp;
4943 return NULL;
4946 /* Called via walk_gimple_op. Verify locations of expressions. */
4948 static tree
4949 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4951 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4952 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4955 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4957 static void
4958 collect_subblocks (hash_set<tree> *blocks, tree block)
4960 tree t;
4961 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4963 blocks->add (t);
4964 collect_subblocks (blocks, t);
4968 /* Verify the GIMPLE statements in the CFG of FN. */
4970 DEBUG_FUNCTION void
4971 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4973 basic_block bb;
4974 bool err = false;
4976 timevar_push (TV_TREE_STMT_VERIFY);
4977 hash_set<void *> visited;
4978 hash_set<gimple> visited_stmts;
4980 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4981 hash_set<tree> blocks;
4982 if (DECL_INITIAL (fn->decl))
4984 blocks.add (DECL_INITIAL (fn->decl));
4985 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4988 FOR_EACH_BB_FN (bb, fn)
4990 gimple_stmt_iterator gsi;
4992 for (gphi_iterator gpi = gsi_start_phis (bb);
4993 !gsi_end_p (gpi);
4994 gsi_next (&gpi))
4996 gphi *phi = gpi.phi ();
4997 bool err2 = false;
4998 unsigned i;
5000 visited_stmts.add (phi);
5002 if (gimple_bb (phi) != bb)
5004 error ("gimple_bb (phi) is set to a wrong basic block");
5005 err2 = true;
5008 err2 |= verify_gimple_phi (phi);
5010 /* Only PHI arguments have locations. */
5011 if (gimple_location (phi) != UNKNOWN_LOCATION)
5013 error ("PHI node with location");
5014 err2 = true;
5017 for (i = 0; i < gimple_phi_num_args (phi); i++)
5019 tree arg = gimple_phi_arg_def (phi, i);
5020 tree addr = walk_tree (&arg, verify_node_sharing_1,
5021 &visited, NULL);
5022 if (addr)
5024 error ("incorrect sharing of tree nodes");
5025 debug_generic_expr (addr);
5026 err2 |= true;
5028 location_t loc = gimple_phi_arg_location (phi, i);
5029 if (virtual_operand_p (gimple_phi_result (phi))
5030 && loc != UNKNOWN_LOCATION)
5032 error ("virtual PHI with argument locations");
5033 err2 = true;
5035 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5036 if (addr)
5038 debug_generic_expr (addr);
5039 err2 = true;
5041 err2 |= verify_location (&blocks, loc);
5044 if (err2)
5045 debug_gimple_stmt (phi);
5046 err |= err2;
5049 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5051 gimple stmt = gsi_stmt (gsi);
5052 bool err2 = false;
5053 struct walk_stmt_info wi;
5054 tree addr;
5055 int lp_nr;
5057 visited_stmts.add (stmt);
5059 if (gimple_bb (stmt) != bb)
5061 error ("gimple_bb (stmt) is set to a wrong basic block");
5062 err2 = true;
5065 err2 |= verify_gimple_stmt (stmt);
5066 err2 |= verify_location (&blocks, gimple_location (stmt));
5068 memset (&wi, 0, sizeof (wi));
5069 wi.info = (void *) &visited;
5070 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5071 if (addr)
5073 error ("incorrect sharing of tree nodes");
5074 debug_generic_expr (addr);
5075 err2 |= true;
5078 memset (&wi, 0, sizeof (wi));
5079 wi.info = (void *) &blocks;
5080 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5081 if (addr)
5083 debug_generic_expr (addr);
5084 err2 |= true;
5087 /* ??? Instead of not checking these stmts at all the walker
5088 should know its context via wi. */
5089 if (!is_gimple_debug (stmt)
5090 && !is_gimple_omp (stmt))
5092 memset (&wi, 0, sizeof (wi));
5093 addr = walk_gimple_op (stmt, verify_expr, &wi);
5094 if (addr)
5096 debug_generic_expr (addr);
5097 inform (gimple_location (stmt), "in statement");
5098 err2 |= true;
5102 /* If the statement is marked as part of an EH region, then it is
5103 expected that the statement could throw. Verify that when we
5104 have optimizations that simplify statements such that we prove
5105 that they cannot throw, that we update other data structures
5106 to match. */
5107 lp_nr = lookup_stmt_eh_lp (stmt);
5108 if (lp_nr > 0)
5110 if (!stmt_could_throw_p (stmt))
5112 if (verify_nothrow)
5114 error ("statement marked for throw, but doesn%'t");
5115 err2 |= true;
5118 else if (!gsi_one_before_end_p (gsi))
5120 error ("statement marked for throw in middle of block");
5121 err2 |= true;
5125 if (err2)
5126 debug_gimple_stmt (stmt);
5127 err |= err2;
5131 eh_error_found = false;
5132 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5133 if (eh_table)
5134 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5135 (&visited_stmts);
5137 if (err || eh_error_found)
5138 internal_error ("verify_gimple failed");
5140 verify_histograms ();
5141 timevar_pop (TV_TREE_STMT_VERIFY);
5145 /* Verifies that the flow information is OK. */
5147 static int
5148 gimple_verify_flow_info (void)
5150 int err = 0;
5151 basic_block bb;
5152 gimple_stmt_iterator gsi;
5153 gimple stmt;
5154 edge e;
5155 edge_iterator ei;
5157 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5158 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5160 error ("ENTRY_BLOCK has IL associated with it");
5161 err = 1;
5164 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5165 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5167 error ("EXIT_BLOCK has IL associated with it");
5168 err = 1;
5171 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5172 if (e->flags & EDGE_FALLTHRU)
5174 error ("fallthru to exit from bb %d", e->src->index);
5175 err = 1;
5178 FOR_EACH_BB_FN (bb, cfun)
5180 bool found_ctrl_stmt = false;
5182 stmt = NULL;
5184 /* Skip labels on the start of basic block. */
5185 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5187 tree label;
5188 gimple prev_stmt = stmt;
5190 stmt = gsi_stmt (gsi);
5192 if (gimple_code (stmt) != GIMPLE_LABEL)
5193 break;
5195 label = gimple_label_label (as_a <glabel *> (stmt));
5196 if (prev_stmt && DECL_NONLOCAL (label))
5198 error ("nonlocal label ");
5199 print_generic_expr (stderr, label, 0);
5200 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5201 bb->index);
5202 err = 1;
5205 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5207 error ("EH landing pad label ");
5208 print_generic_expr (stderr, label, 0);
5209 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5210 bb->index);
5211 err = 1;
5214 if (label_to_block (label) != bb)
5216 error ("label ");
5217 print_generic_expr (stderr, label, 0);
5218 fprintf (stderr, " to block does not match in bb %d",
5219 bb->index);
5220 err = 1;
5223 if (decl_function_context (label) != current_function_decl)
5225 error ("label ");
5226 print_generic_expr (stderr, label, 0);
5227 fprintf (stderr, " has incorrect context in bb %d",
5228 bb->index);
5229 err = 1;
5233 /* Verify that body of basic block BB is free of control flow. */
5234 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5236 gimple stmt = gsi_stmt (gsi);
5238 if (found_ctrl_stmt)
5240 error ("control flow in the middle of basic block %d",
5241 bb->index);
5242 err = 1;
5245 if (stmt_ends_bb_p (stmt))
5246 found_ctrl_stmt = true;
5248 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5250 error ("label ");
5251 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5252 fprintf (stderr, " in the middle of basic block %d", bb->index);
5253 err = 1;
5257 gsi = gsi_last_bb (bb);
5258 if (gsi_end_p (gsi))
5259 continue;
5261 stmt = gsi_stmt (gsi);
5263 if (gimple_code (stmt) == GIMPLE_LABEL)
5264 continue;
5266 err |= verify_eh_edges (stmt);
5268 if (is_ctrl_stmt (stmt))
5270 FOR_EACH_EDGE (e, ei, bb->succs)
5271 if (e->flags & EDGE_FALLTHRU)
5273 error ("fallthru edge after a control statement in bb %d",
5274 bb->index);
5275 err = 1;
5279 if (gimple_code (stmt) != GIMPLE_COND)
5281 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5282 after anything else but if statement. */
5283 FOR_EACH_EDGE (e, ei, bb->succs)
5284 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5286 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5287 bb->index);
5288 err = 1;
5292 switch (gimple_code (stmt))
5294 case GIMPLE_COND:
5296 edge true_edge;
5297 edge false_edge;
5299 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5301 if (!true_edge
5302 || !false_edge
5303 || !(true_edge->flags & EDGE_TRUE_VALUE)
5304 || !(false_edge->flags & EDGE_FALSE_VALUE)
5305 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5306 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5307 || EDGE_COUNT (bb->succs) >= 3)
5309 error ("wrong outgoing edge flags at end of bb %d",
5310 bb->index);
5311 err = 1;
5314 break;
5316 case GIMPLE_GOTO:
5317 if (simple_goto_p (stmt))
5319 error ("explicit goto at end of bb %d", bb->index);
5320 err = 1;
5322 else
5324 /* FIXME. We should double check that the labels in the
5325 destination blocks have their address taken. */
5326 FOR_EACH_EDGE (e, ei, bb->succs)
5327 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5328 | EDGE_FALSE_VALUE))
5329 || !(e->flags & EDGE_ABNORMAL))
5331 error ("wrong outgoing edge flags at end of bb %d",
5332 bb->index);
5333 err = 1;
5336 break;
5338 case GIMPLE_CALL:
5339 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5340 break;
5341 /* ... fallthru ... */
5342 case GIMPLE_RETURN:
5343 if (!single_succ_p (bb)
5344 || (single_succ_edge (bb)->flags
5345 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5346 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5348 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5349 err = 1;
5351 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5353 error ("return edge does not point to exit in bb %d",
5354 bb->index);
5355 err = 1;
5357 break;
5359 case GIMPLE_SWITCH:
5361 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5362 tree prev;
5363 edge e;
5364 size_t i, n;
5366 n = gimple_switch_num_labels (switch_stmt);
5368 /* Mark all the destination basic blocks. */
5369 for (i = 0; i < n; ++i)
5371 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5372 basic_block label_bb = label_to_block (lab);
5373 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5374 label_bb->aux = (void *)1;
5377 /* Verify that the case labels are sorted. */
5378 prev = gimple_switch_label (switch_stmt, 0);
5379 for (i = 1; i < n; ++i)
5381 tree c = gimple_switch_label (switch_stmt, i);
5382 if (!CASE_LOW (c))
5384 error ("found default case not at the start of "
5385 "case vector");
5386 err = 1;
5387 continue;
5389 if (CASE_LOW (prev)
5390 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5392 error ("case labels not sorted: ");
5393 print_generic_expr (stderr, prev, 0);
5394 fprintf (stderr," is greater than ");
5395 print_generic_expr (stderr, c, 0);
5396 fprintf (stderr," but comes before it.\n");
5397 err = 1;
5399 prev = c;
5401 /* VRP will remove the default case if it can prove it will
5402 never be executed. So do not verify there always exists
5403 a default case here. */
5405 FOR_EACH_EDGE (e, ei, bb->succs)
5407 if (!e->dest->aux)
5409 error ("extra outgoing edge %d->%d",
5410 bb->index, e->dest->index);
5411 err = 1;
5414 e->dest->aux = (void *)2;
5415 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5416 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5418 error ("wrong outgoing edge flags at end of bb %d",
5419 bb->index);
5420 err = 1;
5424 /* Check that we have all of them. */
5425 for (i = 0; i < n; ++i)
5427 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5428 basic_block label_bb = label_to_block (lab);
5430 if (label_bb->aux != (void *)2)
5432 error ("missing edge %i->%i", bb->index, label_bb->index);
5433 err = 1;
5437 FOR_EACH_EDGE (e, ei, bb->succs)
5438 e->dest->aux = (void *)0;
5440 break;
5442 case GIMPLE_EH_DISPATCH:
5443 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5444 break;
5446 default:
5447 break;
5451 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5452 verify_dominators (CDI_DOMINATORS);
5454 return err;
5458 /* Updates phi nodes after creating a forwarder block joined
5459 by edge FALLTHRU. */
5461 static void
5462 gimple_make_forwarder_block (edge fallthru)
5464 edge e;
5465 edge_iterator ei;
5466 basic_block dummy, bb;
5467 tree var;
5468 gphi_iterator gsi;
5470 dummy = fallthru->src;
5471 bb = fallthru->dest;
5473 if (single_pred_p (bb))
5474 return;
5476 /* If we redirected a branch we must create new PHI nodes at the
5477 start of BB. */
5478 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5480 gphi *phi, *new_phi;
5482 phi = gsi.phi ();
5483 var = gimple_phi_result (phi);
5484 new_phi = create_phi_node (var, bb);
5485 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5486 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5487 UNKNOWN_LOCATION);
5490 /* Add the arguments we have stored on edges. */
5491 FOR_EACH_EDGE (e, ei, bb->preds)
5493 if (e == fallthru)
5494 continue;
5496 flush_pending_stmts (e);
5501 /* Return a non-special label in the head of basic block BLOCK.
5502 Create one if it doesn't exist. */
5504 tree
5505 gimple_block_label (basic_block bb)
5507 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5508 bool first = true;
5509 tree label;
5510 glabel *stmt;
5512 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5514 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5515 if (!stmt)
5516 break;
5517 label = gimple_label_label (stmt);
5518 if (!DECL_NONLOCAL (label))
5520 if (!first)
5521 gsi_move_before (&i, &s);
5522 return label;
5526 label = create_artificial_label (UNKNOWN_LOCATION);
5527 stmt = gimple_build_label (label);
5528 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5529 return label;
5533 /* Attempt to perform edge redirection by replacing a possibly complex
5534 jump instruction by a goto or by removing the jump completely.
5535 This can apply only if all edges now point to the same block. The
5536 parameters and return values are equivalent to
5537 redirect_edge_and_branch. */
5539 static edge
5540 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5542 basic_block src = e->src;
5543 gimple_stmt_iterator i;
5544 gimple stmt;
5546 /* We can replace or remove a complex jump only when we have exactly
5547 two edges. */
5548 if (EDGE_COUNT (src->succs) != 2
5549 /* Verify that all targets will be TARGET. Specifically, the
5550 edge that is not E must also go to TARGET. */
5551 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5552 return NULL;
5554 i = gsi_last_bb (src);
5555 if (gsi_end_p (i))
5556 return NULL;
5558 stmt = gsi_stmt (i);
5560 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5562 gsi_remove (&i, true);
5563 e = ssa_redirect_edge (e, target);
5564 e->flags = EDGE_FALLTHRU;
5565 return e;
5568 return NULL;
5572 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5573 edge representing the redirected branch. */
5575 static edge
5576 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5578 basic_block bb = e->src;
5579 gimple_stmt_iterator gsi;
5580 edge ret;
5581 gimple stmt;
5583 if (e->flags & EDGE_ABNORMAL)
5584 return NULL;
5586 if (e->dest == dest)
5587 return NULL;
5589 if (e->flags & EDGE_EH)
5590 return redirect_eh_edge (e, dest);
5592 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5594 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5595 if (ret)
5596 return ret;
5599 gsi = gsi_last_bb (bb);
5600 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5602 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5604 case GIMPLE_COND:
5605 /* For COND_EXPR, we only need to redirect the edge. */
5606 break;
5608 case GIMPLE_GOTO:
5609 /* No non-abnormal edges should lead from a non-simple goto, and
5610 simple ones should be represented implicitly. */
5611 gcc_unreachable ();
5613 case GIMPLE_SWITCH:
5615 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5616 tree label = gimple_block_label (dest);
5617 tree cases = get_cases_for_edge (e, switch_stmt);
5619 /* If we have a list of cases associated with E, then use it
5620 as it's a lot faster than walking the entire case vector. */
5621 if (cases)
5623 edge e2 = find_edge (e->src, dest);
5624 tree last, first;
5626 first = cases;
5627 while (cases)
5629 last = cases;
5630 CASE_LABEL (cases) = label;
5631 cases = CASE_CHAIN (cases);
5634 /* If there was already an edge in the CFG, then we need
5635 to move all the cases associated with E to E2. */
5636 if (e2)
5638 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5640 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5641 CASE_CHAIN (cases2) = first;
5643 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5645 else
5647 size_t i, n = gimple_switch_num_labels (switch_stmt);
5649 for (i = 0; i < n; i++)
5651 tree elt = gimple_switch_label (switch_stmt, i);
5652 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5653 CASE_LABEL (elt) = label;
5657 break;
5659 case GIMPLE_ASM:
5661 gasm *asm_stmt = as_a <gasm *> (stmt);
5662 int i, n = gimple_asm_nlabels (asm_stmt);
5663 tree label = NULL;
5665 for (i = 0; i < n; ++i)
5667 tree cons = gimple_asm_label_op (asm_stmt, i);
5668 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5670 if (!label)
5671 label = gimple_block_label (dest);
5672 TREE_VALUE (cons) = label;
5676 /* If we didn't find any label matching the former edge in the
5677 asm labels, we must be redirecting the fallthrough
5678 edge. */
5679 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5681 break;
5683 case GIMPLE_RETURN:
5684 gsi_remove (&gsi, true);
5685 e->flags |= EDGE_FALLTHRU;
5686 break;
5688 case GIMPLE_OMP_RETURN:
5689 case GIMPLE_OMP_CONTINUE:
5690 case GIMPLE_OMP_SECTIONS_SWITCH:
5691 case GIMPLE_OMP_FOR:
5692 /* The edges from OMP constructs can be simply redirected. */
5693 break;
5695 case GIMPLE_EH_DISPATCH:
5696 if (!(e->flags & EDGE_FALLTHRU))
5697 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5698 break;
5700 case GIMPLE_TRANSACTION:
5701 /* The ABORT edge has a stored label associated with it, otherwise
5702 the edges are simply redirectable. */
5703 if (e->flags == 0)
5704 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5705 gimple_block_label (dest));
5706 break;
5708 default:
5709 /* Otherwise it must be a fallthru edge, and we don't need to
5710 do anything besides redirecting it. */
5711 gcc_assert (e->flags & EDGE_FALLTHRU);
5712 break;
5715 /* Update/insert PHI nodes as necessary. */
5717 /* Now update the edges in the CFG. */
5718 e = ssa_redirect_edge (e, dest);
5720 return e;
5723 /* Returns true if it is possible to remove edge E by redirecting
5724 it to the destination of the other edge from E->src. */
5726 static bool
5727 gimple_can_remove_branch_p (const_edge e)
5729 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5730 return false;
5732 return true;
5735 /* Simple wrapper, as we can always redirect fallthru edges. */
5737 static basic_block
5738 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5740 e = gimple_redirect_edge_and_branch (e, dest);
5741 gcc_assert (e);
5743 return NULL;
5747 /* Splits basic block BB after statement STMT (but at least after the
5748 labels). If STMT is NULL, BB is split just after the labels. */
5750 static basic_block
5751 gimple_split_block (basic_block bb, void *stmt)
5753 gimple_stmt_iterator gsi;
5754 gimple_stmt_iterator gsi_tgt;
5755 gimple_seq list;
5756 basic_block new_bb;
5757 edge e;
5758 edge_iterator ei;
5760 new_bb = create_empty_bb (bb);
5762 /* Redirect the outgoing edges. */
5763 new_bb->succs = bb->succs;
5764 bb->succs = NULL;
5765 FOR_EACH_EDGE (e, ei, new_bb->succs)
5766 e->src = new_bb;
5768 /* Get a stmt iterator pointing to the first stmt to move. */
5769 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5770 gsi = gsi_after_labels (bb);
5771 else
5773 gsi = gsi_for_stmt ((gimple) stmt);
5774 gsi_next (&gsi);
5777 /* Move everything from GSI to the new basic block. */
5778 if (gsi_end_p (gsi))
5779 return new_bb;
5781 /* Split the statement list - avoid re-creating new containers as this
5782 brings ugly quadratic memory consumption in the inliner.
5783 (We are still quadratic since we need to update stmt BB pointers,
5784 sadly.) */
5785 gsi_split_seq_before (&gsi, &list);
5786 set_bb_seq (new_bb, list);
5787 for (gsi_tgt = gsi_start (list);
5788 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5789 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5791 return new_bb;
5795 /* Moves basic block BB after block AFTER. */
5797 static bool
5798 gimple_move_block_after (basic_block bb, basic_block after)
5800 if (bb->prev_bb == after)
5801 return true;
5803 unlink_block (bb);
5804 link_block (bb, after);
5806 return true;
5810 /* Return TRUE if block BB has no executable statements, otherwise return
5811 FALSE. */
5813 static bool
5814 gimple_empty_block_p (basic_block bb)
5816 /* BB must have no executable statements. */
5817 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5818 if (phi_nodes (bb))
5819 return false;
5820 if (gsi_end_p (gsi))
5821 return true;
5822 if (is_gimple_debug (gsi_stmt (gsi)))
5823 gsi_next_nondebug (&gsi);
5824 return gsi_end_p (gsi);
5828 /* Split a basic block if it ends with a conditional branch and if the
5829 other part of the block is not empty. */
5831 static basic_block
5832 gimple_split_block_before_cond_jump (basic_block bb)
5834 gimple last, split_point;
5835 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5836 if (gsi_end_p (gsi))
5837 return NULL;
5838 last = gsi_stmt (gsi);
5839 if (gimple_code (last) != GIMPLE_COND
5840 && gimple_code (last) != GIMPLE_SWITCH)
5841 return NULL;
5842 gsi_prev_nondebug (&gsi);
5843 split_point = gsi_stmt (gsi);
5844 return split_block (bb, split_point)->dest;
5848 /* Return true if basic_block can be duplicated. */
5850 static bool
5851 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5853 return true;
5856 /* Create a duplicate of the basic block BB. NOTE: This does not
5857 preserve SSA form. */
5859 static basic_block
5860 gimple_duplicate_bb (basic_block bb)
5862 basic_block new_bb;
5863 gimple_stmt_iterator gsi_tgt;
5865 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5867 /* Copy the PHI nodes. We ignore PHI node arguments here because
5868 the incoming edges have not been setup yet. */
5869 for (gphi_iterator gpi = gsi_start_phis (bb);
5870 !gsi_end_p (gpi);
5871 gsi_next (&gpi))
5873 gphi *phi, *copy;
5874 phi = gpi.phi ();
5875 copy = create_phi_node (NULL_TREE, new_bb);
5876 create_new_def_for (gimple_phi_result (phi), copy,
5877 gimple_phi_result_ptr (copy));
5878 gimple_set_uid (copy, gimple_uid (phi));
5881 gsi_tgt = gsi_start_bb (new_bb);
5882 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5883 !gsi_end_p (gsi);
5884 gsi_next (&gsi))
5886 def_operand_p def_p;
5887 ssa_op_iter op_iter;
5888 tree lhs;
5889 gimple stmt, copy;
5891 stmt = gsi_stmt (gsi);
5892 if (gimple_code (stmt) == GIMPLE_LABEL)
5893 continue;
5895 /* Don't duplicate label debug stmts. */
5896 if (gimple_debug_bind_p (stmt)
5897 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5898 == LABEL_DECL)
5899 continue;
5901 /* Create a new copy of STMT and duplicate STMT's virtual
5902 operands. */
5903 copy = gimple_copy (stmt);
5904 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5906 maybe_duplicate_eh_stmt (copy, stmt);
5907 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5909 /* When copying around a stmt writing into a local non-user
5910 aggregate, make sure it won't share stack slot with other
5911 vars. */
5912 lhs = gimple_get_lhs (stmt);
5913 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5915 tree base = get_base_address (lhs);
5916 if (base
5917 && (TREE_CODE (base) == VAR_DECL
5918 || TREE_CODE (base) == RESULT_DECL)
5919 && DECL_IGNORED_P (base)
5920 && !TREE_STATIC (base)
5921 && !DECL_EXTERNAL (base)
5922 && (TREE_CODE (base) != VAR_DECL
5923 || !DECL_HAS_VALUE_EXPR_P (base)))
5924 DECL_NONSHAREABLE (base) = 1;
5927 /* Create new names for all the definitions created by COPY and
5928 add replacement mappings for each new name. */
5929 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5930 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5933 return new_bb;
5936 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5938 static void
5939 add_phi_args_after_copy_edge (edge e_copy)
5941 basic_block bb, bb_copy = e_copy->src, dest;
5942 edge e;
5943 edge_iterator ei;
5944 gphi *phi, *phi_copy;
5945 tree def;
5946 gphi_iterator psi, psi_copy;
5948 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5949 return;
5951 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5953 if (e_copy->dest->flags & BB_DUPLICATED)
5954 dest = get_bb_original (e_copy->dest);
5955 else
5956 dest = e_copy->dest;
5958 e = find_edge (bb, dest);
5959 if (!e)
5961 /* During loop unrolling the target of the latch edge is copied.
5962 In this case we are not looking for edge to dest, but to
5963 duplicated block whose original was dest. */
5964 FOR_EACH_EDGE (e, ei, bb->succs)
5966 if ((e->dest->flags & BB_DUPLICATED)
5967 && get_bb_original (e->dest) == dest)
5968 break;
5971 gcc_assert (e != NULL);
5974 for (psi = gsi_start_phis (e->dest),
5975 psi_copy = gsi_start_phis (e_copy->dest);
5976 !gsi_end_p (psi);
5977 gsi_next (&psi), gsi_next (&psi_copy))
5979 phi = psi.phi ();
5980 phi_copy = psi_copy.phi ();
5981 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5982 add_phi_arg (phi_copy, def, e_copy,
5983 gimple_phi_arg_location_from_edge (phi, e));
5988 /* Basic block BB_COPY was created by code duplication. Add phi node
5989 arguments for edges going out of BB_COPY. The blocks that were
5990 duplicated have BB_DUPLICATED set. */
5992 void
5993 add_phi_args_after_copy_bb (basic_block bb_copy)
5995 edge e_copy;
5996 edge_iterator ei;
5998 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6000 add_phi_args_after_copy_edge (e_copy);
6004 /* Blocks in REGION_COPY array of length N_REGION were created by
6005 duplication of basic blocks. Add phi node arguments for edges
6006 going from these blocks. If E_COPY is not NULL, also add
6007 phi node arguments for its destination.*/
6009 void
6010 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6011 edge e_copy)
6013 unsigned i;
6015 for (i = 0; i < n_region; i++)
6016 region_copy[i]->flags |= BB_DUPLICATED;
6018 for (i = 0; i < n_region; i++)
6019 add_phi_args_after_copy_bb (region_copy[i]);
6020 if (e_copy)
6021 add_phi_args_after_copy_edge (e_copy);
6023 for (i = 0; i < n_region; i++)
6024 region_copy[i]->flags &= ~BB_DUPLICATED;
6027 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6028 important exit edge EXIT. By important we mean that no SSA name defined
6029 inside region is live over the other exit edges of the region. All entry
6030 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6031 to the duplicate of the region. Dominance and loop information is
6032 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6033 UPDATE_DOMINANCE is false then we assume that the caller will update the
6034 dominance information after calling this function. The new basic
6035 blocks are stored to REGION_COPY in the same order as they had in REGION,
6036 provided that REGION_COPY is not NULL.
6037 The function returns false if it is unable to copy the region,
6038 true otherwise. */
6040 bool
6041 gimple_duplicate_sese_region (edge entry, edge exit,
6042 basic_block *region, unsigned n_region,
6043 basic_block *region_copy,
6044 bool update_dominance)
6046 unsigned i;
6047 bool free_region_copy = false, copying_header = false;
6048 struct loop *loop = entry->dest->loop_father;
6049 edge exit_copy;
6050 vec<basic_block> doms;
6051 edge redirected;
6052 int total_freq = 0, entry_freq = 0;
6053 gcov_type total_count = 0, entry_count = 0;
6055 if (!can_copy_bbs_p (region, n_region))
6056 return false;
6058 /* Some sanity checking. Note that we do not check for all possible
6059 missuses of the functions. I.e. if you ask to copy something weird,
6060 it will work, but the state of structures probably will not be
6061 correct. */
6062 for (i = 0; i < n_region; i++)
6064 /* We do not handle subloops, i.e. all the blocks must belong to the
6065 same loop. */
6066 if (region[i]->loop_father != loop)
6067 return false;
6069 if (region[i] != entry->dest
6070 && region[i] == loop->header)
6071 return false;
6074 /* In case the function is used for loop header copying (which is the primary
6075 use), ensure that EXIT and its copy will be new latch and entry edges. */
6076 if (loop->header == entry->dest)
6078 copying_header = true;
6080 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6081 return false;
6083 for (i = 0; i < n_region; i++)
6084 if (region[i] != exit->src
6085 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6086 return false;
6089 initialize_original_copy_tables ();
6091 if (copying_header)
6092 set_loop_copy (loop, loop_outer (loop));
6093 else
6094 set_loop_copy (loop, loop);
6096 if (!region_copy)
6098 region_copy = XNEWVEC (basic_block, n_region);
6099 free_region_copy = true;
6102 /* Record blocks outside the region that are dominated by something
6103 inside. */
6104 if (update_dominance)
6106 doms.create (0);
6107 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6110 if (entry->dest->count)
6112 total_count = entry->dest->count;
6113 entry_count = entry->count;
6114 /* Fix up corner cases, to avoid division by zero or creation of negative
6115 frequencies. */
6116 if (entry_count > total_count)
6117 entry_count = total_count;
6119 else
6121 total_freq = entry->dest->frequency;
6122 entry_freq = EDGE_FREQUENCY (entry);
6123 /* Fix up corner cases, to avoid division by zero or creation of negative
6124 frequencies. */
6125 if (total_freq == 0)
6126 total_freq = 1;
6127 else if (entry_freq > total_freq)
6128 entry_freq = total_freq;
6131 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6132 split_edge_bb_loc (entry), update_dominance);
6133 if (total_count)
6135 scale_bbs_frequencies_gcov_type (region, n_region,
6136 total_count - entry_count,
6137 total_count);
6138 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6139 total_count);
6141 else
6143 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6144 total_freq);
6145 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6148 if (copying_header)
6150 loop->header = exit->dest;
6151 loop->latch = exit->src;
6154 /* Redirect the entry and add the phi node arguments. */
6155 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6156 gcc_assert (redirected != NULL);
6157 flush_pending_stmts (entry);
6159 /* Concerning updating of dominators: We must recount dominators
6160 for entry block and its copy. Anything that is outside of the
6161 region, but was dominated by something inside needs recounting as
6162 well. */
6163 if (update_dominance)
6165 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6166 doms.safe_push (get_bb_original (entry->dest));
6167 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6168 doms.release ();
6171 /* Add the other PHI node arguments. */
6172 add_phi_args_after_copy (region_copy, n_region, NULL);
6174 if (free_region_copy)
6175 free (region_copy);
6177 free_original_copy_tables ();
6178 return true;
6181 /* Checks if BB is part of the region defined by N_REGION BBS. */
6182 static bool
6183 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6185 unsigned int n;
6187 for (n = 0; n < n_region; n++)
6189 if (bb == bbs[n])
6190 return true;
6192 return false;
6195 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6196 are stored to REGION_COPY in the same order in that they appear
6197 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6198 the region, EXIT an exit from it. The condition guarding EXIT
6199 is moved to ENTRY. Returns true if duplication succeeds, false
6200 otherwise.
6202 For example,
6204 some_code;
6205 if (cond)
6207 else
6210 is transformed to
6212 if (cond)
6214 some_code;
6217 else
6219 some_code;
6224 bool
6225 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6226 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6227 basic_block *region_copy ATTRIBUTE_UNUSED)
6229 unsigned i;
6230 bool free_region_copy = false;
6231 struct loop *loop = exit->dest->loop_father;
6232 struct loop *orig_loop = entry->dest->loop_father;
6233 basic_block switch_bb, entry_bb, nentry_bb;
6234 vec<basic_block> doms;
6235 int total_freq = 0, exit_freq = 0;
6236 gcov_type total_count = 0, exit_count = 0;
6237 edge exits[2], nexits[2], e;
6238 gimple_stmt_iterator gsi;
6239 gimple cond_stmt;
6240 edge sorig, snew;
6241 basic_block exit_bb;
6242 gphi_iterator psi;
6243 gphi *phi;
6244 tree def;
6245 struct loop *target, *aloop, *cloop;
6247 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6248 exits[0] = exit;
6249 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6251 if (!can_copy_bbs_p (region, n_region))
6252 return false;
6254 initialize_original_copy_tables ();
6255 set_loop_copy (orig_loop, loop);
6257 target= loop;
6258 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6260 if (bb_part_of_region_p (aloop->header, region, n_region))
6262 cloop = duplicate_loop (aloop, target);
6263 duplicate_subloops (aloop, cloop);
6267 if (!region_copy)
6269 region_copy = XNEWVEC (basic_block, n_region);
6270 free_region_copy = true;
6273 gcc_assert (!need_ssa_update_p (cfun));
6275 /* Record blocks outside the region that are dominated by something
6276 inside. */
6277 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6279 if (exit->src->count)
6281 total_count = exit->src->count;
6282 exit_count = exit->count;
6283 /* Fix up corner cases, to avoid division by zero or creation of negative
6284 frequencies. */
6285 if (exit_count > total_count)
6286 exit_count = total_count;
6288 else
6290 total_freq = exit->src->frequency;
6291 exit_freq = EDGE_FREQUENCY (exit);
6292 /* Fix up corner cases, to avoid division by zero or creation of negative
6293 frequencies. */
6294 if (total_freq == 0)
6295 total_freq = 1;
6296 if (exit_freq > total_freq)
6297 exit_freq = total_freq;
6300 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6301 split_edge_bb_loc (exit), true);
6302 if (total_count)
6304 scale_bbs_frequencies_gcov_type (region, n_region,
6305 total_count - exit_count,
6306 total_count);
6307 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6308 total_count);
6310 else
6312 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6313 total_freq);
6314 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6317 /* Create the switch block, and put the exit condition to it. */
6318 entry_bb = entry->dest;
6319 nentry_bb = get_bb_copy (entry_bb);
6320 if (!last_stmt (entry->src)
6321 || !stmt_ends_bb_p (last_stmt (entry->src)))
6322 switch_bb = entry->src;
6323 else
6324 switch_bb = split_edge (entry);
6325 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6327 gsi = gsi_last_bb (switch_bb);
6328 cond_stmt = last_stmt (exit->src);
6329 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6330 cond_stmt = gimple_copy (cond_stmt);
6332 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6334 sorig = single_succ_edge (switch_bb);
6335 sorig->flags = exits[1]->flags;
6336 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6338 /* Register the new edge from SWITCH_BB in loop exit lists. */
6339 rescan_loop_exit (snew, true, false);
6341 /* Add the PHI node arguments. */
6342 add_phi_args_after_copy (region_copy, n_region, snew);
6344 /* Get rid of now superfluous conditions and associated edges (and phi node
6345 arguments). */
6346 exit_bb = exit->dest;
6348 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6349 PENDING_STMT (e) = NULL;
6351 /* The latch of ORIG_LOOP was copied, and so was the backedge
6352 to the original header. We redirect this backedge to EXIT_BB. */
6353 for (i = 0; i < n_region; i++)
6354 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6356 gcc_assert (single_succ_edge (region_copy[i]));
6357 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6358 PENDING_STMT (e) = NULL;
6359 for (psi = gsi_start_phis (exit_bb);
6360 !gsi_end_p (psi);
6361 gsi_next (&psi))
6363 phi = psi.phi ();
6364 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6365 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6368 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6369 PENDING_STMT (e) = NULL;
6371 /* Anything that is outside of the region, but was dominated by something
6372 inside needs to update dominance info. */
6373 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6374 doms.release ();
6375 /* Update the SSA web. */
6376 update_ssa (TODO_update_ssa);
6378 if (free_region_copy)
6379 free (region_copy);
6381 free_original_copy_tables ();
6382 return true;
6385 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6386 adding blocks when the dominator traversal reaches EXIT. This
6387 function silently assumes that ENTRY strictly dominates EXIT. */
6389 void
6390 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6391 vec<basic_block> *bbs_p)
6393 basic_block son;
6395 for (son = first_dom_son (CDI_DOMINATORS, entry);
6396 son;
6397 son = next_dom_son (CDI_DOMINATORS, son))
6399 bbs_p->safe_push (son);
6400 if (son != exit)
6401 gather_blocks_in_sese_region (son, exit, bbs_p);
6405 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6406 The duplicates are recorded in VARS_MAP. */
6408 static void
6409 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6410 tree to_context)
6412 tree t = *tp, new_t;
6413 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6415 if (DECL_CONTEXT (t) == to_context)
6416 return;
6418 bool existed;
6419 tree &loc = vars_map->get_or_insert (t, &existed);
6421 if (!existed)
6423 if (SSA_VAR_P (t))
6425 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6426 add_local_decl (f, new_t);
6428 else
6430 gcc_assert (TREE_CODE (t) == CONST_DECL);
6431 new_t = copy_node (t);
6433 DECL_CONTEXT (new_t) = to_context;
6435 loc = new_t;
6437 else
6438 new_t = loc;
6440 *tp = new_t;
6444 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6445 VARS_MAP maps old ssa names and var_decls to the new ones. */
6447 static tree
6448 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6449 tree to_context)
6451 tree new_name;
6453 gcc_assert (!virtual_operand_p (name));
6455 tree *loc = vars_map->get (name);
6457 if (!loc)
6459 tree decl = SSA_NAME_VAR (name);
6460 if (decl)
6462 replace_by_duplicate_decl (&decl, vars_map, to_context);
6463 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6464 decl, SSA_NAME_DEF_STMT (name));
6465 if (SSA_NAME_IS_DEFAULT_DEF (name))
6466 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6467 decl, new_name);
6469 else
6470 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6471 name, SSA_NAME_DEF_STMT (name));
6473 vars_map->put (name, new_name);
6475 else
6476 new_name = *loc;
6478 return new_name;
6481 struct move_stmt_d
6483 tree orig_block;
6484 tree new_block;
6485 tree from_context;
6486 tree to_context;
6487 hash_map<tree, tree> *vars_map;
6488 htab_t new_label_map;
6489 hash_map<void *, void *> *eh_map;
6490 bool remap_decls_p;
6493 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6494 contained in *TP if it has been ORIG_BLOCK previously and change the
6495 DECL_CONTEXT of every local variable referenced in *TP. */
6497 static tree
6498 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6500 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6501 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6502 tree t = *tp;
6504 if (EXPR_P (t))
6506 tree block = TREE_BLOCK (t);
6507 if (block == p->orig_block
6508 || (p->orig_block == NULL_TREE
6509 && block != NULL_TREE))
6510 TREE_SET_BLOCK (t, p->new_block);
6511 #ifdef ENABLE_CHECKING
6512 else if (block != NULL_TREE)
6514 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6515 block = BLOCK_SUPERCONTEXT (block);
6516 gcc_assert (block == p->orig_block);
6518 #endif
6520 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6522 if (TREE_CODE (t) == SSA_NAME)
6523 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6524 else if (TREE_CODE (t) == LABEL_DECL)
6526 if (p->new_label_map)
6528 struct tree_map in, *out;
6529 in.base.from = t;
6530 out = (struct tree_map *)
6531 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6532 if (out)
6533 *tp = t = out->to;
6536 DECL_CONTEXT (t) = p->to_context;
6538 else if (p->remap_decls_p)
6540 /* Replace T with its duplicate. T should no longer appear in the
6541 parent function, so this looks wasteful; however, it may appear
6542 in referenced_vars, and more importantly, as virtual operands of
6543 statements, and in alias lists of other variables. It would be
6544 quite difficult to expunge it from all those places. ??? It might
6545 suffice to do this for addressable variables. */
6546 if ((TREE_CODE (t) == VAR_DECL
6547 && !is_global_var (t))
6548 || TREE_CODE (t) == CONST_DECL)
6549 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6551 *walk_subtrees = 0;
6553 else if (TYPE_P (t))
6554 *walk_subtrees = 0;
6556 return NULL_TREE;
6559 /* Helper for move_stmt_r. Given an EH region number for the source
6560 function, map that to the duplicate EH regio number in the dest. */
6562 static int
6563 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6565 eh_region old_r, new_r;
6567 old_r = get_eh_region_from_number (old_nr);
6568 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6570 return new_r->index;
6573 /* Similar, but operate on INTEGER_CSTs. */
6575 static tree
6576 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6578 int old_nr, new_nr;
6580 old_nr = tree_to_shwi (old_t_nr);
6581 new_nr = move_stmt_eh_region_nr (old_nr, p);
6583 return build_int_cst (integer_type_node, new_nr);
6586 /* Like move_stmt_op, but for gimple statements.
6588 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6589 contained in the current statement in *GSI_P and change the
6590 DECL_CONTEXT of every local variable referenced in the current
6591 statement. */
6593 static tree
6594 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6595 struct walk_stmt_info *wi)
6597 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6598 gimple stmt = gsi_stmt (*gsi_p);
6599 tree block = gimple_block (stmt);
6601 if (block == p->orig_block
6602 || (p->orig_block == NULL_TREE
6603 && block != NULL_TREE))
6604 gimple_set_block (stmt, p->new_block);
6606 switch (gimple_code (stmt))
6608 case GIMPLE_CALL:
6609 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6611 tree r, fndecl = gimple_call_fndecl (stmt);
6612 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6613 switch (DECL_FUNCTION_CODE (fndecl))
6615 case BUILT_IN_EH_COPY_VALUES:
6616 r = gimple_call_arg (stmt, 1);
6617 r = move_stmt_eh_region_tree_nr (r, p);
6618 gimple_call_set_arg (stmt, 1, r);
6619 /* FALLTHRU */
6621 case BUILT_IN_EH_POINTER:
6622 case BUILT_IN_EH_FILTER:
6623 r = gimple_call_arg (stmt, 0);
6624 r = move_stmt_eh_region_tree_nr (r, p);
6625 gimple_call_set_arg (stmt, 0, r);
6626 break;
6628 default:
6629 break;
6632 break;
6634 case GIMPLE_RESX:
6636 gresx *resx_stmt = as_a <gresx *> (stmt);
6637 int r = gimple_resx_region (resx_stmt);
6638 r = move_stmt_eh_region_nr (r, p);
6639 gimple_resx_set_region (resx_stmt, r);
6641 break;
6643 case GIMPLE_EH_DISPATCH:
6645 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6646 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6647 r = move_stmt_eh_region_nr (r, p);
6648 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6650 break;
6652 case GIMPLE_OMP_RETURN:
6653 case GIMPLE_OMP_CONTINUE:
6654 break;
6655 default:
6656 if (is_gimple_omp (stmt))
6658 /* Do not remap variables inside OMP directives. Variables
6659 referenced in clauses and directive header belong to the
6660 parent function and should not be moved into the child
6661 function. */
6662 bool save_remap_decls_p = p->remap_decls_p;
6663 p->remap_decls_p = false;
6664 *handled_ops_p = true;
6666 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6667 move_stmt_op, wi);
6669 p->remap_decls_p = save_remap_decls_p;
6671 break;
6674 return NULL_TREE;
6677 /* Move basic block BB from function CFUN to function DEST_FN. The
6678 block is moved out of the original linked list and placed after
6679 block AFTER in the new list. Also, the block is removed from the
6680 original array of blocks and placed in DEST_FN's array of blocks.
6681 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6682 updated to reflect the moved edges.
6684 The local variables are remapped to new instances, VARS_MAP is used
6685 to record the mapping. */
6687 static void
6688 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6689 basic_block after, bool update_edge_count_p,
6690 struct move_stmt_d *d)
6692 struct control_flow_graph *cfg;
6693 edge_iterator ei;
6694 edge e;
6695 gimple_stmt_iterator si;
6696 unsigned old_len, new_len;
6698 /* Remove BB from dominance structures. */
6699 delete_from_dominance_info (CDI_DOMINATORS, bb);
6701 /* Move BB from its current loop to the copy in the new function. */
6702 if (current_loops)
6704 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6705 if (new_loop)
6706 bb->loop_father = new_loop;
6709 /* Link BB to the new linked list. */
6710 move_block_after (bb, after);
6712 /* Update the edge count in the corresponding flowgraphs. */
6713 if (update_edge_count_p)
6714 FOR_EACH_EDGE (e, ei, bb->succs)
6716 cfun->cfg->x_n_edges--;
6717 dest_cfun->cfg->x_n_edges++;
6720 /* Remove BB from the original basic block array. */
6721 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6722 cfun->cfg->x_n_basic_blocks--;
6724 /* Grow DEST_CFUN's basic block array if needed. */
6725 cfg = dest_cfun->cfg;
6726 cfg->x_n_basic_blocks++;
6727 if (bb->index >= cfg->x_last_basic_block)
6728 cfg->x_last_basic_block = bb->index + 1;
6730 old_len = vec_safe_length (cfg->x_basic_block_info);
6731 if ((unsigned) cfg->x_last_basic_block >= old_len)
6733 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6734 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6737 (*cfg->x_basic_block_info)[bb->index] = bb;
6739 /* Remap the variables in phi nodes. */
6740 for (gphi_iterator psi = gsi_start_phis (bb);
6741 !gsi_end_p (psi); )
6743 gphi *phi = psi.phi ();
6744 use_operand_p use;
6745 tree op = PHI_RESULT (phi);
6746 ssa_op_iter oi;
6747 unsigned i;
6749 if (virtual_operand_p (op))
6751 /* Remove the phi nodes for virtual operands (alias analysis will be
6752 run for the new function, anyway). */
6753 remove_phi_node (&psi, true);
6754 continue;
6757 SET_PHI_RESULT (phi,
6758 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6759 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6761 op = USE_FROM_PTR (use);
6762 if (TREE_CODE (op) == SSA_NAME)
6763 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6766 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6768 location_t locus = gimple_phi_arg_location (phi, i);
6769 tree block = LOCATION_BLOCK (locus);
6771 if (locus == UNKNOWN_LOCATION)
6772 continue;
6773 if (d->orig_block == NULL_TREE || block == d->orig_block)
6775 if (d->new_block == NULL_TREE)
6776 locus = LOCATION_LOCUS (locus);
6777 else
6778 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6779 gimple_phi_arg_set_location (phi, i, locus);
6783 gsi_next (&psi);
6786 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6788 gimple stmt = gsi_stmt (si);
6789 struct walk_stmt_info wi;
6791 memset (&wi, 0, sizeof (wi));
6792 wi.info = d;
6793 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6795 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6797 tree label = gimple_label_label (label_stmt);
6798 int uid = LABEL_DECL_UID (label);
6800 gcc_assert (uid > -1);
6802 old_len = vec_safe_length (cfg->x_label_to_block_map);
6803 if (old_len <= (unsigned) uid)
6805 new_len = 3 * uid / 2 + 1;
6806 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6809 (*cfg->x_label_to_block_map)[uid] = bb;
6810 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6812 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6814 if (uid >= dest_cfun->cfg->last_label_uid)
6815 dest_cfun->cfg->last_label_uid = uid + 1;
6818 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6819 remove_stmt_from_eh_lp_fn (cfun, stmt);
6821 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6822 gimple_remove_stmt_histograms (cfun, stmt);
6824 /* We cannot leave any operands allocated from the operand caches of
6825 the current function. */
6826 free_stmt_operands (cfun, stmt);
6827 push_cfun (dest_cfun);
6828 update_stmt (stmt);
6829 pop_cfun ();
6832 FOR_EACH_EDGE (e, ei, bb->succs)
6833 if (e->goto_locus != UNKNOWN_LOCATION)
6835 tree block = LOCATION_BLOCK (e->goto_locus);
6836 if (d->orig_block == NULL_TREE
6837 || block == d->orig_block)
6838 e->goto_locus = d->new_block ?
6839 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6840 LOCATION_LOCUS (e->goto_locus);
6844 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6845 the outermost EH region. Use REGION as the incoming base EH region. */
6847 static eh_region
6848 find_outermost_region_in_block (struct function *src_cfun,
6849 basic_block bb, eh_region region)
6851 gimple_stmt_iterator si;
6853 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6855 gimple stmt = gsi_stmt (si);
6856 eh_region stmt_region;
6857 int lp_nr;
6859 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6860 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6861 if (stmt_region)
6863 if (region == NULL)
6864 region = stmt_region;
6865 else if (stmt_region != region)
6867 region = eh_region_outermost (src_cfun, stmt_region, region);
6868 gcc_assert (region != NULL);
6873 return region;
6876 static tree
6877 new_label_mapper (tree decl, void *data)
6879 htab_t hash = (htab_t) data;
6880 struct tree_map *m;
6881 void **slot;
6883 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6885 m = XNEW (struct tree_map);
6886 m->hash = DECL_UID (decl);
6887 m->base.from = decl;
6888 m->to = create_artificial_label (UNKNOWN_LOCATION);
6889 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6890 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6891 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6893 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6894 gcc_assert (*slot == NULL);
6896 *slot = m;
6898 return m->to;
6901 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6902 subblocks. */
6904 static void
6905 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6906 tree to_context)
6908 tree *tp, t;
6910 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6912 t = *tp;
6913 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6914 continue;
6915 replace_by_duplicate_decl (&t, vars_map, to_context);
6916 if (t != *tp)
6918 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6920 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6921 DECL_HAS_VALUE_EXPR_P (t) = 1;
6923 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6924 *tp = t;
6928 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6929 replace_block_vars_by_duplicates (block, vars_map, to_context);
6932 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6933 from FN1 to FN2. */
6935 static void
6936 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6937 struct loop *loop)
6939 /* Discard it from the old loop array. */
6940 (*get_loops (fn1))[loop->num] = NULL;
6942 /* Place it in the new loop array, assigning it a new number. */
6943 loop->num = number_of_loops (fn2);
6944 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6946 /* Recurse to children. */
6947 for (loop = loop->inner; loop; loop = loop->next)
6948 fixup_loop_arrays_after_move (fn1, fn2, loop);
6951 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6952 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6954 DEBUG_FUNCTION void
6955 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6957 basic_block bb;
6958 edge_iterator ei;
6959 edge e;
6960 bitmap bbs = BITMAP_ALLOC (NULL);
6961 int i;
6963 gcc_assert (entry != NULL);
6964 gcc_assert (entry != exit);
6965 gcc_assert (bbs_p != NULL);
6967 gcc_assert (bbs_p->length () > 0);
6969 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6970 bitmap_set_bit (bbs, bb->index);
6972 gcc_assert (bitmap_bit_p (bbs, entry->index));
6973 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6975 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6977 if (bb == entry)
6979 gcc_assert (single_pred_p (entry));
6980 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6982 else
6983 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6985 e = ei_edge (ei);
6986 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6989 if (bb == exit)
6991 gcc_assert (single_succ_p (exit));
6992 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6994 else
6995 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6997 e = ei_edge (ei);
6998 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7002 BITMAP_FREE (bbs);
7006 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7007 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7008 single basic block in the original CFG and the new basic block is
7009 returned. DEST_CFUN must not have a CFG yet.
7011 Note that the region need not be a pure SESE region. Blocks inside
7012 the region may contain calls to abort/exit. The only restriction
7013 is that ENTRY_BB should be the only entry point and it must
7014 dominate EXIT_BB.
7016 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7017 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7018 to the new function.
7020 All local variables referenced in the region are assumed to be in
7021 the corresponding BLOCK_VARS and unexpanded variable lists
7022 associated with DEST_CFUN. */
7024 basic_block
7025 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7026 basic_block exit_bb, tree orig_block)
7028 vec<basic_block> bbs, dom_bbs;
7029 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7030 basic_block after, bb, *entry_pred, *exit_succ, abb;
7031 struct function *saved_cfun = cfun;
7032 int *entry_flag, *exit_flag;
7033 unsigned *entry_prob, *exit_prob;
7034 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7035 edge e;
7036 edge_iterator ei;
7037 htab_t new_label_map;
7038 hash_map<void *, void *> *eh_map;
7039 struct loop *loop = entry_bb->loop_father;
7040 struct loop *loop0 = get_loop (saved_cfun, 0);
7041 struct move_stmt_d d;
7043 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7044 region. */
7045 gcc_assert (entry_bb != exit_bb
7046 && (!exit_bb
7047 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7049 /* Collect all the blocks in the region. Manually add ENTRY_BB
7050 because it won't be added by dfs_enumerate_from. */
7051 bbs.create (0);
7052 bbs.safe_push (entry_bb);
7053 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7054 #ifdef ENABLE_CHECKING
7055 verify_sese (entry_bb, exit_bb, &bbs);
7056 #endif
7058 /* The blocks that used to be dominated by something in BBS will now be
7059 dominated by the new block. */
7060 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7061 bbs.address (),
7062 bbs.length ());
7064 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7065 the predecessor edges to ENTRY_BB and the successor edges to
7066 EXIT_BB so that we can re-attach them to the new basic block that
7067 will replace the region. */
7068 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7069 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7070 entry_flag = XNEWVEC (int, num_entry_edges);
7071 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7072 i = 0;
7073 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7075 entry_prob[i] = e->probability;
7076 entry_flag[i] = e->flags;
7077 entry_pred[i++] = e->src;
7078 remove_edge (e);
7081 if (exit_bb)
7083 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7084 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7085 exit_flag = XNEWVEC (int, num_exit_edges);
7086 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7087 i = 0;
7088 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7090 exit_prob[i] = e->probability;
7091 exit_flag[i] = e->flags;
7092 exit_succ[i++] = e->dest;
7093 remove_edge (e);
7096 else
7098 num_exit_edges = 0;
7099 exit_succ = NULL;
7100 exit_flag = NULL;
7101 exit_prob = NULL;
7104 /* Switch context to the child function to initialize DEST_FN's CFG. */
7105 gcc_assert (dest_cfun->cfg == NULL);
7106 push_cfun (dest_cfun);
7108 init_empty_tree_cfg ();
7110 /* Initialize EH information for the new function. */
7111 eh_map = NULL;
7112 new_label_map = NULL;
7113 if (saved_cfun->eh)
7115 eh_region region = NULL;
7117 FOR_EACH_VEC_ELT (bbs, i, bb)
7118 region = find_outermost_region_in_block (saved_cfun, bb, region);
7120 init_eh_for_function ();
7121 if (region != NULL)
7123 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7124 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7125 new_label_mapper, new_label_map);
7129 /* Initialize an empty loop tree. */
7130 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7131 init_loops_structure (dest_cfun, loops, 1);
7132 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7133 set_loops_for_fn (dest_cfun, loops);
7135 /* Move the outlined loop tree part. */
7136 num_nodes = bbs.length ();
7137 FOR_EACH_VEC_ELT (bbs, i, bb)
7139 if (bb->loop_father->header == bb)
7141 struct loop *this_loop = bb->loop_father;
7142 struct loop *outer = loop_outer (this_loop);
7143 if (outer == loop
7144 /* If the SESE region contains some bbs ending with
7145 a noreturn call, those are considered to belong
7146 to the outermost loop in saved_cfun, rather than
7147 the entry_bb's loop_father. */
7148 || outer == loop0)
7150 if (outer != loop)
7151 num_nodes -= this_loop->num_nodes;
7152 flow_loop_tree_node_remove (bb->loop_father);
7153 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7154 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7157 else if (bb->loop_father == loop0 && loop0 != loop)
7158 num_nodes--;
7160 /* Remove loop exits from the outlined region. */
7161 if (loops_for_fn (saved_cfun)->exits)
7162 FOR_EACH_EDGE (e, ei, bb->succs)
7164 struct loops *l = loops_for_fn (saved_cfun);
7165 loop_exit **slot
7166 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7167 NO_INSERT);
7168 if (slot)
7169 l->exits->clear_slot (slot);
7174 /* Adjust the number of blocks in the tree root of the outlined part. */
7175 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7177 /* Setup a mapping to be used by move_block_to_fn. */
7178 loop->aux = current_loops->tree_root;
7179 loop0->aux = current_loops->tree_root;
7181 pop_cfun ();
7183 /* Move blocks from BBS into DEST_CFUN. */
7184 gcc_assert (bbs.length () >= 2);
7185 after = dest_cfun->cfg->x_entry_block_ptr;
7186 hash_map<tree, tree> vars_map;
7188 memset (&d, 0, sizeof (d));
7189 d.orig_block = orig_block;
7190 d.new_block = DECL_INITIAL (dest_cfun->decl);
7191 d.from_context = cfun->decl;
7192 d.to_context = dest_cfun->decl;
7193 d.vars_map = &vars_map;
7194 d.new_label_map = new_label_map;
7195 d.eh_map = eh_map;
7196 d.remap_decls_p = true;
7198 FOR_EACH_VEC_ELT (bbs, i, bb)
7200 /* No need to update edge counts on the last block. It has
7201 already been updated earlier when we detached the region from
7202 the original CFG. */
7203 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7204 after = bb;
7207 loop->aux = NULL;
7208 loop0->aux = NULL;
7209 /* Loop sizes are no longer correct, fix them up. */
7210 loop->num_nodes -= num_nodes;
7211 for (struct loop *outer = loop_outer (loop);
7212 outer; outer = loop_outer (outer))
7213 outer->num_nodes -= num_nodes;
7214 loop0->num_nodes -= bbs.length () - num_nodes;
7216 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7218 struct loop *aloop;
7219 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7220 if (aloop != NULL)
7222 if (aloop->simduid)
7224 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7225 d.to_context);
7226 dest_cfun->has_simduid_loops = true;
7228 if (aloop->force_vectorize)
7229 dest_cfun->has_force_vectorize_loops = true;
7233 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7234 if (orig_block)
7236 tree block;
7237 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7238 == NULL_TREE);
7239 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7240 = BLOCK_SUBBLOCKS (orig_block);
7241 for (block = BLOCK_SUBBLOCKS (orig_block);
7242 block; block = BLOCK_CHAIN (block))
7243 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7244 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7247 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7248 &vars_map, dest_cfun->decl);
7250 if (new_label_map)
7251 htab_delete (new_label_map);
7252 if (eh_map)
7253 delete eh_map;
7255 /* Rewire the entry and exit blocks. The successor to the entry
7256 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7257 the child function. Similarly, the predecessor of DEST_FN's
7258 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7259 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7260 various CFG manipulation function get to the right CFG.
7262 FIXME, this is silly. The CFG ought to become a parameter to
7263 these helpers. */
7264 push_cfun (dest_cfun);
7265 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7266 if (exit_bb)
7267 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7268 pop_cfun ();
7270 /* Back in the original function, the SESE region has disappeared,
7271 create a new basic block in its place. */
7272 bb = create_empty_bb (entry_pred[0]);
7273 if (current_loops)
7274 add_bb_to_loop (bb, loop);
7275 for (i = 0; i < num_entry_edges; i++)
7277 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7278 e->probability = entry_prob[i];
7281 for (i = 0; i < num_exit_edges; i++)
7283 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7284 e->probability = exit_prob[i];
7287 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7288 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7289 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7290 dom_bbs.release ();
7292 if (exit_bb)
7294 free (exit_prob);
7295 free (exit_flag);
7296 free (exit_succ);
7298 free (entry_prob);
7299 free (entry_flag);
7300 free (entry_pred);
7301 bbs.release ();
7303 return bb;
7307 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7310 void
7311 dump_function_to_file (tree fndecl, FILE *file, int flags)
7313 tree arg, var, old_current_fndecl = current_function_decl;
7314 struct function *dsf;
7315 bool ignore_topmost_bind = false, any_var = false;
7316 basic_block bb;
7317 tree chain;
7318 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7319 && decl_is_tm_clone (fndecl));
7320 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7322 current_function_decl = fndecl;
7323 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7325 arg = DECL_ARGUMENTS (fndecl);
7326 while (arg)
7328 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7329 fprintf (file, " ");
7330 print_generic_expr (file, arg, dump_flags);
7331 if (flags & TDF_VERBOSE)
7332 print_node (file, "", arg, 4);
7333 if (DECL_CHAIN (arg))
7334 fprintf (file, ", ");
7335 arg = DECL_CHAIN (arg);
7337 fprintf (file, ")\n");
7339 if (flags & TDF_VERBOSE)
7340 print_node (file, "", fndecl, 2);
7342 dsf = DECL_STRUCT_FUNCTION (fndecl);
7343 if (dsf && (flags & TDF_EH))
7344 dump_eh_tree (file, dsf);
7346 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7348 dump_node (fndecl, TDF_SLIM | flags, file);
7349 current_function_decl = old_current_fndecl;
7350 return;
7353 /* When GIMPLE is lowered, the variables are no longer available in
7354 BIND_EXPRs, so display them separately. */
7355 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7357 unsigned ix;
7358 ignore_topmost_bind = true;
7360 fprintf (file, "{\n");
7361 if (!vec_safe_is_empty (fun->local_decls))
7362 FOR_EACH_LOCAL_DECL (fun, ix, var)
7364 print_generic_decl (file, var, flags);
7365 if (flags & TDF_VERBOSE)
7366 print_node (file, "", var, 4);
7367 fprintf (file, "\n");
7369 any_var = true;
7371 if (gimple_in_ssa_p (cfun))
7372 for (ix = 1; ix < num_ssa_names; ++ix)
7374 tree name = ssa_name (ix);
7375 if (name && !SSA_NAME_VAR (name))
7377 fprintf (file, " ");
7378 print_generic_expr (file, TREE_TYPE (name), flags);
7379 fprintf (file, " ");
7380 print_generic_expr (file, name, flags);
7381 fprintf (file, ";\n");
7383 any_var = true;
7388 if (fun && fun->decl == fndecl
7389 && fun->cfg
7390 && basic_block_info_for_fn (fun))
7392 /* If the CFG has been built, emit a CFG-based dump. */
7393 if (!ignore_topmost_bind)
7394 fprintf (file, "{\n");
7396 if (any_var && n_basic_blocks_for_fn (fun))
7397 fprintf (file, "\n");
7399 FOR_EACH_BB_FN (bb, fun)
7400 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7402 fprintf (file, "}\n");
7404 else if (DECL_SAVED_TREE (fndecl) == NULL)
7406 /* The function is now in GIMPLE form but the CFG has not been
7407 built yet. Emit the single sequence of GIMPLE statements
7408 that make up its body. */
7409 gimple_seq body = gimple_body (fndecl);
7411 if (gimple_seq_first_stmt (body)
7412 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7413 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7414 print_gimple_seq (file, body, 0, flags);
7415 else
7417 if (!ignore_topmost_bind)
7418 fprintf (file, "{\n");
7420 if (any_var)
7421 fprintf (file, "\n");
7423 print_gimple_seq (file, body, 2, flags);
7424 fprintf (file, "}\n");
7427 else
7429 int indent;
7431 /* Make a tree based dump. */
7432 chain = DECL_SAVED_TREE (fndecl);
7433 if (chain && TREE_CODE (chain) == BIND_EXPR)
7435 if (ignore_topmost_bind)
7437 chain = BIND_EXPR_BODY (chain);
7438 indent = 2;
7440 else
7441 indent = 0;
7443 else
7445 if (!ignore_topmost_bind)
7447 fprintf (file, "{\n");
7448 /* No topmost bind, pretend it's ignored for later. */
7449 ignore_topmost_bind = true;
7451 indent = 2;
7454 if (any_var)
7455 fprintf (file, "\n");
7457 print_generic_stmt_indented (file, chain, flags, indent);
7458 if (ignore_topmost_bind)
7459 fprintf (file, "}\n");
7462 if (flags & TDF_ENUMERATE_LOCALS)
7463 dump_enumerated_decls (file, flags);
7464 fprintf (file, "\n\n");
7466 current_function_decl = old_current_fndecl;
7469 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7471 DEBUG_FUNCTION void
7472 debug_function (tree fn, int flags)
7474 dump_function_to_file (fn, stderr, flags);
7478 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7480 static void
7481 print_pred_bbs (FILE *file, basic_block bb)
7483 edge e;
7484 edge_iterator ei;
7486 FOR_EACH_EDGE (e, ei, bb->preds)
7487 fprintf (file, "bb_%d ", e->src->index);
7491 /* Print on FILE the indexes for the successors of basic_block BB. */
7493 static void
7494 print_succ_bbs (FILE *file, basic_block bb)
7496 edge e;
7497 edge_iterator ei;
7499 FOR_EACH_EDGE (e, ei, bb->succs)
7500 fprintf (file, "bb_%d ", e->dest->index);
7503 /* Print to FILE the basic block BB following the VERBOSITY level. */
7505 void
7506 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7508 char *s_indent = (char *) alloca ((size_t) indent + 1);
7509 memset ((void *) s_indent, ' ', (size_t) indent);
7510 s_indent[indent] = '\0';
7512 /* Print basic_block's header. */
7513 if (verbosity >= 2)
7515 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7516 print_pred_bbs (file, bb);
7517 fprintf (file, "}, succs = {");
7518 print_succ_bbs (file, bb);
7519 fprintf (file, "})\n");
7522 /* Print basic_block's body. */
7523 if (verbosity >= 3)
7525 fprintf (file, "%s {\n", s_indent);
7526 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7527 fprintf (file, "%s }\n", s_indent);
7531 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7533 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7534 VERBOSITY level this outputs the contents of the loop, or just its
7535 structure. */
7537 static void
7538 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7540 char *s_indent;
7541 basic_block bb;
7543 if (loop == NULL)
7544 return;
7546 s_indent = (char *) alloca ((size_t) indent + 1);
7547 memset ((void *) s_indent, ' ', (size_t) indent);
7548 s_indent[indent] = '\0';
7550 /* Print loop's header. */
7551 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7552 if (loop->header)
7553 fprintf (file, "header = %d", loop->header->index);
7554 else
7556 fprintf (file, "deleted)\n");
7557 return;
7559 if (loop->latch)
7560 fprintf (file, ", latch = %d", loop->latch->index);
7561 else
7562 fprintf (file, ", multiple latches");
7563 fprintf (file, ", niter = ");
7564 print_generic_expr (file, loop->nb_iterations, 0);
7566 if (loop->any_upper_bound)
7568 fprintf (file, ", upper_bound = ");
7569 print_decu (loop->nb_iterations_upper_bound, file);
7572 if (loop->any_estimate)
7574 fprintf (file, ", estimate = ");
7575 print_decu (loop->nb_iterations_estimate, file);
7577 fprintf (file, ")\n");
7579 /* Print loop's body. */
7580 if (verbosity >= 1)
7582 fprintf (file, "%s{\n", s_indent);
7583 FOR_EACH_BB_FN (bb, cfun)
7584 if (bb->loop_father == loop)
7585 print_loops_bb (file, bb, indent, verbosity);
7587 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7588 fprintf (file, "%s}\n", s_indent);
7592 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7593 spaces. Following VERBOSITY level this outputs the contents of the
7594 loop, or just its structure. */
7596 static void
7597 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7598 int verbosity)
7600 if (loop == NULL)
7601 return;
7603 print_loop (file, loop, indent, verbosity);
7604 print_loop_and_siblings (file, loop->next, indent, verbosity);
7607 /* Follow a CFG edge from the entry point of the program, and on entry
7608 of a loop, pretty print the loop structure on FILE. */
7610 void
7611 print_loops (FILE *file, int verbosity)
7613 basic_block bb;
7615 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7616 if (bb && bb->loop_father)
7617 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7620 /* Dump a loop. */
7622 DEBUG_FUNCTION void
7623 debug (struct loop &ref)
7625 print_loop (stderr, &ref, 0, /*verbosity*/0);
7628 DEBUG_FUNCTION void
7629 debug (struct loop *ptr)
7631 if (ptr)
7632 debug (*ptr);
7633 else
7634 fprintf (stderr, "<nil>\n");
7637 /* Dump a loop verbosely. */
7639 DEBUG_FUNCTION void
7640 debug_verbose (struct loop &ref)
7642 print_loop (stderr, &ref, 0, /*verbosity*/3);
7645 DEBUG_FUNCTION void
7646 debug_verbose (struct loop *ptr)
7648 if (ptr)
7649 debug (*ptr);
7650 else
7651 fprintf (stderr, "<nil>\n");
7655 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7657 DEBUG_FUNCTION void
7658 debug_loops (int verbosity)
7660 print_loops (stderr, verbosity);
7663 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7665 DEBUG_FUNCTION void
7666 debug_loop (struct loop *loop, int verbosity)
7668 print_loop (stderr, loop, 0, verbosity);
7671 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7672 level. */
7674 DEBUG_FUNCTION void
7675 debug_loop_num (unsigned num, int verbosity)
7677 debug_loop (get_loop (cfun, num), verbosity);
7680 /* Return true if BB ends with a call, possibly followed by some
7681 instructions that must stay with the call. Return false,
7682 otherwise. */
7684 static bool
7685 gimple_block_ends_with_call_p (basic_block bb)
7687 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7688 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7692 /* Return true if BB ends with a conditional branch. Return false,
7693 otherwise. */
7695 static bool
7696 gimple_block_ends_with_condjump_p (const_basic_block bb)
7698 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7699 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7703 /* Return true if we need to add fake edge to exit at statement T.
7704 Helper function for gimple_flow_call_edges_add. */
7706 static bool
7707 need_fake_edge_p (gimple t)
7709 tree fndecl = NULL_TREE;
7710 int call_flags = 0;
7712 /* NORETURN and LONGJMP calls already have an edge to exit.
7713 CONST and PURE calls do not need one.
7714 We don't currently check for CONST and PURE here, although
7715 it would be a good idea, because those attributes are
7716 figured out from the RTL in mark_constant_function, and
7717 the counter incrementation code from -fprofile-arcs
7718 leads to different results from -fbranch-probabilities. */
7719 if (is_gimple_call (t))
7721 fndecl = gimple_call_fndecl (t);
7722 call_flags = gimple_call_flags (t);
7725 if (is_gimple_call (t)
7726 && fndecl
7727 && DECL_BUILT_IN (fndecl)
7728 && (call_flags & ECF_NOTHROW)
7729 && !(call_flags & ECF_RETURNS_TWICE)
7730 /* fork() doesn't really return twice, but the effect of
7731 wrapping it in __gcov_fork() which calls __gcov_flush()
7732 and clears the counters before forking has the same
7733 effect as returning twice. Force a fake edge. */
7734 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7735 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7736 return false;
7738 if (is_gimple_call (t))
7740 edge_iterator ei;
7741 edge e;
7742 basic_block bb;
7744 if (!(call_flags & ECF_NORETURN))
7745 return true;
7747 bb = gimple_bb (t);
7748 FOR_EACH_EDGE (e, ei, bb->succs)
7749 if ((e->flags & EDGE_FAKE) == 0)
7750 return true;
7753 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7754 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7755 return true;
7757 return false;
7761 /* Add fake edges to the function exit for any non constant and non
7762 noreturn calls (or noreturn calls with EH/abnormal edges),
7763 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7764 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7765 that were split.
7767 The goal is to expose cases in which entering a basic block does
7768 not imply that all subsequent instructions must be executed. */
7770 static int
7771 gimple_flow_call_edges_add (sbitmap blocks)
7773 int i;
7774 int blocks_split = 0;
7775 int last_bb = last_basic_block_for_fn (cfun);
7776 bool check_last_block = false;
7778 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7779 return 0;
7781 if (! blocks)
7782 check_last_block = true;
7783 else
7784 check_last_block = bitmap_bit_p (blocks,
7785 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7787 /* In the last basic block, before epilogue generation, there will be
7788 a fallthru edge to EXIT. Special care is required if the last insn
7789 of the last basic block is a call because make_edge folds duplicate
7790 edges, which would result in the fallthru edge also being marked
7791 fake, which would result in the fallthru edge being removed by
7792 remove_fake_edges, which would result in an invalid CFG.
7794 Moreover, we can't elide the outgoing fake edge, since the block
7795 profiler needs to take this into account in order to solve the minimal
7796 spanning tree in the case that the call doesn't return.
7798 Handle this by adding a dummy instruction in a new last basic block. */
7799 if (check_last_block)
7801 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7802 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7803 gimple t = NULL;
7805 if (!gsi_end_p (gsi))
7806 t = gsi_stmt (gsi);
7808 if (t && need_fake_edge_p (t))
7810 edge e;
7812 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7813 if (e)
7815 gsi_insert_on_edge (e, gimple_build_nop ());
7816 gsi_commit_edge_inserts ();
7821 /* Now add fake edges to the function exit for any non constant
7822 calls since there is no way that we can determine if they will
7823 return or not... */
7824 for (i = 0; i < last_bb; i++)
7826 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7827 gimple_stmt_iterator gsi;
7828 gimple stmt, last_stmt;
7830 if (!bb)
7831 continue;
7833 if (blocks && !bitmap_bit_p (blocks, i))
7834 continue;
7836 gsi = gsi_last_nondebug_bb (bb);
7837 if (!gsi_end_p (gsi))
7839 last_stmt = gsi_stmt (gsi);
7842 stmt = gsi_stmt (gsi);
7843 if (need_fake_edge_p (stmt))
7845 edge e;
7847 /* The handling above of the final block before the
7848 epilogue should be enough to verify that there is
7849 no edge to the exit block in CFG already.
7850 Calling make_edge in such case would cause us to
7851 mark that edge as fake and remove it later. */
7852 #ifdef ENABLE_CHECKING
7853 if (stmt == last_stmt)
7855 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7856 gcc_assert (e == NULL);
7858 #endif
7860 /* Note that the following may create a new basic block
7861 and renumber the existing basic blocks. */
7862 if (stmt != last_stmt)
7864 e = split_block (bb, stmt);
7865 if (e)
7866 blocks_split++;
7868 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7870 gsi_prev (&gsi);
7872 while (!gsi_end_p (gsi));
7876 if (blocks_split)
7877 verify_flow_info ();
7879 return blocks_split;
7882 /* Removes edge E and all the blocks dominated by it, and updates dominance
7883 information. The IL in E->src needs to be updated separately.
7884 If dominance info is not available, only the edge E is removed.*/
7886 void
7887 remove_edge_and_dominated_blocks (edge e)
7889 vec<basic_block> bbs_to_remove = vNULL;
7890 vec<basic_block> bbs_to_fix_dom = vNULL;
7891 bitmap df, df_idom;
7892 edge f;
7893 edge_iterator ei;
7894 bool none_removed = false;
7895 unsigned i;
7896 basic_block bb, dbb;
7897 bitmap_iterator bi;
7899 /* If we are removing a path inside a non-root loop that may change
7900 loop ownership of blocks or remove loops. Mark loops for fixup. */
7901 if (current_loops
7902 && loop_outer (e->src->loop_father) != NULL
7903 && e->src->loop_father == e->dest->loop_father)
7904 loops_state_set (LOOPS_NEED_FIXUP);
7906 if (!dom_info_available_p (CDI_DOMINATORS))
7908 remove_edge (e);
7909 return;
7912 /* No updating is needed for edges to exit. */
7913 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7915 if (cfgcleanup_altered_bbs)
7916 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7917 remove_edge (e);
7918 return;
7921 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7922 that is not dominated by E->dest, then this set is empty. Otherwise,
7923 all the basic blocks dominated by E->dest are removed.
7925 Also, to DF_IDOM we store the immediate dominators of the blocks in
7926 the dominance frontier of E (i.e., of the successors of the
7927 removed blocks, if there are any, and of E->dest otherwise). */
7928 FOR_EACH_EDGE (f, ei, e->dest->preds)
7930 if (f == e)
7931 continue;
7933 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7935 none_removed = true;
7936 break;
7940 df = BITMAP_ALLOC (NULL);
7941 df_idom = BITMAP_ALLOC (NULL);
7943 if (none_removed)
7944 bitmap_set_bit (df_idom,
7945 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7946 else
7948 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7949 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7951 FOR_EACH_EDGE (f, ei, bb->succs)
7953 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7954 bitmap_set_bit (df, f->dest->index);
7957 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7958 bitmap_clear_bit (df, bb->index);
7960 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7962 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7963 bitmap_set_bit (df_idom,
7964 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7968 if (cfgcleanup_altered_bbs)
7970 /* Record the set of the altered basic blocks. */
7971 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7972 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7975 /* Remove E and the cancelled blocks. */
7976 if (none_removed)
7977 remove_edge (e);
7978 else
7980 /* Walk backwards so as to get a chance to substitute all
7981 released DEFs into debug stmts. See
7982 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7983 details. */
7984 for (i = bbs_to_remove.length (); i-- > 0; )
7985 delete_basic_block (bbs_to_remove[i]);
7988 /* Update the dominance information. The immediate dominator may change only
7989 for blocks whose immediate dominator belongs to DF_IDOM:
7991 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7992 removal. Let Z the arbitrary block such that idom(Z) = Y and
7993 Z dominates X after the removal. Before removal, there exists a path P
7994 from Y to X that avoids Z. Let F be the last edge on P that is
7995 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7996 dominates W, and because of P, Z does not dominate W), and W belongs to
7997 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7998 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8000 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8001 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8002 dbb;
8003 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8004 bbs_to_fix_dom.safe_push (dbb);
8007 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8009 BITMAP_FREE (df);
8010 BITMAP_FREE (df_idom);
8011 bbs_to_remove.release ();
8012 bbs_to_fix_dom.release ();
8015 /* Purge dead EH edges from basic block BB. */
8017 bool
8018 gimple_purge_dead_eh_edges (basic_block bb)
8020 bool changed = false;
8021 edge e;
8022 edge_iterator ei;
8023 gimple stmt = last_stmt (bb);
8025 if (stmt && stmt_can_throw_internal (stmt))
8026 return false;
8028 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8030 if (e->flags & EDGE_EH)
8032 remove_edge_and_dominated_blocks (e);
8033 changed = true;
8035 else
8036 ei_next (&ei);
8039 return changed;
8042 /* Purge dead EH edges from basic block listed in BLOCKS. */
8044 bool
8045 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8047 bool changed = false;
8048 unsigned i;
8049 bitmap_iterator bi;
8051 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8053 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8055 /* Earlier gimple_purge_dead_eh_edges could have removed
8056 this basic block already. */
8057 gcc_assert (bb || changed);
8058 if (bb != NULL)
8059 changed |= gimple_purge_dead_eh_edges (bb);
8062 return changed;
8065 /* Purge dead abnormal call edges from basic block BB. */
8067 bool
8068 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8070 bool changed = false;
8071 edge e;
8072 edge_iterator ei;
8073 gimple stmt = last_stmt (bb);
8075 if (!cfun->has_nonlocal_label
8076 && !cfun->calls_setjmp)
8077 return false;
8079 if (stmt && stmt_can_make_abnormal_goto (stmt))
8080 return false;
8082 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8084 if (e->flags & EDGE_ABNORMAL)
8086 if (e->flags & EDGE_FALLTHRU)
8087 e->flags &= ~EDGE_ABNORMAL;
8088 else
8089 remove_edge_and_dominated_blocks (e);
8090 changed = true;
8092 else
8093 ei_next (&ei);
8096 return changed;
8099 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8101 bool
8102 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8104 bool changed = false;
8105 unsigned i;
8106 bitmap_iterator bi;
8108 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8110 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8112 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8113 this basic block already. */
8114 gcc_assert (bb || changed);
8115 if (bb != NULL)
8116 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8119 return changed;
8122 /* This function is called whenever a new edge is created or
8123 redirected. */
8125 static void
8126 gimple_execute_on_growing_pred (edge e)
8128 basic_block bb = e->dest;
8130 if (!gimple_seq_empty_p (phi_nodes (bb)))
8131 reserve_phi_args_for_new_edge (bb);
8134 /* This function is called immediately before edge E is removed from
8135 the edge vector E->dest->preds. */
8137 static void
8138 gimple_execute_on_shrinking_pred (edge e)
8140 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8141 remove_phi_args (e);
8144 /*---------------------------------------------------------------------------
8145 Helper functions for Loop versioning
8146 ---------------------------------------------------------------------------*/
8148 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8149 of 'first'. Both of them are dominated by 'new_head' basic block. When
8150 'new_head' was created by 'second's incoming edge it received phi arguments
8151 on the edge by split_edge(). Later, additional edge 'e' was created to
8152 connect 'new_head' and 'first'. Now this routine adds phi args on this
8153 additional edge 'e' that new_head to second edge received as part of edge
8154 splitting. */
8156 static void
8157 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8158 basic_block new_head, edge e)
8160 gphi *phi1, *phi2;
8161 gphi_iterator psi1, psi2;
8162 tree def;
8163 edge e2 = find_edge (new_head, second);
8165 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8166 edge, we should always have an edge from NEW_HEAD to SECOND. */
8167 gcc_assert (e2 != NULL);
8169 /* Browse all 'second' basic block phi nodes and add phi args to
8170 edge 'e' for 'first' head. PHI args are always in correct order. */
8172 for (psi2 = gsi_start_phis (second),
8173 psi1 = gsi_start_phis (first);
8174 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8175 gsi_next (&psi2), gsi_next (&psi1))
8177 phi1 = psi1.phi ();
8178 phi2 = psi2.phi ();
8179 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8180 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8185 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8186 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8187 the destination of the ELSE part. */
8189 static void
8190 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8191 basic_block second_head ATTRIBUTE_UNUSED,
8192 basic_block cond_bb, void *cond_e)
8194 gimple_stmt_iterator gsi;
8195 gimple new_cond_expr;
8196 tree cond_expr = (tree) cond_e;
8197 edge e0;
8199 /* Build new conditional expr */
8200 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8201 NULL_TREE, NULL_TREE);
8203 /* Add new cond in cond_bb. */
8204 gsi = gsi_last_bb (cond_bb);
8205 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8207 /* Adjust edges appropriately to connect new head with first head
8208 as well as second head. */
8209 e0 = single_succ_edge (cond_bb);
8210 e0->flags &= ~EDGE_FALLTHRU;
8211 e0->flags |= EDGE_FALSE_VALUE;
8215 /* Do book-keeping of basic block BB for the profile consistency checker.
8216 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8217 then do post-pass accounting. Store the counting in RECORD. */
8218 static void
8219 gimple_account_profile_record (basic_block bb, int after_pass,
8220 struct profile_record *record)
8222 gimple_stmt_iterator i;
8223 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8225 record->size[after_pass]
8226 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8227 if (profile_status_for_fn (cfun) == PROFILE_READ)
8228 record->time[after_pass]
8229 += estimate_num_insns (gsi_stmt (i),
8230 &eni_time_weights) * bb->count;
8231 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8232 record->time[after_pass]
8233 += estimate_num_insns (gsi_stmt (i),
8234 &eni_time_weights) * bb->frequency;
8238 struct cfg_hooks gimple_cfg_hooks = {
8239 "gimple",
8240 gimple_verify_flow_info,
8241 gimple_dump_bb, /* dump_bb */
8242 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8243 create_bb, /* create_basic_block */
8244 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8245 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8246 gimple_can_remove_branch_p, /* can_remove_branch_p */
8247 remove_bb, /* delete_basic_block */
8248 gimple_split_block, /* split_block */
8249 gimple_move_block_after, /* move_block_after */
8250 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8251 gimple_merge_blocks, /* merge_blocks */
8252 gimple_predict_edge, /* predict_edge */
8253 gimple_predicted_by_p, /* predicted_by_p */
8254 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8255 gimple_duplicate_bb, /* duplicate_block */
8256 gimple_split_edge, /* split_edge */
8257 gimple_make_forwarder_block, /* make_forward_block */
8258 NULL, /* tidy_fallthru_edge */
8259 NULL, /* force_nonfallthru */
8260 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8261 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8262 gimple_flow_call_edges_add, /* flow_call_edges_add */
8263 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8264 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8265 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8266 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8267 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8268 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8269 flush_pending_stmts, /* flush_pending_stmts */
8270 gimple_empty_block_p, /* block_empty_p */
8271 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8272 gimple_account_profile_record,
8276 /* Split all critical edges. */
8278 unsigned int
8279 split_critical_edges (void)
8281 basic_block bb;
8282 edge e;
8283 edge_iterator ei;
8285 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8286 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8287 mappings around the calls to split_edge. */
8288 start_recording_case_labels ();
8289 FOR_ALL_BB_FN (bb, cfun)
8291 FOR_EACH_EDGE (e, ei, bb->succs)
8293 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8294 split_edge (e);
8295 /* PRE inserts statements to edges and expects that
8296 since split_critical_edges was done beforehand, committing edge
8297 insertions will not split more edges. In addition to critical
8298 edges we must split edges that have multiple successors and
8299 end by control flow statements, such as RESX.
8300 Go ahead and split them too. This matches the logic in
8301 gimple_find_edge_insert_loc. */
8302 else if ((!single_pred_p (e->dest)
8303 || !gimple_seq_empty_p (phi_nodes (e->dest))
8304 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8305 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8306 && !(e->flags & EDGE_ABNORMAL))
8308 gimple_stmt_iterator gsi;
8310 gsi = gsi_last_bb (e->src);
8311 if (!gsi_end_p (gsi)
8312 && stmt_ends_bb_p (gsi_stmt (gsi))
8313 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8314 && !gimple_call_builtin_p (gsi_stmt (gsi),
8315 BUILT_IN_RETURN)))
8316 split_edge (e);
8320 end_recording_case_labels ();
8321 return 0;
8324 namespace {
8326 const pass_data pass_data_split_crit_edges =
8328 GIMPLE_PASS, /* type */
8329 "crited", /* name */
8330 OPTGROUP_NONE, /* optinfo_flags */
8331 TV_TREE_SPLIT_EDGES, /* tv_id */
8332 PROP_cfg, /* properties_required */
8333 PROP_no_crit_edges, /* properties_provided */
8334 0, /* properties_destroyed */
8335 0, /* todo_flags_start */
8336 0, /* todo_flags_finish */
8339 class pass_split_crit_edges : public gimple_opt_pass
8341 public:
8342 pass_split_crit_edges (gcc::context *ctxt)
8343 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8346 /* opt_pass methods: */
8347 virtual unsigned int execute (function *) { return split_critical_edges (); }
8349 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8350 }; // class pass_split_crit_edges
8352 } // anon namespace
8354 gimple_opt_pass *
8355 make_pass_split_crit_edges (gcc::context *ctxt)
8357 return new pass_split_crit_edges (ctxt);
8361 /* Insert COND expression which is GIMPLE_COND after STMT
8362 in basic block BB with appropriate basic block split
8363 and creation of a new conditionally executed basic block.
8364 Return created basic block. */
8365 basic_block
8366 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8368 edge fall = split_block (bb, stmt);
8369 gimple_stmt_iterator iter = gsi_last_bb (bb);
8370 basic_block new_bb;
8372 /* Insert cond statement. */
8373 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8374 if (gsi_end_p (iter))
8375 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8376 else
8377 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8379 /* Create conditionally executed block. */
8380 new_bb = create_empty_bb (bb);
8381 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8382 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8384 /* Fix edge for split bb. */
8385 fall->flags = EDGE_FALSE_VALUE;
8387 /* Update dominance info. */
8388 if (dom_info_available_p (CDI_DOMINATORS))
8390 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8391 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8394 /* Update loop info. */
8395 if (current_loops)
8396 add_bb_to_loop (new_bb, bb->loop_father);
8398 return new_bb;
8401 /* Build a ternary operation and gimplify it. Emit code before GSI.
8402 Return the gimple_val holding the result. */
8404 tree
8405 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8406 tree type, tree a, tree b, tree c)
8408 tree ret;
8409 location_t loc = gimple_location (gsi_stmt (*gsi));
8411 ret = fold_build3_loc (loc, code, type, a, b, c);
8412 STRIP_NOPS (ret);
8414 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8415 GSI_SAME_STMT);
8418 /* Build a binary operation and gimplify it. Emit code before GSI.
8419 Return the gimple_val holding the result. */
8421 tree
8422 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8423 tree type, tree a, tree b)
8425 tree ret;
8427 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8428 STRIP_NOPS (ret);
8430 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8431 GSI_SAME_STMT);
8434 /* Build a unary operation and gimplify it. Emit code before GSI.
8435 Return the gimple_val holding the result. */
8437 tree
8438 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8439 tree a)
8441 tree ret;
8443 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8444 STRIP_NOPS (ret);
8446 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8447 GSI_SAME_STMT);
8452 /* Given a basic block B which ends with a conditional and has
8453 precisely two successors, determine which of the edges is taken if
8454 the conditional is true and which is taken if the conditional is
8455 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8457 void
8458 extract_true_false_edges_from_block (basic_block b,
8459 edge *true_edge,
8460 edge *false_edge)
8462 edge e = EDGE_SUCC (b, 0);
8464 if (e->flags & EDGE_TRUE_VALUE)
8466 *true_edge = e;
8467 *false_edge = EDGE_SUCC (b, 1);
8469 else
8471 *false_edge = e;
8472 *true_edge = EDGE_SUCC (b, 1);
8476 /* Emit return warnings. */
8478 namespace {
8480 const pass_data pass_data_warn_function_return =
8482 GIMPLE_PASS, /* type */
8483 "*warn_function_return", /* name */
8484 OPTGROUP_NONE, /* optinfo_flags */
8485 TV_NONE, /* tv_id */
8486 PROP_cfg, /* properties_required */
8487 0, /* properties_provided */
8488 0, /* properties_destroyed */
8489 0, /* todo_flags_start */
8490 0, /* todo_flags_finish */
8493 class pass_warn_function_return : public gimple_opt_pass
8495 public:
8496 pass_warn_function_return (gcc::context *ctxt)
8497 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8500 /* opt_pass methods: */
8501 virtual unsigned int execute (function *);
8503 }; // class pass_warn_function_return
8505 unsigned int
8506 pass_warn_function_return::execute (function *fun)
8508 source_location location;
8509 gimple last;
8510 edge e;
8511 edge_iterator ei;
8513 if (!targetm.warn_func_return (fun->decl))
8514 return 0;
8516 /* If we have a path to EXIT, then we do return. */
8517 if (TREE_THIS_VOLATILE (fun->decl)
8518 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8520 location = UNKNOWN_LOCATION;
8521 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8523 last = last_stmt (e->src);
8524 if ((gimple_code (last) == GIMPLE_RETURN
8525 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8526 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8527 break;
8529 if (location == UNKNOWN_LOCATION)
8530 location = cfun->function_end_locus;
8531 warning_at (location, 0, "%<noreturn%> function does return");
8534 /* If we see "return;" in some basic block, then we do reach the end
8535 without returning a value. */
8536 else if (warn_return_type
8537 && !TREE_NO_WARNING (fun->decl)
8538 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8539 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8541 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8543 gimple last = last_stmt (e->src);
8544 greturn *return_stmt = dyn_cast <greturn *> (last);
8545 if (return_stmt
8546 && gimple_return_retval (return_stmt) == NULL
8547 && !gimple_no_warning_p (last))
8549 location = gimple_location (last);
8550 if (location == UNKNOWN_LOCATION)
8551 location = fun->function_end_locus;
8552 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8553 TREE_NO_WARNING (fun->decl) = 1;
8554 break;
8558 return 0;
8561 } // anon namespace
8563 gimple_opt_pass *
8564 make_pass_warn_function_return (gcc::context *ctxt)
8566 return new pass_warn_function_return (ctxt);
8569 /* Walk a gimplified function and warn for functions whose return value is
8570 ignored and attribute((warn_unused_result)) is set. This is done before
8571 inlining, so we don't have to worry about that. */
8573 static void
8574 do_warn_unused_result (gimple_seq seq)
8576 tree fdecl, ftype;
8577 gimple_stmt_iterator i;
8579 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8581 gimple g = gsi_stmt (i);
8583 switch (gimple_code (g))
8585 case GIMPLE_BIND:
8586 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8587 break;
8588 case GIMPLE_TRY:
8589 do_warn_unused_result (gimple_try_eval (g));
8590 do_warn_unused_result (gimple_try_cleanup (g));
8591 break;
8592 case GIMPLE_CATCH:
8593 do_warn_unused_result (gimple_catch_handler (
8594 as_a <gcatch *> (g)));
8595 break;
8596 case GIMPLE_EH_FILTER:
8597 do_warn_unused_result (gimple_eh_filter_failure (g));
8598 break;
8600 case GIMPLE_CALL:
8601 if (gimple_call_lhs (g))
8602 break;
8603 if (gimple_call_internal_p (g))
8604 break;
8606 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8607 LHS. All calls whose value is ignored should be
8608 represented like this. Look for the attribute. */
8609 fdecl = gimple_call_fndecl (g);
8610 ftype = gimple_call_fntype (g);
8612 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8614 location_t loc = gimple_location (g);
8616 if (fdecl)
8617 warning_at (loc, OPT_Wunused_result,
8618 "ignoring return value of %qD, "
8619 "declared with attribute warn_unused_result",
8620 fdecl);
8621 else
8622 warning_at (loc, OPT_Wunused_result,
8623 "ignoring return value of function "
8624 "declared with attribute warn_unused_result");
8626 break;
8628 default:
8629 /* Not a container, not a call, or a call whose value is used. */
8630 break;
8635 namespace {
8637 const pass_data pass_data_warn_unused_result =
8639 GIMPLE_PASS, /* type */
8640 "*warn_unused_result", /* name */
8641 OPTGROUP_NONE, /* optinfo_flags */
8642 TV_NONE, /* tv_id */
8643 PROP_gimple_any, /* properties_required */
8644 0, /* properties_provided */
8645 0, /* properties_destroyed */
8646 0, /* todo_flags_start */
8647 0, /* todo_flags_finish */
8650 class pass_warn_unused_result : public gimple_opt_pass
8652 public:
8653 pass_warn_unused_result (gcc::context *ctxt)
8654 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8657 /* opt_pass methods: */
8658 virtual bool gate (function *) { return flag_warn_unused_result; }
8659 virtual unsigned int execute (function *)
8661 do_warn_unused_result (gimple_body (current_function_decl));
8662 return 0;
8665 }; // class pass_warn_unused_result
8667 } // anon namespace
8669 gimple_opt_pass *
8670 make_pass_warn_unused_result (gcc::context *ctxt)
8672 return new pass_warn_unused_result (ctxt);
8675 /* IPA passes, compilation of earlier functions or inlining
8676 might have changed some properties, such as marked functions nothrow,
8677 pure, const or noreturn.
8678 Remove redundant edges and basic blocks, and create new ones if necessary.
8680 This pass can't be executed as stand alone pass from pass manager, because
8681 in between inlining and this fixup the verify_flow_info would fail. */
8683 unsigned int
8684 execute_fixup_cfg (void)
8686 basic_block bb;
8687 gimple_stmt_iterator gsi;
8688 int todo = 0;
8689 gcov_type count_scale;
8690 edge e;
8691 edge_iterator ei;
8693 count_scale
8694 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8695 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8697 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8698 cgraph_node::get (current_function_decl)->count;
8699 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8700 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8701 count_scale);
8703 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8704 e->count = apply_scale (e->count, count_scale);
8706 FOR_EACH_BB_FN (bb, cfun)
8708 bb->count = apply_scale (bb->count, count_scale);
8709 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8711 gimple stmt = gsi_stmt (gsi);
8712 tree decl = is_gimple_call (stmt)
8713 ? gimple_call_fndecl (stmt)
8714 : NULL;
8715 if (decl)
8717 int flags = gimple_call_flags (stmt);
8718 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8720 if (gimple_purge_dead_abnormal_call_edges (bb))
8721 todo |= TODO_cleanup_cfg;
8723 if (gimple_in_ssa_p (cfun))
8725 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8726 update_stmt (stmt);
8730 if (flags & ECF_NORETURN
8731 && fixup_noreturn_call (stmt))
8732 todo |= TODO_cleanup_cfg;
8735 /* Remove stores to variables we marked write-only.
8736 Keep access when store has side effect, i.e. in case when source
8737 is volatile. */
8738 if (gimple_store_p (stmt)
8739 && !gimple_has_side_effects (stmt))
8741 tree lhs = get_base_address (gimple_get_lhs (stmt));
8743 if (TREE_CODE (lhs) == VAR_DECL
8744 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8745 && varpool_node::get (lhs)->writeonly)
8747 unlink_stmt_vdef (stmt);
8748 gsi_remove (&gsi, true);
8749 release_defs (stmt);
8750 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8751 continue;
8754 /* For calls we can simply remove LHS when it is known
8755 to be write-only. */
8756 if (is_gimple_call (stmt)
8757 && gimple_get_lhs (stmt))
8759 tree lhs = get_base_address (gimple_get_lhs (stmt));
8761 if (TREE_CODE (lhs) == VAR_DECL
8762 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8763 && varpool_node::get (lhs)->writeonly)
8765 gimple_call_set_lhs (stmt, NULL);
8766 update_stmt (stmt);
8767 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8771 if (maybe_clean_eh_stmt (stmt)
8772 && gimple_purge_dead_eh_edges (bb))
8773 todo |= TODO_cleanup_cfg;
8774 gsi_next (&gsi);
8777 FOR_EACH_EDGE (e, ei, bb->succs)
8778 e->count = apply_scale (e->count, count_scale);
8780 /* If we have a basic block with no successors that does not
8781 end with a control statement or a noreturn call end it with
8782 a call to __builtin_unreachable. This situation can occur
8783 when inlining a noreturn call that does in fact return. */
8784 if (EDGE_COUNT (bb->succs) == 0)
8786 gimple stmt = last_stmt (bb);
8787 if (!stmt
8788 || (!is_ctrl_stmt (stmt)
8789 && (!is_gimple_call (stmt)
8790 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8792 if (stmt && is_gimple_call (stmt))
8793 gimple_call_set_ctrl_altering (stmt, false);
8794 stmt = gimple_build_call
8795 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8796 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8797 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8801 if (count_scale != REG_BR_PROB_BASE)
8802 compute_function_frequency ();
8804 if (current_loops
8805 && (todo & TODO_cleanup_cfg))
8806 loops_state_set (LOOPS_NEED_FIXUP);
8808 return todo;
8811 namespace {
8813 const pass_data pass_data_fixup_cfg =
8815 GIMPLE_PASS, /* type */
8816 "fixup_cfg", /* name */
8817 OPTGROUP_NONE, /* optinfo_flags */
8818 TV_NONE, /* tv_id */
8819 PROP_cfg, /* properties_required */
8820 0, /* properties_provided */
8821 0, /* properties_destroyed */
8822 0, /* todo_flags_start */
8823 0, /* todo_flags_finish */
8826 class pass_fixup_cfg : public gimple_opt_pass
8828 public:
8829 pass_fixup_cfg (gcc::context *ctxt)
8830 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8833 /* opt_pass methods: */
8834 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8835 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8837 }; // class pass_fixup_cfg
8839 } // anon namespace
8841 gimple_opt_pass *
8842 make_pass_fixup_cfg (gcc::context *ctxt)
8844 return new pass_fixup_cfg (ctxt);
8847 /* Garbage collection support for edge_def. */
8849 extern void gt_ggc_mx (tree&);
8850 extern void gt_ggc_mx (gimple&);
8851 extern void gt_ggc_mx (rtx&);
8852 extern void gt_ggc_mx (basic_block&);
8854 static void
8855 gt_ggc_mx (rtx_insn *& x)
8857 if (x)
8858 gt_ggc_mx_rtx_def ((void *) x);
8861 void
8862 gt_ggc_mx (edge_def *e)
8864 tree block = LOCATION_BLOCK (e->goto_locus);
8865 gt_ggc_mx (e->src);
8866 gt_ggc_mx (e->dest);
8867 if (current_ir_type () == IR_GIMPLE)
8868 gt_ggc_mx (e->insns.g);
8869 else
8870 gt_ggc_mx (e->insns.r);
8871 gt_ggc_mx (block);
8874 /* PCH support for edge_def. */
8876 extern void gt_pch_nx (tree&);
8877 extern void gt_pch_nx (gimple&);
8878 extern void gt_pch_nx (rtx&);
8879 extern void gt_pch_nx (basic_block&);
8881 static void
8882 gt_pch_nx (rtx_insn *& x)
8884 if (x)
8885 gt_pch_nx_rtx_def ((void *) x);
8888 void
8889 gt_pch_nx (edge_def *e)
8891 tree block = LOCATION_BLOCK (e->goto_locus);
8892 gt_pch_nx (e->src);
8893 gt_pch_nx (e->dest);
8894 if (current_ir_type () == IR_GIMPLE)
8895 gt_pch_nx (e->insns.g);
8896 else
8897 gt_pch_nx (e->insns.r);
8898 gt_pch_nx (block);
8901 void
8902 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8904 tree block = LOCATION_BLOCK (e->goto_locus);
8905 op (&(e->src), cookie);
8906 op (&(e->dest), cookie);
8907 if (current_ir_type () == IR_GIMPLE)
8908 op (&(e->insns.g), cookie);
8909 else
8910 op (&(e->insns.r), cookie);
8911 op (&(block), cookie);