PR tree-optimization/66718
[official-gcc.git] / gcc / tree-cfg.c
blobf47795a3cf294546e6acfc7c12ad7d0eed80228f
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "trans-mem.h"
30 #include "stor-layout.h"
31 #include "print-tree.h"
32 #include "tm_p.h"
33 #include "predict.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "cfganal.h"
39 #include "basic-block.h"
40 #include "flags.h"
41 #include "gimple-pretty-print.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "tree-phinodes.h"
55 #include "ssa-iterators.h"
56 #include "stringpool.h"
57 #include "tree-ssanames.h"
58 #include "tree-ssa-loop-manip.h"
59 #include "tree-ssa-loop-niter.h"
60 #include "tree-into-ssa.h"
61 #include "rtl.h"
62 #include "insn-config.h"
63 #include "expmed.h"
64 #include "dojump.h"
65 #include "explow.h"
66 #include "calls.h"
67 #include "emit-rtl.h"
68 #include "varasm.h"
69 #include "stmt.h"
70 #include "expr.h"
71 #include "tree-dfa.h"
72 #include "tree-ssa.h"
73 #include "tree-dump.h"
74 #include "tree-pass.h"
75 #include "diagnostic-core.h"
76 #include "except.h"
77 #include "cfgloop.h"
78 #include "tree-ssa-propagate.h"
79 #include "value-prof.h"
80 #include "tree-inline.h"
81 #include "target.h"
82 #include "tree-ssa-live.h"
83 #include "omp-low.h"
84 #include "tree-cfgcleanup.h"
85 #include "wide-int-print.h"
87 /* This file contains functions for building the Control Flow Graph (CFG)
88 for a function tree. */
90 /* Local declarations. */
92 /* Initial capacity for the basic block array. */
93 static const int initial_cfg_capacity = 20;
95 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
96 which use a particular edge. The CASE_LABEL_EXPRs are chained together
97 via their CASE_CHAIN field, which we clear after we're done with the
98 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
100 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
101 update the case vector in response to edge redirections.
103 Right now this table is set up and torn down at key points in the
104 compilation process. It would be nice if we could make the table
105 more persistent. The key is getting notification of changes to
106 the CFG (particularly edge removal, creation and redirection). */
108 static hash_map<edge, tree> *edge_to_cases;
110 /* If we record edge_to_cases, this bitmap will hold indexes
111 of basic blocks that end in a GIMPLE_SWITCH which we touched
112 due to edge manipulations. */
114 static bitmap touched_switch_bbs;
116 /* CFG statistics. */
117 struct cfg_stats_d
119 long num_merged_labels;
122 static struct cfg_stats_d cfg_stats;
124 /* Hash table to store last discriminator assigned for each locus. */
125 struct locus_discrim_map
127 location_t locus;
128 int discriminator;
131 /* Hashtable helpers. */
133 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
135 static inline hashval_t hash (const locus_discrim_map *);
136 static inline bool equal (const locus_discrim_map *,
137 const locus_discrim_map *);
140 /* Trivial hash function for a location_t. ITEM is a pointer to
141 a hash table entry that maps a location_t to a discriminator. */
143 inline hashval_t
144 locus_discrim_hasher::hash (const locus_discrim_map *item)
146 return LOCATION_LINE (item->locus);
149 /* Equality function for the locus-to-discriminator map. A and B
150 point to the two hash table entries to compare. */
152 inline bool
153 locus_discrim_hasher::equal (const locus_discrim_map *a,
154 const locus_discrim_map *b)
156 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
159 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
161 /* Basic blocks and flowgraphs. */
162 static void make_blocks (gimple_seq);
164 /* Edges. */
165 static void make_edges (void);
166 static void assign_discriminators (void);
167 static void make_cond_expr_edges (basic_block);
168 static void make_gimple_switch_edges (gswitch *, basic_block);
169 static bool make_goto_expr_edges (basic_block);
170 static void make_gimple_asm_edges (basic_block);
171 static edge gimple_redirect_edge_and_branch (edge, basic_block);
172 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
174 /* Various helpers. */
175 static inline bool stmt_starts_bb_p (gimple, gimple);
176 static int gimple_verify_flow_info (void);
177 static void gimple_make_forwarder_block (edge);
178 static gimple first_non_label_stmt (basic_block);
179 static bool verify_gimple_transaction (gtransaction *);
180 static bool call_can_make_abnormal_goto (gimple);
182 /* Flowgraph optimization and cleanup. */
183 static void gimple_merge_blocks (basic_block, basic_block);
184 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
185 static void remove_bb (basic_block);
186 static edge find_taken_edge_computed_goto (basic_block, tree);
187 static edge find_taken_edge_cond_expr (basic_block, tree);
188 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
189 static tree find_case_label_for_value (gswitch *, tree);
191 void
192 init_empty_tree_cfg_for_function (struct function *fn)
194 /* Initialize the basic block array. */
195 init_flow (fn);
196 profile_status_for_fn (fn) = PROFILE_ABSENT;
197 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
198 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
199 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
200 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
201 initial_cfg_capacity);
203 /* Build a mapping of labels to their associated blocks. */
204 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
205 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
206 initial_cfg_capacity);
208 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
209 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
211 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
212 = EXIT_BLOCK_PTR_FOR_FN (fn);
213 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
214 = ENTRY_BLOCK_PTR_FOR_FN (fn);
217 void
218 init_empty_tree_cfg (void)
220 init_empty_tree_cfg_for_function (cfun);
223 /*---------------------------------------------------------------------------
224 Create basic blocks
225 ---------------------------------------------------------------------------*/
227 /* Entry point to the CFG builder for trees. SEQ is the sequence of
228 statements to be added to the flowgraph. */
230 static void
231 build_gimple_cfg (gimple_seq seq)
233 /* Register specific gimple functions. */
234 gimple_register_cfg_hooks ();
236 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
238 init_empty_tree_cfg ();
240 make_blocks (seq);
242 /* Make sure there is always at least one block, even if it's empty. */
243 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
244 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
246 /* Adjust the size of the array. */
247 if (basic_block_info_for_fn (cfun)->length ()
248 < (size_t) n_basic_blocks_for_fn (cfun))
249 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
250 n_basic_blocks_for_fn (cfun));
252 /* To speed up statement iterator walks, we first purge dead labels. */
253 cleanup_dead_labels ();
255 /* Group case nodes to reduce the number of edges.
256 We do this after cleaning up dead labels because otherwise we miss
257 a lot of obvious case merging opportunities. */
258 group_case_labels ();
260 /* Create the edges of the flowgraph. */
261 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
262 make_edges ();
263 assign_discriminators ();
264 cleanup_dead_labels ();
265 delete discriminator_per_locus;
266 discriminator_per_locus = NULL;
269 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
270 them and propagate the information to LOOP. We assume that the annotations
271 come immediately before the condition in BB, if any. */
273 static void
274 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
276 gimple_stmt_iterator gsi = gsi_last_bb (bb);
277 gimple stmt = gsi_stmt (gsi);
279 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
280 return;
282 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
284 stmt = gsi_stmt (gsi);
285 if (gimple_code (stmt) != GIMPLE_CALL)
286 break;
287 if (!gimple_call_internal_p (stmt)
288 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
289 break;
291 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
293 case annot_expr_ivdep_kind:
294 loop->safelen = INT_MAX;
295 break;
296 case annot_expr_no_vector_kind:
297 loop->dont_vectorize = true;
298 break;
299 case annot_expr_vector_kind:
300 loop->force_vectorize = true;
301 cfun->has_force_vectorize_loops = true;
302 break;
303 default:
304 gcc_unreachable ();
307 stmt = gimple_build_assign (gimple_call_lhs (stmt),
308 gimple_call_arg (stmt, 0));
309 gsi_replace (&gsi, stmt, true);
313 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
314 them and propagate the information to the loop. We assume that the
315 annotations come immediately before the condition of the loop. */
317 static void
318 replace_loop_annotate (void)
320 struct loop *loop;
321 basic_block bb;
322 gimple_stmt_iterator gsi;
323 gimple stmt;
325 FOR_EACH_LOOP (loop, 0)
327 /* First look into the header. */
328 replace_loop_annotate_in_block (loop->header, loop);
330 /* Then look into the latch, if any. */
331 if (loop->latch)
332 replace_loop_annotate_in_block (loop->latch, loop);
335 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
336 FOR_EACH_BB_FN (bb, cfun)
338 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
340 stmt = gsi_stmt (gsi);
341 if (gimple_code (stmt) != GIMPLE_CALL)
342 continue;
343 if (!gimple_call_internal_p (stmt)
344 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
345 continue;
347 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
349 case annot_expr_ivdep_kind:
350 case annot_expr_no_vector_kind:
351 case annot_expr_vector_kind:
352 break;
353 default:
354 gcc_unreachable ();
357 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
358 stmt = gimple_build_assign (gimple_call_lhs (stmt),
359 gimple_call_arg (stmt, 0));
360 gsi_replace (&gsi, stmt, true);
366 static unsigned int
367 execute_build_cfg (void)
369 gimple_seq body = gimple_body (current_function_decl);
371 build_gimple_cfg (body);
372 gimple_set_body (current_function_decl, NULL);
373 if (dump_file && (dump_flags & TDF_DETAILS))
375 fprintf (dump_file, "Scope blocks:\n");
376 dump_scope_blocks (dump_file, dump_flags);
378 cleanup_tree_cfg ();
379 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
380 replace_loop_annotate ();
381 return 0;
384 namespace {
386 const pass_data pass_data_build_cfg =
388 GIMPLE_PASS, /* type */
389 "cfg", /* name */
390 OPTGROUP_NONE, /* optinfo_flags */
391 TV_TREE_CFG, /* tv_id */
392 PROP_gimple_leh, /* properties_required */
393 ( PROP_cfg | PROP_loops ), /* properties_provided */
394 0, /* properties_destroyed */
395 0, /* todo_flags_start */
396 0, /* todo_flags_finish */
399 class pass_build_cfg : public gimple_opt_pass
401 public:
402 pass_build_cfg (gcc::context *ctxt)
403 : gimple_opt_pass (pass_data_build_cfg, ctxt)
406 /* opt_pass methods: */
407 virtual unsigned int execute (function *) { return execute_build_cfg (); }
409 }; // class pass_build_cfg
411 } // anon namespace
413 gimple_opt_pass *
414 make_pass_build_cfg (gcc::context *ctxt)
416 return new pass_build_cfg (ctxt);
420 /* Return true if T is a computed goto. */
422 bool
423 computed_goto_p (gimple t)
425 return (gimple_code (t) == GIMPLE_GOTO
426 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
429 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
430 the other edge points to a bb with just __builtin_unreachable ().
431 I.e. return true for C->M edge in:
432 <bb C>:
434 if (something)
435 goto <bb N>;
436 else
437 goto <bb M>;
438 <bb N>:
439 __builtin_unreachable ();
440 <bb M>: */
442 bool
443 assert_unreachable_fallthru_edge_p (edge e)
445 basic_block pred_bb = e->src;
446 gimple last = last_stmt (pred_bb);
447 if (last && gimple_code (last) == GIMPLE_COND)
449 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
450 if (other_bb == e->dest)
451 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
452 if (EDGE_COUNT (other_bb->succs) == 0)
454 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
455 gimple stmt;
457 if (gsi_end_p (gsi))
458 return false;
459 stmt = gsi_stmt (gsi);
460 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
462 gsi_next (&gsi);
463 if (gsi_end_p (gsi))
464 return false;
465 stmt = gsi_stmt (gsi);
467 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
470 return false;
474 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
475 could alter control flow except via eh. We initialize the flag at
476 CFG build time and only ever clear it later. */
478 static void
479 gimple_call_initialize_ctrl_altering (gimple stmt)
481 int flags = gimple_call_flags (stmt);
483 /* A call alters control flow if it can make an abnormal goto. */
484 if (call_can_make_abnormal_goto (stmt)
485 /* A call also alters control flow if it does not return. */
486 || flags & ECF_NORETURN
487 /* TM ending statements have backedges out of the transaction.
488 Return true so we split the basic block containing them.
489 Note that the TM_BUILTIN test is merely an optimization. */
490 || ((flags & ECF_TM_BUILTIN)
491 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
492 /* BUILT_IN_RETURN call is same as return statement. */
493 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
494 gimple_call_set_ctrl_altering (stmt, true);
495 else
496 gimple_call_set_ctrl_altering (stmt, false);
500 /* Insert SEQ after BB and build a flowgraph. */
502 static basic_block
503 make_blocks_1 (gimple_seq seq, basic_block bb)
505 gimple_stmt_iterator i = gsi_start (seq);
506 gimple stmt = NULL;
507 bool start_new_block = true;
508 bool first_stmt_of_seq = true;
510 while (!gsi_end_p (i))
512 gimple prev_stmt;
514 prev_stmt = stmt;
515 stmt = gsi_stmt (i);
517 if (stmt && is_gimple_call (stmt))
518 gimple_call_initialize_ctrl_altering (stmt);
520 /* If the statement starts a new basic block or if we have determined
521 in a previous pass that we need to create a new block for STMT, do
522 so now. */
523 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
525 if (!first_stmt_of_seq)
526 gsi_split_seq_before (&i, &seq);
527 bb = create_basic_block (seq, bb);
528 start_new_block = false;
531 /* Now add STMT to BB and create the subgraphs for special statement
532 codes. */
533 gimple_set_bb (stmt, bb);
535 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
536 next iteration. */
537 if (stmt_ends_bb_p (stmt))
539 /* If the stmt can make abnormal goto use a new temporary
540 for the assignment to the LHS. This makes sure the old value
541 of the LHS is available on the abnormal edge. Otherwise
542 we will end up with overlapping life-ranges for abnormal
543 SSA names. */
544 if (gimple_has_lhs (stmt)
545 && stmt_can_make_abnormal_goto (stmt)
546 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
548 tree lhs = gimple_get_lhs (stmt);
549 tree tmp = create_tmp_var (TREE_TYPE (lhs));
550 gimple s = gimple_build_assign (lhs, tmp);
551 gimple_set_location (s, gimple_location (stmt));
552 gimple_set_block (s, gimple_block (stmt));
553 gimple_set_lhs (stmt, tmp);
554 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
555 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
556 DECL_GIMPLE_REG_P (tmp) = 1;
557 gsi_insert_after (&i, s, GSI_SAME_STMT);
559 start_new_block = true;
562 gsi_next (&i);
563 first_stmt_of_seq = false;
565 return bb;
568 /* Build a flowgraph for the sequence of stmts SEQ. */
570 static void
571 make_blocks (gimple_seq seq)
573 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
576 /* Create and return a new empty basic block after bb AFTER. */
578 static basic_block
579 create_bb (void *h, void *e, basic_block after)
581 basic_block bb;
583 gcc_assert (!e);
585 /* Create and initialize a new basic block. Since alloc_block uses
586 GC allocation that clears memory to allocate a basic block, we do
587 not have to clear the newly allocated basic block here. */
588 bb = alloc_block ();
590 bb->index = last_basic_block_for_fn (cfun);
591 bb->flags = BB_NEW;
592 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
594 /* Add the new block to the linked list of blocks. */
595 link_block (bb, after);
597 /* Grow the basic block array if needed. */
598 if ((size_t) last_basic_block_for_fn (cfun)
599 == basic_block_info_for_fn (cfun)->length ())
601 size_t new_size =
602 (last_basic_block_for_fn (cfun)
603 + (last_basic_block_for_fn (cfun) + 3) / 4);
604 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
607 /* Add the newly created block to the array. */
608 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
610 n_basic_blocks_for_fn (cfun)++;
611 last_basic_block_for_fn (cfun)++;
613 return bb;
617 /*---------------------------------------------------------------------------
618 Edge creation
619 ---------------------------------------------------------------------------*/
621 /* Fold COND_EXPR_COND of each COND_EXPR. */
623 void
624 fold_cond_expr_cond (void)
626 basic_block bb;
628 FOR_EACH_BB_FN (bb, cfun)
630 gimple stmt = last_stmt (bb);
632 if (stmt && gimple_code (stmt) == GIMPLE_COND)
634 gcond *cond_stmt = as_a <gcond *> (stmt);
635 location_t loc = gimple_location (stmt);
636 tree cond;
637 bool zerop, onep;
639 fold_defer_overflow_warnings ();
640 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
641 boolean_type_node,
642 gimple_cond_lhs (cond_stmt),
643 gimple_cond_rhs (cond_stmt));
644 if (cond)
646 zerop = integer_zerop (cond);
647 onep = integer_onep (cond);
649 else
650 zerop = onep = false;
652 fold_undefer_overflow_warnings (zerop || onep,
653 stmt,
654 WARN_STRICT_OVERFLOW_CONDITIONAL);
655 if (zerop)
656 gimple_cond_make_false (cond_stmt);
657 else if (onep)
658 gimple_cond_make_true (cond_stmt);
663 /* If basic block BB has an abnormal edge to a basic block
664 containing IFN_ABNORMAL_DISPATCHER internal call, return
665 that the dispatcher's basic block, otherwise return NULL. */
667 basic_block
668 get_abnormal_succ_dispatcher (basic_block bb)
670 edge e;
671 edge_iterator ei;
673 FOR_EACH_EDGE (e, ei, bb->succs)
674 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
676 gimple_stmt_iterator gsi
677 = gsi_start_nondebug_after_labels_bb (e->dest);
678 gimple g = gsi_stmt (gsi);
679 if (g
680 && is_gimple_call (g)
681 && gimple_call_internal_p (g)
682 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
683 return e->dest;
685 return NULL;
688 /* Helper function for make_edges. Create a basic block with
689 with ABNORMAL_DISPATCHER internal call in it if needed, and
690 create abnormal edges from BBS to it and from it to FOR_BB
691 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
693 static void
694 handle_abnormal_edges (basic_block *dispatcher_bbs,
695 basic_block for_bb, int *bb_to_omp_idx,
696 auto_vec<basic_block> *bbs, bool computed_goto)
698 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
699 unsigned int idx = 0;
700 basic_block bb;
701 bool inner = false;
703 if (bb_to_omp_idx)
705 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
706 if (bb_to_omp_idx[for_bb->index] != 0)
707 inner = true;
710 /* If the dispatcher has been created already, then there are basic
711 blocks with abnormal edges to it, so just make a new edge to
712 for_bb. */
713 if (*dispatcher == NULL)
715 /* Check if there are any basic blocks that need to have
716 abnormal edges to this dispatcher. If there are none, return
717 early. */
718 if (bb_to_omp_idx == NULL)
720 if (bbs->is_empty ())
721 return;
723 else
725 FOR_EACH_VEC_ELT (*bbs, idx, bb)
726 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
727 break;
728 if (bb == NULL)
729 return;
732 /* Create the dispatcher bb. */
733 *dispatcher = create_basic_block (NULL, for_bb);
734 if (computed_goto)
736 /* Factor computed gotos into a common computed goto site. Also
737 record the location of that site so that we can un-factor the
738 gotos after we have converted back to normal form. */
739 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
741 /* Create the destination of the factored goto. Each original
742 computed goto will put its desired destination into this
743 variable and jump to the label we create immediately below. */
744 tree var = create_tmp_var (ptr_type_node, "gotovar");
746 /* Build a label for the new block which will contain the
747 factored computed goto. */
748 tree factored_label_decl
749 = create_artificial_label (UNKNOWN_LOCATION);
750 gimple factored_computed_goto_label
751 = gimple_build_label (factored_label_decl);
752 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
754 /* Build our new computed goto. */
755 gimple factored_computed_goto = gimple_build_goto (var);
756 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
758 FOR_EACH_VEC_ELT (*bbs, idx, bb)
760 if (bb_to_omp_idx
761 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
762 continue;
764 gsi = gsi_last_bb (bb);
765 gimple last = gsi_stmt (gsi);
767 gcc_assert (computed_goto_p (last));
769 /* Copy the original computed goto's destination into VAR. */
770 gimple assignment
771 = gimple_build_assign (var, gimple_goto_dest (last));
772 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
774 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
775 e->goto_locus = gimple_location (last);
776 gsi_remove (&gsi, true);
779 else
781 tree arg = inner ? boolean_true_node : boolean_false_node;
782 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
783 1, arg);
784 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
785 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
787 /* Create predecessor edges of the dispatcher. */
788 FOR_EACH_VEC_ELT (*bbs, idx, bb)
790 if (bb_to_omp_idx
791 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
792 continue;
793 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
798 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
801 /* Creates outgoing edges for BB. Returns 1 when it ends with an
802 computed goto, returns 2 when it ends with a statement that
803 might return to this function via an nonlocal goto, otherwise
804 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
806 static int
807 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
809 gimple last = last_stmt (bb);
810 bool fallthru = false;
811 int ret = 0;
813 if (!last)
814 return ret;
816 switch (gimple_code (last))
818 case GIMPLE_GOTO:
819 if (make_goto_expr_edges (bb))
820 ret = 1;
821 fallthru = false;
822 break;
823 case GIMPLE_RETURN:
825 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
826 e->goto_locus = gimple_location (last);
827 fallthru = false;
829 break;
830 case GIMPLE_COND:
831 make_cond_expr_edges (bb);
832 fallthru = false;
833 break;
834 case GIMPLE_SWITCH:
835 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
836 fallthru = false;
837 break;
838 case GIMPLE_RESX:
839 make_eh_edges (last);
840 fallthru = false;
841 break;
842 case GIMPLE_EH_DISPATCH:
843 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
844 break;
846 case GIMPLE_CALL:
847 /* If this function receives a nonlocal goto, then we need to
848 make edges from this call site to all the nonlocal goto
849 handlers. */
850 if (stmt_can_make_abnormal_goto (last))
851 ret = 2;
853 /* If this statement has reachable exception handlers, then
854 create abnormal edges to them. */
855 make_eh_edges (last);
857 /* BUILTIN_RETURN is really a return statement. */
858 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
860 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
861 fallthru = false;
863 /* Some calls are known not to return. */
864 else
865 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
866 break;
868 case GIMPLE_ASSIGN:
869 /* A GIMPLE_ASSIGN may throw internally and thus be considered
870 control-altering. */
871 if (is_ctrl_altering_stmt (last))
872 make_eh_edges (last);
873 fallthru = true;
874 break;
876 case GIMPLE_ASM:
877 make_gimple_asm_edges (bb);
878 fallthru = true;
879 break;
881 CASE_GIMPLE_OMP:
882 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
883 break;
885 case GIMPLE_TRANSACTION:
887 tree abort_label
888 = gimple_transaction_label (as_a <gtransaction *> (last));
889 if (abort_label)
890 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
891 fallthru = true;
893 break;
895 default:
896 gcc_assert (!stmt_ends_bb_p (last));
897 fallthru = true;
898 break;
901 if (fallthru)
902 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
904 return ret;
907 /* Join all the blocks in the flowgraph. */
909 static void
910 make_edges (void)
912 basic_block bb;
913 struct omp_region *cur_region = NULL;
914 auto_vec<basic_block> ab_edge_goto;
915 auto_vec<basic_block> ab_edge_call;
916 int *bb_to_omp_idx = NULL;
917 int cur_omp_region_idx = 0;
919 /* Create an edge from entry to the first block with executable
920 statements in it. */
921 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
922 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
923 EDGE_FALLTHRU);
925 /* Traverse the basic block array placing edges. */
926 FOR_EACH_BB_FN (bb, cfun)
928 int mer;
930 if (bb_to_omp_idx)
931 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
933 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
934 if (mer == 1)
935 ab_edge_goto.safe_push (bb);
936 else if (mer == 2)
937 ab_edge_call.safe_push (bb);
939 if (cur_region && bb_to_omp_idx == NULL)
940 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
943 /* Computed gotos are hell to deal with, especially if there are
944 lots of them with a large number of destinations. So we factor
945 them to a common computed goto location before we build the
946 edge list. After we convert back to normal form, we will un-factor
947 the computed gotos since factoring introduces an unwanted jump.
948 For non-local gotos and abnormal edges from calls to calls that return
949 twice or forced labels, factor the abnormal edges too, by having all
950 abnormal edges from the calls go to a common artificial basic block
951 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
952 basic block to all forced labels and calls returning twice.
953 We do this per-OpenMP structured block, because those regions
954 are guaranteed to be single entry single exit by the standard,
955 so it is not allowed to enter or exit such regions abnormally this way,
956 thus all computed gotos, non-local gotos and setjmp/longjmp calls
957 must not transfer control across SESE region boundaries. */
958 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
960 gimple_stmt_iterator gsi;
961 basic_block dispatcher_bb_array[2] = { NULL, NULL };
962 basic_block *dispatcher_bbs = dispatcher_bb_array;
963 int count = n_basic_blocks_for_fn (cfun);
965 if (bb_to_omp_idx)
966 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
968 FOR_EACH_BB_FN (bb, cfun)
970 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
972 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
973 tree target;
975 if (!label_stmt)
976 break;
978 target = gimple_label_label (label_stmt);
980 /* Make an edge to every label block that has been marked as a
981 potential target for a computed goto or a non-local goto. */
982 if (FORCED_LABEL (target))
983 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
984 &ab_edge_goto, true);
985 if (DECL_NONLOCAL (target))
987 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
988 &ab_edge_call, false);
989 break;
993 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
994 gsi_next_nondebug (&gsi);
995 if (!gsi_end_p (gsi))
997 /* Make an edge to every setjmp-like call. */
998 gimple call_stmt = gsi_stmt (gsi);
999 if (is_gimple_call (call_stmt)
1000 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1001 || gimple_call_builtin_p (call_stmt,
1002 BUILT_IN_SETJMP_RECEIVER)))
1003 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1004 &ab_edge_call, false);
1008 if (bb_to_omp_idx)
1009 XDELETE (dispatcher_bbs);
1012 XDELETE (bb_to_omp_idx);
1014 free_omp_regions ();
1016 /* Fold COND_EXPR_COND of each COND_EXPR. */
1017 fold_cond_expr_cond ();
1020 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1021 needed. Returns true if new bbs were created.
1022 Note: This is transitional code, and should not be used for new code. We
1023 should be able to get rid of this by rewriting all target va-arg
1024 gimplification hooks to use an interface gimple_build_cond_value as described
1025 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1027 bool
1028 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1030 gimple stmt = gsi_stmt (*gsi);
1031 basic_block bb = gimple_bb (stmt);
1032 basic_block lastbb, afterbb;
1033 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1034 edge e;
1035 lastbb = make_blocks_1 (seq, bb);
1036 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1037 return false;
1038 e = split_block (bb, stmt);
1039 /* Move e->dest to come after the new basic blocks. */
1040 afterbb = e->dest;
1041 unlink_block (afterbb);
1042 link_block (afterbb, lastbb);
1043 redirect_edge_succ (e, bb->next_bb);
1044 bb = bb->next_bb;
1045 while (bb != afterbb)
1047 struct omp_region *cur_region = NULL;
1048 int cur_omp_region_idx = 0;
1049 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1050 gcc_assert (!mer && !cur_region);
1051 add_bb_to_loop (bb, afterbb->loop_father);
1052 bb = bb->next_bb;
1054 return true;
1057 /* Find the next available discriminator value for LOCUS. The
1058 discriminator distinguishes among several basic blocks that
1059 share a common locus, allowing for more accurate sample-based
1060 profiling. */
1062 static int
1063 next_discriminator_for_locus (location_t locus)
1065 struct locus_discrim_map item;
1066 struct locus_discrim_map **slot;
1068 item.locus = locus;
1069 item.discriminator = 0;
1070 slot = discriminator_per_locus->find_slot_with_hash (
1071 &item, LOCATION_LINE (locus), INSERT);
1072 gcc_assert (slot);
1073 if (*slot == HTAB_EMPTY_ENTRY)
1075 *slot = XNEW (struct locus_discrim_map);
1076 gcc_assert (*slot);
1077 (*slot)->locus = locus;
1078 (*slot)->discriminator = 0;
1080 (*slot)->discriminator++;
1081 return (*slot)->discriminator;
1084 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1086 static bool
1087 same_line_p (location_t locus1, location_t locus2)
1089 expanded_location from, to;
1091 if (locus1 == locus2)
1092 return true;
1094 from = expand_location (locus1);
1095 to = expand_location (locus2);
1097 if (from.line != to.line)
1098 return false;
1099 if (from.file == to.file)
1100 return true;
1101 return (from.file != NULL
1102 && to.file != NULL
1103 && filename_cmp (from.file, to.file) == 0);
1106 /* Assign discriminators to each basic block. */
1108 static void
1109 assign_discriminators (void)
1111 basic_block bb;
1113 FOR_EACH_BB_FN (bb, cfun)
1115 edge e;
1116 edge_iterator ei;
1117 gimple last = last_stmt (bb);
1118 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1120 if (locus == UNKNOWN_LOCATION)
1121 continue;
1123 FOR_EACH_EDGE (e, ei, bb->succs)
1125 gimple first = first_non_label_stmt (e->dest);
1126 gimple last = last_stmt (e->dest);
1127 if ((first && same_line_p (locus, gimple_location (first)))
1128 || (last && same_line_p (locus, gimple_location (last))))
1130 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1131 bb->discriminator = next_discriminator_for_locus (locus);
1132 else
1133 e->dest->discriminator = next_discriminator_for_locus (locus);
1139 /* Create the edges for a GIMPLE_COND starting at block BB. */
1141 static void
1142 make_cond_expr_edges (basic_block bb)
1144 gcond *entry = as_a <gcond *> (last_stmt (bb));
1145 gimple then_stmt, else_stmt;
1146 basic_block then_bb, else_bb;
1147 tree then_label, else_label;
1148 edge e;
1150 gcc_assert (entry);
1151 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1153 /* Entry basic blocks for each component. */
1154 then_label = gimple_cond_true_label (entry);
1155 else_label = gimple_cond_false_label (entry);
1156 then_bb = label_to_block (then_label);
1157 else_bb = label_to_block (else_label);
1158 then_stmt = first_stmt (then_bb);
1159 else_stmt = first_stmt (else_bb);
1161 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1162 e->goto_locus = gimple_location (then_stmt);
1163 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1164 if (e)
1165 e->goto_locus = gimple_location (else_stmt);
1167 /* We do not need the labels anymore. */
1168 gimple_cond_set_true_label (entry, NULL_TREE);
1169 gimple_cond_set_false_label (entry, NULL_TREE);
1173 /* Called for each element in the hash table (P) as we delete the
1174 edge to cases hash table.
1176 Clear all the TREE_CHAINs to prevent problems with copying of
1177 SWITCH_EXPRs and structure sharing rules, then free the hash table
1178 element. */
1180 bool
1181 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1183 tree t, next;
1185 for (t = value; t; t = next)
1187 next = CASE_CHAIN (t);
1188 CASE_CHAIN (t) = NULL;
1191 return true;
1194 /* Start recording information mapping edges to case labels. */
1196 void
1197 start_recording_case_labels (void)
1199 gcc_assert (edge_to_cases == NULL);
1200 edge_to_cases = new hash_map<edge, tree>;
1201 touched_switch_bbs = BITMAP_ALLOC (NULL);
1204 /* Return nonzero if we are recording information for case labels. */
1206 static bool
1207 recording_case_labels_p (void)
1209 return (edge_to_cases != NULL);
1212 /* Stop recording information mapping edges to case labels and
1213 remove any information we have recorded. */
1214 void
1215 end_recording_case_labels (void)
1217 bitmap_iterator bi;
1218 unsigned i;
1219 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1220 delete edge_to_cases;
1221 edge_to_cases = NULL;
1222 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1224 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1225 if (bb)
1227 gimple stmt = last_stmt (bb);
1228 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1229 group_case_labels_stmt (as_a <gswitch *> (stmt));
1232 BITMAP_FREE (touched_switch_bbs);
1235 /* If we are inside a {start,end}_recording_cases block, then return
1236 a chain of CASE_LABEL_EXPRs from T which reference E.
1238 Otherwise return NULL. */
1240 static tree
1241 get_cases_for_edge (edge e, gswitch *t)
1243 tree *slot;
1244 size_t i, n;
1246 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1247 chains available. Return NULL so the caller can detect this case. */
1248 if (!recording_case_labels_p ())
1249 return NULL;
1251 slot = edge_to_cases->get (e);
1252 if (slot)
1253 return *slot;
1255 /* If we did not find E in the hash table, then this must be the first
1256 time we have been queried for information about E & T. Add all the
1257 elements from T to the hash table then perform the query again. */
1259 n = gimple_switch_num_labels (t);
1260 for (i = 0; i < n; i++)
1262 tree elt = gimple_switch_label (t, i);
1263 tree lab = CASE_LABEL (elt);
1264 basic_block label_bb = label_to_block (lab);
1265 edge this_edge = find_edge (e->src, label_bb);
1267 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1268 a new chain. */
1269 tree &s = edge_to_cases->get_or_insert (this_edge);
1270 CASE_CHAIN (elt) = s;
1271 s = elt;
1274 return *edge_to_cases->get (e);
1277 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1279 static void
1280 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1282 size_t i, n;
1284 n = gimple_switch_num_labels (entry);
1286 for (i = 0; i < n; ++i)
1288 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1289 basic_block label_bb = label_to_block (lab);
1290 make_edge (bb, label_bb, 0);
1295 /* Return the basic block holding label DEST. */
1297 basic_block
1298 label_to_block_fn (struct function *ifun, tree dest)
1300 int uid = LABEL_DECL_UID (dest);
1302 /* We would die hard when faced by an undefined label. Emit a label to
1303 the very first basic block. This will hopefully make even the dataflow
1304 and undefined variable warnings quite right. */
1305 if (seen_error () && uid < 0)
1307 gimple_stmt_iterator gsi =
1308 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1309 gimple stmt;
1311 stmt = gimple_build_label (dest);
1312 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1313 uid = LABEL_DECL_UID (dest);
1315 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1316 return NULL;
1317 return (*ifun->cfg->x_label_to_block_map)[uid];
1320 /* Create edges for a goto statement at block BB. Returns true
1321 if abnormal edges should be created. */
1323 static bool
1324 make_goto_expr_edges (basic_block bb)
1326 gimple_stmt_iterator last = gsi_last_bb (bb);
1327 gimple goto_t = gsi_stmt (last);
1329 /* A simple GOTO creates normal edges. */
1330 if (simple_goto_p (goto_t))
1332 tree dest = gimple_goto_dest (goto_t);
1333 basic_block label_bb = label_to_block (dest);
1334 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1335 e->goto_locus = gimple_location (goto_t);
1336 gsi_remove (&last, true);
1337 return false;
1340 /* A computed GOTO creates abnormal edges. */
1341 return true;
1344 /* Create edges for an asm statement with labels at block BB. */
1346 static void
1347 make_gimple_asm_edges (basic_block bb)
1349 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1350 int i, n = gimple_asm_nlabels (stmt);
1352 for (i = 0; i < n; ++i)
1354 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1355 basic_block label_bb = label_to_block (label);
1356 make_edge (bb, label_bb, 0);
1360 /*---------------------------------------------------------------------------
1361 Flowgraph analysis
1362 ---------------------------------------------------------------------------*/
1364 /* Cleanup useless labels in basic blocks. This is something we wish
1365 to do early because it allows us to group case labels before creating
1366 the edges for the CFG, and it speeds up block statement iterators in
1367 all passes later on.
1368 We rerun this pass after CFG is created, to get rid of the labels that
1369 are no longer referenced. After then we do not run it any more, since
1370 (almost) no new labels should be created. */
1372 /* A map from basic block index to the leading label of that block. */
1373 static struct label_record
1375 /* The label. */
1376 tree label;
1378 /* True if the label is referenced from somewhere. */
1379 bool used;
1380 } *label_for_bb;
1382 /* Given LABEL return the first label in the same basic block. */
1384 static tree
1385 main_block_label (tree label)
1387 basic_block bb = label_to_block (label);
1388 tree main_label = label_for_bb[bb->index].label;
1390 /* label_to_block possibly inserted undefined label into the chain. */
1391 if (!main_label)
1393 label_for_bb[bb->index].label = label;
1394 main_label = label;
1397 label_for_bb[bb->index].used = true;
1398 return main_label;
1401 /* Clean up redundant labels within the exception tree. */
1403 static void
1404 cleanup_dead_labels_eh (void)
1406 eh_landing_pad lp;
1407 eh_region r;
1408 tree lab;
1409 int i;
1411 if (cfun->eh == NULL)
1412 return;
1414 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1415 if (lp && lp->post_landing_pad)
1417 lab = main_block_label (lp->post_landing_pad);
1418 if (lab != lp->post_landing_pad)
1420 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1421 EH_LANDING_PAD_NR (lab) = lp->index;
1425 FOR_ALL_EH_REGION (r)
1426 switch (r->type)
1428 case ERT_CLEANUP:
1429 case ERT_MUST_NOT_THROW:
1430 break;
1432 case ERT_TRY:
1434 eh_catch c;
1435 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1437 lab = c->label;
1438 if (lab)
1439 c->label = main_block_label (lab);
1442 break;
1444 case ERT_ALLOWED_EXCEPTIONS:
1445 lab = r->u.allowed.label;
1446 if (lab)
1447 r->u.allowed.label = main_block_label (lab);
1448 break;
1453 /* Cleanup redundant labels. This is a three-step process:
1454 1) Find the leading label for each block.
1455 2) Redirect all references to labels to the leading labels.
1456 3) Cleanup all useless labels. */
1458 void
1459 cleanup_dead_labels (void)
1461 basic_block bb;
1462 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1464 /* Find a suitable label for each block. We use the first user-defined
1465 label if there is one, or otherwise just the first label we see. */
1466 FOR_EACH_BB_FN (bb, cfun)
1468 gimple_stmt_iterator i;
1470 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1472 tree label;
1473 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1475 if (!label_stmt)
1476 break;
1478 label = gimple_label_label (label_stmt);
1480 /* If we have not yet seen a label for the current block,
1481 remember this one and see if there are more labels. */
1482 if (!label_for_bb[bb->index].label)
1484 label_for_bb[bb->index].label = label;
1485 continue;
1488 /* If we did see a label for the current block already, but it
1489 is an artificially created label, replace it if the current
1490 label is a user defined label. */
1491 if (!DECL_ARTIFICIAL (label)
1492 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1494 label_for_bb[bb->index].label = label;
1495 break;
1500 /* Now redirect all jumps/branches to the selected label.
1501 First do so for each block ending in a control statement. */
1502 FOR_EACH_BB_FN (bb, cfun)
1504 gimple stmt = last_stmt (bb);
1505 tree label, new_label;
1507 if (!stmt)
1508 continue;
1510 switch (gimple_code (stmt))
1512 case GIMPLE_COND:
1514 gcond *cond_stmt = as_a <gcond *> (stmt);
1515 label = gimple_cond_true_label (cond_stmt);
1516 if (label)
1518 new_label = main_block_label (label);
1519 if (new_label != label)
1520 gimple_cond_set_true_label (cond_stmt, new_label);
1523 label = gimple_cond_false_label (cond_stmt);
1524 if (label)
1526 new_label = main_block_label (label);
1527 if (new_label != label)
1528 gimple_cond_set_false_label (cond_stmt, new_label);
1531 break;
1533 case GIMPLE_SWITCH:
1535 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1536 size_t i, n = gimple_switch_num_labels (switch_stmt);
1538 /* Replace all destination labels. */
1539 for (i = 0; i < n; ++i)
1541 tree case_label = gimple_switch_label (switch_stmt, i);
1542 label = CASE_LABEL (case_label);
1543 new_label = main_block_label (label);
1544 if (new_label != label)
1545 CASE_LABEL (case_label) = new_label;
1547 break;
1550 case GIMPLE_ASM:
1552 gasm *asm_stmt = as_a <gasm *> (stmt);
1553 int i, n = gimple_asm_nlabels (asm_stmt);
1555 for (i = 0; i < n; ++i)
1557 tree cons = gimple_asm_label_op (asm_stmt, i);
1558 tree label = main_block_label (TREE_VALUE (cons));
1559 TREE_VALUE (cons) = label;
1561 break;
1564 /* We have to handle gotos until they're removed, and we don't
1565 remove them until after we've created the CFG edges. */
1566 case GIMPLE_GOTO:
1567 if (!computed_goto_p (stmt))
1569 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1570 label = gimple_goto_dest (goto_stmt);
1571 new_label = main_block_label (label);
1572 if (new_label != label)
1573 gimple_goto_set_dest (goto_stmt, new_label);
1575 break;
1577 case GIMPLE_TRANSACTION:
1579 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1580 tree label = gimple_transaction_label (trans_stmt);
1581 if (label)
1583 tree new_label = main_block_label (label);
1584 if (new_label != label)
1585 gimple_transaction_set_label (trans_stmt, new_label);
1588 break;
1590 default:
1591 break;
1595 /* Do the same for the exception region tree labels. */
1596 cleanup_dead_labels_eh ();
1598 /* Finally, purge dead labels. All user-defined labels and labels that
1599 can be the target of non-local gotos and labels which have their
1600 address taken are preserved. */
1601 FOR_EACH_BB_FN (bb, cfun)
1603 gimple_stmt_iterator i;
1604 tree label_for_this_bb = label_for_bb[bb->index].label;
1606 if (!label_for_this_bb)
1607 continue;
1609 /* If the main label of the block is unused, we may still remove it. */
1610 if (!label_for_bb[bb->index].used)
1611 label_for_this_bb = NULL;
1613 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1615 tree label;
1616 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1618 if (!label_stmt)
1619 break;
1621 label = gimple_label_label (label_stmt);
1623 if (label == label_for_this_bb
1624 || !DECL_ARTIFICIAL (label)
1625 || DECL_NONLOCAL (label)
1626 || FORCED_LABEL (label))
1627 gsi_next (&i);
1628 else
1629 gsi_remove (&i, true);
1633 free (label_for_bb);
1636 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1637 the ones jumping to the same label.
1638 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1640 void
1641 group_case_labels_stmt (gswitch *stmt)
1643 int old_size = gimple_switch_num_labels (stmt);
1644 int i, j, new_size = old_size;
1645 basic_block default_bb = NULL;
1647 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1649 /* Look for possible opportunities to merge cases. */
1650 i = 1;
1651 while (i < old_size)
1653 tree base_case, base_high;
1654 basic_block base_bb;
1656 base_case = gimple_switch_label (stmt, i);
1658 gcc_assert (base_case);
1659 base_bb = label_to_block (CASE_LABEL (base_case));
1661 /* Discard cases that have the same destination as the
1662 default case. */
1663 if (base_bb == default_bb)
1665 gimple_switch_set_label (stmt, i, NULL_TREE);
1666 i++;
1667 new_size--;
1668 continue;
1671 base_high = CASE_HIGH (base_case)
1672 ? CASE_HIGH (base_case)
1673 : CASE_LOW (base_case);
1674 i++;
1676 /* Try to merge case labels. Break out when we reach the end
1677 of the label vector or when we cannot merge the next case
1678 label with the current one. */
1679 while (i < old_size)
1681 tree merge_case = gimple_switch_label (stmt, i);
1682 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1683 wide_int bhp1 = wi::add (base_high, 1);
1685 /* Merge the cases if they jump to the same place,
1686 and their ranges are consecutive. */
1687 if (merge_bb == base_bb
1688 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1690 base_high = CASE_HIGH (merge_case) ?
1691 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1692 CASE_HIGH (base_case) = base_high;
1693 gimple_switch_set_label (stmt, i, NULL_TREE);
1694 new_size--;
1695 i++;
1697 else
1698 break;
1702 /* Compress the case labels in the label vector, and adjust the
1703 length of the vector. */
1704 for (i = 0, j = 0; i < new_size; i++)
1706 while (! gimple_switch_label (stmt, j))
1707 j++;
1708 gimple_switch_set_label (stmt, i,
1709 gimple_switch_label (stmt, j++));
1712 gcc_assert (new_size <= old_size);
1713 gimple_switch_set_num_labels (stmt, new_size);
1716 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1717 and scan the sorted vector of cases. Combine the ones jumping to the
1718 same label. */
1720 void
1721 group_case_labels (void)
1723 basic_block bb;
1725 FOR_EACH_BB_FN (bb, cfun)
1727 gimple stmt = last_stmt (bb);
1728 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1729 group_case_labels_stmt (as_a <gswitch *> (stmt));
1733 /* Checks whether we can merge block B into block A. */
1735 static bool
1736 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1738 gimple stmt;
1740 if (!single_succ_p (a))
1741 return false;
1743 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1744 return false;
1746 if (single_succ (a) != b)
1747 return false;
1749 if (!single_pred_p (b))
1750 return false;
1752 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1753 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1754 return false;
1756 /* If A ends by a statement causing exceptions or something similar, we
1757 cannot merge the blocks. */
1758 stmt = last_stmt (a);
1759 if (stmt && stmt_ends_bb_p (stmt))
1760 return false;
1762 /* Do not allow a block with only a non-local label to be merged. */
1763 if (stmt)
1764 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1765 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1766 return false;
1768 /* Examine the labels at the beginning of B. */
1769 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1770 gsi_next (&gsi))
1772 tree lab;
1773 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1774 if (!label_stmt)
1775 break;
1776 lab = gimple_label_label (label_stmt);
1778 /* Do not remove user forced labels or for -O0 any user labels. */
1779 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1780 return false;
1783 /* Protect simple loop latches. We only want to avoid merging
1784 the latch with the loop header or with a block in another
1785 loop in this case. */
1786 if (current_loops
1787 && b->loop_father->latch == b
1788 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1789 && (b->loop_father->header == a
1790 || b->loop_father != a->loop_father))
1791 return false;
1793 /* It must be possible to eliminate all phi nodes in B. If ssa form
1794 is not up-to-date and a name-mapping is registered, we cannot eliminate
1795 any phis. Symbols marked for renaming are never a problem though. */
1796 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1797 gsi_next (&gsi))
1799 gphi *phi = gsi.phi ();
1800 /* Technically only new names matter. */
1801 if (name_registered_for_update_p (PHI_RESULT (phi)))
1802 return false;
1805 /* When not optimizing, don't merge if we'd lose goto_locus. */
1806 if (!optimize
1807 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1809 location_t goto_locus = single_succ_edge (a)->goto_locus;
1810 gimple_stmt_iterator prev, next;
1811 prev = gsi_last_nondebug_bb (a);
1812 next = gsi_after_labels (b);
1813 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1814 gsi_next_nondebug (&next);
1815 if ((gsi_end_p (prev)
1816 || gimple_location (gsi_stmt (prev)) != goto_locus)
1817 && (gsi_end_p (next)
1818 || gimple_location (gsi_stmt (next)) != goto_locus))
1819 return false;
1822 return true;
1825 /* Replaces all uses of NAME by VAL. */
1827 void
1828 replace_uses_by (tree name, tree val)
1830 imm_use_iterator imm_iter;
1831 use_operand_p use;
1832 gimple stmt;
1833 edge e;
1835 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1837 /* Mark the block if we change the last stmt in it. */
1838 if (cfgcleanup_altered_bbs
1839 && stmt_ends_bb_p (stmt))
1840 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1842 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1844 replace_exp (use, val);
1846 if (gimple_code (stmt) == GIMPLE_PHI)
1848 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1849 PHI_ARG_INDEX_FROM_USE (use));
1850 if (e->flags & EDGE_ABNORMAL
1851 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1853 /* This can only occur for virtual operands, since
1854 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1855 would prevent replacement. */
1856 gcc_checking_assert (virtual_operand_p (name));
1857 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1862 if (gimple_code (stmt) != GIMPLE_PHI)
1864 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1865 gimple orig_stmt = stmt;
1866 size_t i;
1868 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1869 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1870 only change sth from non-invariant to invariant, and only
1871 when propagating constants. */
1872 if (is_gimple_min_invariant (val))
1873 for (i = 0; i < gimple_num_ops (stmt); i++)
1875 tree op = gimple_op (stmt, i);
1876 /* Operands may be empty here. For example, the labels
1877 of a GIMPLE_COND are nulled out following the creation
1878 of the corresponding CFG edges. */
1879 if (op && TREE_CODE (op) == ADDR_EXPR)
1880 recompute_tree_invariant_for_addr_expr (op);
1883 if (fold_stmt (&gsi))
1884 stmt = gsi_stmt (gsi);
1886 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1887 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1889 update_stmt (stmt);
1893 gcc_checking_assert (has_zero_uses (name));
1895 /* Also update the trees stored in loop structures. */
1896 if (current_loops)
1898 struct loop *loop;
1900 FOR_EACH_LOOP (loop, 0)
1902 substitute_in_loop_info (loop, name, val);
1907 /* Merge block B into block A. */
1909 static void
1910 gimple_merge_blocks (basic_block a, basic_block b)
1912 gimple_stmt_iterator last, gsi;
1913 gphi_iterator psi;
1915 if (dump_file)
1916 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1918 /* Remove all single-valued PHI nodes from block B of the form
1919 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1920 gsi = gsi_last_bb (a);
1921 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1923 gimple phi = gsi_stmt (psi);
1924 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1925 gimple copy;
1926 bool may_replace_uses = (virtual_operand_p (def)
1927 || may_propagate_copy (def, use));
1929 /* In case we maintain loop closed ssa form, do not propagate arguments
1930 of loop exit phi nodes. */
1931 if (current_loops
1932 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1933 && !virtual_operand_p (def)
1934 && TREE_CODE (use) == SSA_NAME
1935 && a->loop_father != b->loop_father)
1936 may_replace_uses = false;
1938 if (!may_replace_uses)
1940 gcc_assert (!virtual_operand_p (def));
1942 /* Note that just emitting the copies is fine -- there is no problem
1943 with ordering of phi nodes. This is because A is the single
1944 predecessor of B, therefore results of the phi nodes cannot
1945 appear as arguments of the phi nodes. */
1946 copy = gimple_build_assign (def, use);
1947 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1948 remove_phi_node (&psi, false);
1950 else
1952 /* If we deal with a PHI for virtual operands, we can simply
1953 propagate these without fussing with folding or updating
1954 the stmt. */
1955 if (virtual_operand_p (def))
1957 imm_use_iterator iter;
1958 use_operand_p use_p;
1959 gimple stmt;
1961 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1962 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1963 SET_USE (use_p, use);
1965 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1966 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1968 else
1969 replace_uses_by (def, use);
1971 remove_phi_node (&psi, true);
1975 /* Ensure that B follows A. */
1976 move_block_after (b, a);
1978 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1979 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1981 /* Remove labels from B and set gimple_bb to A for other statements. */
1982 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1984 gimple stmt = gsi_stmt (gsi);
1985 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1987 tree label = gimple_label_label (label_stmt);
1988 int lp_nr;
1990 gsi_remove (&gsi, false);
1992 /* Now that we can thread computed gotos, we might have
1993 a situation where we have a forced label in block B
1994 However, the label at the start of block B might still be
1995 used in other ways (think about the runtime checking for
1996 Fortran assigned gotos). So we can not just delete the
1997 label. Instead we move the label to the start of block A. */
1998 if (FORCED_LABEL (label))
2000 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2001 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2003 /* Other user labels keep around in a form of a debug stmt. */
2004 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2006 gimple dbg = gimple_build_debug_bind (label,
2007 integer_zero_node,
2008 stmt);
2009 gimple_debug_bind_reset_value (dbg);
2010 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2013 lp_nr = EH_LANDING_PAD_NR (label);
2014 if (lp_nr)
2016 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2017 lp->post_landing_pad = NULL;
2020 else
2022 gimple_set_bb (stmt, a);
2023 gsi_next (&gsi);
2027 /* When merging two BBs, if their counts are different, the larger count
2028 is selected as the new bb count. This is to handle inconsistent
2029 profiles. */
2030 if (a->loop_father == b->loop_father)
2032 a->count = MAX (a->count, b->count);
2033 a->frequency = MAX (a->frequency, b->frequency);
2036 /* Merge the sequences. */
2037 last = gsi_last_bb (a);
2038 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2039 set_bb_seq (b, NULL);
2041 if (cfgcleanup_altered_bbs)
2042 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2046 /* Return the one of two successors of BB that is not reachable by a
2047 complex edge, if there is one. Else, return BB. We use
2048 this in optimizations that use post-dominators for their heuristics,
2049 to catch the cases in C++ where function calls are involved. */
2051 basic_block
2052 single_noncomplex_succ (basic_block bb)
2054 edge e0, e1;
2055 if (EDGE_COUNT (bb->succs) != 2)
2056 return bb;
2058 e0 = EDGE_SUCC (bb, 0);
2059 e1 = EDGE_SUCC (bb, 1);
2060 if (e0->flags & EDGE_COMPLEX)
2061 return e1->dest;
2062 if (e1->flags & EDGE_COMPLEX)
2063 return e0->dest;
2065 return bb;
2068 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2070 void
2071 notice_special_calls (gcall *call)
2073 int flags = gimple_call_flags (call);
2075 if (flags & ECF_MAY_BE_ALLOCA)
2076 cfun->calls_alloca = true;
2077 if (flags & ECF_RETURNS_TWICE)
2078 cfun->calls_setjmp = true;
2082 /* Clear flags set by notice_special_calls. Used by dead code removal
2083 to update the flags. */
2085 void
2086 clear_special_calls (void)
2088 cfun->calls_alloca = false;
2089 cfun->calls_setjmp = false;
2092 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2094 static void
2095 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2097 /* Since this block is no longer reachable, we can just delete all
2098 of its PHI nodes. */
2099 remove_phi_nodes (bb);
2101 /* Remove edges to BB's successors. */
2102 while (EDGE_COUNT (bb->succs) > 0)
2103 remove_edge (EDGE_SUCC (bb, 0));
2107 /* Remove statements of basic block BB. */
2109 static void
2110 remove_bb (basic_block bb)
2112 gimple_stmt_iterator i;
2114 if (dump_file)
2116 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2117 if (dump_flags & TDF_DETAILS)
2119 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2120 fprintf (dump_file, "\n");
2124 if (current_loops)
2126 struct loop *loop = bb->loop_father;
2128 /* If a loop gets removed, clean up the information associated
2129 with it. */
2130 if (loop->latch == bb
2131 || loop->header == bb)
2132 free_numbers_of_iterations_estimates_loop (loop);
2135 /* Remove all the instructions in the block. */
2136 if (bb_seq (bb) != NULL)
2138 /* Walk backwards so as to get a chance to substitute all
2139 released DEFs into debug stmts. See
2140 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2141 details. */
2142 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2144 gimple stmt = gsi_stmt (i);
2145 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2146 if (label_stmt
2147 && (FORCED_LABEL (gimple_label_label (label_stmt))
2148 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2150 basic_block new_bb;
2151 gimple_stmt_iterator new_gsi;
2153 /* A non-reachable non-local label may still be referenced.
2154 But it no longer needs to carry the extra semantics of
2155 non-locality. */
2156 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2158 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2159 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2162 new_bb = bb->prev_bb;
2163 new_gsi = gsi_start_bb (new_bb);
2164 gsi_remove (&i, false);
2165 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2167 else
2169 /* Release SSA definitions if we are in SSA. Note that we
2170 may be called when not in SSA. For example,
2171 final_cleanup calls this function via
2172 cleanup_tree_cfg. */
2173 if (gimple_in_ssa_p (cfun))
2174 release_defs (stmt);
2176 gsi_remove (&i, true);
2179 if (gsi_end_p (i))
2180 i = gsi_last_bb (bb);
2181 else
2182 gsi_prev (&i);
2186 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2187 bb->il.gimple.seq = NULL;
2188 bb->il.gimple.phi_nodes = NULL;
2192 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2193 predicate VAL, return the edge that will be taken out of the block.
2194 If VAL does not match a unique edge, NULL is returned. */
2196 edge
2197 find_taken_edge (basic_block bb, tree val)
2199 gimple stmt;
2201 stmt = last_stmt (bb);
2203 gcc_assert (stmt);
2204 gcc_assert (is_ctrl_stmt (stmt));
2206 if (val == NULL)
2207 return NULL;
2209 if (!is_gimple_min_invariant (val))
2210 return NULL;
2212 if (gimple_code (stmt) == GIMPLE_COND)
2213 return find_taken_edge_cond_expr (bb, val);
2215 if (gimple_code (stmt) == GIMPLE_SWITCH)
2216 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2218 if (computed_goto_p (stmt))
2220 /* Only optimize if the argument is a label, if the argument is
2221 not a label then we can not construct a proper CFG.
2223 It may be the case that we only need to allow the LABEL_REF to
2224 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2225 appear inside a LABEL_EXPR just to be safe. */
2226 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2227 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2228 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2229 return NULL;
2232 gcc_unreachable ();
2235 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2236 statement, determine which of the outgoing edges will be taken out of the
2237 block. Return NULL if either edge may be taken. */
2239 static edge
2240 find_taken_edge_computed_goto (basic_block bb, tree val)
2242 basic_block dest;
2243 edge e = NULL;
2245 dest = label_to_block (val);
2246 if (dest)
2248 e = find_edge (bb, dest);
2249 gcc_assert (e != NULL);
2252 return e;
2255 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2256 statement, determine which of the two edges will be taken out of the
2257 block. Return NULL if either edge may be taken. */
2259 static edge
2260 find_taken_edge_cond_expr (basic_block bb, tree val)
2262 edge true_edge, false_edge;
2264 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2266 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2267 return (integer_zerop (val) ? false_edge : true_edge);
2270 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2271 statement, determine which edge will be taken out of the block. Return
2272 NULL if any edge may be taken. */
2274 static edge
2275 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2276 tree val)
2278 basic_block dest_bb;
2279 edge e;
2280 tree taken_case;
2282 taken_case = find_case_label_for_value (switch_stmt, val);
2283 dest_bb = label_to_block (CASE_LABEL (taken_case));
2285 e = find_edge (bb, dest_bb);
2286 gcc_assert (e);
2287 return e;
2291 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2292 We can make optimal use here of the fact that the case labels are
2293 sorted: We can do a binary search for a case matching VAL. */
2295 static tree
2296 find_case_label_for_value (gswitch *switch_stmt, tree val)
2298 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2299 tree default_case = gimple_switch_default_label (switch_stmt);
2301 for (low = 0, high = n; high - low > 1; )
2303 size_t i = (high + low) / 2;
2304 tree t = gimple_switch_label (switch_stmt, i);
2305 int cmp;
2307 /* Cache the result of comparing CASE_LOW and val. */
2308 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2310 if (cmp > 0)
2311 high = i;
2312 else
2313 low = i;
2315 if (CASE_HIGH (t) == NULL)
2317 /* A singe-valued case label. */
2318 if (cmp == 0)
2319 return t;
2321 else
2323 /* A case range. We can only handle integer ranges. */
2324 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2325 return t;
2329 return default_case;
2333 /* Dump a basic block on stderr. */
2335 void
2336 gimple_debug_bb (basic_block bb)
2338 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2342 /* Dump basic block with index N on stderr. */
2344 basic_block
2345 gimple_debug_bb_n (int n)
2347 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2348 return BASIC_BLOCK_FOR_FN (cfun, n);
2352 /* Dump the CFG on stderr.
2354 FLAGS are the same used by the tree dumping functions
2355 (see TDF_* in dumpfile.h). */
2357 void
2358 gimple_debug_cfg (int flags)
2360 gimple_dump_cfg (stderr, flags);
2364 /* Dump the program showing basic block boundaries on the given FILE.
2366 FLAGS are the same used by the tree dumping functions (see TDF_* in
2367 tree.h). */
2369 void
2370 gimple_dump_cfg (FILE *file, int flags)
2372 if (flags & TDF_DETAILS)
2374 dump_function_header (file, current_function_decl, flags);
2375 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2376 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2377 last_basic_block_for_fn (cfun));
2379 brief_dump_cfg (file, flags | TDF_COMMENT);
2380 fprintf (file, "\n");
2383 if (flags & TDF_STATS)
2384 dump_cfg_stats (file);
2386 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2390 /* Dump CFG statistics on FILE. */
2392 void
2393 dump_cfg_stats (FILE *file)
2395 static long max_num_merged_labels = 0;
2396 unsigned long size, total = 0;
2397 long num_edges;
2398 basic_block bb;
2399 const char * const fmt_str = "%-30s%-13s%12s\n";
2400 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2401 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2402 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2403 const char *funcname = current_function_name ();
2405 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2407 fprintf (file, "---------------------------------------------------------\n");
2408 fprintf (file, fmt_str, "", " Number of ", "Memory");
2409 fprintf (file, fmt_str, "", " instances ", "used ");
2410 fprintf (file, "---------------------------------------------------------\n");
2412 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2413 total += size;
2414 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2415 SCALE (size), LABEL (size));
2417 num_edges = 0;
2418 FOR_EACH_BB_FN (bb, cfun)
2419 num_edges += EDGE_COUNT (bb->succs);
2420 size = num_edges * sizeof (struct edge_def);
2421 total += size;
2422 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2424 fprintf (file, "---------------------------------------------------------\n");
2425 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2426 LABEL (total));
2427 fprintf (file, "---------------------------------------------------------\n");
2428 fprintf (file, "\n");
2430 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2431 max_num_merged_labels = cfg_stats.num_merged_labels;
2433 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2434 cfg_stats.num_merged_labels, max_num_merged_labels);
2436 fprintf (file, "\n");
2440 /* Dump CFG statistics on stderr. Keep extern so that it's always
2441 linked in the final executable. */
2443 DEBUG_FUNCTION void
2444 debug_cfg_stats (void)
2446 dump_cfg_stats (stderr);
2449 /*---------------------------------------------------------------------------
2450 Miscellaneous helpers
2451 ---------------------------------------------------------------------------*/
2453 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2454 flow. Transfers of control flow associated with EH are excluded. */
2456 static bool
2457 call_can_make_abnormal_goto (gimple t)
2459 /* If the function has no non-local labels, then a call cannot make an
2460 abnormal transfer of control. */
2461 if (!cfun->has_nonlocal_label
2462 && !cfun->calls_setjmp)
2463 return false;
2465 /* Likewise if the call has no side effects. */
2466 if (!gimple_has_side_effects (t))
2467 return false;
2469 /* Likewise if the called function is leaf. */
2470 if (gimple_call_flags (t) & ECF_LEAF)
2471 return false;
2473 return true;
2477 /* Return true if T can make an abnormal transfer of control flow.
2478 Transfers of control flow associated with EH are excluded. */
2480 bool
2481 stmt_can_make_abnormal_goto (gimple t)
2483 if (computed_goto_p (t))
2484 return true;
2485 if (is_gimple_call (t))
2486 return call_can_make_abnormal_goto (t);
2487 return false;
2491 /* Return true if T represents a stmt that always transfers control. */
2493 bool
2494 is_ctrl_stmt (gimple t)
2496 switch (gimple_code (t))
2498 case GIMPLE_COND:
2499 case GIMPLE_SWITCH:
2500 case GIMPLE_GOTO:
2501 case GIMPLE_RETURN:
2502 case GIMPLE_RESX:
2503 return true;
2504 default:
2505 return false;
2510 /* Return true if T is a statement that may alter the flow of control
2511 (e.g., a call to a non-returning function). */
2513 bool
2514 is_ctrl_altering_stmt (gimple t)
2516 gcc_assert (t);
2518 switch (gimple_code (t))
2520 case GIMPLE_CALL:
2521 /* Per stmt call flag indicates whether the call could alter
2522 controlflow. */
2523 if (gimple_call_ctrl_altering_p (t))
2524 return true;
2525 break;
2527 case GIMPLE_EH_DISPATCH:
2528 /* EH_DISPATCH branches to the individual catch handlers at
2529 this level of a try or allowed-exceptions region. It can
2530 fallthru to the next statement as well. */
2531 return true;
2533 case GIMPLE_ASM:
2534 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2535 return true;
2536 break;
2538 CASE_GIMPLE_OMP:
2539 /* OpenMP directives alter control flow. */
2540 return true;
2542 case GIMPLE_TRANSACTION:
2543 /* A transaction start alters control flow. */
2544 return true;
2546 default:
2547 break;
2550 /* If a statement can throw, it alters control flow. */
2551 return stmt_can_throw_internal (t);
2555 /* Return true if T is a simple local goto. */
2557 bool
2558 simple_goto_p (gimple t)
2560 return (gimple_code (t) == GIMPLE_GOTO
2561 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2565 /* Return true if STMT should start a new basic block. PREV_STMT is
2566 the statement preceding STMT. It is used when STMT is a label or a
2567 case label. Labels should only start a new basic block if their
2568 previous statement wasn't a label. Otherwise, sequence of labels
2569 would generate unnecessary basic blocks that only contain a single
2570 label. */
2572 static inline bool
2573 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2575 if (stmt == NULL)
2576 return false;
2578 /* Labels start a new basic block only if the preceding statement
2579 wasn't a label of the same type. This prevents the creation of
2580 consecutive blocks that have nothing but a single label. */
2581 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2583 /* Nonlocal and computed GOTO targets always start a new block. */
2584 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2585 || FORCED_LABEL (gimple_label_label (label_stmt)))
2586 return true;
2588 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2590 if (DECL_NONLOCAL (gimple_label_label (
2591 as_a <glabel *> (prev_stmt))))
2592 return true;
2594 cfg_stats.num_merged_labels++;
2595 return false;
2597 else
2598 return true;
2600 else if (gimple_code (stmt) == GIMPLE_CALL
2601 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2602 /* setjmp acts similar to a nonlocal GOTO target and thus should
2603 start a new block. */
2604 return true;
2606 return false;
2610 /* Return true if T should end a basic block. */
2612 bool
2613 stmt_ends_bb_p (gimple t)
2615 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2618 /* Remove block annotations and other data structures. */
2620 void
2621 delete_tree_cfg_annotations (void)
2623 vec_free (label_to_block_map_for_fn (cfun));
2627 /* Return the first statement in basic block BB. */
2629 gimple
2630 first_stmt (basic_block bb)
2632 gimple_stmt_iterator i = gsi_start_bb (bb);
2633 gimple stmt = NULL;
2635 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2637 gsi_next (&i);
2638 stmt = NULL;
2640 return stmt;
2643 /* Return the first non-label statement in basic block BB. */
2645 static gimple
2646 first_non_label_stmt (basic_block bb)
2648 gimple_stmt_iterator i = gsi_start_bb (bb);
2649 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2650 gsi_next (&i);
2651 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2654 /* Return the last statement in basic block BB. */
2656 gimple
2657 last_stmt (basic_block bb)
2659 gimple_stmt_iterator i = gsi_last_bb (bb);
2660 gimple stmt = NULL;
2662 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2664 gsi_prev (&i);
2665 stmt = NULL;
2667 return stmt;
2670 /* Return the last statement of an otherwise empty block. Return NULL
2671 if the block is totally empty, or if it contains more than one
2672 statement. */
2674 gimple
2675 last_and_only_stmt (basic_block bb)
2677 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2678 gimple last, prev;
2680 if (gsi_end_p (i))
2681 return NULL;
2683 last = gsi_stmt (i);
2684 gsi_prev_nondebug (&i);
2685 if (gsi_end_p (i))
2686 return last;
2688 /* Empty statements should no longer appear in the instruction stream.
2689 Everything that might have appeared before should be deleted by
2690 remove_useless_stmts, and the optimizers should just gsi_remove
2691 instead of smashing with build_empty_stmt.
2693 Thus the only thing that should appear here in a block containing
2694 one executable statement is a label. */
2695 prev = gsi_stmt (i);
2696 if (gimple_code (prev) == GIMPLE_LABEL)
2697 return last;
2698 else
2699 return NULL;
2702 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2704 static void
2705 reinstall_phi_args (edge new_edge, edge old_edge)
2707 edge_var_map *vm;
2708 int i;
2709 gphi_iterator phis;
2711 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2712 if (!v)
2713 return;
2715 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2716 v->iterate (i, &vm) && !gsi_end_p (phis);
2717 i++, gsi_next (&phis))
2719 gphi *phi = phis.phi ();
2720 tree result = redirect_edge_var_map_result (vm);
2721 tree arg = redirect_edge_var_map_def (vm);
2723 gcc_assert (result == gimple_phi_result (phi));
2725 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2728 redirect_edge_var_map_clear (old_edge);
2731 /* Returns the basic block after which the new basic block created
2732 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2733 near its "logical" location. This is of most help to humans looking
2734 at debugging dumps. */
2736 basic_block
2737 split_edge_bb_loc (edge edge_in)
2739 basic_block dest = edge_in->dest;
2740 basic_block dest_prev = dest->prev_bb;
2742 if (dest_prev)
2744 edge e = find_edge (dest_prev, dest);
2745 if (e && !(e->flags & EDGE_COMPLEX))
2746 return edge_in->src;
2748 return dest_prev;
2751 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2752 Abort on abnormal edges. */
2754 static basic_block
2755 gimple_split_edge (edge edge_in)
2757 basic_block new_bb, after_bb, dest;
2758 edge new_edge, e;
2760 /* Abnormal edges cannot be split. */
2761 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2763 dest = edge_in->dest;
2765 after_bb = split_edge_bb_loc (edge_in);
2767 new_bb = create_empty_bb (after_bb);
2768 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2769 new_bb->count = edge_in->count;
2770 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2771 new_edge->probability = REG_BR_PROB_BASE;
2772 new_edge->count = edge_in->count;
2774 e = redirect_edge_and_branch (edge_in, new_bb);
2775 gcc_assert (e == edge_in);
2776 reinstall_phi_args (new_edge, e);
2778 return new_bb;
2782 /* Verify properties of the address expression T with base object BASE. */
2784 static tree
2785 verify_address (tree t, tree base)
2787 bool old_constant;
2788 bool old_side_effects;
2789 bool new_constant;
2790 bool new_side_effects;
2792 old_constant = TREE_CONSTANT (t);
2793 old_side_effects = TREE_SIDE_EFFECTS (t);
2795 recompute_tree_invariant_for_addr_expr (t);
2796 new_side_effects = TREE_SIDE_EFFECTS (t);
2797 new_constant = TREE_CONSTANT (t);
2799 if (old_constant != new_constant)
2801 error ("constant not recomputed when ADDR_EXPR changed");
2802 return t;
2804 if (old_side_effects != new_side_effects)
2806 error ("side effects not recomputed when ADDR_EXPR changed");
2807 return t;
2810 if (!(TREE_CODE (base) == VAR_DECL
2811 || TREE_CODE (base) == PARM_DECL
2812 || TREE_CODE (base) == RESULT_DECL))
2813 return NULL_TREE;
2815 if (DECL_GIMPLE_REG_P (base))
2817 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2818 return base;
2821 return NULL_TREE;
2824 /* Callback for walk_tree, check that all elements with address taken are
2825 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2826 inside a PHI node. */
2828 static tree
2829 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2831 tree t = *tp, x;
2833 if (TYPE_P (t))
2834 *walk_subtrees = 0;
2836 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2837 #define CHECK_OP(N, MSG) \
2838 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2839 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2841 switch (TREE_CODE (t))
2843 case SSA_NAME:
2844 if (SSA_NAME_IN_FREE_LIST (t))
2846 error ("SSA name in freelist but still referenced");
2847 return *tp;
2849 break;
2851 case INDIRECT_REF:
2852 error ("INDIRECT_REF in gimple IL");
2853 return t;
2855 case MEM_REF:
2856 x = TREE_OPERAND (t, 0);
2857 if (!POINTER_TYPE_P (TREE_TYPE (x))
2858 || !is_gimple_mem_ref_addr (x))
2860 error ("invalid first operand of MEM_REF");
2861 return x;
2863 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2864 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2866 error ("invalid offset operand of MEM_REF");
2867 return TREE_OPERAND (t, 1);
2869 if (TREE_CODE (x) == ADDR_EXPR
2870 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2871 return x;
2872 *walk_subtrees = 0;
2873 break;
2875 case ASSERT_EXPR:
2876 x = fold (ASSERT_EXPR_COND (t));
2877 if (x == boolean_false_node)
2879 error ("ASSERT_EXPR with an always-false condition");
2880 return *tp;
2882 break;
2884 case MODIFY_EXPR:
2885 error ("MODIFY_EXPR not expected while having tuples");
2886 return *tp;
2888 case ADDR_EXPR:
2890 tree tem;
2892 gcc_assert (is_gimple_address (t));
2894 /* Skip any references (they will be checked when we recurse down the
2895 tree) and ensure that any variable used as a prefix is marked
2896 addressable. */
2897 for (x = TREE_OPERAND (t, 0);
2898 handled_component_p (x);
2899 x = TREE_OPERAND (x, 0))
2902 if ((tem = verify_address (t, x)))
2903 return tem;
2905 if (!(TREE_CODE (x) == VAR_DECL
2906 || TREE_CODE (x) == PARM_DECL
2907 || TREE_CODE (x) == RESULT_DECL))
2908 return NULL;
2910 if (!TREE_ADDRESSABLE (x))
2912 error ("address taken, but ADDRESSABLE bit not set");
2913 return x;
2916 break;
2919 case COND_EXPR:
2920 x = COND_EXPR_COND (t);
2921 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2923 error ("non-integral used in condition");
2924 return x;
2926 if (!is_gimple_condexpr (x))
2928 error ("invalid conditional operand");
2929 return x;
2931 break;
2933 case NON_LVALUE_EXPR:
2934 case TRUTH_NOT_EXPR:
2935 gcc_unreachable ();
2937 CASE_CONVERT:
2938 case FIX_TRUNC_EXPR:
2939 case FLOAT_EXPR:
2940 case NEGATE_EXPR:
2941 case ABS_EXPR:
2942 case BIT_NOT_EXPR:
2943 CHECK_OP (0, "invalid operand to unary operator");
2944 break;
2946 case REALPART_EXPR:
2947 case IMAGPART_EXPR:
2948 case BIT_FIELD_REF:
2949 if (!is_gimple_reg_type (TREE_TYPE (t)))
2951 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2952 return t;
2955 if (TREE_CODE (t) == BIT_FIELD_REF)
2957 tree t0 = TREE_OPERAND (t, 0);
2958 tree t1 = TREE_OPERAND (t, 1);
2959 tree t2 = TREE_OPERAND (t, 2);
2960 if (!tree_fits_uhwi_p (t1)
2961 || !tree_fits_uhwi_p (t2))
2963 error ("invalid position or size operand to BIT_FIELD_REF");
2964 return t;
2966 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2967 && (TYPE_PRECISION (TREE_TYPE (t))
2968 != tree_to_uhwi (t1)))
2970 error ("integral result type precision does not match "
2971 "field size of BIT_FIELD_REF");
2972 return t;
2974 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2975 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2976 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2977 != tree_to_uhwi (t1)))
2979 error ("mode precision of non-integral result does not "
2980 "match field size of BIT_FIELD_REF");
2981 return t;
2983 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2984 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2985 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2987 error ("position plus size exceeds size of referenced object in "
2988 "BIT_FIELD_REF");
2989 return t;
2992 t = TREE_OPERAND (t, 0);
2994 /* Fall-through. */
2995 case COMPONENT_REF:
2996 case ARRAY_REF:
2997 case ARRAY_RANGE_REF:
2998 case VIEW_CONVERT_EXPR:
2999 /* We have a nest of references. Verify that each of the operands
3000 that determine where to reference is either a constant or a variable,
3001 verify that the base is valid, and then show we've already checked
3002 the subtrees. */
3003 while (handled_component_p (t))
3005 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3006 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3007 else if (TREE_CODE (t) == ARRAY_REF
3008 || TREE_CODE (t) == ARRAY_RANGE_REF)
3010 CHECK_OP (1, "invalid array index");
3011 if (TREE_OPERAND (t, 2))
3012 CHECK_OP (2, "invalid array lower bound");
3013 if (TREE_OPERAND (t, 3))
3014 CHECK_OP (3, "invalid array stride");
3016 else if (TREE_CODE (t) == BIT_FIELD_REF
3017 || TREE_CODE (t) == REALPART_EXPR
3018 || TREE_CODE (t) == IMAGPART_EXPR)
3020 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3021 "REALPART_EXPR");
3022 return t;
3025 t = TREE_OPERAND (t, 0);
3028 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3030 error ("invalid reference prefix");
3031 return t;
3033 *walk_subtrees = 0;
3034 break;
3035 case PLUS_EXPR:
3036 case MINUS_EXPR:
3037 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3038 POINTER_PLUS_EXPR. */
3039 if (POINTER_TYPE_P (TREE_TYPE (t)))
3041 error ("invalid operand to plus/minus, type is a pointer");
3042 return t;
3044 CHECK_OP (0, "invalid operand to binary operator");
3045 CHECK_OP (1, "invalid operand to binary operator");
3046 break;
3048 case POINTER_PLUS_EXPR:
3049 /* Check to make sure the first operand is a pointer or reference type. */
3050 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3052 error ("invalid operand to pointer plus, first operand is not a pointer");
3053 return t;
3055 /* Check to make sure the second operand is a ptrofftype. */
3056 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3058 error ("invalid operand to pointer plus, second operand is not an "
3059 "integer type of appropriate width");
3060 return t;
3062 /* FALLTHROUGH */
3063 case LT_EXPR:
3064 case LE_EXPR:
3065 case GT_EXPR:
3066 case GE_EXPR:
3067 case EQ_EXPR:
3068 case NE_EXPR:
3069 case UNORDERED_EXPR:
3070 case ORDERED_EXPR:
3071 case UNLT_EXPR:
3072 case UNLE_EXPR:
3073 case UNGT_EXPR:
3074 case UNGE_EXPR:
3075 case UNEQ_EXPR:
3076 case LTGT_EXPR:
3077 case MULT_EXPR:
3078 case TRUNC_DIV_EXPR:
3079 case CEIL_DIV_EXPR:
3080 case FLOOR_DIV_EXPR:
3081 case ROUND_DIV_EXPR:
3082 case TRUNC_MOD_EXPR:
3083 case CEIL_MOD_EXPR:
3084 case FLOOR_MOD_EXPR:
3085 case ROUND_MOD_EXPR:
3086 case RDIV_EXPR:
3087 case EXACT_DIV_EXPR:
3088 case MIN_EXPR:
3089 case MAX_EXPR:
3090 case LSHIFT_EXPR:
3091 case RSHIFT_EXPR:
3092 case LROTATE_EXPR:
3093 case RROTATE_EXPR:
3094 case BIT_IOR_EXPR:
3095 case BIT_XOR_EXPR:
3096 case BIT_AND_EXPR:
3097 CHECK_OP (0, "invalid operand to binary operator");
3098 CHECK_OP (1, "invalid operand to binary operator");
3099 break;
3101 case CONSTRUCTOR:
3102 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3103 *walk_subtrees = 0;
3104 break;
3106 case CASE_LABEL_EXPR:
3107 if (CASE_CHAIN (t))
3109 error ("invalid CASE_CHAIN");
3110 return t;
3112 break;
3114 default:
3115 break;
3117 return NULL;
3119 #undef CHECK_OP
3123 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3124 Returns true if there is an error, otherwise false. */
3126 static bool
3127 verify_types_in_gimple_min_lval (tree expr)
3129 tree op;
3131 if (is_gimple_id (expr))
3132 return false;
3134 if (TREE_CODE (expr) != TARGET_MEM_REF
3135 && TREE_CODE (expr) != MEM_REF)
3137 error ("invalid expression for min lvalue");
3138 return true;
3141 /* TARGET_MEM_REFs are strange beasts. */
3142 if (TREE_CODE (expr) == TARGET_MEM_REF)
3143 return false;
3145 op = TREE_OPERAND (expr, 0);
3146 if (!is_gimple_val (op))
3148 error ("invalid operand in indirect reference");
3149 debug_generic_stmt (op);
3150 return true;
3152 /* Memory references now generally can involve a value conversion. */
3154 return false;
3157 /* Verify if EXPR is a valid GIMPLE reference expression. If
3158 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3159 if there is an error, otherwise false. */
3161 static bool
3162 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3164 while (handled_component_p (expr))
3166 tree op = TREE_OPERAND (expr, 0);
3168 if (TREE_CODE (expr) == ARRAY_REF
3169 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3171 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3172 || (TREE_OPERAND (expr, 2)
3173 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3174 || (TREE_OPERAND (expr, 3)
3175 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3177 error ("invalid operands to array reference");
3178 debug_generic_stmt (expr);
3179 return true;
3183 /* Verify if the reference array element types are compatible. */
3184 if (TREE_CODE (expr) == ARRAY_REF
3185 && !useless_type_conversion_p (TREE_TYPE (expr),
3186 TREE_TYPE (TREE_TYPE (op))))
3188 error ("type mismatch in array reference");
3189 debug_generic_stmt (TREE_TYPE (expr));
3190 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3191 return true;
3193 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3194 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3195 TREE_TYPE (TREE_TYPE (op))))
3197 error ("type mismatch in array range reference");
3198 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3199 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3200 return true;
3203 if ((TREE_CODE (expr) == REALPART_EXPR
3204 || TREE_CODE (expr) == IMAGPART_EXPR)
3205 && !useless_type_conversion_p (TREE_TYPE (expr),
3206 TREE_TYPE (TREE_TYPE (op))))
3208 error ("type mismatch in real/imagpart reference");
3209 debug_generic_stmt (TREE_TYPE (expr));
3210 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3211 return true;
3214 if (TREE_CODE (expr) == COMPONENT_REF
3215 && !useless_type_conversion_p (TREE_TYPE (expr),
3216 TREE_TYPE (TREE_OPERAND (expr, 1))))
3218 error ("type mismatch in component reference");
3219 debug_generic_stmt (TREE_TYPE (expr));
3220 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3221 return true;
3224 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3226 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3227 that their operand is not an SSA name or an invariant when
3228 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3229 bug). Otherwise there is nothing to verify, gross mismatches at
3230 most invoke undefined behavior. */
3231 if (require_lvalue
3232 && (TREE_CODE (op) == SSA_NAME
3233 || is_gimple_min_invariant (op)))
3235 error ("conversion of an SSA_NAME on the left hand side");
3236 debug_generic_stmt (expr);
3237 return true;
3239 else if (TREE_CODE (op) == SSA_NAME
3240 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3242 error ("conversion of register to a different size");
3243 debug_generic_stmt (expr);
3244 return true;
3246 else if (!handled_component_p (op))
3247 return false;
3250 expr = op;
3253 if (TREE_CODE (expr) == MEM_REF)
3255 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3257 error ("invalid address operand in MEM_REF");
3258 debug_generic_stmt (expr);
3259 return true;
3261 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3262 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3264 error ("invalid offset operand in MEM_REF");
3265 debug_generic_stmt (expr);
3266 return true;
3269 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3271 if (!TMR_BASE (expr)
3272 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3274 error ("invalid address operand in TARGET_MEM_REF");
3275 return true;
3277 if (!TMR_OFFSET (expr)
3278 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3279 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3281 error ("invalid offset operand in TARGET_MEM_REF");
3282 debug_generic_stmt (expr);
3283 return true;
3287 return ((require_lvalue || !is_gimple_min_invariant (expr))
3288 && verify_types_in_gimple_min_lval (expr));
3291 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3292 list of pointer-to types that is trivially convertible to DEST. */
3294 static bool
3295 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3297 tree src;
3299 if (!TYPE_POINTER_TO (src_obj))
3300 return true;
3302 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3303 if (useless_type_conversion_p (dest, src))
3304 return true;
3306 return false;
3309 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3310 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3312 static bool
3313 valid_fixed_convert_types_p (tree type1, tree type2)
3315 return (FIXED_POINT_TYPE_P (type1)
3316 && (INTEGRAL_TYPE_P (type2)
3317 || SCALAR_FLOAT_TYPE_P (type2)
3318 || FIXED_POINT_TYPE_P (type2)));
3321 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3322 is a problem, otherwise false. */
3324 static bool
3325 verify_gimple_call (gcall *stmt)
3327 tree fn = gimple_call_fn (stmt);
3328 tree fntype, fndecl;
3329 unsigned i;
3331 if (gimple_call_internal_p (stmt))
3333 if (fn)
3335 error ("gimple call has two targets");
3336 debug_generic_stmt (fn);
3337 return true;
3340 else
3342 if (!fn)
3344 error ("gimple call has no target");
3345 return true;
3349 if (fn && !is_gimple_call_addr (fn))
3351 error ("invalid function in gimple call");
3352 debug_generic_stmt (fn);
3353 return true;
3356 if (fn
3357 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3358 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3359 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3361 error ("non-function in gimple call");
3362 return true;
3365 fndecl = gimple_call_fndecl (stmt);
3366 if (fndecl
3367 && TREE_CODE (fndecl) == FUNCTION_DECL
3368 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3369 && !DECL_PURE_P (fndecl)
3370 && !TREE_READONLY (fndecl))
3372 error ("invalid pure const state for function");
3373 return true;
3376 tree lhs = gimple_call_lhs (stmt);
3377 if (lhs
3378 && (!is_gimple_lvalue (lhs)
3379 || verify_types_in_gimple_reference (lhs, true)))
3381 error ("invalid LHS in gimple call");
3382 return true;
3385 if (lhs
3386 && gimple_call_ctrl_altering_p (stmt)
3387 && gimple_call_noreturn_p (stmt)
3388 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3390 error ("LHS in noreturn call");
3391 return true;
3394 fntype = gimple_call_fntype (stmt);
3395 if (fntype
3396 && lhs
3397 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3398 /* ??? At least C++ misses conversions at assignments from
3399 void * call results.
3400 ??? Java is completely off. Especially with functions
3401 returning java.lang.Object.
3402 For now simply allow arbitrary pointer type conversions. */
3403 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3404 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3406 error ("invalid conversion in gimple call");
3407 debug_generic_stmt (TREE_TYPE (lhs));
3408 debug_generic_stmt (TREE_TYPE (fntype));
3409 return true;
3412 if (gimple_call_chain (stmt)
3413 && !is_gimple_val (gimple_call_chain (stmt)))
3415 error ("invalid static chain in gimple call");
3416 debug_generic_stmt (gimple_call_chain (stmt));
3417 return true;
3420 /* If there is a static chain argument, the call should either be
3421 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3422 if (gimple_call_chain (stmt)
3423 && fndecl
3424 && !DECL_STATIC_CHAIN (fndecl))
3426 error ("static chain with function that doesn%'t use one");
3427 return true;
3430 /* ??? The C frontend passes unpromoted arguments in case it
3431 didn't see a function declaration before the call. So for now
3432 leave the call arguments mostly unverified. Once we gimplify
3433 unit-at-a-time we have a chance to fix this. */
3435 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3437 tree arg = gimple_call_arg (stmt, i);
3438 if ((is_gimple_reg_type (TREE_TYPE (arg))
3439 && !is_gimple_val (arg))
3440 || (!is_gimple_reg_type (TREE_TYPE (arg))
3441 && !is_gimple_lvalue (arg)))
3443 error ("invalid argument to gimple call");
3444 debug_generic_expr (arg);
3445 return true;
3449 return false;
3452 /* Verifies the gimple comparison with the result type TYPE and
3453 the operands OP0 and OP1. */
3455 static bool
3456 verify_gimple_comparison (tree type, tree op0, tree op1)
3458 tree op0_type = TREE_TYPE (op0);
3459 tree op1_type = TREE_TYPE (op1);
3461 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3463 error ("invalid operands in gimple comparison");
3464 return true;
3467 /* For comparisons we do not have the operations type as the
3468 effective type the comparison is carried out in. Instead
3469 we require that either the first operand is trivially
3470 convertible into the second, or the other way around.
3471 Because we special-case pointers to void we allow
3472 comparisons of pointers with the same mode as well. */
3473 if (!useless_type_conversion_p (op0_type, op1_type)
3474 && !useless_type_conversion_p (op1_type, op0_type)
3475 && (!POINTER_TYPE_P (op0_type)
3476 || !POINTER_TYPE_P (op1_type)
3477 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3479 error ("mismatching comparison operand types");
3480 debug_generic_expr (op0_type);
3481 debug_generic_expr (op1_type);
3482 return true;
3485 /* The resulting type of a comparison may be an effective boolean type. */
3486 if (INTEGRAL_TYPE_P (type)
3487 && (TREE_CODE (type) == BOOLEAN_TYPE
3488 || TYPE_PRECISION (type) == 1))
3490 if (TREE_CODE (op0_type) == VECTOR_TYPE
3491 || TREE_CODE (op1_type) == VECTOR_TYPE)
3493 error ("vector comparison returning a boolean");
3494 debug_generic_expr (op0_type);
3495 debug_generic_expr (op1_type);
3496 return true;
3499 /* Or an integer vector type with the same size and element count
3500 as the comparison operand types. */
3501 else if (TREE_CODE (type) == VECTOR_TYPE
3502 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3504 if (TREE_CODE (op0_type) != VECTOR_TYPE
3505 || TREE_CODE (op1_type) != VECTOR_TYPE)
3507 error ("non-vector operands in vector comparison");
3508 debug_generic_expr (op0_type);
3509 debug_generic_expr (op1_type);
3510 return true;
3513 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3514 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3515 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3516 /* The result of a vector comparison is of signed
3517 integral type. */
3518 || TYPE_UNSIGNED (TREE_TYPE (type)))
3520 error ("invalid vector comparison resulting type");
3521 debug_generic_expr (type);
3522 return true;
3525 else
3527 error ("bogus comparison result type");
3528 debug_generic_expr (type);
3529 return true;
3532 return false;
3535 /* Verify a gimple assignment statement STMT with an unary rhs.
3536 Returns true if anything is wrong. */
3538 static bool
3539 verify_gimple_assign_unary (gassign *stmt)
3541 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3542 tree lhs = gimple_assign_lhs (stmt);
3543 tree lhs_type = TREE_TYPE (lhs);
3544 tree rhs1 = gimple_assign_rhs1 (stmt);
3545 tree rhs1_type = TREE_TYPE (rhs1);
3547 if (!is_gimple_reg (lhs))
3549 error ("non-register as LHS of unary operation");
3550 return true;
3553 if (!is_gimple_val (rhs1))
3555 error ("invalid operand in unary operation");
3556 return true;
3559 /* First handle conversions. */
3560 switch (rhs_code)
3562 CASE_CONVERT:
3564 /* Allow conversions from pointer type to integral type only if
3565 there is no sign or zero extension involved.
3566 For targets were the precision of ptrofftype doesn't match that
3567 of pointers we need to allow arbitrary conversions to ptrofftype. */
3568 if ((POINTER_TYPE_P (lhs_type)
3569 && INTEGRAL_TYPE_P (rhs1_type))
3570 || (POINTER_TYPE_P (rhs1_type)
3571 && INTEGRAL_TYPE_P (lhs_type)
3572 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3573 || ptrofftype_p (sizetype))))
3574 return false;
3576 /* Allow conversion from integral to offset type and vice versa. */
3577 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3578 && INTEGRAL_TYPE_P (rhs1_type))
3579 || (INTEGRAL_TYPE_P (lhs_type)
3580 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3581 return false;
3583 /* Otherwise assert we are converting between types of the
3584 same kind. */
3585 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3587 error ("invalid types in nop conversion");
3588 debug_generic_expr (lhs_type);
3589 debug_generic_expr (rhs1_type);
3590 return true;
3593 return false;
3596 case ADDR_SPACE_CONVERT_EXPR:
3598 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3599 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3600 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3602 error ("invalid types in address space conversion");
3603 debug_generic_expr (lhs_type);
3604 debug_generic_expr (rhs1_type);
3605 return true;
3608 return false;
3611 case FIXED_CONVERT_EXPR:
3613 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3614 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3616 error ("invalid types in fixed-point conversion");
3617 debug_generic_expr (lhs_type);
3618 debug_generic_expr (rhs1_type);
3619 return true;
3622 return false;
3625 case FLOAT_EXPR:
3627 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3628 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3629 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3631 error ("invalid types in conversion to floating point");
3632 debug_generic_expr (lhs_type);
3633 debug_generic_expr (rhs1_type);
3634 return true;
3637 return false;
3640 case FIX_TRUNC_EXPR:
3642 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3643 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3644 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3646 error ("invalid types in conversion to integer");
3647 debug_generic_expr (lhs_type);
3648 debug_generic_expr (rhs1_type);
3649 return true;
3652 return false;
3654 case REDUC_MAX_EXPR:
3655 case REDUC_MIN_EXPR:
3656 case REDUC_PLUS_EXPR:
3657 if (!VECTOR_TYPE_P (rhs1_type)
3658 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3660 error ("reduction should convert from vector to element type");
3661 debug_generic_expr (lhs_type);
3662 debug_generic_expr (rhs1_type);
3663 return true;
3665 return false;
3667 case VEC_UNPACK_HI_EXPR:
3668 case VEC_UNPACK_LO_EXPR:
3669 case VEC_UNPACK_FLOAT_HI_EXPR:
3670 case VEC_UNPACK_FLOAT_LO_EXPR:
3671 /* FIXME. */
3672 return false;
3674 case NEGATE_EXPR:
3675 case ABS_EXPR:
3676 case BIT_NOT_EXPR:
3677 case PAREN_EXPR:
3678 case CONJ_EXPR:
3679 break;
3681 default:
3682 gcc_unreachable ();
3685 /* For the remaining codes assert there is no conversion involved. */
3686 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3688 error ("non-trivial conversion in unary operation");
3689 debug_generic_expr (lhs_type);
3690 debug_generic_expr (rhs1_type);
3691 return true;
3694 return false;
3697 /* Verify a gimple assignment statement STMT with a binary rhs.
3698 Returns true if anything is wrong. */
3700 static bool
3701 verify_gimple_assign_binary (gassign *stmt)
3703 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3704 tree lhs = gimple_assign_lhs (stmt);
3705 tree lhs_type = TREE_TYPE (lhs);
3706 tree rhs1 = gimple_assign_rhs1 (stmt);
3707 tree rhs1_type = TREE_TYPE (rhs1);
3708 tree rhs2 = gimple_assign_rhs2 (stmt);
3709 tree rhs2_type = TREE_TYPE (rhs2);
3711 if (!is_gimple_reg (lhs))
3713 error ("non-register as LHS of binary operation");
3714 return true;
3717 if (!is_gimple_val (rhs1)
3718 || !is_gimple_val (rhs2))
3720 error ("invalid operands in binary operation");
3721 return true;
3724 /* First handle operations that involve different types. */
3725 switch (rhs_code)
3727 case COMPLEX_EXPR:
3729 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3730 || !(INTEGRAL_TYPE_P (rhs1_type)
3731 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3732 || !(INTEGRAL_TYPE_P (rhs2_type)
3733 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3735 error ("type mismatch in complex expression");
3736 debug_generic_expr (lhs_type);
3737 debug_generic_expr (rhs1_type);
3738 debug_generic_expr (rhs2_type);
3739 return true;
3742 return false;
3745 case LSHIFT_EXPR:
3746 case RSHIFT_EXPR:
3747 case LROTATE_EXPR:
3748 case RROTATE_EXPR:
3750 /* Shifts and rotates are ok on integral types, fixed point
3751 types and integer vector types. */
3752 if ((!INTEGRAL_TYPE_P (rhs1_type)
3753 && !FIXED_POINT_TYPE_P (rhs1_type)
3754 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3755 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3756 || (!INTEGRAL_TYPE_P (rhs2_type)
3757 /* Vector shifts of vectors are also ok. */
3758 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3759 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3760 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3761 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3762 || !useless_type_conversion_p (lhs_type, rhs1_type))
3764 error ("type mismatch in shift expression");
3765 debug_generic_expr (lhs_type);
3766 debug_generic_expr (rhs1_type);
3767 debug_generic_expr (rhs2_type);
3768 return true;
3771 return false;
3774 case WIDEN_LSHIFT_EXPR:
3776 if (!INTEGRAL_TYPE_P (lhs_type)
3777 || !INTEGRAL_TYPE_P (rhs1_type)
3778 || TREE_CODE (rhs2) != INTEGER_CST
3779 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3781 error ("type mismatch in widening vector shift expression");
3782 debug_generic_expr (lhs_type);
3783 debug_generic_expr (rhs1_type);
3784 debug_generic_expr (rhs2_type);
3785 return true;
3788 return false;
3791 case VEC_WIDEN_LSHIFT_HI_EXPR:
3792 case VEC_WIDEN_LSHIFT_LO_EXPR:
3794 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3795 || TREE_CODE (lhs_type) != VECTOR_TYPE
3796 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3797 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3798 || TREE_CODE (rhs2) != INTEGER_CST
3799 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3800 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3802 error ("type mismatch in widening vector shift expression");
3803 debug_generic_expr (lhs_type);
3804 debug_generic_expr (rhs1_type);
3805 debug_generic_expr (rhs2_type);
3806 return true;
3809 return false;
3812 case PLUS_EXPR:
3813 case MINUS_EXPR:
3815 tree lhs_etype = lhs_type;
3816 tree rhs1_etype = rhs1_type;
3817 tree rhs2_etype = rhs2_type;
3818 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3820 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3821 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3823 error ("invalid non-vector operands to vector valued plus");
3824 return true;
3826 lhs_etype = TREE_TYPE (lhs_type);
3827 rhs1_etype = TREE_TYPE (rhs1_type);
3828 rhs2_etype = TREE_TYPE (rhs2_type);
3830 if (POINTER_TYPE_P (lhs_etype)
3831 || POINTER_TYPE_P (rhs1_etype)
3832 || POINTER_TYPE_P (rhs2_etype))
3834 error ("invalid (pointer) operands to plus/minus");
3835 return true;
3838 /* Continue with generic binary expression handling. */
3839 break;
3842 case POINTER_PLUS_EXPR:
3844 if (!POINTER_TYPE_P (rhs1_type)
3845 || !useless_type_conversion_p (lhs_type, rhs1_type)
3846 || !ptrofftype_p (rhs2_type))
3848 error ("type mismatch in pointer plus expression");
3849 debug_generic_stmt (lhs_type);
3850 debug_generic_stmt (rhs1_type);
3851 debug_generic_stmt (rhs2_type);
3852 return true;
3855 return false;
3858 case TRUTH_ANDIF_EXPR:
3859 case TRUTH_ORIF_EXPR:
3860 case TRUTH_AND_EXPR:
3861 case TRUTH_OR_EXPR:
3862 case TRUTH_XOR_EXPR:
3864 gcc_unreachable ();
3866 case LT_EXPR:
3867 case LE_EXPR:
3868 case GT_EXPR:
3869 case GE_EXPR:
3870 case EQ_EXPR:
3871 case NE_EXPR:
3872 case UNORDERED_EXPR:
3873 case ORDERED_EXPR:
3874 case UNLT_EXPR:
3875 case UNLE_EXPR:
3876 case UNGT_EXPR:
3877 case UNGE_EXPR:
3878 case UNEQ_EXPR:
3879 case LTGT_EXPR:
3880 /* Comparisons are also binary, but the result type is not
3881 connected to the operand types. */
3882 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3884 case WIDEN_MULT_EXPR:
3885 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3886 return true;
3887 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3888 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3890 case WIDEN_SUM_EXPR:
3891 case VEC_WIDEN_MULT_HI_EXPR:
3892 case VEC_WIDEN_MULT_LO_EXPR:
3893 case VEC_WIDEN_MULT_EVEN_EXPR:
3894 case VEC_WIDEN_MULT_ODD_EXPR:
3895 case VEC_PACK_TRUNC_EXPR:
3896 case VEC_PACK_SAT_EXPR:
3897 case VEC_PACK_FIX_TRUNC_EXPR:
3898 /* FIXME. */
3899 return false;
3901 case MULT_EXPR:
3902 case MULT_HIGHPART_EXPR:
3903 case TRUNC_DIV_EXPR:
3904 case CEIL_DIV_EXPR:
3905 case FLOOR_DIV_EXPR:
3906 case ROUND_DIV_EXPR:
3907 case TRUNC_MOD_EXPR:
3908 case CEIL_MOD_EXPR:
3909 case FLOOR_MOD_EXPR:
3910 case ROUND_MOD_EXPR:
3911 case RDIV_EXPR:
3912 case EXACT_DIV_EXPR:
3913 case MIN_EXPR:
3914 case MAX_EXPR:
3915 case BIT_IOR_EXPR:
3916 case BIT_XOR_EXPR:
3917 case BIT_AND_EXPR:
3918 /* Continue with generic binary expression handling. */
3919 break;
3921 default:
3922 gcc_unreachable ();
3925 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3926 || !useless_type_conversion_p (lhs_type, rhs2_type))
3928 error ("type mismatch in binary expression");
3929 debug_generic_stmt (lhs_type);
3930 debug_generic_stmt (rhs1_type);
3931 debug_generic_stmt (rhs2_type);
3932 return true;
3935 return false;
3938 /* Verify a gimple assignment statement STMT with a ternary rhs.
3939 Returns true if anything is wrong. */
3941 static bool
3942 verify_gimple_assign_ternary (gassign *stmt)
3944 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3945 tree lhs = gimple_assign_lhs (stmt);
3946 tree lhs_type = TREE_TYPE (lhs);
3947 tree rhs1 = gimple_assign_rhs1 (stmt);
3948 tree rhs1_type = TREE_TYPE (rhs1);
3949 tree rhs2 = gimple_assign_rhs2 (stmt);
3950 tree rhs2_type = TREE_TYPE (rhs2);
3951 tree rhs3 = gimple_assign_rhs3 (stmt);
3952 tree rhs3_type = TREE_TYPE (rhs3);
3954 if (!is_gimple_reg (lhs))
3956 error ("non-register as LHS of ternary operation");
3957 return true;
3960 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3961 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3962 || !is_gimple_val (rhs2)
3963 || !is_gimple_val (rhs3))
3965 error ("invalid operands in ternary operation");
3966 return true;
3969 /* First handle operations that involve different types. */
3970 switch (rhs_code)
3972 case WIDEN_MULT_PLUS_EXPR:
3973 case WIDEN_MULT_MINUS_EXPR:
3974 if ((!INTEGRAL_TYPE_P (rhs1_type)
3975 && !FIXED_POINT_TYPE_P (rhs1_type))
3976 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3977 || !useless_type_conversion_p (lhs_type, rhs3_type)
3978 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3979 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3981 error ("type mismatch in widening multiply-accumulate expression");
3982 debug_generic_expr (lhs_type);
3983 debug_generic_expr (rhs1_type);
3984 debug_generic_expr (rhs2_type);
3985 debug_generic_expr (rhs3_type);
3986 return true;
3988 break;
3990 case FMA_EXPR:
3991 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3992 || !useless_type_conversion_p (lhs_type, rhs2_type)
3993 || !useless_type_conversion_p (lhs_type, rhs3_type))
3995 error ("type mismatch in fused multiply-add expression");
3996 debug_generic_expr (lhs_type);
3997 debug_generic_expr (rhs1_type);
3998 debug_generic_expr (rhs2_type);
3999 debug_generic_expr (rhs3_type);
4000 return true;
4002 break;
4004 case VEC_COND_EXPR:
4005 if (!VECTOR_INTEGER_TYPE_P (rhs1_type)
4006 || TYPE_SIGN (rhs1_type) != SIGNED
4007 || TYPE_SIZE (rhs1_type) != TYPE_SIZE (lhs_type)
4008 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4009 != TYPE_VECTOR_SUBPARTS (lhs_type))
4011 error ("the first argument of a VEC_COND_EXPR must be of a signed "
4012 "integral vector type of the same size and number of "
4013 "elements as the result");
4014 debug_generic_expr (lhs_type);
4015 debug_generic_expr (rhs1_type);
4016 return true;
4018 /* Fallthrough. */
4019 case COND_EXPR:
4020 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4021 || !useless_type_conversion_p (lhs_type, rhs3_type))
4023 error ("type mismatch in conditional expression");
4024 debug_generic_expr (lhs_type);
4025 debug_generic_expr (rhs2_type);
4026 debug_generic_expr (rhs3_type);
4027 return true;
4029 break;
4031 case VEC_PERM_EXPR:
4032 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4033 || !useless_type_conversion_p (lhs_type, rhs2_type))
4035 error ("type mismatch in vector permute expression");
4036 debug_generic_expr (lhs_type);
4037 debug_generic_expr (rhs1_type);
4038 debug_generic_expr (rhs2_type);
4039 debug_generic_expr (rhs3_type);
4040 return true;
4043 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4044 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4045 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4047 error ("vector types expected in vector permute expression");
4048 debug_generic_expr (lhs_type);
4049 debug_generic_expr (rhs1_type);
4050 debug_generic_expr (rhs2_type);
4051 debug_generic_expr (rhs3_type);
4052 return true;
4055 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4056 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4057 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4058 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4059 != TYPE_VECTOR_SUBPARTS (lhs_type))
4061 error ("vectors with different element number found "
4062 "in vector permute expression");
4063 debug_generic_expr (lhs_type);
4064 debug_generic_expr (rhs1_type);
4065 debug_generic_expr (rhs2_type);
4066 debug_generic_expr (rhs3_type);
4067 return true;
4070 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4071 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4072 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4074 error ("invalid mask type in vector permute expression");
4075 debug_generic_expr (lhs_type);
4076 debug_generic_expr (rhs1_type);
4077 debug_generic_expr (rhs2_type);
4078 debug_generic_expr (rhs3_type);
4079 return true;
4082 return false;
4084 case SAD_EXPR:
4085 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4086 || !useless_type_conversion_p (lhs_type, rhs3_type)
4087 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4088 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4089 > GET_MODE_BITSIZE (GET_MODE_INNER
4090 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4092 error ("type mismatch in sad expression");
4093 debug_generic_expr (lhs_type);
4094 debug_generic_expr (rhs1_type);
4095 debug_generic_expr (rhs2_type);
4096 debug_generic_expr (rhs3_type);
4097 return true;
4100 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4101 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4102 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4104 error ("vector types expected in sad expression");
4105 debug_generic_expr (lhs_type);
4106 debug_generic_expr (rhs1_type);
4107 debug_generic_expr (rhs2_type);
4108 debug_generic_expr (rhs3_type);
4109 return true;
4112 return false;
4114 case DOT_PROD_EXPR:
4115 case REALIGN_LOAD_EXPR:
4116 /* FIXME. */
4117 return false;
4119 default:
4120 gcc_unreachable ();
4122 return false;
4125 /* Verify a gimple assignment statement STMT with a single rhs.
4126 Returns true if anything is wrong. */
4128 static bool
4129 verify_gimple_assign_single (gassign *stmt)
4131 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4132 tree lhs = gimple_assign_lhs (stmt);
4133 tree lhs_type = TREE_TYPE (lhs);
4134 tree rhs1 = gimple_assign_rhs1 (stmt);
4135 tree rhs1_type = TREE_TYPE (rhs1);
4136 bool res = false;
4138 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4140 error ("non-trivial conversion at assignment");
4141 debug_generic_expr (lhs_type);
4142 debug_generic_expr (rhs1_type);
4143 return true;
4146 if (gimple_clobber_p (stmt)
4147 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4149 error ("non-decl/MEM_REF LHS in clobber statement");
4150 debug_generic_expr (lhs);
4151 return true;
4154 if (handled_component_p (lhs)
4155 || TREE_CODE (lhs) == MEM_REF
4156 || TREE_CODE (lhs) == TARGET_MEM_REF)
4157 res |= verify_types_in_gimple_reference (lhs, true);
4159 /* Special codes we cannot handle via their class. */
4160 switch (rhs_code)
4162 case ADDR_EXPR:
4164 tree op = TREE_OPERAND (rhs1, 0);
4165 if (!is_gimple_addressable (op))
4167 error ("invalid operand in unary expression");
4168 return true;
4171 /* Technically there is no longer a need for matching types, but
4172 gimple hygiene asks for this check. In LTO we can end up
4173 combining incompatible units and thus end up with addresses
4174 of globals that change their type to a common one. */
4175 if (!in_lto_p
4176 && !types_compatible_p (TREE_TYPE (op),
4177 TREE_TYPE (TREE_TYPE (rhs1)))
4178 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4179 TREE_TYPE (op)))
4181 error ("type mismatch in address expression");
4182 debug_generic_stmt (TREE_TYPE (rhs1));
4183 debug_generic_stmt (TREE_TYPE (op));
4184 return true;
4187 return verify_types_in_gimple_reference (op, true);
4190 /* tcc_reference */
4191 case INDIRECT_REF:
4192 error ("INDIRECT_REF in gimple IL");
4193 return true;
4195 case COMPONENT_REF:
4196 case BIT_FIELD_REF:
4197 case ARRAY_REF:
4198 case ARRAY_RANGE_REF:
4199 case VIEW_CONVERT_EXPR:
4200 case REALPART_EXPR:
4201 case IMAGPART_EXPR:
4202 case TARGET_MEM_REF:
4203 case MEM_REF:
4204 if (!is_gimple_reg (lhs)
4205 && is_gimple_reg_type (TREE_TYPE (lhs)))
4207 error ("invalid rhs for gimple memory store");
4208 debug_generic_stmt (lhs);
4209 debug_generic_stmt (rhs1);
4210 return true;
4212 return res || verify_types_in_gimple_reference (rhs1, false);
4214 /* tcc_constant */
4215 case SSA_NAME:
4216 case INTEGER_CST:
4217 case REAL_CST:
4218 case FIXED_CST:
4219 case COMPLEX_CST:
4220 case VECTOR_CST:
4221 case STRING_CST:
4222 return res;
4224 /* tcc_declaration */
4225 case CONST_DECL:
4226 return res;
4227 case VAR_DECL:
4228 case PARM_DECL:
4229 if (!is_gimple_reg (lhs)
4230 && !is_gimple_reg (rhs1)
4231 && is_gimple_reg_type (TREE_TYPE (lhs)))
4233 error ("invalid rhs for gimple memory store");
4234 debug_generic_stmt (lhs);
4235 debug_generic_stmt (rhs1);
4236 return true;
4238 return res;
4240 case CONSTRUCTOR:
4241 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4243 unsigned int i;
4244 tree elt_i, elt_v, elt_t = NULL_TREE;
4246 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4247 return res;
4248 /* For vector CONSTRUCTORs we require that either it is empty
4249 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4250 (then the element count must be correct to cover the whole
4251 outer vector and index must be NULL on all elements, or it is
4252 a CONSTRUCTOR of scalar elements, where we as an exception allow
4253 smaller number of elements (assuming zero filling) and
4254 consecutive indexes as compared to NULL indexes (such
4255 CONSTRUCTORs can appear in the IL from FEs). */
4256 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4258 if (elt_t == NULL_TREE)
4260 elt_t = TREE_TYPE (elt_v);
4261 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4263 tree elt_t = TREE_TYPE (elt_v);
4264 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4265 TREE_TYPE (elt_t)))
4267 error ("incorrect type of vector CONSTRUCTOR"
4268 " elements");
4269 debug_generic_stmt (rhs1);
4270 return true;
4272 else if (CONSTRUCTOR_NELTS (rhs1)
4273 * TYPE_VECTOR_SUBPARTS (elt_t)
4274 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4276 error ("incorrect number of vector CONSTRUCTOR"
4277 " elements");
4278 debug_generic_stmt (rhs1);
4279 return true;
4282 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4283 elt_t))
4285 error ("incorrect type of vector CONSTRUCTOR elements");
4286 debug_generic_stmt (rhs1);
4287 return true;
4289 else if (CONSTRUCTOR_NELTS (rhs1)
4290 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4292 error ("incorrect number of vector CONSTRUCTOR elements");
4293 debug_generic_stmt (rhs1);
4294 return true;
4297 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4299 error ("incorrect type of vector CONSTRUCTOR elements");
4300 debug_generic_stmt (rhs1);
4301 return true;
4303 if (elt_i != NULL_TREE
4304 && (TREE_CODE (elt_t) == VECTOR_TYPE
4305 || TREE_CODE (elt_i) != INTEGER_CST
4306 || compare_tree_int (elt_i, i) != 0))
4308 error ("vector CONSTRUCTOR with non-NULL element index");
4309 debug_generic_stmt (rhs1);
4310 return true;
4312 if (!is_gimple_val (elt_v))
4314 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4315 debug_generic_stmt (rhs1);
4316 return true;
4320 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4322 error ("non-vector CONSTRUCTOR with elements");
4323 debug_generic_stmt (rhs1);
4324 return true;
4326 return res;
4327 case OBJ_TYPE_REF:
4328 case ASSERT_EXPR:
4329 case WITH_SIZE_EXPR:
4330 /* FIXME. */
4331 return res;
4333 default:;
4336 return res;
4339 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4340 is a problem, otherwise false. */
4342 static bool
4343 verify_gimple_assign (gassign *stmt)
4345 switch (gimple_assign_rhs_class (stmt))
4347 case GIMPLE_SINGLE_RHS:
4348 return verify_gimple_assign_single (stmt);
4350 case GIMPLE_UNARY_RHS:
4351 return verify_gimple_assign_unary (stmt);
4353 case GIMPLE_BINARY_RHS:
4354 return verify_gimple_assign_binary (stmt);
4356 case GIMPLE_TERNARY_RHS:
4357 return verify_gimple_assign_ternary (stmt);
4359 default:
4360 gcc_unreachable ();
4364 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4365 is a problem, otherwise false. */
4367 static bool
4368 verify_gimple_return (greturn *stmt)
4370 tree op = gimple_return_retval (stmt);
4371 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4373 /* We cannot test for present return values as we do not fix up missing
4374 return values from the original source. */
4375 if (op == NULL)
4376 return false;
4378 if (!is_gimple_val (op)
4379 && TREE_CODE (op) != RESULT_DECL)
4381 error ("invalid operand in return statement");
4382 debug_generic_stmt (op);
4383 return true;
4386 if ((TREE_CODE (op) == RESULT_DECL
4387 && DECL_BY_REFERENCE (op))
4388 || (TREE_CODE (op) == SSA_NAME
4389 && SSA_NAME_VAR (op)
4390 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4391 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4392 op = TREE_TYPE (op);
4394 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4396 error ("invalid conversion in return statement");
4397 debug_generic_stmt (restype);
4398 debug_generic_stmt (TREE_TYPE (op));
4399 return true;
4402 return false;
4406 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4407 is a problem, otherwise false. */
4409 static bool
4410 verify_gimple_goto (ggoto *stmt)
4412 tree dest = gimple_goto_dest (stmt);
4414 /* ??? We have two canonical forms of direct goto destinations, a
4415 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4416 if (TREE_CODE (dest) != LABEL_DECL
4417 && (!is_gimple_val (dest)
4418 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4420 error ("goto destination is neither a label nor a pointer");
4421 return true;
4424 return false;
4427 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4428 is a problem, otherwise false. */
4430 static bool
4431 verify_gimple_switch (gswitch *stmt)
4433 unsigned int i, n;
4434 tree elt, prev_upper_bound = NULL_TREE;
4435 tree index_type, elt_type = NULL_TREE;
4437 if (!is_gimple_val (gimple_switch_index (stmt)))
4439 error ("invalid operand to switch statement");
4440 debug_generic_stmt (gimple_switch_index (stmt));
4441 return true;
4444 index_type = TREE_TYPE (gimple_switch_index (stmt));
4445 if (! INTEGRAL_TYPE_P (index_type))
4447 error ("non-integral type switch statement");
4448 debug_generic_expr (index_type);
4449 return true;
4452 elt = gimple_switch_label (stmt, 0);
4453 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4455 error ("invalid default case label in switch statement");
4456 debug_generic_expr (elt);
4457 return true;
4460 n = gimple_switch_num_labels (stmt);
4461 for (i = 1; i < n; i++)
4463 elt = gimple_switch_label (stmt, i);
4465 if (! CASE_LOW (elt))
4467 error ("invalid case label in switch statement");
4468 debug_generic_expr (elt);
4469 return true;
4471 if (CASE_HIGH (elt)
4472 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4474 error ("invalid case range in switch statement");
4475 debug_generic_expr (elt);
4476 return true;
4479 if (elt_type)
4481 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4482 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4484 error ("type mismatch for case label in switch statement");
4485 debug_generic_expr (elt);
4486 return true;
4489 else
4491 elt_type = TREE_TYPE (CASE_LOW (elt));
4492 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4494 error ("type precision mismatch in switch statement");
4495 return true;
4499 if (prev_upper_bound)
4501 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4503 error ("case labels not sorted in switch statement");
4504 return true;
4508 prev_upper_bound = CASE_HIGH (elt);
4509 if (! prev_upper_bound)
4510 prev_upper_bound = CASE_LOW (elt);
4513 return false;
4516 /* Verify a gimple debug statement STMT.
4517 Returns true if anything is wrong. */
4519 static bool
4520 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4522 /* There isn't much that could be wrong in a gimple debug stmt. A
4523 gimple debug bind stmt, for example, maps a tree, that's usually
4524 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4525 component or member of an aggregate type, to another tree, that
4526 can be an arbitrary expression. These stmts expand into debug
4527 insns, and are converted to debug notes by var-tracking.c. */
4528 return false;
4531 /* Verify a gimple label statement STMT.
4532 Returns true if anything is wrong. */
4534 static bool
4535 verify_gimple_label (glabel *stmt)
4537 tree decl = gimple_label_label (stmt);
4538 int uid;
4539 bool err = false;
4541 if (TREE_CODE (decl) != LABEL_DECL)
4542 return true;
4543 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4544 && DECL_CONTEXT (decl) != current_function_decl)
4546 error ("label's context is not the current function decl");
4547 err |= true;
4550 uid = LABEL_DECL_UID (decl);
4551 if (cfun->cfg
4552 && (uid == -1
4553 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4555 error ("incorrect entry in label_to_block_map");
4556 err |= true;
4559 uid = EH_LANDING_PAD_NR (decl);
4560 if (uid)
4562 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4563 if (decl != lp->post_landing_pad)
4565 error ("incorrect setting of landing pad number");
4566 err |= true;
4570 return err;
4573 /* Verify a gimple cond statement STMT.
4574 Returns true if anything is wrong. */
4576 static bool
4577 verify_gimple_cond (gcond *stmt)
4579 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4581 error ("invalid comparison code in gimple cond");
4582 return true;
4584 if (!(!gimple_cond_true_label (stmt)
4585 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4586 || !(!gimple_cond_false_label (stmt)
4587 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4589 error ("invalid labels in gimple cond");
4590 return true;
4593 return verify_gimple_comparison (boolean_type_node,
4594 gimple_cond_lhs (stmt),
4595 gimple_cond_rhs (stmt));
4598 /* Verify the GIMPLE statement STMT. Returns true if there is an
4599 error, otherwise false. */
4601 static bool
4602 verify_gimple_stmt (gimple stmt)
4604 switch (gimple_code (stmt))
4606 case GIMPLE_ASSIGN:
4607 return verify_gimple_assign (as_a <gassign *> (stmt));
4609 case GIMPLE_LABEL:
4610 return verify_gimple_label (as_a <glabel *> (stmt));
4612 case GIMPLE_CALL:
4613 return verify_gimple_call (as_a <gcall *> (stmt));
4615 case GIMPLE_COND:
4616 return verify_gimple_cond (as_a <gcond *> (stmt));
4618 case GIMPLE_GOTO:
4619 return verify_gimple_goto (as_a <ggoto *> (stmt));
4621 case GIMPLE_SWITCH:
4622 return verify_gimple_switch (as_a <gswitch *> (stmt));
4624 case GIMPLE_RETURN:
4625 return verify_gimple_return (as_a <greturn *> (stmt));
4627 case GIMPLE_ASM:
4628 return false;
4630 case GIMPLE_TRANSACTION:
4631 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4633 /* Tuples that do not have tree operands. */
4634 case GIMPLE_NOP:
4635 case GIMPLE_PREDICT:
4636 case GIMPLE_RESX:
4637 case GIMPLE_EH_DISPATCH:
4638 case GIMPLE_EH_MUST_NOT_THROW:
4639 return false;
4641 CASE_GIMPLE_OMP:
4642 /* OpenMP directives are validated by the FE and never operated
4643 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4644 non-gimple expressions when the main index variable has had
4645 its address taken. This does not affect the loop itself
4646 because the header of an GIMPLE_OMP_FOR is merely used to determine
4647 how to setup the parallel iteration. */
4648 return false;
4650 case GIMPLE_DEBUG:
4651 return verify_gimple_debug (stmt);
4653 default:
4654 gcc_unreachable ();
4658 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4659 and false otherwise. */
4661 static bool
4662 verify_gimple_phi (gimple phi)
4664 bool err = false;
4665 unsigned i;
4666 tree phi_result = gimple_phi_result (phi);
4667 bool virtual_p;
4669 if (!phi_result)
4671 error ("invalid PHI result");
4672 return true;
4675 virtual_p = virtual_operand_p (phi_result);
4676 if (TREE_CODE (phi_result) != SSA_NAME
4677 || (virtual_p
4678 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4680 error ("invalid PHI result");
4681 err = true;
4684 for (i = 0; i < gimple_phi_num_args (phi); i++)
4686 tree t = gimple_phi_arg_def (phi, i);
4688 if (!t)
4690 error ("missing PHI def");
4691 err |= true;
4692 continue;
4694 /* Addressable variables do have SSA_NAMEs but they
4695 are not considered gimple values. */
4696 else if ((TREE_CODE (t) == SSA_NAME
4697 && virtual_p != virtual_operand_p (t))
4698 || (virtual_p
4699 && (TREE_CODE (t) != SSA_NAME
4700 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4701 || (!virtual_p
4702 && !is_gimple_val (t)))
4704 error ("invalid PHI argument");
4705 debug_generic_expr (t);
4706 err |= true;
4708 #ifdef ENABLE_TYPES_CHECKING
4709 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4711 error ("incompatible types in PHI argument %u", i);
4712 debug_generic_stmt (TREE_TYPE (phi_result));
4713 debug_generic_stmt (TREE_TYPE (t));
4714 err |= true;
4716 #endif
4719 return err;
4722 /* Verify the GIMPLE statements inside the sequence STMTS. */
4724 static bool
4725 verify_gimple_in_seq_2 (gimple_seq stmts)
4727 gimple_stmt_iterator ittr;
4728 bool err = false;
4730 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4732 gimple stmt = gsi_stmt (ittr);
4734 switch (gimple_code (stmt))
4736 case GIMPLE_BIND:
4737 err |= verify_gimple_in_seq_2 (
4738 gimple_bind_body (as_a <gbind *> (stmt)));
4739 break;
4741 case GIMPLE_TRY:
4742 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4743 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4744 break;
4746 case GIMPLE_EH_FILTER:
4747 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4748 break;
4750 case GIMPLE_EH_ELSE:
4752 geh_else *eh_else = as_a <geh_else *> (stmt);
4753 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4754 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4756 break;
4758 case GIMPLE_CATCH:
4759 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4760 as_a <gcatch *> (stmt)));
4761 break;
4763 case GIMPLE_TRANSACTION:
4764 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4765 break;
4767 default:
4769 bool err2 = verify_gimple_stmt (stmt);
4770 if (err2)
4771 debug_gimple_stmt (stmt);
4772 err |= err2;
4777 return err;
4780 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4781 is a problem, otherwise false. */
4783 static bool
4784 verify_gimple_transaction (gtransaction *stmt)
4786 tree lab = gimple_transaction_label (stmt);
4787 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4788 return true;
4789 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4793 /* Verify the GIMPLE statements inside the statement list STMTS. */
4795 DEBUG_FUNCTION void
4796 verify_gimple_in_seq (gimple_seq stmts)
4798 timevar_push (TV_TREE_STMT_VERIFY);
4799 if (verify_gimple_in_seq_2 (stmts))
4800 internal_error ("verify_gimple failed");
4801 timevar_pop (TV_TREE_STMT_VERIFY);
4804 /* Return true when the T can be shared. */
4806 static bool
4807 tree_node_can_be_shared (tree t)
4809 if (IS_TYPE_OR_DECL_P (t)
4810 || is_gimple_min_invariant (t)
4811 || TREE_CODE (t) == SSA_NAME
4812 || t == error_mark_node
4813 || TREE_CODE (t) == IDENTIFIER_NODE)
4814 return true;
4816 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4817 return true;
4819 if (DECL_P (t))
4820 return true;
4822 return false;
4825 /* Called via walk_tree. Verify tree sharing. */
4827 static tree
4828 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4830 hash_set<void *> *visited = (hash_set<void *> *) data;
4832 if (tree_node_can_be_shared (*tp))
4834 *walk_subtrees = false;
4835 return NULL;
4838 if (visited->add (*tp))
4839 return *tp;
4841 return NULL;
4844 /* Called via walk_gimple_stmt. Verify tree sharing. */
4846 static tree
4847 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4849 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4850 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4853 static bool eh_error_found;
4854 bool
4855 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4856 hash_set<gimple> *visited)
4858 if (!visited->contains (stmt))
4860 error ("dead STMT in EH table");
4861 debug_gimple_stmt (stmt);
4862 eh_error_found = true;
4864 return true;
4867 /* Verify if the location LOCs block is in BLOCKS. */
4869 static bool
4870 verify_location (hash_set<tree> *blocks, location_t loc)
4872 tree block = LOCATION_BLOCK (loc);
4873 if (block != NULL_TREE
4874 && !blocks->contains (block))
4876 error ("location references block not in block tree");
4877 return true;
4879 if (block != NULL_TREE)
4880 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4881 return false;
4884 /* Called via walk_tree. Verify that expressions have no blocks. */
4886 static tree
4887 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4889 if (!EXPR_P (*tp))
4891 *walk_subtrees = false;
4892 return NULL;
4895 location_t loc = EXPR_LOCATION (*tp);
4896 if (LOCATION_BLOCK (loc) != NULL)
4897 return *tp;
4899 return NULL;
4902 /* Called via walk_tree. Verify locations of expressions. */
4904 static tree
4905 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4907 hash_set<tree> *blocks = (hash_set<tree> *) data;
4909 if (TREE_CODE (*tp) == VAR_DECL
4910 && DECL_HAS_DEBUG_EXPR_P (*tp))
4912 tree t = DECL_DEBUG_EXPR (*tp);
4913 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4914 if (addr)
4915 return addr;
4917 if ((TREE_CODE (*tp) == VAR_DECL
4918 || TREE_CODE (*tp) == PARM_DECL
4919 || TREE_CODE (*tp) == RESULT_DECL)
4920 && DECL_HAS_VALUE_EXPR_P (*tp))
4922 tree t = DECL_VALUE_EXPR (*tp);
4923 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4924 if (addr)
4925 return addr;
4928 if (!EXPR_P (*tp))
4930 *walk_subtrees = false;
4931 return NULL;
4934 location_t loc = EXPR_LOCATION (*tp);
4935 if (verify_location (blocks, loc))
4936 return *tp;
4938 return NULL;
4941 /* Called via walk_gimple_op. Verify locations of expressions. */
4943 static tree
4944 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4946 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4947 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4950 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4952 static void
4953 collect_subblocks (hash_set<tree> *blocks, tree block)
4955 tree t;
4956 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4958 blocks->add (t);
4959 collect_subblocks (blocks, t);
4963 /* Verify the GIMPLE statements in the CFG of FN. */
4965 DEBUG_FUNCTION void
4966 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4968 basic_block bb;
4969 bool err = false;
4971 timevar_push (TV_TREE_STMT_VERIFY);
4972 hash_set<void *> visited;
4973 hash_set<gimple> visited_stmts;
4975 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4976 hash_set<tree> blocks;
4977 if (DECL_INITIAL (fn->decl))
4979 blocks.add (DECL_INITIAL (fn->decl));
4980 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4983 FOR_EACH_BB_FN (bb, fn)
4985 gimple_stmt_iterator gsi;
4987 for (gphi_iterator gpi = gsi_start_phis (bb);
4988 !gsi_end_p (gpi);
4989 gsi_next (&gpi))
4991 gphi *phi = gpi.phi ();
4992 bool err2 = false;
4993 unsigned i;
4995 visited_stmts.add (phi);
4997 if (gimple_bb (phi) != bb)
4999 error ("gimple_bb (phi) is set to a wrong basic block");
5000 err2 = true;
5003 err2 |= verify_gimple_phi (phi);
5005 /* Only PHI arguments have locations. */
5006 if (gimple_location (phi) != UNKNOWN_LOCATION)
5008 error ("PHI node with location");
5009 err2 = true;
5012 for (i = 0; i < gimple_phi_num_args (phi); i++)
5014 tree arg = gimple_phi_arg_def (phi, i);
5015 tree addr = walk_tree (&arg, verify_node_sharing_1,
5016 &visited, NULL);
5017 if (addr)
5019 error ("incorrect sharing of tree nodes");
5020 debug_generic_expr (addr);
5021 err2 |= true;
5023 location_t loc = gimple_phi_arg_location (phi, i);
5024 if (virtual_operand_p (gimple_phi_result (phi))
5025 && loc != UNKNOWN_LOCATION)
5027 error ("virtual PHI with argument locations");
5028 err2 = true;
5030 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5031 if (addr)
5033 debug_generic_expr (addr);
5034 err2 = true;
5036 err2 |= verify_location (&blocks, loc);
5039 if (err2)
5040 debug_gimple_stmt (phi);
5041 err |= err2;
5044 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5046 gimple stmt = gsi_stmt (gsi);
5047 bool err2 = false;
5048 struct walk_stmt_info wi;
5049 tree addr;
5050 int lp_nr;
5052 visited_stmts.add (stmt);
5054 if (gimple_bb (stmt) != bb)
5056 error ("gimple_bb (stmt) is set to a wrong basic block");
5057 err2 = true;
5060 err2 |= verify_gimple_stmt (stmt);
5061 err2 |= verify_location (&blocks, gimple_location (stmt));
5063 memset (&wi, 0, sizeof (wi));
5064 wi.info = (void *) &visited;
5065 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5066 if (addr)
5068 error ("incorrect sharing of tree nodes");
5069 debug_generic_expr (addr);
5070 err2 |= true;
5073 memset (&wi, 0, sizeof (wi));
5074 wi.info = (void *) &blocks;
5075 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5076 if (addr)
5078 debug_generic_expr (addr);
5079 err2 |= true;
5082 /* ??? Instead of not checking these stmts at all the walker
5083 should know its context via wi. */
5084 if (!is_gimple_debug (stmt)
5085 && !is_gimple_omp (stmt))
5087 memset (&wi, 0, sizeof (wi));
5088 addr = walk_gimple_op (stmt, verify_expr, &wi);
5089 if (addr)
5091 debug_generic_expr (addr);
5092 inform (gimple_location (stmt), "in statement");
5093 err2 |= true;
5097 /* If the statement is marked as part of an EH region, then it is
5098 expected that the statement could throw. Verify that when we
5099 have optimizations that simplify statements such that we prove
5100 that they cannot throw, that we update other data structures
5101 to match. */
5102 lp_nr = lookup_stmt_eh_lp (stmt);
5103 if (lp_nr > 0)
5105 if (!stmt_could_throw_p (stmt))
5107 if (verify_nothrow)
5109 error ("statement marked for throw, but doesn%'t");
5110 err2 |= true;
5113 else if (!gsi_one_before_end_p (gsi))
5115 error ("statement marked for throw in middle of block");
5116 err2 |= true;
5120 if (err2)
5121 debug_gimple_stmt (stmt);
5122 err |= err2;
5126 eh_error_found = false;
5127 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5128 if (eh_table)
5129 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5130 (&visited_stmts);
5132 if (err || eh_error_found)
5133 internal_error ("verify_gimple failed");
5135 verify_histograms ();
5136 timevar_pop (TV_TREE_STMT_VERIFY);
5140 /* Verifies that the flow information is OK. */
5142 static int
5143 gimple_verify_flow_info (void)
5145 int err = 0;
5146 basic_block bb;
5147 gimple_stmt_iterator gsi;
5148 gimple stmt;
5149 edge e;
5150 edge_iterator ei;
5152 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5153 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5155 error ("ENTRY_BLOCK has IL associated with it");
5156 err = 1;
5159 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5160 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5162 error ("EXIT_BLOCK has IL associated with it");
5163 err = 1;
5166 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5167 if (e->flags & EDGE_FALLTHRU)
5169 error ("fallthru to exit from bb %d", e->src->index);
5170 err = 1;
5173 FOR_EACH_BB_FN (bb, cfun)
5175 bool found_ctrl_stmt = false;
5177 stmt = NULL;
5179 /* Skip labels on the start of basic block. */
5180 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5182 tree label;
5183 gimple prev_stmt = stmt;
5185 stmt = gsi_stmt (gsi);
5187 if (gimple_code (stmt) != GIMPLE_LABEL)
5188 break;
5190 label = gimple_label_label (as_a <glabel *> (stmt));
5191 if (prev_stmt && DECL_NONLOCAL (label))
5193 error ("nonlocal label ");
5194 print_generic_expr (stderr, label, 0);
5195 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5196 bb->index);
5197 err = 1;
5200 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5202 error ("EH landing pad label ");
5203 print_generic_expr (stderr, label, 0);
5204 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5205 bb->index);
5206 err = 1;
5209 if (label_to_block (label) != bb)
5211 error ("label ");
5212 print_generic_expr (stderr, label, 0);
5213 fprintf (stderr, " to block does not match in bb %d",
5214 bb->index);
5215 err = 1;
5218 if (decl_function_context (label) != current_function_decl)
5220 error ("label ");
5221 print_generic_expr (stderr, label, 0);
5222 fprintf (stderr, " has incorrect context in bb %d",
5223 bb->index);
5224 err = 1;
5228 /* Verify that body of basic block BB is free of control flow. */
5229 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5231 gimple stmt = gsi_stmt (gsi);
5233 if (found_ctrl_stmt)
5235 error ("control flow in the middle of basic block %d",
5236 bb->index);
5237 err = 1;
5240 if (stmt_ends_bb_p (stmt))
5241 found_ctrl_stmt = true;
5243 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5245 error ("label ");
5246 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5247 fprintf (stderr, " in the middle of basic block %d", bb->index);
5248 err = 1;
5252 gsi = gsi_last_bb (bb);
5253 if (gsi_end_p (gsi))
5254 continue;
5256 stmt = gsi_stmt (gsi);
5258 if (gimple_code (stmt) == GIMPLE_LABEL)
5259 continue;
5261 err |= verify_eh_edges (stmt);
5263 if (is_ctrl_stmt (stmt))
5265 FOR_EACH_EDGE (e, ei, bb->succs)
5266 if (e->flags & EDGE_FALLTHRU)
5268 error ("fallthru edge after a control statement in bb %d",
5269 bb->index);
5270 err = 1;
5274 if (gimple_code (stmt) != GIMPLE_COND)
5276 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5277 after anything else but if statement. */
5278 FOR_EACH_EDGE (e, ei, bb->succs)
5279 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5281 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5282 bb->index);
5283 err = 1;
5287 switch (gimple_code (stmt))
5289 case GIMPLE_COND:
5291 edge true_edge;
5292 edge false_edge;
5294 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5296 if (!true_edge
5297 || !false_edge
5298 || !(true_edge->flags & EDGE_TRUE_VALUE)
5299 || !(false_edge->flags & EDGE_FALSE_VALUE)
5300 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5301 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5302 || EDGE_COUNT (bb->succs) >= 3)
5304 error ("wrong outgoing edge flags at end of bb %d",
5305 bb->index);
5306 err = 1;
5309 break;
5311 case GIMPLE_GOTO:
5312 if (simple_goto_p (stmt))
5314 error ("explicit goto at end of bb %d", bb->index);
5315 err = 1;
5317 else
5319 /* FIXME. We should double check that the labels in the
5320 destination blocks have their address taken. */
5321 FOR_EACH_EDGE (e, ei, bb->succs)
5322 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5323 | EDGE_FALSE_VALUE))
5324 || !(e->flags & EDGE_ABNORMAL))
5326 error ("wrong outgoing edge flags at end of bb %d",
5327 bb->index);
5328 err = 1;
5331 break;
5333 case GIMPLE_CALL:
5334 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5335 break;
5336 /* ... fallthru ... */
5337 case GIMPLE_RETURN:
5338 if (!single_succ_p (bb)
5339 || (single_succ_edge (bb)->flags
5340 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5341 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5343 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5344 err = 1;
5346 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5348 error ("return edge does not point to exit in bb %d",
5349 bb->index);
5350 err = 1;
5352 break;
5354 case GIMPLE_SWITCH:
5356 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5357 tree prev;
5358 edge e;
5359 size_t i, n;
5361 n = gimple_switch_num_labels (switch_stmt);
5363 /* Mark all the destination basic blocks. */
5364 for (i = 0; i < n; ++i)
5366 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5367 basic_block label_bb = label_to_block (lab);
5368 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5369 label_bb->aux = (void *)1;
5372 /* Verify that the case labels are sorted. */
5373 prev = gimple_switch_label (switch_stmt, 0);
5374 for (i = 1; i < n; ++i)
5376 tree c = gimple_switch_label (switch_stmt, i);
5377 if (!CASE_LOW (c))
5379 error ("found default case not at the start of "
5380 "case vector");
5381 err = 1;
5382 continue;
5384 if (CASE_LOW (prev)
5385 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5387 error ("case labels not sorted: ");
5388 print_generic_expr (stderr, prev, 0);
5389 fprintf (stderr," is greater than ");
5390 print_generic_expr (stderr, c, 0);
5391 fprintf (stderr," but comes before it.\n");
5392 err = 1;
5394 prev = c;
5396 /* VRP will remove the default case if it can prove it will
5397 never be executed. So do not verify there always exists
5398 a default case here. */
5400 FOR_EACH_EDGE (e, ei, bb->succs)
5402 if (!e->dest->aux)
5404 error ("extra outgoing edge %d->%d",
5405 bb->index, e->dest->index);
5406 err = 1;
5409 e->dest->aux = (void *)2;
5410 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5411 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5413 error ("wrong outgoing edge flags at end of bb %d",
5414 bb->index);
5415 err = 1;
5419 /* Check that we have all of them. */
5420 for (i = 0; i < n; ++i)
5422 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5423 basic_block label_bb = label_to_block (lab);
5425 if (label_bb->aux != (void *)2)
5427 error ("missing edge %i->%i", bb->index, label_bb->index);
5428 err = 1;
5432 FOR_EACH_EDGE (e, ei, bb->succs)
5433 e->dest->aux = (void *)0;
5435 break;
5437 case GIMPLE_EH_DISPATCH:
5438 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5439 break;
5441 default:
5442 break;
5446 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5447 verify_dominators (CDI_DOMINATORS);
5449 return err;
5453 /* Updates phi nodes after creating a forwarder block joined
5454 by edge FALLTHRU. */
5456 static void
5457 gimple_make_forwarder_block (edge fallthru)
5459 edge e;
5460 edge_iterator ei;
5461 basic_block dummy, bb;
5462 tree var;
5463 gphi_iterator gsi;
5465 dummy = fallthru->src;
5466 bb = fallthru->dest;
5468 if (single_pred_p (bb))
5469 return;
5471 /* If we redirected a branch we must create new PHI nodes at the
5472 start of BB. */
5473 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5475 gphi *phi, *new_phi;
5477 phi = gsi.phi ();
5478 var = gimple_phi_result (phi);
5479 new_phi = create_phi_node (var, bb);
5480 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5481 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5482 UNKNOWN_LOCATION);
5485 /* Add the arguments we have stored on edges. */
5486 FOR_EACH_EDGE (e, ei, bb->preds)
5488 if (e == fallthru)
5489 continue;
5491 flush_pending_stmts (e);
5496 /* Return a non-special label in the head of basic block BLOCK.
5497 Create one if it doesn't exist. */
5499 tree
5500 gimple_block_label (basic_block bb)
5502 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5503 bool first = true;
5504 tree label;
5505 glabel *stmt;
5507 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5509 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5510 if (!stmt)
5511 break;
5512 label = gimple_label_label (stmt);
5513 if (!DECL_NONLOCAL (label))
5515 if (!first)
5516 gsi_move_before (&i, &s);
5517 return label;
5521 label = create_artificial_label (UNKNOWN_LOCATION);
5522 stmt = gimple_build_label (label);
5523 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5524 return label;
5528 /* Attempt to perform edge redirection by replacing a possibly complex
5529 jump instruction by a goto or by removing the jump completely.
5530 This can apply only if all edges now point to the same block. The
5531 parameters and return values are equivalent to
5532 redirect_edge_and_branch. */
5534 static edge
5535 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5537 basic_block src = e->src;
5538 gimple_stmt_iterator i;
5539 gimple stmt;
5541 /* We can replace or remove a complex jump only when we have exactly
5542 two edges. */
5543 if (EDGE_COUNT (src->succs) != 2
5544 /* Verify that all targets will be TARGET. Specifically, the
5545 edge that is not E must also go to TARGET. */
5546 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5547 return NULL;
5549 i = gsi_last_bb (src);
5550 if (gsi_end_p (i))
5551 return NULL;
5553 stmt = gsi_stmt (i);
5555 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5557 gsi_remove (&i, true);
5558 e = ssa_redirect_edge (e, target);
5559 e->flags = EDGE_FALLTHRU;
5560 return e;
5563 return NULL;
5567 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5568 edge representing the redirected branch. */
5570 static edge
5571 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5573 basic_block bb = e->src;
5574 gimple_stmt_iterator gsi;
5575 edge ret;
5576 gimple stmt;
5578 if (e->flags & EDGE_ABNORMAL)
5579 return NULL;
5581 if (e->dest == dest)
5582 return NULL;
5584 if (e->flags & EDGE_EH)
5585 return redirect_eh_edge (e, dest);
5587 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5589 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5590 if (ret)
5591 return ret;
5594 gsi = gsi_last_bb (bb);
5595 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5597 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5599 case GIMPLE_COND:
5600 /* For COND_EXPR, we only need to redirect the edge. */
5601 break;
5603 case GIMPLE_GOTO:
5604 /* No non-abnormal edges should lead from a non-simple goto, and
5605 simple ones should be represented implicitly. */
5606 gcc_unreachable ();
5608 case GIMPLE_SWITCH:
5610 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5611 tree label = gimple_block_label (dest);
5612 tree cases = get_cases_for_edge (e, switch_stmt);
5614 /* If we have a list of cases associated with E, then use it
5615 as it's a lot faster than walking the entire case vector. */
5616 if (cases)
5618 edge e2 = find_edge (e->src, dest);
5619 tree last, first;
5621 first = cases;
5622 while (cases)
5624 last = cases;
5625 CASE_LABEL (cases) = label;
5626 cases = CASE_CHAIN (cases);
5629 /* If there was already an edge in the CFG, then we need
5630 to move all the cases associated with E to E2. */
5631 if (e2)
5633 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5635 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5636 CASE_CHAIN (cases2) = first;
5638 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5640 else
5642 size_t i, n = gimple_switch_num_labels (switch_stmt);
5644 for (i = 0; i < n; i++)
5646 tree elt = gimple_switch_label (switch_stmt, i);
5647 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5648 CASE_LABEL (elt) = label;
5652 break;
5654 case GIMPLE_ASM:
5656 gasm *asm_stmt = as_a <gasm *> (stmt);
5657 int i, n = gimple_asm_nlabels (asm_stmt);
5658 tree label = NULL;
5660 for (i = 0; i < n; ++i)
5662 tree cons = gimple_asm_label_op (asm_stmt, i);
5663 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5665 if (!label)
5666 label = gimple_block_label (dest);
5667 TREE_VALUE (cons) = label;
5671 /* If we didn't find any label matching the former edge in the
5672 asm labels, we must be redirecting the fallthrough
5673 edge. */
5674 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5676 break;
5678 case GIMPLE_RETURN:
5679 gsi_remove (&gsi, true);
5680 e->flags |= EDGE_FALLTHRU;
5681 break;
5683 case GIMPLE_OMP_RETURN:
5684 case GIMPLE_OMP_CONTINUE:
5685 case GIMPLE_OMP_SECTIONS_SWITCH:
5686 case GIMPLE_OMP_FOR:
5687 /* The edges from OMP constructs can be simply redirected. */
5688 break;
5690 case GIMPLE_EH_DISPATCH:
5691 if (!(e->flags & EDGE_FALLTHRU))
5692 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5693 break;
5695 case GIMPLE_TRANSACTION:
5696 /* The ABORT edge has a stored label associated with it, otherwise
5697 the edges are simply redirectable. */
5698 if (e->flags == 0)
5699 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5700 gimple_block_label (dest));
5701 break;
5703 default:
5704 /* Otherwise it must be a fallthru edge, and we don't need to
5705 do anything besides redirecting it. */
5706 gcc_assert (e->flags & EDGE_FALLTHRU);
5707 break;
5710 /* Update/insert PHI nodes as necessary. */
5712 /* Now update the edges in the CFG. */
5713 e = ssa_redirect_edge (e, dest);
5715 return e;
5718 /* Returns true if it is possible to remove edge E by redirecting
5719 it to the destination of the other edge from E->src. */
5721 static bool
5722 gimple_can_remove_branch_p (const_edge e)
5724 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5725 return false;
5727 return true;
5730 /* Simple wrapper, as we can always redirect fallthru edges. */
5732 static basic_block
5733 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5735 e = gimple_redirect_edge_and_branch (e, dest);
5736 gcc_assert (e);
5738 return NULL;
5742 /* Splits basic block BB after statement STMT (but at least after the
5743 labels). If STMT is NULL, BB is split just after the labels. */
5745 static basic_block
5746 gimple_split_block (basic_block bb, void *stmt)
5748 gimple_stmt_iterator gsi;
5749 gimple_stmt_iterator gsi_tgt;
5750 gimple_seq list;
5751 basic_block new_bb;
5752 edge e;
5753 edge_iterator ei;
5755 new_bb = create_empty_bb (bb);
5757 /* Redirect the outgoing edges. */
5758 new_bb->succs = bb->succs;
5759 bb->succs = NULL;
5760 FOR_EACH_EDGE (e, ei, new_bb->succs)
5761 e->src = new_bb;
5763 /* Get a stmt iterator pointing to the first stmt to move. */
5764 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5765 gsi = gsi_after_labels (bb);
5766 else
5768 gsi = gsi_for_stmt ((gimple) stmt);
5769 gsi_next (&gsi);
5772 /* Move everything from GSI to the new basic block. */
5773 if (gsi_end_p (gsi))
5774 return new_bb;
5776 /* Split the statement list - avoid re-creating new containers as this
5777 brings ugly quadratic memory consumption in the inliner.
5778 (We are still quadratic since we need to update stmt BB pointers,
5779 sadly.) */
5780 gsi_split_seq_before (&gsi, &list);
5781 set_bb_seq (new_bb, list);
5782 for (gsi_tgt = gsi_start (list);
5783 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5784 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5786 return new_bb;
5790 /* Moves basic block BB after block AFTER. */
5792 static bool
5793 gimple_move_block_after (basic_block bb, basic_block after)
5795 if (bb->prev_bb == after)
5796 return true;
5798 unlink_block (bb);
5799 link_block (bb, after);
5801 return true;
5805 /* Return TRUE if block BB has no executable statements, otherwise return
5806 FALSE. */
5808 static bool
5809 gimple_empty_block_p (basic_block bb)
5811 /* BB must have no executable statements. */
5812 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5813 if (phi_nodes (bb))
5814 return false;
5815 if (gsi_end_p (gsi))
5816 return true;
5817 if (is_gimple_debug (gsi_stmt (gsi)))
5818 gsi_next_nondebug (&gsi);
5819 return gsi_end_p (gsi);
5823 /* Split a basic block if it ends with a conditional branch and if the
5824 other part of the block is not empty. */
5826 static basic_block
5827 gimple_split_block_before_cond_jump (basic_block bb)
5829 gimple last, split_point;
5830 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5831 if (gsi_end_p (gsi))
5832 return NULL;
5833 last = gsi_stmt (gsi);
5834 if (gimple_code (last) != GIMPLE_COND
5835 && gimple_code (last) != GIMPLE_SWITCH)
5836 return NULL;
5837 gsi_prev_nondebug (&gsi);
5838 split_point = gsi_stmt (gsi);
5839 return split_block (bb, split_point)->dest;
5843 /* Return true if basic_block can be duplicated. */
5845 static bool
5846 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5848 return true;
5851 /* Create a duplicate of the basic block BB. NOTE: This does not
5852 preserve SSA form. */
5854 static basic_block
5855 gimple_duplicate_bb (basic_block bb)
5857 basic_block new_bb;
5858 gimple_stmt_iterator gsi_tgt;
5860 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5862 /* Copy the PHI nodes. We ignore PHI node arguments here because
5863 the incoming edges have not been setup yet. */
5864 for (gphi_iterator gpi = gsi_start_phis (bb);
5865 !gsi_end_p (gpi);
5866 gsi_next (&gpi))
5868 gphi *phi, *copy;
5869 phi = gpi.phi ();
5870 copy = create_phi_node (NULL_TREE, new_bb);
5871 create_new_def_for (gimple_phi_result (phi), copy,
5872 gimple_phi_result_ptr (copy));
5873 gimple_set_uid (copy, gimple_uid (phi));
5876 gsi_tgt = gsi_start_bb (new_bb);
5877 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5878 !gsi_end_p (gsi);
5879 gsi_next (&gsi))
5881 def_operand_p def_p;
5882 ssa_op_iter op_iter;
5883 tree lhs;
5884 gimple stmt, copy;
5886 stmt = gsi_stmt (gsi);
5887 if (gimple_code (stmt) == GIMPLE_LABEL)
5888 continue;
5890 /* Don't duplicate label debug stmts. */
5891 if (gimple_debug_bind_p (stmt)
5892 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5893 == LABEL_DECL)
5894 continue;
5896 /* Create a new copy of STMT and duplicate STMT's virtual
5897 operands. */
5898 copy = gimple_copy (stmt);
5899 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5901 maybe_duplicate_eh_stmt (copy, stmt);
5902 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5904 /* When copying around a stmt writing into a local non-user
5905 aggregate, make sure it won't share stack slot with other
5906 vars. */
5907 lhs = gimple_get_lhs (stmt);
5908 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5910 tree base = get_base_address (lhs);
5911 if (base
5912 && (TREE_CODE (base) == VAR_DECL
5913 || TREE_CODE (base) == RESULT_DECL)
5914 && DECL_IGNORED_P (base)
5915 && !TREE_STATIC (base)
5916 && !DECL_EXTERNAL (base)
5917 && (TREE_CODE (base) != VAR_DECL
5918 || !DECL_HAS_VALUE_EXPR_P (base)))
5919 DECL_NONSHAREABLE (base) = 1;
5922 /* Create new names for all the definitions created by COPY and
5923 add replacement mappings for each new name. */
5924 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5925 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5928 return new_bb;
5931 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5933 static void
5934 add_phi_args_after_copy_edge (edge e_copy)
5936 basic_block bb, bb_copy = e_copy->src, dest;
5937 edge e;
5938 edge_iterator ei;
5939 gphi *phi, *phi_copy;
5940 tree def;
5941 gphi_iterator psi, psi_copy;
5943 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5944 return;
5946 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5948 if (e_copy->dest->flags & BB_DUPLICATED)
5949 dest = get_bb_original (e_copy->dest);
5950 else
5951 dest = e_copy->dest;
5953 e = find_edge (bb, dest);
5954 if (!e)
5956 /* During loop unrolling the target of the latch edge is copied.
5957 In this case we are not looking for edge to dest, but to
5958 duplicated block whose original was dest. */
5959 FOR_EACH_EDGE (e, ei, bb->succs)
5961 if ((e->dest->flags & BB_DUPLICATED)
5962 && get_bb_original (e->dest) == dest)
5963 break;
5966 gcc_assert (e != NULL);
5969 for (psi = gsi_start_phis (e->dest),
5970 psi_copy = gsi_start_phis (e_copy->dest);
5971 !gsi_end_p (psi);
5972 gsi_next (&psi), gsi_next (&psi_copy))
5974 phi = psi.phi ();
5975 phi_copy = psi_copy.phi ();
5976 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5977 add_phi_arg (phi_copy, def, e_copy,
5978 gimple_phi_arg_location_from_edge (phi, e));
5983 /* Basic block BB_COPY was created by code duplication. Add phi node
5984 arguments for edges going out of BB_COPY. The blocks that were
5985 duplicated have BB_DUPLICATED set. */
5987 void
5988 add_phi_args_after_copy_bb (basic_block bb_copy)
5990 edge e_copy;
5991 edge_iterator ei;
5993 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5995 add_phi_args_after_copy_edge (e_copy);
5999 /* Blocks in REGION_COPY array of length N_REGION were created by
6000 duplication of basic blocks. Add phi node arguments for edges
6001 going from these blocks. If E_COPY is not NULL, also add
6002 phi node arguments for its destination.*/
6004 void
6005 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6006 edge e_copy)
6008 unsigned i;
6010 for (i = 0; i < n_region; i++)
6011 region_copy[i]->flags |= BB_DUPLICATED;
6013 for (i = 0; i < n_region; i++)
6014 add_phi_args_after_copy_bb (region_copy[i]);
6015 if (e_copy)
6016 add_phi_args_after_copy_edge (e_copy);
6018 for (i = 0; i < n_region; i++)
6019 region_copy[i]->flags &= ~BB_DUPLICATED;
6022 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6023 important exit edge EXIT. By important we mean that no SSA name defined
6024 inside region is live over the other exit edges of the region. All entry
6025 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6026 to the duplicate of the region. Dominance and loop information is
6027 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6028 UPDATE_DOMINANCE is false then we assume that the caller will update the
6029 dominance information after calling this function. The new basic
6030 blocks are stored to REGION_COPY in the same order as they had in REGION,
6031 provided that REGION_COPY is not NULL.
6032 The function returns false if it is unable to copy the region,
6033 true otherwise. */
6035 bool
6036 gimple_duplicate_sese_region (edge entry, edge exit,
6037 basic_block *region, unsigned n_region,
6038 basic_block *region_copy,
6039 bool update_dominance)
6041 unsigned i;
6042 bool free_region_copy = false, copying_header = false;
6043 struct loop *loop = entry->dest->loop_father;
6044 edge exit_copy;
6045 vec<basic_block> doms;
6046 edge redirected;
6047 int total_freq = 0, entry_freq = 0;
6048 gcov_type total_count = 0, entry_count = 0;
6050 if (!can_copy_bbs_p (region, n_region))
6051 return false;
6053 /* Some sanity checking. Note that we do not check for all possible
6054 missuses of the functions. I.e. if you ask to copy something weird,
6055 it will work, but the state of structures probably will not be
6056 correct. */
6057 for (i = 0; i < n_region; i++)
6059 /* We do not handle subloops, i.e. all the blocks must belong to the
6060 same loop. */
6061 if (region[i]->loop_father != loop)
6062 return false;
6064 if (region[i] != entry->dest
6065 && region[i] == loop->header)
6066 return false;
6069 /* In case the function is used for loop header copying (which is the primary
6070 use), ensure that EXIT and its copy will be new latch and entry edges. */
6071 if (loop->header == entry->dest)
6073 copying_header = true;
6075 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6076 return false;
6078 for (i = 0; i < n_region; i++)
6079 if (region[i] != exit->src
6080 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6081 return false;
6084 initialize_original_copy_tables ();
6086 if (copying_header)
6087 set_loop_copy (loop, loop_outer (loop));
6088 else
6089 set_loop_copy (loop, loop);
6091 if (!region_copy)
6093 region_copy = XNEWVEC (basic_block, n_region);
6094 free_region_copy = true;
6097 /* Record blocks outside the region that are dominated by something
6098 inside. */
6099 if (update_dominance)
6101 doms.create (0);
6102 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6105 if (entry->dest->count)
6107 total_count = entry->dest->count;
6108 entry_count = entry->count;
6109 /* Fix up corner cases, to avoid division by zero or creation of negative
6110 frequencies. */
6111 if (entry_count > total_count)
6112 entry_count = total_count;
6114 else
6116 total_freq = entry->dest->frequency;
6117 entry_freq = EDGE_FREQUENCY (entry);
6118 /* Fix up corner cases, to avoid division by zero or creation of negative
6119 frequencies. */
6120 if (total_freq == 0)
6121 total_freq = 1;
6122 else if (entry_freq > total_freq)
6123 entry_freq = total_freq;
6126 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6127 split_edge_bb_loc (entry), update_dominance);
6128 if (total_count)
6130 scale_bbs_frequencies_gcov_type (region, n_region,
6131 total_count - entry_count,
6132 total_count);
6133 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6134 total_count);
6136 else
6138 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6139 total_freq);
6140 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6143 if (copying_header)
6145 loop->header = exit->dest;
6146 loop->latch = exit->src;
6149 /* Redirect the entry and add the phi node arguments. */
6150 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6151 gcc_assert (redirected != NULL);
6152 flush_pending_stmts (entry);
6154 /* Concerning updating of dominators: We must recount dominators
6155 for entry block and its copy. Anything that is outside of the
6156 region, but was dominated by something inside needs recounting as
6157 well. */
6158 if (update_dominance)
6160 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6161 doms.safe_push (get_bb_original (entry->dest));
6162 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6163 doms.release ();
6166 /* Add the other PHI node arguments. */
6167 add_phi_args_after_copy (region_copy, n_region, NULL);
6169 if (free_region_copy)
6170 free (region_copy);
6172 free_original_copy_tables ();
6173 return true;
6176 /* Checks if BB is part of the region defined by N_REGION BBS. */
6177 static bool
6178 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6180 unsigned int n;
6182 for (n = 0; n < n_region; n++)
6184 if (bb == bbs[n])
6185 return true;
6187 return false;
6190 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6191 are stored to REGION_COPY in the same order in that they appear
6192 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6193 the region, EXIT an exit from it. The condition guarding EXIT
6194 is moved to ENTRY. Returns true if duplication succeeds, false
6195 otherwise.
6197 For example,
6199 some_code;
6200 if (cond)
6202 else
6205 is transformed to
6207 if (cond)
6209 some_code;
6212 else
6214 some_code;
6219 bool
6220 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6221 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6222 basic_block *region_copy ATTRIBUTE_UNUSED)
6224 unsigned i;
6225 bool free_region_copy = false;
6226 struct loop *loop = exit->dest->loop_father;
6227 struct loop *orig_loop = entry->dest->loop_father;
6228 basic_block switch_bb, entry_bb, nentry_bb;
6229 vec<basic_block> doms;
6230 int total_freq = 0, exit_freq = 0;
6231 gcov_type total_count = 0, exit_count = 0;
6232 edge exits[2], nexits[2], e;
6233 gimple_stmt_iterator gsi;
6234 gimple cond_stmt;
6235 edge sorig, snew;
6236 basic_block exit_bb;
6237 gphi_iterator psi;
6238 gphi *phi;
6239 tree def;
6240 struct loop *target, *aloop, *cloop;
6242 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6243 exits[0] = exit;
6244 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6246 if (!can_copy_bbs_p (region, n_region))
6247 return false;
6249 initialize_original_copy_tables ();
6250 set_loop_copy (orig_loop, loop);
6252 target= loop;
6253 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6255 if (bb_part_of_region_p (aloop->header, region, n_region))
6257 cloop = duplicate_loop (aloop, target);
6258 duplicate_subloops (aloop, cloop);
6262 if (!region_copy)
6264 region_copy = XNEWVEC (basic_block, n_region);
6265 free_region_copy = true;
6268 gcc_assert (!need_ssa_update_p (cfun));
6270 /* Record blocks outside the region that are dominated by something
6271 inside. */
6272 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6274 if (exit->src->count)
6276 total_count = exit->src->count;
6277 exit_count = exit->count;
6278 /* Fix up corner cases, to avoid division by zero or creation of negative
6279 frequencies. */
6280 if (exit_count > total_count)
6281 exit_count = total_count;
6283 else
6285 total_freq = exit->src->frequency;
6286 exit_freq = EDGE_FREQUENCY (exit);
6287 /* Fix up corner cases, to avoid division by zero or creation of negative
6288 frequencies. */
6289 if (total_freq == 0)
6290 total_freq = 1;
6291 if (exit_freq > total_freq)
6292 exit_freq = total_freq;
6295 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6296 split_edge_bb_loc (exit), true);
6297 if (total_count)
6299 scale_bbs_frequencies_gcov_type (region, n_region,
6300 total_count - exit_count,
6301 total_count);
6302 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6303 total_count);
6305 else
6307 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6308 total_freq);
6309 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6312 /* Create the switch block, and put the exit condition to it. */
6313 entry_bb = entry->dest;
6314 nentry_bb = get_bb_copy (entry_bb);
6315 if (!last_stmt (entry->src)
6316 || !stmt_ends_bb_p (last_stmt (entry->src)))
6317 switch_bb = entry->src;
6318 else
6319 switch_bb = split_edge (entry);
6320 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6322 gsi = gsi_last_bb (switch_bb);
6323 cond_stmt = last_stmt (exit->src);
6324 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6325 cond_stmt = gimple_copy (cond_stmt);
6327 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6329 sorig = single_succ_edge (switch_bb);
6330 sorig->flags = exits[1]->flags;
6331 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6333 /* Register the new edge from SWITCH_BB in loop exit lists. */
6334 rescan_loop_exit (snew, true, false);
6336 /* Add the PHI node arguments. */
6337 add_phi_args_after_copy (region_copy, n_region, snew);
6339 /* Get rid of now superfluous conditions and associated edges (and phi node
6340 arguments). */
6341 exit_bb = exit->dest;
6343 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6344 PENDING_STMT (e) = NULL;
6346 /* The latch of ORIG_LOOP was copied, and so was the backedge
6347 to the original header. We redirect this backedge to EXIT_BB. */
6348 for (i = 0; i < n_region; i++)
6349 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6351 gcc_assert (single_succ_edge (region_copy[i]));
6352 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6353 PENDING_STMT (e) = NULL;
6354 for (psi = gsi_start_phis (exit_bb);
6355 !gsi_end_p (psi);
6356 gsi_next (&psi))
6358 phi = psi.phi ();
6359 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6360 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6363 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6364 PENDING_STMT (e) = NULL;
6366 /* Anything that is outside of the region, but was dominated by something
6367 inside needs to update dominance info. */
6368 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6369 doms.release ();
6370 /* Update the SSA web. */
6371 update_ssa (TODO_update_ssa);
6373 if (free_region_copy)
6374 free (region_copy);
6376 free_original_copy_tables ();
6377 return true;
6380 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6381 adding blocks when the dominator traversal reaches EXIT. This
6382 function silently assumes that ENTRY strictly dominates EXIT. */
6384 void
6385 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6386 vec<basic_block> *bbs_p)
6388 basic_block son;
6390 for (son = first_dom_son (CDI_DOMINATORS, entry);
6391 son;
6392 son = next_dom_son (CDI_DOMINATORS, son))
6394 bbs_p->safe_push (son);
6395 if (son != exit)
6396 gather_blocks_in_sese_region (son, exit, bbs_p);
6400 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6401 The duplicates are recorded in VARS_MAP. */
6403 static void
6404 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6405 tree to_context)
6407 tree t = *tp, new_t;
6408 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6410 if (DECL_CONTEXT (t) == to_context)
6411 return;
6413 bool existed;
6414 tree &loc = vars_map->get_or_insert (t, &existed);
6416 if (!existed)
6418 if (SSA_VAR_P (t))
6420 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6421 add_local_decl (f, new_t);
6423 else
6425 gcc_assert (TREE_CODE (t) == CONST_DECL);
6426 new_t = copy_node (t);
6428 DECL_CONTEXT (new_t) = to_context;
6430 loc = new_t;
6432 else
6433 new_t = loc;
6435 *tp = new_t;
6439 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6440 VARS_MAP maps old ssa names and var_decls to the new ones. */
6442 static tree
6443 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6444 tree to_context)
6446 tree new_name;
6448 gcc_assert (!virtual_operand_p (name));
6450 tree *loc = vars_map->get (name);
6452 if (!loc)
6454 tree decl = SSA_NAME_VAR (name);
6455 if (decl)
6457 replace_by_duplicate_decl (&decl, vars_map, to_context);
6458 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6459 decl, SSA_NAME_DEF_STMT (name));
6460 if (SSA_NAME_IS_DEFAULT_DEF (name))
6461 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6462 decl, new_name);
6464 else
6465 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6466 name, SSA_NAME_DEF_STMT (name));
6468 vars_map->put (name, new_name);
6470 else
6471 new_name = *loc;
6473 return new_name;
6476 struct move_stmt_d
6478 tree orig_block;
6479 tree new_block;
6480 tree from_context;
6481 tree to_context;
6482 hash_map<tree, tree> *vars_map;
6483 htab_t new_label_map;
6484 hash_map<void *, void *> *eh_map;
6485 bool remap_decls_p;
6488 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6489 contained in *TP if it has been ORIG_BLOCK previously and change the
6490 DECL_CONTEXT of every local variable referenced in *TP. */
6492 static tree
6493 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6495 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6496 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6497 tree t = *tp;
6499 if (EXPR_P (t))
6501 tree block = TREE_BLOCK (t);
6502 if (block == p->orig_block
6503 || (p->orig_block == NULL_TREE
6504 && block != NULL_TREE))
6505 TREE_SET_BLOCK (t, p->new_block);
6506 #ifdef ENABLE_CHECKING
6507 else if (block != NULL_TREE)
6509 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6510 block = BLOCK_SUPERCONTEXT (block);
6511 gcc_assert (block == p->orig_block);
6513 #endif
6515 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6517 if (TREE_CODE (t) == SSA_NAME)
6518 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6519 else if (TREE_CODE (t) == LABEL_DECL)
6521 if (p->new_label_map)
6523 struct tree_map in, *out;
6524 in.base.from = t;
6525 out = (struct tree_map *)
6526 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6527 if (out)
6528 *tp = t = out->to;
6531 DECL_CONTEXT (t) = p->to_context;
6533 else if (p->remap_decls_p)
6535 /* Replace T with its duplicate. T should no longer appear in the
6536 parent function, so this looks wasteful; however, it may appear
6537 in referenced_vars, and more importantly, as virtual operands of
6538 statements, and in alias lists of other variables. It would be
6539 quite difficult to expunge it from all those places. ??? It might
6540 suffice to do this for addressable variables. */
6541 if ((TREE_CODE (t) == VAR_DECL
6542 && !is_global_var (t))
6543 || TREE_CODE (t) == CONST_DECL)
6544 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6546 *walk_subtrees = 0;
6548 else if (TYPE_P (t))
6549 *walk_subtrees = 0;
6551 return NULL_TREE;
6554 /* Helper for move_stmt_r. Given an EH region number for the source
6555 function, map that to the duplicate EH regio number in the dest. */
6557 static int
6558 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6560 eh_region old_r, new_r;
6562 old_r = get_eh_region_from_number (old_nr);
6563 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6565 return new_r->index;
6568 /* Similar, but operate on INTEGER_CSTs. */
6570 static tree
6571 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6573 int old_nr, new_nr;
6575 old_nr = tree_to_shwi (old_t_nr);
6576 new_nr = move_stmt_eh_region_nr (old_nr, p);
6578 return build_int_cst (integer_type_node, new_nr);
6581 /* Like move_stmt_op, but for gimple statements.
6583 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6584 contained in the current statement in *GSI_P and change the
6585 DECL_CONTEXT of every local variable referenced in the current
6586 statement. */
6588 static tree
6589 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6590 struct walk_stmt_info *wi)
6592 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6593 gimple stmt = gsi_stmt (*gsi_p);
6594 tree block = gimple_block (stmt);
6596 if (block == p->orig_block
6597 || (p->orig_block == NULL_TREE
6598 && block != NULL_TREE))
6599 gimple_set_block (stmt, p->new_block);
6601 switch (gimple_code (stmt))
6603 case GIMPLE_CALL:
6604 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6606 tree r, fndecl = gimple_call_fndecl (stmt);
6607 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6608 switch (DECL_FUNCTION_CODE (fndecl))
6610 case BUILT_IN_EH_COPY_VALUES:
6611 r = gimple_call_arg (stmt, 1);
6612 r = move_stmt_eh_region_tree_nr (r, p);
6613 gimple_call_set_arg (stmt, 1, r);
6614 /* FALLTHRU */
6616 case BUILT_IN_EH_POINTER:
6617 case BUILT_IN_EH_FILTER:
6618 r = gimple_call_arg (stmt, 0);
6619 r = move_stmt_eh_region_tree_nr (r, p);
6620 gimple_call_set_arg (stmt, 0, r);
6621 break;
6623 default:
6624 break;
6627 break;
6629 case GIMPLE_RESX:
6631 gresx *resx_stmt = as_a <gresx *> (stmt);
6632 int r = gimple_resx_region (resx_stmt);
6633 r = move_stmt_eh_region_nr (r, p);
6634 gimple_resx_set_region (resx_stmt, r);
6636 break;
6638 case GIMPLE_EH_DISPATCH:
6640 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6641 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6642 r = move_stmt_eh_region_nr (r, p);
6643 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6645 break;
6647 case GIMPLE_OMP_RETURN:
6648 case GIMPLE_OMP_CONTINUE:
6649 break;
6650 default:
6651 if (is_gimple_omp (stmt))
6653 /* Do not remap variables inside OMP directives. Variables
6654 referenced in clauses and directive header belong to the
6655 parent function and should not be moved into the child
6656 function. */
6657 bool save_remap_decls_p = p->remap_decls_p;
6658 p->remap_decls_p = false;
6659 *handled_ops_p = true;
6661 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6662 move_stmt_op, wi);
6664 p->remap_decls_p = save_remap_decls_p;
6666 break;
6669 return NULL_TREE;
6672 /* Move basic block BB from function CFUN to function DEST_FN. The
6673 block is moved out of the original linked list and placed after
6674 block AFTER in the new list. Also, the block is removed from the
6675 original array of blocks and placed in DEST_FN's array of blocks.
6676 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6677 updated to reflect the moved edges.
6679 The local variables are remapped to new instances, VARS_MAP is used
6680 to record the mapping. */
6682 static void
6683 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6684 basic_block after, bool update_edge_count_p,
6685 struct move_stmt_d *d)
6687 struct control_flow_graph *cfg;
6688 edge_iterator ei;
6689 edge e;
6690 gimple_stmt_iterator si;
6691 unsigned old_len, new_len;
6693 /* Remove BB from dominance structures. */
6694 delete_from_dominance_info (CDI_DOMINATORS, bb);
6696 /* Move BB from its current loop to the copy in the new function. */
6697 if (current_loops)
6699 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6700 if (new_loop)
6701 bb->loop_father = new_loop;
6704 /* Link BB to the new linked list. */
6705 move_block_after (bb, after);
6707 /* Update the edge count in the corresponding flowgraphs. */
6708 if (update_edge_count_p)
6709 FOR_EACH_EDGE (e, ei, bb->succs)
6711 cfun->cfg->x_n_edges--;
6712 dest_cfun->cfg->x_n_edges++;
6715 /* Remove BB from the original basic block array. */
6716 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6717 cfun->cfg->x_n_basic_blocks--;
6719 /* Grow DEST_CFUN's basic block array if needed. */
6720 cfg = dest_cfun->cfg;
6721 cfg->x_n_basic_blocks++;
6722 if (bb->index >= cfg->x_last_basic_block)
6723 cfg->x_last_basic_block = bb->index + 1;
6725 old_len = vec_safe_length (cfg->x_basic_block_info);
6726 if ((unsigned) cfg->x_last_basic_block >= old_len)
6728 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6729 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6732 (*cfg->x_basic_block_info)[bb->index] = bb;
6734 /* Remap the variables in phi nodes. */
6735 for (gphi_iterator psi = gsi_start_phis (bb);
6736 !gsi_end_p (psi); )
6738 gphi *phi = psi.phi ();
6739 use_operand_p use;
6740 tree op = PHI_RESULT (phi);
6741 ssa_op_iter oi;
6742 unsigned i;
6744 if (virtual_operand_p (op))
6746 /* Remove the phi nodes for virtual operands (alias analysis will be
6747 run for the new function, anyway). */
6748 remove_phi_node (&psi, true);
6749 continue;
6752 SET_PHI_RESULT (phi,
6753 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6754 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6756 op = USE_FROM_PTR (use);
6757 if (TREE_CODE (op) == SSA_NAME)
6758 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6761 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6763 location_t locus = gimple_phi_arg_location (phi, i);
6764 tree block = LOCATION_BLOCK (locus);
6766 if (locus == UNKNOWN_LOCATION)
6767 continue;
6768 if (d->orig_block == NULL_TREE || block == d->orig_block)
6770 if (d->new_block == NULL_TREE)
6771 locus = LOCATION_LOCUS (locus);
6772 else
6773 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6774 gimple_phi_arg_set_location (phi, i, locus);
6778 gsi_next (&psi);
6781 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6783 gimple stmt = gsi_stmt (si);
6784 struct walk_stmt_info wi;
6786 memset (&wi, 0, sizeof (wi));
6787 wi.info = d;
6788 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6790 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6792 tree label = gimple_label_label (label_stmt);
6793 int uid = LABEL_DECL_UID (label);
6795 gcc_assert (uid > -1);
6797 old_len = vec_safe_length (cfg->x_label_to_block_map);
6798 if (old_len <= (unsigned) uid)
6800 new_len = 3 * uid / 2 + 1;
6801 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6804 (*cfg->x_label_to_block_map)[uid] = bb;
6805 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6807 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6809 if (uid >= dest_cfun->cfg->last_label_uid)
6810 dest_cfun->cfg->last_label_uid = uid + 1;
6813 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6814 remove_stmt_from_eh_lp_fn (cfun, stmt);
6816 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6817 gimple_remove_stmt_histograms (cfun, stmt);
6819 /* We cannot leave any operands allocated from the operand caches of
6820 the current function. */
6821 free_stmt_operands (cfun, stmt);
6822 push_cfun (dest_cfun);
6823 update_stmt (stmt);
6824 pop_cfun ();
6827 FOR_EACH_EDGE (e, ei, bb->succs)
6828 if (e->goto_locus != UNKNOWN_LOCATION)
6830 tree block = LOCATION_BLOCK (e->goto_locus);
6831 if (d->orig_block == NULL_TREE
6832 || block == d->orig_block)
6833 e->goto_locus = d->new_block ?
6834 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6835 LOCATION_LOCUS (e->goto_locus);
6839 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6840 the outermost EH region. Use REGION as the incoming base EH region. */
6842 static eh_region
6843 find_outermost_region_in_block (struct function *src_cfun,
6844 basic_block bb, eh_region region)
6846 gimple_stmt_iterator si;
6848 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6850 gimple stmt = gsi_stmt (si);
6851 eh_region stmt_region;
6852 int lp_nr;
6854 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6855 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6856 if (stmt_region)
6858 if (region == NULL)
6859 region = stmt_region;
6860 else if (stmt_region != region)
6862 region = eh_region_outermost (src_cfun, stmt_region, region);
6863 gcc_assert (region != NULL);
6868 return region;
6871 static tree
6872 new_label_mapper (tree decl, void *data)
6874 htab_t hash = (htab_t) data;
6875 struct tree_map *m;
6876 void **slot;
6878 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6880 m = XNEW (struct tree_map);
6881 m->hash = DECL_UID (decl);
6882 m->base.from = decl;
6883 m->to = create_artificial_label (UNKNOWN_LOCATION);
6884 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6885 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6886 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6888 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6889 gcc_assert (*slot == NULL);
6891 *slot = m;
6893 return m->to;
6896 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6897 subblocks. */
6899 static void
6900 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6901 tree to_context)
6903 tree *tp, t;
6905 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6907 t = *tp;
6908 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6909 continue;
6910 replace_by_duplicate_decl (&t, vars_map, to_context);
6911 if (t != *tp)
6913 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6915 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6916 DECL_HAS_VALUE_EXPR_P (t) = 1;
6918 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6919 *tp = t;
6923 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6924 replace_block_vars_by_duplicates (block, vars_map, to_context);
6927 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6928 from FN1 to FN2. */
6930 static void
6931 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6932 struct loop *loop)
6934 /* Discard it from the old loop array. */
6935 (*get_loops (fn1))[loop->num] = NULL;
6937 /* Place it in the new loop array, assigning it a new number. */
6938 loop->num = number_of_loops (fn2);
6939 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6941 /* Recurse to children. */
6942 for (loop = loop->inner; loop; loop = loop->next)
6943 fixup_loop_arrays_after_move (fn1, fn2, loop);
6946 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6947 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6949 DEBUG_FUNCTION void
6950 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6952 basic_block bb;
6953 edge_iterator ei;
6954 edge e;
6955 bitmap bbs = BITMAP_ALLOC (NULL);
6956 int i;
6958 gcc_assert (entry != NULL);
6959 gcc_assert (entry != exit);
6960 gcc_assert (bbs_p != NULL);
6962 gcc_assert (bbs_p->length () > 0);
6964 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6965 bitmap_set_bit (bbs, bb->index);
6967 gcc_assert (bitmap_bit_p (bbs, entry->index));
6968 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6970 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6972 if (bb == entry)
6974 gcc_assert (single_pred_p (entry));
6975 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6977 else
6978 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6980 e = ei_edge (ei);
6981 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6984 if (bb == exit)
6986 gcc_assert (single_succ_p (exit));
6987 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6989 else
6990 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6992 e = ei_edge (ei);
6993 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6997 BITMAP_FREE (bbs);
7001 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7002 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7003 single basic block in the original CFG and the new basic block is
7004 returned. DEST_CFUN must not have a CFG yet.
7006 Note that the region need not be a pure SESE region. Blocks inside
7007 the region may contain calls to abort/exit. The only restriction
7008 is that ENTRY_BB should be the only entry point and it must
7009 dominate EXIT_BB.
7011 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7012 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7013 to the new function.
7015 All local variables referenced in the region are assumed to be in
7016 the corresponding BLOCK_VARS and unexpanded variable lists
7017 associated with DEST_CFUN. */
7019 basic_block
7020 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7021 basic_block exit_bb, tree orig_block)
7023 vec<basic_block> bbs, dom_bbs;
7024 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7025 basic_block after, bb, *entry_pred, *exit_succ, abb;
7026 struct function *saved_cfun = cfun;
7027 int *entry_flag, *exit_flag;
7028 unsigned *entry_prob, *exit_prob;
7029 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7030 edge e;
7031 edge_iterator ei;
7032 htab_t new_label_map;
7033 hash_map<void *, void *> *eh_map;
7034 struct loop *loop = entry_bb->loop_father;
7035 struct loop *loop0 = get_loop (saved_cfun, 0);
7036 struct move_stmt_d d;
7038 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7039 region. */
7040 gcc_assert (entry_bb != exit_bb
7041 && (!exit_bb
7042 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7044 /* Collect all the blocks in the region. Manually add ENTRY_BB
7045 because it won't be added by dfs_enumerate_from. */
7046 bbs.create (0);
7047 bbs.safe_push (entry_bb);
7048 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7049 #ifdef ENABLE_CHECKING
7050 verify_sese (entry_bb, exit_bb, &bbs);
7051 #endif
7053 /* The blocks that used to be dominated by something in BBS will now be
7054 dominated by the new block. */
7055 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7056 bbs.address (),
7057 bbs.length ());
7059 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7060 the predecessor edges to ENTRY_BB and the successor edges to
7061 EXIT_BB so that we can re-attach them to the new basic block that
7062 will replace the region. */
7063 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7064 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7065 entry_flag = XNEWVEC (int, num_entry_edges);
7066 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7067 i = 0;
7068 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7070 entry_prob[i] = e->probability;
7071 entry_flag[i] = e->flags;
7072 entry_pred[i++] = e->src;
7073 remove_edge (e);
7076 if (exit_bb)
7078 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7079 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7080 exit_flag = XNEWVEC (int, num_exit_edges);
7081 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7082 i = 0;
7083 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7085 exit_prob[i] = e->probability;
7086 exit_flag[i] = e->flags;
7087 exit_succ[i++] = e->dest;
7088 remove_edge (e);
7091 else
7093 num_exit_edges = 0;
7094 exit_succ = NULL;
7095 exit_flag = NULL;
7096 exit_prob = NULL;
7099 /* Switch context to the child function to initialize DEST_FN's CFG. */
7100 gcc_assert (dest_cfun->cfg == NULL);
7101 push_cfun (dest_cfun);
7103 init_empty_tree_cfg ();
7105 /* Initialize EH information for the new function. */
7106 eh_map = NULL;
7107 new_label_map = NULL;
7108 if (saved_cfun->eh)
7110 eh_region region = NULL;
7112 FOR_EACH_VEC_ELT (bbs, i, bb)
7113 region = find_outermost_region_in_block (saved_cfun, bb, region);
7115 init_eh_for_function ();
7116 if (region != NULL)
7118 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7119 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7120 new_label_mapper, new_label_map);
7124 /* Initialize an empty loop tree. */
7125 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7126 init_loops_structure (dest_cfun, loops, 1);
7127 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7128 set_loops_for_fn (dest_cfun, loops);
7130 /* Move the outlined loop tree part. */
7131 num_nodes = bbs.length ();
7132 FOR_EACH_VEC_ELT (bbs, i, bb)
7134 if (bb->loop_father->header == bb)
7136 struct loop *this_loop = bb->loop_father;
7137 struct loop *outer = loop_outer (this_loop);
7138 if (outer == loop
7139 /* If the SESE region contains some bbs ending with
7140 a noreturn call, those are considered to belong
7141 to the outermost loop in saved_cfun, rather than
7142 the entry_bb's loop_father. */
7143 || outer == loop0)
7145 if (outer != loop)
7146 num_nodes -= this_loop->num_nodes;
7147 flow_loop_tree_node_remove (bb->loop_father);
7148 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7149 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7152 else if (bb->loop_father == loop0 && loop0 != loop)
7153 num_nodes--;
7155 /* Remove loop exits from the outlined region. */
7156 if (loops_for_fn (saved_cfun)->exits)
7157 FOR_EACH_EDGE (e, ei, bb->succs)
7159 struct loops *l = loops_for_fn (saved_cfun);
7160 loop_exit **slot
7161 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7162 NO_INSERT);
7163 if (slot)
7164 l->exits->clear_slot (slot);
7169 /* Adjust the number of blocks in the tree root of the outlined part. */
7170 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7172 /* Setup a mapping to be used by move_block_to_fn. */
7173 loop->aux = current_loops->tree_root;
7174 loop0->aux = current_loops->tree_root;
7176 pop_cfun ();
7178 /* Move blocks from BBS into DEST_CFUN. */
7179 gcc_assert (bbs.length () >= 2);
7180 after = dest_cfun->cfg->x_entry_block_ptr;
7181 hash_map<tree, tree> vars_map;
7183 memset (&d, 0, sizeof (d));
7184 d.orig_block = orig_block;
7185 d.new_block = DECL_INITIAL (dest_cfun->decl);
7186 d.from_context = cfun->decl;
7187 d.to_context = dest_cfun->decl;
7188 d.vars_map = &vars_map;
7189 d.new_label_map = new_label_map;
7190 d.eh_map = eh_map;
7191 d.remap_decls_p = true;
7193 FOR_EACH_VEC_ELT (bbs, i, bb)
7195 /* No need to update edge counts on the last block. It has
7196 already been updated earlier when we detached the region from
7197 the original CFG. */
7198 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7199 after = bb;
7202 loop->aux = NULL;
7203 loop0->aux = NULL;
7204 /* Loop sizes are no longer correct, fix them up. */
7205 loop->num_nodes -= num_nodes;
7206 for (struct loop *outer = loop_outer (loop);
7207 outer; outer = loop_outer (outer))
7208 outer->num_nodes -= num_nodes;
7209 loop0->num_nodes -= bbs.length () - num_nodes;
7211 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7213 struct loop *aloop;
7214 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7215 if (aloop != NULL)
7217 if (aloop->simduid)
7219 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7220 d.to_context);
7221 dest_cfun->has_simduid_loops = true;
7223 if (aloop->force_vectorize)
7224 dest_cfun->has_force_vectorize_loops = true;
7228 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7229 if (orig_block)
7231 tree block;
7232 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7233 == NULL_TREE);
7234 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7235 = BLOCK_SUBBLOCKS (orig_block);
7236 for (block = BLOCK_SUBBLOCKS (orig_block);
7237 block; block = BLOCK_CHAIN (block))
7238 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7239 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7242 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7243 &vars_map, dest_cfun->decl);
7245 if (new_label_map)
7246 htab_delete (new_label_map);
7247 if (eh_map)
7248 delete eh_map;
7250 /* Rewire the entry and exit blocks. The successor to the entry
7251 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7252 the child function. Similarly, the predecessor of DEST_FN's
7253 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7254 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7255 various CFG manipulation function get to the right CFG.
7257 FIXME, this is silly. The CFG ought to become a parameter to
7258 these helpers. */
7259 push_cfun (dest_cfun);
7260 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7261 if (exit_bb)
7262 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7263 pop_cfun ();
7265 /* Back in the original function, the SESE region has disappeared,
7266 create a new basic block in its place. */
7267 bb = create_empty_bb (entry_pred[0]);
7268 if (current_loops)
7269 add_bb_to_loop (bb, loop);
7270 for (i = 0; i < num_entry_edges; i++)
7272 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7273 e->probability = entry_prob[i];
7276 for (i = 0; i < num_exit_edges; i++)
7278 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7279 e->probability = exit_prob[i];
7282 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7283 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7284 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7285 dom_bbs.release ();
7287 if (exit_bb)
7289 free (exit_prob);
7290 free (exit_flag);
7291 free (exit_succ);
7293 free (entry_prob);
7294 free (entry_flag);
7295 free (entry_pred);
7296 bbs.release ();
7298 return bb;
7302 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7305 void
7306 dump_function_to_file (tree fndecl, FILE *file, int flags)
7308 tree arg, var, old_current_fndecl = current_function_decl;
7309 struct function *dsf;
7310 bool ignore_topmost_bind = false, any_var = false;
7311 basic_block bb;
7312 tree chain;
7313 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7314 && decl_is_tm_clone (fndecl));
7315 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7317 current_function_decl = fndecl;
7318 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7320 arg = DECL_ARGUMENTS (fndecl);
7321 while (arg)
7323 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7324 fprintf (file, " ");
7325 print_generic_expr (file, arg, dump_flags);
7326 if (flags & TDF_VERBOSE)
7327 print_node (file, "", arg, 4);
7328 if (DECL_CHAIN (arg))
7329 fprintf (file, ", ");
7330 arg = DECL_CHAIN (arg);
7332 fprintf (file, ")\n");
7334 if (flags & TDF_VERBOSE)
7335 print_node (file, "", fndecl, 2);
7337 dsf = DECL_STRUCT_FUNCTION (fndecl);
7338 if (dsf && (flags & TDF_EH))
7339 dump_eh_tree (file, dsf);
7341 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7343 dump_node (fndecl, TDF_SLIM | flags, file);
7344 current_function_decl = old_current_fndecl;
7345 return;
7348 /* When GIMPLE is lowered, the variables are no longer available in
7349 BIND_EXPRs, so display them separately. */
7350 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7352 unsigned ix;
7353 ignore_topmost_bind = true;
7355 fprintf (file, "{\n");
7356 if (!vec_safe_is_empty (fun->local_decls))
7357 FOR_EACH_LOCAL_DECL (fun, ix, var)
7359 print_generic_decl (file, var, flags);
7360 if (flags & TDF_VERBOSE)
7361 print_node (file, "", var, 4);
7362 fprintf (file, "\n");
7364 any_var = true;
7366 if (gimple_in_ssa_p (cfun))
7367 for (ix = 1; ix < num_ssa_names; ++ix)
7369 tree name = ssa_name (ix);
7370 if (name && !SSA_NAME_VAR (name))
7372 fprintf (file, " ");
7373 print_generic_expr (file, TREE_TYPE (name), flags);
7374 fprintf (file, " ");
7375 print_generic_expr (file, name, flags);
7376 fprintf (file, ";\n");
7378 any_var = true;
7383 if (fun && fun->decl == fndecl
7384 && fun->cfg
7385 && basic_block_info_for_fn (fun))
7387 /* If the CFG has been built, emit a CFG-based dump. */
7388 if (!ignore_topmost_bind)
7389 fprintf (file, "{\n");
7391 if (any_var && n_basic_blocks_for_fn (fun))
7392 fprintf (file, "\n");
7394 FOR_EACH_BB_FN (bb, fun)
7395 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7397 fprintf (file, "}\n");
7399 else if (DECL_SAVED_TREE (fndecl) == NULL)
7401 /* The function is now in GIMPLE form but the CFG has not been
7402 built yet. Emit the single sequence of GIMPLE statements
7403 that make up its body. */
7404 gimple_seq body = gimple_body (fndecl);
7406 if (gimple_seq_first_stmt (body)
7407 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7408 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7409 print_gimple_seq (file, body, 0, flags);
7410 else
7412 if (!ignore_topmost_bind)
7413 fprintf (file, "{\n");
7415 if (any_var)
7416 fprintf (file, "\n");
7418 print_gimple_seq (file, body, 2, flags);
7419 fprintf (file, "}\n");
7422 else
7424 int indent;
7426 /* Make a tree based dump. */
7427 chain = DECL_SAVED_TREE (fndecl);
7428 if (chain && TREE_CODE (chain) == BIND_EXPR)
7430 if (ignore_topmost_bind)
7432 chain = BIND_EXPR_BODY (chain);
7433 indent = 2;
7435 else
7436 indent = 0;
7438 else
7440 if (!ignore_topmost_bind)
7442 fprintf (file, "{\n");
7443 /* No topmost bind, pretend it's ignored for later. */
7444 ignore_topmost_bind = true;
7446 indent = 2;
7449 if (any_var)
7450 fprintf (file, "\n");
7452 print_generic_stmt_indented (file, chain, flags, indent);
7453 if (ignore_topmost_bind)
7454 fprintf (file, "}\n");
7457 if (flags & TDF_ENUMERATE_LOCALS)
7458 dump_enumerated_decls (file, flags);
7459 fprintf (file, "\n\n");
7461 current_function_decl = old_current_fndecl;
7464 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7466 DEBUG_FUNCTION void
7467 debug_function (tree fn, int flags)
7469 dump_function_to_file (fn, stderr, flags);
7473 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7475 static void
7476 print_pred_bbs (FILE *file, basic_block bb)
7478 edge e;
7479 edge_iterator ei;
7481 FOR_EACH_EDGE (e, ei, bb->preds)
7482 fprintf (file, "bb_%d ", e->src->index);
7486 /* Print on FILE the indexes for the successors of basic_block BB. */
7488 static void
7489 print_succ_bbs (FILE *file, basic_block bb)
7491 edge e;
7492 edge_iterator ei;
7494 FOR_EACH_EDGE (e, ei, bb->succs)
7495 fprintf (file, "bb_%d ", e->dest->index);
7498 /* Print to FILE the basic block BB following the VERBOSITY level. */
7500 void
7501 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7503 char *s_indent = (char *) alloca ((size_t) indent + 1);
7504 memset ((void *) s_indent, ' ', (size_t) indent);
7505 s_indent[indent] = '\0';
7507 /* Print basic_block's header. */
7508 if (verbosity >= 2)
7510 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7511 print_pred_bbs (file, bb);
7512 fprintf (file, "}, succs = {");
7513 print_succ_bbs (file, bb);
7514 fprintf (file, "})\n");
7517 /* Print basic_block's body. */
7518 if (verbosity >= 3)
7520 fprintf (file, "%s {\n", s_indent);
7521 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7522 fprintf (file, "%s }\n", s_indent);
7526 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7528 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7529 VERBOSITY level this outputs the contents of the loop, or just its
7530 structure. */
7532 static void
7533 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7535 char *s_indent;
7536 basic_block bb;
7538 if (loop == NULL)
7539 return;
7541 s_indent = (char *) alloca ((size_t) indent + 1);
7542 memset ((void *) s_indent, ' ', (size_t) indent);
7543 s_indent[indent] = '\0';
7545 /* Print loop's header. */
7546 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7547 if (loop->header)
7548 fprintf (file, "header = %d", loop->header->index);
7549 else
7551 fprintf (file, "deleted)\n");
7552 return;
7554 if (loop->latch)
7555 fprintf (file, ", latch = %d", loop->latch->index);
7556 else
7557 fprintf (file, ", multiple latches");
7558 fprintf (file, ", niter = ");
7559 print_generic_expr (file, loop->nb_iterations, 0);
7561 if (loop->any_upper_bound)
7563 fprintf (file, ", upper_bound = ");
7564 print_decu (loop->nb_iterations_upper_bound, file);
7567 if (loop->any_estimate)
7569 fprintf (file, ", estimate = ");
7570 print_decu (loop->nb_iterations_estimate, file);
7572 fprintf (file, ")\n");
7574 /* Print loop's body. */
7575 if (verbosity >= 1)
7577 fprintf (file, "%s{\n", s_indent);
7578 FOR_EACH_BB_FN (bb, cfun)
7579 if (bb->loop_father == loop)
7580 print_loops_bb (file, bb, indent, verbosity);
7582 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7583 fprintf (file, "%s}\n", s_indent);
7587 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7588 spaces. Following VERBOSITY level this outputs the contents of the
7589 loop, or just its structure. */
7591 static void
7592 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7593 int verbosity)
7595 if (loop == NULL)
7596 return;
7598 print_loop (file, loop, indent, verbosity);
7599 print_loop_and_siblings (file, loop->next, indent, verbosity);
7602 /* Follow a CFG edge from the entry point of the program, and on entry
7603 of a loop, pretty print the loop structure on FILE. */
7605 void
7606 print_loops (FILE *file, int verbosity)
7608 basic_block bb;
7610 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7611 if (bb && bb->loop_father)
7612 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7615 /* Dump a loop. */
7617 DEBUG_FUNCTION void
7618 debug (struct loop &ref)
7620 print_loop (stderr, &ref, 0, /*verbosity*/0);
7623 DEBUG_FUNCTION void
7624 debug (struct loop *ptr)
7626 if (ptr)
7627 debug (*ptr);
7628 else
7629 fprintf (stderr, "<nil>\n");
7632 /* Dump a loop verbosely. */
7634 DEBUG_FUNCTION void
7635 debug_verbose (struct loop &ref)
7637 print_loop (stderr, &ref, 0, /*verbosity*/3);
7640 DEBUG_FUNCTION void
7641 debug_verbose (struct loop *ptr)
7643 if (ptr)
7644 debug (*ptr);
7645 else
7646 fprintf (stderr, "<nil>\n");
7650 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7652 DEBUG_FUNCTION void
7653 debug_loops (int verbosity)
7655 print_loops (stderr, verbosity);
7658 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7660 DEBUG_FUNCTION void
7661 debug_loop (struct loop *loop, int verbosity)
7663 print_loop (stderr, loop, 0, verbosity);
7666 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7667 level. */
7669 DEBUG_FUNCTION void
7670 debug_loop_num (unsigned num, int verbosity)
7672 debug_loop (get_loop (cfun, num), verbosity);
7675 /* Return true if BB ends with a call, possibly followed by some
7676 instructions that must stay with the call. Return false,
7677 otherwise. */
7679 static bool
7680 gimple_block_ends_with_call_p (basic_block bb)
7682 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7683 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7687 /* Return true if BB ends with a conditional branch. Return false,
7688 otherwise. */
7690 static bool
7691 gimple_block_ends_with_condjump_p (const_basic_block bb)
7693 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7694 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7698 /* Return true if we need to add fake edge to exit at statement T.
7699 Helper function for gimple_flow_call_edges_add. */
7701 static bool
7702 need_fake_edge_p (gimple t)
7704 tree fndecl = NULL_TREE;
7705 int call_flags = 0;
7707 /* NORETURN and LONGJMP calls already have an edge to exit.
7708 CONST and PURE calls do not need one.
7709 We don't currently check for CONST and PURE here, although
7710 it would be a good idea, because those attributes are
7711 figured out from the RTL in mark_constant_function, and
7712 the counter incrementation code from -fprofile-arcs
7713 leads to different results from -fbranch-probabilities. */
7714 if (is_gimple_call (t))
7716 fndecl = gimple_call_fndecl (t);
7717 call_flags = gimple_call_flags (t);
7720 if (is_gimple_call (t)
7721 && fndecl
7722 && DECL_BUILT_IN (fndecl)
7723 && (call_flags & ECF_NOTHROW)
7724 && !(call_flags & ECF_RETURNS_TWICE)
7725 /* fork() doesn't really return twice, but the effect of
7726 wrapping it in __gcov_fork() which calls __gcov_flush()
7727 and clears the counters before forking has the same
7728 effect as returning twice. Force a fake edge. */
7729 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7730 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7731 return false;
7733 if (is_gimple_call (t))
7735 edge_iterator ei;
7736 edge e;
7737 basic_block bb;
7739 if (!(call_flags & ECF_NORETURN))
7740 return true;
7742 bb = gimple_bb (t);
7743 FOR_EACH_EDGE (e, ei, bb->succs)
7744 if ((e->flags & EDGE_FAKE) == 0)
7745 return true;
7748 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7749 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7750 return true;
7752 return false;
7756 /* Add fake edges to the function exit for any non constant and non
7757 noreturn calls (or noreturn calls with EH/abnormal edges),
7758 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7759 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7760 that were split.
7762 The goal is to expose cases in which entering a basic block does
7763 not imply that all subsequent instructions must be executed. */
7765 static int
7766 gimple_flow_call_edges_add (sbitmap blocks)
7768 int i;
7769 int blocks_split = 0;
7770 int last_bb = last_basic_block_for_fn (cfun);
7771 bool check_last_block = false;
7773 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7774 return 0;
7776 if (! blocks)
7777 check_last_block = true;
7778 else
7779 check_last_block = bitmap_bit_p (blocks,
7780 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7782 /* In the last basic block, before epilogue generation, there will be
7783 a fallthru edge to EXIT. Special care is required if the last insn
7784 of the last basic block is a call because make_edge folds duplicate
7785 edges, which would result in the fallthru edge also being marked
7786 fake, which would result in the fallthru edge being removed by
7787 remove_fake_edges, which would result in an invalid CFG.
7789 Moreover, we can't elide the outgoing fake edge, since the block
7790 profiler needs to take this into account in order to solve the minimal
7791 spanning tree in the case that the call doesn't return.
7793 Handle this by adding a dummy instruction in a new last basic block. */
7794 if (check_last_block)
7796 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7797 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7798 gimple t = NULL;
7800 if (!gsi_end_p (gsi))
7801 t = gsi_stmt (gsi);
7803 if (t && need_fake_edge_p (t))
7805 edge e;
7807 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7808 if (e)
7810 gsi_insert_on_edge (e, gimple_build_nop ());
7811 gsi_commit_edge_inserts ();
7816 /* Now add fake edges to the function exit for any non constant
7817 calls since there is no way that we can determine if they will
7818 return or not... */
7819 for (i = 0; i < last_bb; i++)
7821 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7822 gimple_stmt_iterator gsi;
7823 gimple stmt, last_stmt;
7825 if (!bb)
7826 continue;
7828 if (blocks && !bitmap_bit_p (blocks, i))
7829 continue;
7831 gsi = gsi_last_nondebug_bb (bb);
7832 if (!gsi_end_p (gsi))
7834 last_stmt = gsi_stmt (gsi);
7837 stmt = gsi_stmt (gsi);
7838 if (need_fake_edge_p (stmt))
7840 edge e;
7842 /* The handling above of the final block before the
7843 epilogue should be enough to verify that there is
7844 no edge to the exit block in CFG already.
7845 Calling make_edge in such case would cause us to
7846 mark that edge as fake and remove it later. */
7847 #ifdef ENABLE_CHECKING
7848 if (stmt == last_stmt)
7850 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7851 gcc_assert (e == NULL);
7853 #endif
7855 /* Note that the following may create a new basic block
7856 and renumber the existing basic blocks. */
7857 if (stmt != last_stmt)
7859 e = split_block (bb, stmt);
7860 if (e)
7861 blocks_split++;
7863 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7865 gsi_prev (&gsi);
7867 while (!gsi_end_p (gsi));
7871 if (blocks_split)
7872 verify_flow_info ();
7874 return blocks_split;
7877 /* Removes edge E and all the blocks dominated by it, and updates dominance
7878 information. The IL in E->src needs to be updated separately.
7879 If dominance info is not available, only the edge E is removed.*/
7881 void
7882 remove_edge_and_dominated_blocks (edge e)
7884 vec<basic_block> bbs_to_remove = vNULL;
7885 vec<basic_block> bbs_to_fix_dom = vNULL;
7886 bitmap df, df_idom;
7887 edge f;
7888 edge_iterator ei;
7889 bool none_removed = false;
7890 unsigned i;
7891 basic_block bb, dbb;
7892 bitmap_iterator bi;
7894 /* If we are removing a path inside a non-root loop that may change
7895 loop ownership of blocks or remove loops. Mark loops for fixup. */
7896 if (current_loops
7897 && loop_outer (e->src->loop_father) != NULL
7898 && e->src->loop_father == e->dest->loop_father)
7899 loops_state_set (LOOPS_NEED_FIXUP);
7901 if (!dom_info_available_p (CDI_DOMINATORS))
7903 remove_edge (e);
7904 return;
7907 /* No updating is needed for edges to exit. */
7908 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7910 if (cfgcleanup_altered_bbs)
7911 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7912 remove_edge (e);
7913 return;
7916 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7917 that is not dominated by E->dest, then this set is empty. Otherwise,
7918 all the basic blocks dominated by E->dest are removed.
7920 Also, to DF_IDOM we store the immediate dominators of the blocks in
7921 the dominance frontier of E (i.e., of the successors of the
7922 removed blocks, if there are any, and of E->dest otherwise). */
7923 FOR_EACH_EDGE (f, ei, e->dest->preds)
7925 if (f == e)
7926 continue;
7928 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7930 none_removed = true;
7931 break;
7935 df = BITMAP_ALLOC (NULL);
7936 df_idom = BITMAP_ALLOC (NULL);
7938 if (none_removed)
7939 bitmap_set_bit (df_idom,
7940 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7941 else
7943 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7944 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7946 FOR_EACH_EDGE (f, ei, bb->succs)
7948 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7949 bitmap_set_bit (df, f->dest->index);
7952 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7953 bitmap_clear_bit (df, bb->index);
7955 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7957 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7958 bitmap_set_bit (df_idom,
7959 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7963 if (cfgcleanup_altered_bbs)
7965 /* Record the set of the altered basic blocks. */
7966 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7967 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7970 /* Remove E and the cancelled blocks. */
7971 if (none_removed)
7972 remove_edge (e);
7973 else
7975 /* Walk backwards so as to get a chance to substitute all
7976 released DEFs into debug stmts. See
7977 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7978 details. */
7979 for (i = bbs_to_remove.length (); i-- > 0; )
7980 delete_basic_block (bbs_to_remove[i]);
7983 /* Update the dominance information. The immediate dominator may change only
7984 for blocks whose immediate dominator belongs to DF_IDOM:
7986 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7987 removal. Let Z the arbitrary block such that idom(Z) = Y and
7988 Z dominates X after the removal. Before removal, there exists a path P
7989 from Y to X that avoids Z. Let F be the last edge on P that is
7990 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7991 dominates W, and because of P, Z does not dominate W), and W belongs to
7992 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7993 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7995 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7996 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7997 dbb;
7998 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7999 bbs_to_fix_dom.safe_push (dbb);
8002 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8004 BITMAP_FREE (df);
8005 BITMAP_FREE (df_idom);
8006 bbs_to_remove.release ();
8007 bbs_to_fix_dom.release ();
8010 /* Purge dead EH edges from basic block BB. */
8012 bool
8013 gimple_purge_dead_eh_edges (basic_block bb)
8015 bool changed = false;
8016 edge e;
8017 edge_iterator ei;
8018 gimple stmt = last_stmt (bb);
8020 if (stmt && stmt_can_throw_internal (stmt))
8021 return false;
8023 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8025 if (e->flags & EDGE_EH)
8027 remove_edge_and_dominated_blocks (e);
8028 changed = true;
8030 else
8031 ei_next (&ei);
8034 return changed;
8037 /* Purge dead EH edges from basic block listed in BLOCKS. */
8039 bool
8040 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8042 bool changed = false;
8043 unsigned i;
8044 bitmap_iterator bi;
8046 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8048 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8050 /* Earlier gimple_purge_dead_eh_edges could have removed
8051 this basic block already. */
8052 gcc_assert (bb || changed);
8053 if (bb != NULL)
8054 changed |= gimple_purge_dead_eh_edges (bb);
8057 return changed;
8060 /* Purge dead abnormal call edges from basic block BB. */
8062 bool
8063 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8065 bool changed = false;
8066 edge e;
8067 edge_iterator ei;
8068 gimple stmt = last_stmt (bb);
8070 if (!cfun->has_nonlocal_label
8071 && !cfun->calls_setjmp)
8072 return false;
8074 if (stmt && stmt_can_make_abnormal_goto (stmt))
8075 return false;
8077 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8079 if (e->flags & EDGE_ABNORMAL)
8081 if (e->flags & EDGE_FALLTHRU)
8082 e->flags &= ~EDGE_ABNORMAL;
8083 else
8084 remove_edge_and_dominated_blocks (e);
8085 changed = true;
8087 else
8088 ei_next (&ei);
8091 return changed;
8094 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8096 bool
8097 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8099 bool changed = false;
8100 unsigned i;
8101 bitmap_iterator bi;
8103 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8105 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8107 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8108 this basic block already. */
8109 gcc_assert (bb || changed);
8110 if (bb != NULL)
8111 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8114 return changed;
8117 /* This function is called whenever a new edge is created or
8118 redirected. */
8120 static void
8121 gimple_execute_on_growing_pred (edge e)
8123 basic_block bb = e->dest;
8125 if (!gimple_seq_empty_p (phi_nodes (bb)))
8126 reserve_phi_args_for_new_edge (bb);
8129 /* This function is called immediately before edge E is removed from
8130 the edge vector E->dest->preds. */
8132 static void
8133 gimple_execute_on_shrinking_pred (edge e)
8135 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8136 remove_phi_args (e);
8139 /*---------------------------------------------------------------------------
8140 Helper functions for Loop versioning
8141 ---------------------------------------------------------------------------*/
8143 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8144 of 'first'. Both of them are dominated by 'new_head' basic block. When
8145 'new_head' was created by 'second's incoming edge it received phi arguments
8146 on the edge by split_edge(). Later, additional edge 'e' was created to
8147 connect 'new_head' and 'first'. Now this routine adds phi args on this
8148 additional edge 'e' that new_head to second edge received as part of edge
8149 splitting. */
8151 static void
8152 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8153 basic_block new_head, edge e)
8155 gphi *phi1, *phi2;
8156 gphi_iterator psi1, psi2;
8157 tree def;
8158 edge e2 = find_edge (new_head, second);
8160 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8161 edge, we should always have an edge from NEW_HEAD to SECOND. */
8162 gcc_assert (e2 != NULL);
8164 /* Browse all 'second' basic block phi nodes and add phi args to
8165 edge 'e' for 'first' head. PHI args are always in correct order. */
8167 for (psi2 = gsi_start_phis (second),
8168 psi1 = gsi_start_phis (first);
8169 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8170 gsi_next (&psi2), gsi_next (&psi1))
8172 phi1 = psi1.phi ();
8173 phi2 = psi2.phi ();
8174 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8175 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8180 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8181 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8182 the destination of the ELSE part. */
8184 static void
8185 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8186 basic_block second_head ATTRIBUTE_UNUSED,
8187 basic_block cond_bb, void *cond_e)
8189 gimple_stmt_iterator gsi;
8190 gimple new_cond_expr;
8191 tree cond_expr = (tree) cond_e;
8192 edge e0;
8194 /* Build new conditional expr */
8195 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8196 NULL_TREE, NULL_TREE);
8198 /* Add new cond in cond_bb. */
8199 gsi = gsi_last_bb (cond_bb);
8200 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8202 /* Adjust edges appropriately to connect new head with first head
8203 as well as second head. */
8204 e0 = single_succ_edge (cond_bb);
8205 e0->flags &= ~EDGE_FALLTHRU;
8206 e0->flags |= EDGE_FALSE_VALUE;
8210 /* Do book-keeping of basic block BB for the profile consistency checker.
8211 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8212 then do post-pass accounting. Store the counting in RECORD. */
8213 static void
8214 gimple_account_profile_record (basic_block bb, int after_pass,
8215 struct profile_record *record)
8217 gimple_stmt_iterator i;
8218 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8220 record->size[after_pass]
8221 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8222 if (profile_status_for_fn (cfun) == PROFILE_READ)
8223 record->time[after_pass]
8224 += estimate_num_insns (gsi_stmt (i),
8225 &eni_time_weights) * bb->count;
8226 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8227 record->time[after_pass]
8228 += estimate_num_insns (gsi_stmt (i),
8229 &eni_time_weights) * bb->frequency;
8233 struct cfg_hooks gimple_cfg_hooks = {
8234 "gimple",
8235 gimple_verify_flow_info,
8236 gimple_dump_bb, /* dump_bb */
8237 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8238 create_bb, /* create_basic_block */
8239 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8240 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8241 gimple_can_remove_branch_p, /* can_remove_branch_p */
8242 remove_bb, /* delete_basic_block */
8243 gimple_split_block, /* split_block */
8244 gimple_move_block_after, /* move_block_after */
8245 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8246 gimple_merge_blocks, /* merge_blocks */
8247 gimple_predict_edge, /* predict_edge */
8248 gimple_predicted_by_p, /* predicted_by_p */
8249 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8250 gimple_duplicate_bb, /* duplicate_block */
8251 gimple_split_edge, /* split_edge */
8252 gimple_make_forwarder_block, /* make_forward_block */
8253 NULL, /* tidy_fallthru_edge */
8254 NULL, /* force_nonfallthru */
8255 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8256 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8257 gimple_flow_call_edges_add, /* flow_call_edges_add */
8258 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8259 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8260 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8261 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8262 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8263 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8264 flush_pending_stmts, /* flush_pending_stmts */
8265 gimple_empty_block_p, /* block_empty_p */
8266 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8267 gimple_account_profile_record,
8271 /* Split all critical edges. */
8273 unsigned int
8274 split_critical_edges (void)
8276 basic_block bb;
8277 edge e;
8278 edge_iterator ei;
8280 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8281 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8282 mappings around the calls to split_edge. */
8283 start_recording_case_labels ();
8284 FOR_ALL_BB_FN (bb, cfun)
8286 FOR_EACH_EDGE (e, ei, bb->succs)
8288 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8289 split_edge (e);
8290 /* PRE inserts statements to edges and expects that
8291 since split_critical_edges was done beforehand, committing edge
8292 insertions will not split more edges. In addition to critical
8293 edges we must split edges that have multiple successors and
8294 end by control flow statements, such as RESX.
8295 Go ahead and split them too. This matches the logic in
8296 gimple_find_edge_insert_loc. */
8297 else if ((!single_pred_p (e->dest)
8298 || !gimple_seq_empty_p (phi_nodes (e->dest))
8299 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8300 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8301 && !(e->flags & EDGE_ABNORMAL))
8303 gimple_stmt_iterator gsi;
8305 gsi = gsi_last_bb (e->src);
8306 if (!gsi_end_p (gsi)
8307 && stmt_ends_bb_p (gsi_stmt (gsi))
8308 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8309 && !gimple_call_builtin_p (gsi_stmt (gsi),
8310 BUILT_IN_RETURN)))
8311 split_edge (e);
8315 end_recording_case_labels ();
8316 return 0;
8319 namespace {
8321 const pass_data pass_data_split_crit_edges =
8323 GIMPLE_PASS, /* type */
8324 "crited", /* name */
8325 OPTGROUP_NONE, /* optinfo_flags */
8326 TV_TREE_SPLIT_EDGES, /* tv_id */
8327 PROP_cfg, /* properties_required */
8328 PROP_no_crit_edges, /* properties_provided */
8329 0, /* properties_destroyed */
8330 0, /* todo_flags_start */
8331 0, /* todo_flags_finish */
8334 class pass_split_crit_edges : public gimple_opt_pass
8336 public:
8337 pass_split_crit_edges (gcc::context *ctxt)
8338 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8341 /* opt_pass methods: */
8342 virtual unsigned int execute (function *) { return split_critical_edges (); }
8344 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8345 }; // class pass_split_crit_edges
8347 } // anon namespace
8349 gimple_opt_pass *
8350 make_pass_split_crit_edges (gcc::context *ctxt)
8352 return new pass_split_crit_edges (ctxt);
8356 /* Insert COND expression which is GIMPLE_COND after STMT
8357 in basic block BB with appropriate basic block split
8358 and creation of a new conditionally executed basic block.
8359 Return created basic block. */
8360 basic_block
8361 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8363 edge fall = split_block (bb, stmt);
8364 gimple_stmt_iterator iter = gsi_last_bb (bb);
8365 basic_block new_bb;
8367 /* Insert cond statement. */
8368 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8369 if (gsi_end_p (iter))
8370 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8371 else
8372 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8374 /* Create conditionally executed block. */
8375 new_bb = create_empty_bb (bb);
8376 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8377 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8379 /* Fix edge for split bb. */
8380 fall->flags = EDGE_FALSE_VALUE;
8382 /* Update dominance info. */
8383 if (dom_info_available_p (CDI_DOMINATORS))
8385 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8386 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8389 /* Update loop info. */
8390 if (current_loops)
8391 add_bb_to_loop (new_bb, bb->loop_father);
8393 return new_bb;
8396 /* Build a ternary operation and gimplify it. Emit code before GSI.
8397 Return the gimple_val holding the result. */
8399 tree
8400 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8401 tree type, tree a, tree b, tree c)
8403 tree ret;
8404 location_t loc = gimple_location (gsi_stmt (*gsi));
8406 ret = fold_build3_loc (loc, code, type, a, b, c);
8407 STRIP_NOPS (ret);
8409 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8410 GSI_SAME_STMT);
8413 /* Build a binary operation and gimplify it. Emit code before GSI.
8414 Return the gimple_val holding the result. */
8416 tree
8417 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8418 tree type, tree a, tree b)
8420 tree ret;
8422 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8423 STRIP_NOPS (ret);
8425 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8426 GSI_SAME_STMT);
8429 /* Build a unary operation and gimplify it. Emit code before GSI.
8430 Return the gimple_val holding the result. */
8432 tree
8433 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8434 tree a)
8436 tree ret;
8438 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8439 STRIP_NOPS (ret);
8441 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8442 GSI_SAME_STMT);
8447 /* Given a basic block B which ends with a conditional and has
8448 precisely two successors, determine which of the edges is taken if
8449 the conditional is true and which is taken if the conditional is
8450 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8452 void
8453 extract_true_false_edges_from_block (basic_block b,
8454 edge *true_edge,
8455 edge *false_edge)
8457 edge e = EDGE_SUCC (b, 0);
8459 if (e->flags & EDGE_TRUE_VALUE)
8461 *true_edge = e;
8462 *false_edge = EDGE_SUCC (b, 1);
8464 else
8466 *false_edge = e;
8467 *true_edge = EDGE_SUCC (b, 1);
8471 /* Emit return warnings. */
8473 namespace {
8475 const pass_data pass_data_warn_function_return =
8477 GIMPLE_PASS, /* type */
8478 "*warn_function_return", /* name */
8479 OPTGROUP_NONE, /* optinfo_flags */
8480 TV_NONE, /* tv_id */
8481 PROP_cfg, /* properties_required */
8482 0, /* properties_provided */
8483 0, /* properties_destroyed */
8484 0, /* todo_flags_start */
8485 0, /* todo_flags_finish */
8488 class pass_warn_function_return : public gimple_opt_pass
8490 public:
8491 pass_warn_function_return (gcc::context *ctxt)
8492 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8495 /* opt_pass methods: */
8496 virtual unsigned int execute (function *);
8498 }; // class pass_warn_function_return
8500 unsigned int
8501 pass_warn_function_return::execute (function *fun)
8503 source_location location;
8504 gimple last;
8505 edge e;
8506 edge_iterator ei;
8508 if (!targetm.warn_func_return (fun->decl))
8509 return 0;
8511 /* If we have a path to EXIT, then we do return. */
8512 if (TREE_THIS_VOLATILE (fun->decl)
8513 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8515 location = UNKNOWN_LOCATION;
8516 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8518 last = last_stmt (e->src);
8519 if ((gimple_code (last) == GIMPLE_RETURN
8520 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8521 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8522 break;
8524 if (location == UNKNOWN_LOCATION)
8525 location = cfun->function_end_locus;
8526 warning_at (location, 0, "%<noreturn%> function does return");
8529 /* If we see "return;" in some basic block, then we do reach the end
8530 without returning a value. */
8531 else if (warn_return_type
8532 && !TREE_NO_WARNING (fun->decl)
8533 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8534 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8536 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8538 gimple last = last_stmt (e->src);
8539 greturn *return_stmt = dyn_cast <greturn *> (last);
8540 if (return_stmt
8541 && gimple_return_retval (return_stmt) == NULL
8542 && !gimple_no_warning_p (last))
8544 location = gimple_location (last);
8545 if (location == UNKNOWN_LOCATION)
8546 location = fun->function_end_locus;
8547 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8548 TREE_NO_WARNING (fun->decl) = 1;
8549 break;
8553 return 0;
8556 } // anon namespace
8558 gimple_opt_pass *
8559 make_pass_warn_function_return (gcc::context *ctxt)
8561 return new pass_warn_function_return (ctxt);
8564 /* Walk a gimplified function and warn for functions whose return value is
8565 ignored and attribute((warn_unused_result)) is set. This is done before
8566 inlining, so we don't have to worry about that. */
8568 static void
8569 do_warn_unused_result (gimple_seq seq)
8571 tree fdecl, ftype;
8572 gimple_stmt_iterator i;
8574 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8576 gimple g = gsi_stmt (i);
8578 switch (gimple_code (g))
8580 case GIMPLE_BIND:
8581 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8582 break;
8583 case GIMPLE_TRY:
8584 do_warn_unused_result (gimple_try_eval (g));
8585 do_warn_unused_result (gimple_try_cleanup (g));
8586 break;
8587 case GIMPLE_CATCH:
8588 do_warn_unused_result (gimple_catch_handler (
8589 as_a <gcatch *> (g)));
8590 break;
8591 case GIMPLE_EH_FILTER:
8592 do_warn_unused_result (gimple_eh_filter_failure (g));
8593 break;
8595 case GIMPLE_CALL:
8596 if (gimple_call_lhs (g))
8597 break;
8598 if (gimple_call_internal_p (g))
8599 break;
8601 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8602 LHS. All calls whose value is ignored should be
8603 represented like this. Look for the attribute. */
8604 fdecl = gimple_call_fndecl (g);
8605 ftype = gimple_call_fntype (g);
8607 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8609 location_t loc = gimple_location (g);
8611 if (fdecl)
8612 warning_at (loc, OPT_Wunused_result,
8613 "ignoring return value of %qD, "
8614 "declared with attribute warn_unused_result",
8615 fdecl);
8616 else
8617 warning_at (loc, OPT_Wunused_result,
8618 "ignoring return value of function "
8619 "declared with attribute warn_unused_result");
8621 break;
8623 default:
8624 /* Not a container, not a call, or a call whose value is used. */
8625 break;
8630 namespace {
8632 const pass_data pass_data_warn_unused_result =
8634 GIMPLE_PASS, /* type */
8635 "*warn_unused_result", /* name */
8636 OPTGROUP_NONE, /* optinfo_flags */
8637 TV_NONE, /* tv_id */
8638 PROP_gimple_any, /* properties_required */
8639 0, /* properties_provided */
8640 0, /* properties_destroyed */
8641 0, /* todo_flags_start */
8642 0, /* todo_flags_finish */
8645 class pass_warn_unused_result : public gimple_opt_pass
8647 public:
8648 pass_warn_unused_result (gcc::context *ctxt)
8649 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8652 /* opt_pass methods: */
8653 virtual bool gate (function *) { return flag_warn_unused_result; }
8654 virtual unsigned int execute (function *)
8656 do_warn_unused_result (gimple_body (current_function_decl));
8657 return 0;
8660 }; // class pass_warn_unused_result
8662 } // anon namespace
8664 gimple_opt_pass *
8665 make_pass_warn_unused_result (gcc::context *ctxt)
8667 return new pass_warn_unused_result (ctxt);
8670 /* IPA passes, compilation of earlier functions or inlining
8671 might have changed some properties, such as marked functions nothrow,
8672 pure, const or noreturn.
8673 Remove redundant edges and basic blocks, and create new ones if necessary.
8675 This pass can't be executed as stand alone pass from pass manager, because
8676 in between inlining and this fixup the verify_flow_info would fail. */
8678 unsigned int
8679 execute_fixup_cfg (void)
8681 basic_block bb;
8682 gimple_stmt_iterator gsi;
8683 int todo = 0;
8684 gcov_type count_scale;
8685 edge e;
8686 edge_iterator ei;
8688 count_scale
8689 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8690 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8692 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8693 cgraph_node::get (current_function_decl)->count;
8694 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8695 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8696 count_scale);
8698 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8699 e->count = apply_scale (e->count, count_scale);
8701 FOR_EACH_BB_FN (bb, cfun)
8703 bb->count = apply_scale (bb->count, count_scale);
8704 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8706 gimple stmt = gsi_stmt (gsi);
8707 tree decl = is_gimple_call (stmt)
8708 ? gimple_call_fndecl (stmt)
8709 : NULL;
8710 if (decl)
8712 int flags = gimple_call_flags (stmt);
8713 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8715 if (gimple_purge_dead_abnormal_call_edges (bb))
8716 todo |= TODO_cleanup_cfg;
8718 if (gimple_in_ssa_p (cfun))
8720 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8721 update_stmt (stmt);
8725 if (flags & ECF_NORETURN
8726 && fixup_noreturn_call (stmt))
8727 todo |= TODO_cleanup_cfg;
8730 /* Remove stores to variables we marked write-only.
8731 Keep access when store has side effect, i.e. in case when source
8732 is volatile. */
8733 if (gimple_store_p (stmt)
8734 && !gimple_has_side_effects (stmt))
8736 tree lhs = get_base_address (gimple_get_lhs (stmt));
8738 if (TREE_CODE (lhs) == VAR_DECL
8739 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8740 && varpool_node::get (lhs)->writeonly)
8742 unlink_stmt_vdef (stmt);
8743 gsi_remove (&gsi, true);
8744 release_defs (stmt);
8745 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8746 continue;
8749 /* For calls we can simply remove LHS when it is known
8750 to be write-only. */
8751 if (is_gimple_call (stmt)
8752 && gimple_get_lhs (stmt))
8754 tree lhs = get_base_address (gimple_get_lhs (stmt));
8756 if (TREE_CODE (lhs) == VAR_DECL
8757 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8758 && varpool_node::get (lhs)->writeonly)
8760 gimple_call_set_lhs (stmt, NULL);
8761 update_stmt (stmt);
8762 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8766 if (maybe_clean_eh_stmt (stmt)
8767 && gimple_purge_dead_eh_edges (bb))
8768 todo |= TODO_cleanup_cfg;
8769 gsi_next (&gsi);
8772 FOR_EACH_EDGE (e, ei, bb->succs)
8773 e->count = apply_scale (e->count, count_scale);
8775 /* If we have a basic block with no successors that does not
8776 end with a control statement or a noreturn call end it with
8777 a call to __builtin_unreachable. This situation can occur
8778 when inlining a noreturn call that does in fact return. */
8779 if (EDGE_COUNT (bb->succs) == 0)
8781 gimple stmt = last_stmt (bb);
8782 if (!stmt
8783 || (!is_ctrl_stmt (stmt)
8784 && (!is_gimple_call (stmt)
8785 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8787 if (stmt && is_gimple_call (stmt))
8788 gimple_call_set_ctrl_altering (stmt, false);
8789 stmt = gimple_build_call
8790 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8791 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8792 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8796 if (count_scale != REG_BR_PROB_BASE)
8797 compute_function_frequency ();
8799 if (current_loops
8800 && (todo & TODO_cleanup_cfg))
8801 loops_state_set (LOOPS_NEED_FIXUP);
8803 return todo;
8806 namespace {
8808 const pass_data pass_data_fixup_cfg =
8810 GIMPLE_PASS, /* type */
8811 "fixup_cfg", /* name */
8812 OPTGROUP_NONE, /* optinfo_flags */
8813 TV_NONE, /* tv_id */
8814 PROP_cfg, /* properties_required */
8815 0, /* properties_provided */
8816 0, /* properties_destroyed */
8817 0, /* todo_flags_start */
8818 0, /* todo_flags_finish */
8821 class pass_fixup_cfg : public gimple_opt_pass
8823 public:
8824 pass_fixup_cfg (gcc::context *ctxt)
8825 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8828 /* opt_pass methods: */
8829 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8830 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8832 }; // class pass_fixup_cfg
8834 } // anon namespace
8836 gimple_opt_pass *
8837 make_pass_fixup_cfg (gcc::context *ctxt)
8839 return new pass_fixup_cfg (ctxt);
8842 /* Garbage collection support for edge_def. */
8844 extern void gt_ggc_mx (tree&);
8845 extern void gt_ggc_mx (gimple&);
8846 extern void gt_ggc_mx (rtx&);
8847 extern void gt_ggc_mx (basic_block&);
8849 static void
8850 gt_ggc_mx (rtx_insn *& x)
8852 if (x)
8853 gt_ggc_mx_rtx_def ((void *) x);
8856 void
8857 gt_ggc_mx (edge_def *e)
8859 tree block = LOCATION_BLOCK (e->goto_locus);
8860 gt_ggc_mx (e->src);
8861 gt_ggc_mx (e->dest);
8862 if (current_ir_type () == IR_GIMPLE)
8863 gt_ggc_mx (e->insns.g);
8864 else
8865 gt_ggc_mx (e->insns.r);
8866 gt_ggc_mx (block);
8869 /* PCH support for edge_def. */
8871 extern void gt_pch_nx (tree&);
8872 extern void gt_pch_nx (gimple&);
8873 extern void gt_pch_nx (rtx&);
8874 extern void gt_pch_nx (basic_block&);
8876 static void
8877 gt_pch_nx (rtx_insn *& x)
8879 if (x)
8880 gt_pch_nx_rtx_def ((void *) x);
8883 void
8884 gt_pch_nx (edge_def *e)
8886 tree block = LOCATION_BLOCK (e->goto_locus);
8887 gt_pch_nx (e->src);
8888 gt_pch_nx (e->dest);
8889 if (current_ir_type () == IR_GIMPLE)
8890 gt_pch_nx (e->insns.g);
8891 else
8892 gt_pch_nx (e->insns.r);
8893 gt_pch_nx (block);
8896 void
8897 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8899 tree block = LOCATION_BLOCK (e->goto_locus);
8900 op (&(e->src), cookie);
8901 op (&(e->dest), cookie);
8902 if (current_ir_type () == IR_GIMPLE)
8903 op (&(e->insns.g), cookie);
8904 else
8905 op (&(e->insns.r), cookie);
8906 op (&(block), cookie);