gcc/
[official-gcc.git] / gcc / tree-cfg.c
blob94ed95751a914d58d74a08739bef82bc0dfba715
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "trans-mem.h"
30 #include "stor-layout.h"
31 #include "print-tree.h"
32 #include "tm_p.h"
33 #include "predict.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "cfganal.h"
39 #include "basic-block.h"
40 #include "flags.h"
41 #include "gimple-pretty-print.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "tree-phinodes.h"
55 #include "ssa-iterators.h"
56 #include "stringpool.h"
57 #include "tree-ssanames.h"
58 #include "tree-ssa-loop-manip.h"
59 #include "tree-ssa-loop-niter.h"
60 #include "tree-into-ssa.h"
61 #include "rtl.h"
62 #include "insn-config.h"
63 #include "expmed.h"
64 #include "dojump.h"
65 #include "explow.h"
66 #include "calls.h"
67 #include "emit-rtl.h"
68 #include "varasm.h"
69 #include "stmt.h"
70 #include "expr.h"
71 #include "tree-dfa.h"
72 #include "tree-ssa.h"
73 #include "tree-dump.h"
74 #include "tree-pass.h"
75 #include "diagnostic-core.h"
76 #include "except.h"
77 #include "cfgloop.h"
78 #include "tree-ssa-propagate.h"
79 #include "value-prof.h"
80 #include "tree-inline.h"
81 #include "target.h"
82 #include "tree-ssa-live.h"
83 #include "omp-low.h"
84 #include "tree-cfgcleanup.h"
85 #include "wide-int-print.h"
87 /* This file contains functions for building the Control Flow Graph (CFG)
88 for a function tree. */
90 /* Local declarations. */
92 /* Initial capacity for the basic block array. */
93 static const int initial_cfg_capacity = 20;
95 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
96 which use a particular edge. The CASE_LABEL_EXPRs are chained together
97 via their CASE_CHAIN field, which we clear after we're done with the
98 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
100 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
101 update the case vector in response to edge redirections.
103 Right now this table is set up and torn down at key points in the
104 compilation process. It would be nice if we could make the table
105 more persistent. The key is getting notification of changes to
106 the CFG (particularly edge removal, creation and redirection). */
108 static hash_map<edge, tree> *edge_to_cases;
110 /* If we record edge_to_cases, this bitmap will hold indexes
111 of basic blocks that end in a GIMPLE_SWITCH which we touched
112 due to edge manipulations. */
114 static bitmap touched_switch_bbs;
116 /* CFG statistics. */
117 struct cfg_stats_d
119 long num_merged_labels;
122 static struct cfg_stats_d cfg_stats;
124 /* Hash table to store last discriminator assigned for each locus. */
125 struct locus_discrim_map
127 location_t locus;
128 int discriminator;
131 /* Hashtable helpers. */
133 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
135 static inline hashval_t hash (const locus_discrim_map *);
136 static inline bool equal (const locus_discrim_map *,
137 const locus_discrim_map *);
140 /* Trivial hash function for a location_t. ITEM is a pointer to
141 a hash table entry that maps a location_t to a discriminator. */
143 inline hashval_t
144 locus_discrim_hasher::hash (const locus_discrim_map *item)
146 return LOCATION_LINE (item->locus);
149 /* Equality function for the locus-to-discriminator map. A and B
150 point to the two hash table entries to compare. */
152 inline bool
153 locus_discrim_hasher::equal (const locus_discrim_map *a,
154 const locus_discrim_map *b)
156 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
159 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
161 /* Basic blocks and flowgraphs. */
162 static void make_blocks (gimple_seq);
164 /* Edges. */
165 static void make_edges (void);
166 static void assign_discriminators (void);
167 static void make_cond_expr_edges (basic_block);
168 static void make_gimple_switch_edges (gswitch *, basic_block);
169 static bool make_goto_expr_edges (basic_block);
170 static void make_gimple_asm_edges (basic_block);
171 static edge gimple_redirect_edge_and_branch (edge, basic_block);
172 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
174 /* Various helpers. */
175 static inline bool stmt_starts_bb_p (gimple, gimple);
176 static int gimple_verify_flow_info (void);
177 static void gimple_make_forwarder_block (edge);
178 static gimple first_non_label_stmt (basic_block);
179 static bool verify_gimple_transaction (gtransaction *);
180 static bool call_can_make_abnormal_goto (gimple);
182 /* Flowgraph optimization and cleanup. */
183 static void gimple_merge_blocks (basic_block, basic_block);
184 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
185 static void remove_bb (basic_block);
186 static edge find_taken_edge_computed_goto (basic_block, tree);
187 static edge find_taken_edge_cond_expr (basic_block, tree);
188 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
189 static tree find_case_label_for_value (gswitch *, tree);
191 void
192 init_empty_tree_cfg_for_function (struct function *fn)
194 /* Initialize the basic block array. */
195 init_flow (fn);
196 profile_status_for_fn (fn) = PROFILE_ABSENT;
197 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
198 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
199 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
200 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
201 initial_cfg_capacity);
203 /* Build a mapping of labels to their associated blocks. */
204 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
205 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
206 initial_cfg_capacity);
208 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
209 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
211 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
212 = EXIT_BLOCK_PTR_FOR_FN (fn);
213 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
214 = ENTRY_BLOCK_PTR_FOR_FN (fn);
217 void
218 init_empty_tree_cfg (void)
220 init_empty_tree_cfg_for_function (cfun);
223 /*---------------------------------------------------------------------------
224 Create basic blocks
225 ---------------------------------------------------------------------------*/
227 /* Entry point to the CFG builder for trees. SEQ is the sequence of
228 statements to be added to the flowgraph. */
230 static void
231 build_gimple_cfg (gimple_seq seq)
233 /* Register specific gimple functions. */
234 gimple_register_cfg_hooks ();
236 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
238 init_empty_tree_cfg ();
240 make_blocks (seq);
242 /* Make sure there is always at least one block, even if it's empty. */
243 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
244 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
246 /* Adjust the size of the array. */
247 if (basic_block_info_for_fn (cfun)->length ()
248 < (size_t) n_basic_blocks_for_fn (cfun))
249 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
250 n_basic_blocks_for_fn (cfun));
252 /* To speed up statement iterator walks, we first purge dead labels. */
253 cleanup_dead_labels ();
255 /* Group case nodes to reduce the number of edges.
256 We do this after cleaning up dead labels because otherwise we miss
257 a lot of obvious case merging opportunities. */
258 group_case_labels ();
260 /* Create the edges of the flowgraph. */
261 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
262 make_edges ();
263 assign_discriminators ();
264 cleanup_dead_labels ();
265 delete discriminator_per_locus;
266 discriminator_per_locus = NULL;
269 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
270 them and propagate the information to LOOP. We assume that the annotations
271 come immediately before the condition in BB, if any. */
273 static void
274 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
276 gimple_stmt_iterator gsi = gsi_last_bb (bb);
277 gimple stmt = gsi_stmt (gsi);
279 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
280 return;
282 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
284 stmt = gsi_stmt (gsi);
285 if (gimple_code (stmt) != GIMPLE_CALL)
286 break;
287 if (!gimple_call_internal_p (stmt)
288 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
289 break;
291 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
293 case annot_expr_ivdep_kind:
294 loop->safelen = INT_MAX;
295 break;
296 case annot_expr_no_vector_kind:
297 loop->dont_vectorize = true;
298 break;
299 case annot_expr_vector_kind:
300 loop->force_vectorize = true;
301 cfun->has_force_vectorize_loops = true;
302 break;
303 default:
304 gcc_unreachable ();
307 stmt = gimple_build_assign (gimple_call_lhs (stmt),
308 gimple_call_arg (stmt, 0));
309 gsi_replace (&gsi, stmt, true);
313 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
314 them and propagate the information to the loop. We assume that the
315 annotations come immediately before the condition of the loop. */
317 static void
318 replace_loop_annotate (void)
320 struct loop *loop;
321 basic_block bb;
322 gimple_stmt_iterator gsi;
323 gimple stmt;
325 FOR_EACH_LOOP (loop, 0)
327 /* First look into the header. */
328 replace_loop_annotate_in_block (loop->header, loop);
330 /* Then look into the latch, if any. */
331 if (loop->latch)
332 replace_loop_annotate_in_block (loop->latch, loop);
335 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
336 FOR_EACH_BB_FN (bb, cfun)
338 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
340 stmt = gsi_stmt (gsi);
341 if (gimple_code (stmt) != GIMPLE_CALL)
342 continue;
343 if (!gimple_call_internal_p (stmt)
344 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
345 continue;
347 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
349 case annot_expr_ivdep_kind:
350 case annot_expr_no_vector_kind:
351 case annot_expr_vector_kind:
352 break;
353 default:
354 gcc_unreachable ();
357 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
358 stmt = gimple_build_assign (gimple_call_lhs (stmt),
359 gimple_call_arg (stmt, 0));
360 gsi_replace (&gsi, stmt, true);
366 static unsigned int
367 execute_build_cfg (void)
369 gimple_seq body = gimple_body (current_function_decl);
371 build_gimple_cfg (body);
372 gimple_set_body (current_function_decl, NULL);
373 if (dump_file && (dump_flags & TDF_DETAILS))
375 fprintf (dump_file, "Scope blocks:\n");
376 dump_scope_blocks (dump_file, dump_flags);
378 cleanup_tree_cfg ();
379 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
380 replace_loop_annotate ();
381 return 0;
384 namespace {
386 const pass_data pass_data_build_cfg =
388 GIMPLE_PASS, /* type */
389 "cfg", /* name */
390 OPTGROUP_NONE, /* optinfo_flags */
391 TV_TREE_CFG, /* tv_id */
392 PROP_gimple_leh, /* properties_required */
393 ( PROP_cfg | PROP_loops ), /* properties_provided */
394 0, /* properties_destroyed */
395 0, /* todo_flags_start */
396 0, /* todo_flags_finish */
399 class pass_build_cfg : public gimple_opt_pass
401 public:
402 pass_build_cfg (gcc::context *ctxt)
403 : gimple_opt_pass (pass_data_build_cfg, ctxt)
406 /* opt_pass methods: */
407 virtual unsigned int execute (function *) { return execute_build_cfg (); }
409 }; // class pass_build_cfg
411 } // anon namespace
413 gimple_opt_pass *
414 make_pass_build_cfg (gcc::context *ctxt)
416 return new pass_build_cfg (ctxt);
420 /* Return true if T is a computed goto. */
422 bool
423 computed_goto_p (gimple t)
425 return (gimple_code (t) == GIMPLE_GOTO
426 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
429 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
430 the other edge points to a bb with just __builtin_unreachable ().
431 I.e. return true for C->M edge in:
432 <bb C>:
434 if (something)
435 goto <bb N>;
436 else
437 goto <bb M>;
438 <bb N>:
439 __builtin_unreachable ();
440 <bb M>: */
442 bool
443 assert_unreachable_fallthru_edge_p (edge e)
445 basic_block pred_bb = e->src;
446 gimple last = last_stmt (pred_bb);
447 if (last && gimple_code (last) == GIMPLE_COND)
449 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
450 if (other_bb == e->dest)
451 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
452 if (EDGE_COUNT (other_bb->succs) == 0)
454 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
455 gimple stmt;
457 if (gsi_end_p (gsi))
458 return false;
459 stmt = gsi_stmt (gsi);
460 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
462 gsi_next (&gsi);
463 if (gsi_end_p (gsi))
464 return false;
465 stmt = gsi_stmt (gsi);
467 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
470 return false;
474 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
475 could alter control flow except via eh. We initialize the flag at
476 CFG build time and only ever clear it later. */
478 static void
479 gimple_call_initialize_ctrl_altering (gimple stmt)
481 int flags = gimple_call_flags (stmt);
483 /* A call alters control flow if it can make an abnormal goto. */
484 if (call_can_make_abnormal_goto (stmt)
485 /* A call also alters control flow if it does not return. */
486 || flags & ECF_NORETURN
487 /* TM ending statements have backedges out of the transaction.
488 Return true so we split the basic block containing them.
489 Note that the TM_BUILTIN test is merely an optimization. */
490 || ((flags & ECF_TM_BUILTIN)
491 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
492 /* BUILT_IN_RETURN call is same as return statement. */
493 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
494 gimple_call_set_ctrl_altering (stmt, true);
495 else
496 gimple_call_set_ctrl_altering (stmt, false);
500 /* Insert SEQ after BB and build a flowgraph. */
502 static basic_block
503 make_blocks_1 (gimple_seq seq, basic_block bb)
505 gimple_stmt_iterator i = gsi_start (seq);
506 gimple stmt = NULL;
507 bool start_new_block = true;
508 bool first_stmt_of_seq = true;
510 while (!gsi_end_p (i))
512 gimple prev_stmt;
514 prev_stmt = stmt;
515 stmt = gsi_stmt (i);
517 if (stmt && is_gimple_call (stmt))
518 gimple_call_initialize_ctrl_altering (stmt);
520 /* If the statement starts a new basic block or if we have determined
521 in a previous pass that we need to create a new block for STMT, do
522 so now. */
523 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
525 if (!first_stmt_of_seq)
526 gsi_split_seq_before (&i, &seq);
527 bb = create_basic_block (seq, bb);
528 start_new_block = false;
531 /* Now add STMT to BB and create the subgraphs for special statement
532 codes. */
533 gimple_set_bb (stmt, bb);
535 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
536 next iteration. */
537 if (stmt_ends_bb_p (stmt))
539 /* If the stmt can make abnormal goto use a new temporary
540 for the assignment to the LHS. This makes sure the old value
541 of the LHS is available on the abnormal edge. Otherwise
542 we will end up with overlapping life-ranges for abnormal
543 SSA names. */
544 if (gimple_has_lhs (stmt)
545 && stmt_can_make_abnormal_goto (stmt)
546 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
548 tree lhs = gimple_get_lhs (stmt);
549 tree tmp = create_tmp_var (TREE_TYPE (lhs));
550 gimple s = gimple_build_assign (lhs, tmp);
551 gimple_set_location (s, gimple_location (stmt));
552 gimple_set_block (s, gimple_block (stmt));
553 gimple_set_lhs (stmt, tmp);
554 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
555 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
556 DECL_GIMPLE_REG_P (tmp) = 1;
557 gsi_insert_after (&i, s, GSI_SAME_STMT);
559 start_new_block = true;
562 gsi_next (&i);
563 first_stmt_of_seq = false;
565 return bb;
568 /* Build a flowgraph for the sequence of stmts SEQ. */
570 static void
571 make_blocks (gimple_seq seq)
573 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
576 /* Create and return a new empty basic block after bb AFTER. */
578 static basic_block
579 create_bb (void *h, void *e, basic_block after)
581 basic_block bb;
583 gcc_assert (!e);
585 /* Create and initialize a new basic block. Since alloc_block uses
586 GC allocation that clears memory to allocate a basic block, we do
587 not have to clear the newly allocated basic block here. */
588 bb = alloc_block ();
590 bb->index = last_basic_block_for_fn (cfun);
591 bb->flags = BB_NEW;
592 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
594 /* Add the new block to the linked list of blocks. */
595 link_block (bb, after);
597 /* Grow the basic block array if needed. */
598 if ((size_t) last_basic_block_for_fn (cfun)
599 == basic_block_info_for_fn (cfun)->length ())
601 size_t new_size =
602 (last_basic_block_for_fn (cfun)
603 + (last_basic_block_for_fn (cfun) + 3) / 4);
604 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
607 /* Add the newly created block to the array. */
608 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
610 n_basic_blocks_for_fn (cfun)++;
611 last_basic_block_for_fn (cfun)++;
613 return bb;
617 /*---------------------------------------------------------------------------
618 Edge creation
619 ---------------------------------------------------------------------------*/
621 /* Fold COND_EXPR_COND of each COND_EXPR. */
623 void
624 fold_cond_expr_cond (void)
626 basic_block bb;
628 FOR_EACH_BB_FN (bb, cfun)
630 gimple stmt = last_stmt (bb);
632 if (stmt && gimple_code (stmt) == GIMPLE_COND)
634 gcond *cond_stmt = as_a <gcond *> (stmt);
635 location_t loc = gimple_location (stmt);
636 tree cond;
637 bool zerop, onep;
639 fold_defer_overflow_warnings ();
640 cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
641 boolean_type_node,
642 gimple_cond_lhs (cond_stmt),
643 gimple_cond_rhs (cond_stmt));
644 if (cond)
646 zerop = integer_zerop (cond);
647 onep = integer_onep (cond);
649 else
650 zerop = onep = false;
652 fold_undefer_overflow_warnings (zerop || onep,
653 stmt,
654 WARN_STRICT_OVERFLOW_CONDITIONAL);
655 if (zerop)
656 gimple_cond_make_false (cond_stmt);
657 else if (onep)
658 gimple_cond_make_true (cond_stmt);
663 /* If basic block BB has an abnormal edge to a basic block
664 containing IFN_ABNORMAL_DISPATCHER internal call, return
665 that the dispatcher's basic block, otherwise return NULL. */
667 basic_block
668 get_abnormal_succ_dispatcher (basic_block bb)
670 edge e;
671 edge_iterator ei;
673 FOR_EACH_EDGE (e, ei, bb->succs)
674 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
676 gimple_stmt_iterator gsi
677 = gsi_start_nondebug_after_labels_bb (e->dest);
678 gimple g = gsi_stmt (gsi);
679 if (g
680 && is_gimple_call (g)
681 && gimple_call_internal_p (g)
682 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
683 return e->dest;
685 return NULL;
688 /* Helper function for make_edges. Create a basic block with
689 with ABNORMAL_DISPATCHER internal call in it if needed, and
690 create abnormal edges from BBS to it and from it to FOR_BB
691 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
693 static void
694 handle_abnormal_edges (basic_block *dispatcher_bbs,
695 basic_block for_bb, int *bb_to_omp_idx,
696 auto_vec<basic_block> *bbs, bool computed_goto)
698 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
699 unsigned int idx = 0;
700 basic_block bb;
701 bool inner = false;
703 if (bb_to_omp_idx)
705 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
706 if (bb_to_omp_idx[for_bb->index] != 0)
707 inner = true;
710 /* If the dispatcher has been created already, then there are basic
711 blocks with abnormal edges to it, so just make a new edge to
712 for_bb. */
713 if (*dispatcher == NULL)
715 /* Check if there are any basic blocks that need to have
716 abnormal edges to this dispatcher. If there are none, return
717 early. */
718 if (bb_to_omp_idx == NULL)
720 if (bbs->is_empty ())
721 return;
723 else
725 FOR_EACH_VEC_ELT (*bbs, idx, bb)
726 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
727 break;
728 if (bb == NULL)
729 return;
732 /* Create the dispatcher bb. */
733 *dispatcher = create_basic_block (NULL, for_bb);
734 if (computed_goto)
736 /* Factor computed gotos into a common computed goto site. Also
737 record the location of that site so that we can un-factor the
738 gotos after we have converted back to normal form. */
739 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
741 /* Create the destination of the factored goto. Each original
742 computed goto will put its desired destination into this
743 variable and jump to the label we create immediately below. */
744 tree var = create_tmp_var (ptr_type_node, "gotovar");
746 /* Build a label for the new block which will contain the
747 factored computed goto. */
748 tree factored_label_decl
749 = create_artificial_label (UNKNOWN_LOCATION);
750 gimple factored_computed_goto_label
751 = gimple_build_label (factored_label_decl);
752 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
754 /* Build our new computed goto. */
755 gimple factored_computed_goto = gimple_build_goto (var);
756 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
758 FOR_EACH_VEC_ELT (*bbs, idx, bb)
760 if (bb_to_omp_idx
761 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
762 continue;
764 gsi = gsi_last_bb (bb);
765 gimple last = gsi_stmt (gsi);
767 gcc_assert (computed_goto_p (last));
769 /* Copy the original computed goto's destination into VAR. */
770 gimple assignment
771 = gimple_build_assign (var, gimple_goto_dest (last));
772 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
774 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
775 e->goto_locus = gimple_location (last);
776 gsi_remove (&gsi, true);
779 else
781 tree arg = inner ? boolean_true_node : boolean_false_node;
782 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
783 1, arg);
784 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
785 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
787 /* Create predecessor edges of the dispatcher. */
788 FOR_EACH_VEC_ELT (*bbs, idx, bb)
790 if (bb_to_omp_idx
791 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
792 continue;
793 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
798 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
801 /* Creates outgoing edges for BB. Returns 1 when it ends with an
802 computed goto, returns 2 when it ends with a statement that
803 might return to this function via an nonlocal goto, otherwise
804 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
806 static int
807 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
809 gimple last = last_stmt (bb);
810 bool fallthru = false;
811 int ret = 0;
813 if (!last)
814 return ret;
816 switch (gimple_code (last))
818 case GIMPLE_GOTO:
819 if (make_goto_expr_edges (bb))
820 ret = 1;
821 fallthru = false;
822 break;
823 case GIMPLE_RETURN:
825 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
826 e->goto_locus = gimple_location (last);
827 fallthru = false;
829 break;
830 case GIMPLE_COND:
831 make_cond_expr_edges (bb);
832 fallthru = false;
833 break;
834 case GIMPLE_SWITCH:
835 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
836 fallthru = false;
837 break;
838 case GIMPLE_RESX:
839 make_eh_edges (last);
840 fallthru = false;
841 break;
842 case GIMPLE_EH_DISPATCH:
843 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
844 break;
846 case GIMPLE_CALL:
847 /* If this function receives a nonlocal goto, then we need to
848 make edges from this call site to all the nonlocal goto
849 handlers. */
850 if (stmt_can_make_abnormal_goto (last))
851 ret = 2;
853 /* If this statement has reachable exception handlers, then
854 create abnormal edges to them. */
855 make_eh_edges (last);
857 /* BUILTIN_RETURN is really a return statement. */
858 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
860 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
861 fallthru = false;
863 /* Some calls are known not to return. */
864 else
865 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
866 break;
868 case GIMPLE_ASSIGN:
869 /* A GIMPLE_ASSIGN may throw internally and thus be considered
870 control-altering. */
871 if (is_ctrl_altering_stmt (last))
872 make_eh_edges (last);
873 fallthru = true;
874 break;
876 case GIMPLE_ASM:
877 make_gimple_asm_edges (bb);
878 fallthru = true;
879 break;
881 CASE_GIMPLE_OMP:
882 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
883 break;
885 case GIMPLE_TRANSACTION:
887 tree abort_label
888 = gimple_transaction_label (as_a <gtransaction *> (last));
889 if (abort_label)
890 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
891 fallthru = true;
893 break;
895 default:
896 gcc_assert (!stmt_ends_bb_p (last));
897 fallthru = true;
898 break;
901 if (fallthru)
902 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
904 return ret;
907 /* Join all the blocks in the flowgraph. */
909 static void
910 make_edges (void)
912 basic_block bb;
913 struct omp_region *cur_region = NULL;
914 auto_vec<basic_block> ab_edge_goto;
915 auto_vec<basic_block> ab_edge_call;
916 int *bb_to_omp_idx = NULL;
917 int cur_omp_region_idx = 0;
919 /* Create an edge from entry to the first block with executable
920 statements in it. */
921 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
922 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
923 EDGE_FALLTHRU);
925 /* Traverse the basic block array placing edges. */
926 FOR_EACH_BB_FN (bb, cfun)
928 int mer;
930 if (bb_to_omp_idx)
931 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
933 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
934 if (mer == 1)
935 ab_edge_goto.safe_push (bb);
936 else if (mer == 2)
937 ab_edge_call.safe_push (bb);
939 if (cur_region && bb_to_omp_idx == NULL)
940 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
943 /* Computed gotos are hell to deal with, especially if there are
944 lots of them with a large number of destinations. So we factor
945 them to a common computed goto location before we build the
946 edge list. After we convert back to normal form, we will un-factor
947 the computed gotos since factoring introduces an unwanted jump.
948 For non-local gotos and abnormal edges from calls to calls that return
949 twice or forced labels, factor the abnormal edges too, by having all
950 abnormal edges from the calls go to a common artificial basic block
951 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
952 basic block to all forced labels and calls returning twice.
953 We do this per-OpenMP structured block, because those regions
954 are guaranteed to be single entry single exit by the standard,
955 so it is not allowed to enter or exit such regions abnormally this way,
956 thus all computed gotos, non-local gotos and setjmp/longjmp calls
957 must not transfer control across SESE region boundaries. */
958 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
960 gimple_stmt_iterator gsi;
961 basic_block dispatcher_bb_array[2] = { NULL, NULL };
962 basic_block *dispatcher_bbs = dispatcher_bb_array;
963 int count = n_basic_blocks_for_fn (cfun);
965 if (bb_to_omp_idx)
966 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
968 FOR_EACH_BB_FN (bb, cfun)
970 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
972 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
973 tree target;
975 if (!label_stmt)
976 break;
978 target = gimple_label_label (label_stmt);
980 /* Make an edge to every label block that has been marked as a
981 potential target for a computed goto or a non-local goto. */
982 if (FORCED_LABEL (target))
983 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
984 &ab_edge_goto, true);
985 if (DECL_NONLOCAL (target))
987 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
988 &ab_edge_call, false);
989 break;
993 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
994 gsi_next_nondebug (&gsi);
995 if (!gsi_end_p (gsi))
997 /* Make an edge to every setjmp-like call. */
998 gimple call_stmt = gsi_stmt (gsi);
999 if (is_gimple_call (call_stmt)
1000 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1001 || gimple_call_builtin_p (call_stmt,
1002 BUILT_IN_SETJMP_RECEIVER)))
1003 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1004 &ab_edge_call, false);
1008 if (bb_to_omp_idx)
1009 XDELETE (dispatcher_bbs);
1012 XDELETE (bb_to_omp_idx);
1014 free_omp_regions ();
1016 /* Fold COND_EXPR_COND of each COND_EXPR. */
1017 fold_cond_expr_cond ();
1020 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1021 needed. Returns true if new bbs were created.
1022 Note: This is transitional code, and should not be used for new code. We
1023 should be able to get rid of this by rewriting all target va-arg
1024 gimplification hooks to use an interface gimple_build_cond_value as described
1025 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1027 bool
1028 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1030 gimple stmt = gsi_stmt (*gsi);
1031 basic_block bb = gimple_bb (stmt);
1032 basic_block lastbb, afterbb;
1033 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1034 edge e;
1035 lastbb = make_blocks_1 (seq, bb);
1036 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1037 return false;
1038 e = split_block (bb, stmt);
1039 /* Move e->dest to come after the new basic blocks. */
1040 afterbb = e->dest;
1041 unlink_block (afterbb);
1042 link_block (afterbb, lastbb);
1043 redirect_edge_succ (e, bb->next_bb);
1044 bb = bb->next_bb;
1045 while (bb != afterbb)
1047 struct omp_region *cur_region = NULL;
1048 int cur_omp_region_idx = 0;
1049 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1050 gcc_assert (!mer && !cur_region);
1051 add_bb_to_loop (bb, afterbb->loop_father);
1052 bb = bb->next_bb;
1054 return true;
1057 /* Find the next available discriminator value for LOCUS. The
1058 discriminator distinguishes among several basic blocks that
1059 share a common locus, allowing for more accurate sample-based
1060 profiling. */
1062 static int
1063 next_discriminator_for_locus (location_t locus)
1065 struct locus_discrim_map item;
1066 struct locus_discrim_map **slot;
1068 item.locus = locus;
1069 item.discriminator = 0;
1070 slot = discriminator_per_locus->find_slot_with_hash (
1071 &item, LOCATION_LINE (locus), INSERT);
1072 gcc_assert (slot);
1073 if (*slot == HTAB_EMPTY_ENTRY)
1075 *slot = XNEW (struct locus_discrim_map);
1076 gcc_assert (*slot);
1077 (*slot)->locus = locus;
1078 (*slot)->discriminator = 0;
1080 (*slot)->discriminator++;
1081 return (*slot)->discriminator;
1084 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1086 static bool
1087 same_line_p (location_t locus1, location_t locus2)
1089 expanded_location from, to;
1091 if (locus1 == locus2)
1092 return true;
1094 from = expand_location (locus1);
1095 to = expand_location (locus2);
1097 if (from.line != to.line)
1098 return false;
1099 if (from.file == to.file)
1100 return true;
1101 return (from.file != NULL
1102 && to.file != NULL
1103 && filename_cmp (from.file, to.file) == 0);
1106 /* Assign discriminators to each basic block. */
1108 static void
1109 assign_discriminators (void)
1111 basic_block bb;
1113 FOR_EACH_BB_FN (bb, cfun)
1115 edge e;
1116 edge_iterator ei;
1117 gimple last = last_stmt (bb);
1118 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1120 if (locus == UNKNOWN_LOCATION)
1121 continue;
1123 FOR_EACH_EDGE (e, ei, bb->succs)
1125 gimple first = first_non_label_stmt (e->dest);
1126 gimple last = last_stmt (e->dest);
1127 if ((first && same_line_p (locus, gimple_location (first)))
1128 || (last && same_line_p (locus, gimple_location (last))))
1130 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1131 bb->discriminator = next_discriminator_for_locus (locus);
1132 else
1133 e->dest->discriminator = next_discriminator_for_locus (locus);
1139 /* Create the edges for a GIMPLE_COND starting at block BB. */
1141 static void
1142 make_cond_expr_edges (basic_block bb)
1144 gcond *entry = as_a <gcond *> (last_stmt (bb));
1145 gimple then_stmt, else_stmt;
1146 basic_block then_bb, else_bb;
1147 tree then_label, else_label;
1148 edge e;
1150 gcc_assert (entry);
1151 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1153 /* Entry basic blocks for each component. */
1154 then_label = gimple_cond_true_label (entry);
1155 else_label = gimple_cond_false_label (entry);
1156 then_bb = label_to_block (then_label);
1157 else_bb = label_to_block (else_label);
1158 then_stmt = first_stmt (then_bb);
1159 else_stmt = first_stmt (else_bb);
1161 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1162 e->goto_locus = gimple_location (then_stmt);
1163 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1164 if (e)
1165 e->goto_locus = gimple_location (else_stmt);
1167 /* We do not need the labels anymore. */
1168 gimple_cond_set_true_label (entry, NULL_TREE);
1169 gimple_cond_set_false_label (entry, NULL_TREE);
1173 /* Called for each element in the hash table (P) as we delete the
1174 edge to cases hash table.
1176 Clear all the TREE_CHAINs to prevent problems with copying of
1177 SWITCH_EXPRs and structure sharing rules, then free the hash table
1178 element. */
1180 bool
1181 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1183 tree t, next;
1185 for (t = value; t; t = next)
1187 next = CASE_CHAIN (t);
1188 CASE_CHAIN (t) = NULL;
1191 return true;
1194 /* Start recording information mapping edges to case labels. */
1196 void
1197 start_recording_case_labels (void)
1199 gcc_assert (edge_to_cases == NULL);
1200 edge_to_cases = new hash_map<edge, tree>;
1201 touched_switch_bbs = BITMAP_ALLOC (NULL);
1204 /* Return nonzero if we are recording information for case labels. */
1206 static bool
1207 recording_case_labels_p (void)
1209 return (edge_to_cases != NULL);
1212 /* Stop recording information mapping edges to case labels and
1213 remove any information we have recorded. */
1214 void
1215 end_recording_case_labels (void)
1217 bitmap_iterator bi;
1218 unsigned i;
1219 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1220 delete edge_to_cases;
1221 edge_to_cases = NULL;
1222 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1224 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1225 if (bb)
1227 gimple stmt = last_stmt (bb);
1228 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1229 group_case_labels_stmt (as_a <gswitch *> (stmt));
1232 BITMAP_FREE (touched_switch_bbs);
1235 /* If we are inside a {start,end}_recording_cases block, then return
1236 a chain of CASE_LABEL_EXPRs from T which reference E.
1238 Otherwise return NULL. */
1240 static tree
1241 get_cases_for_edge (edge e, gswitch *t)
1243 tree *slot;
1244 size_t i, n;
1246 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1247 chains available. Return NULL so the caller can detect this case. */
1248 if (!recording_case_labels_p ())
1249 return NULL;
1251 slot = edge_to_cases->get (e);
1252 if (slot)
1253 return *slot;
1255 /* If we did not find E in the hash table, then this must be the first
1256 time we have been queried for information about E & T. Add all the
1257 elements from T to the hash table then perform the query again. */
1259 n = gimple_switch_num_labels (t);
1260 for (i = 0; i < n; i++)
1262 tree elt = gimple_switch_label (t, i);
1263 tree lab = CASE_LABEL (elt);
1264 basic_block label_bb = label_to_block (lab);
1265 edge this_edge = find_edge (e->src, label_bb);
1267 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1268 a new chain. */
1269 tree &s = edge_to_cases->get_or_insert (this_edge);
1270 CASE_CHAIN (elt) = s;
1271 s = elt;
1274 return *edge_to_cases->get (e);
1277 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1279 static void
1280 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1282 size_t i, n;
1284 n = gimple_switch_num_labels (entry);
1286 for (i = 0; i < n; ++i)
1288 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1289 basic_block label_bb = label_to_block (lab);
1290 make_edge (bb, label_bb, 0);
1295 /* Return the basic block holding label DEST. */
1297 basic_block
1298 label_to_block_fn (struct function *ifun, tree dest)
1300 int uid = LABEL_DECL_UID (dest);
1302 /* We would die hard when faced by an undefined label. Emit a label to
1303 the very first basic block. This will hopefully make even the dataflow
1304 and undefined variable warnings quite right. */
1305 if (seen_error () && uid < 0)
1307 gimple_stmt_iterator gsi =
1308 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1309 gimple stmt;
1311 stmt = gimple_build_label (dest);
1312 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1313 uid = LABEL_DECL_UID (dest);
1315 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1316 return NULL;
1317 return (*ifun->cfg->x_label_to_block_map)[uid];
1320 /* Create edges for a goto statement at block BB. Returns true
1321 if abnormal edges should be created. */
1323 static bool
1324 make_goto_expr_edges (basic_block bb)
1326 gimple_stmt_iterator last = gsi_last_bb (bb);
1327 gimple goto_t = gsi_stmt (last);
1329 /* A simple GOTO creates normal edges. */
1330 if (simple_goto_p (goto_t))
1332 tree dest = gimple_goto_dest (goto_t);
1333 basic_block label_bb = label_to_block (dest);
1334 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1335 e->goto_locus = gimple_location (goto_t);
1336 gsi_remove (&last, true);
1337 return false;
1340 /* A computed GOTO creates abnormal edges. */
1341 return true;
1344 /* Create edges for an asm statement with labels at block BB. */
1346 static void
1347 make_gimple_asm_edges (basic_block bb)
1349 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1350 int i, n = gimple_asm_nlabels (stmt);
1352 for (i = 0; i < n; ++i)
1354 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1355 basic_block label_bb = label_to_block (label);
1356 make_edge (bb, label_bb, 0);
1360 /*---------------------------------------------------------------------------
1361 Flowgraph analysis
1362 ---------------------------------------------------------------------------*/
1364 /* Cleanup useless labels in basic blocks. This is something we wish
1365 to do early because it allows us to group case labels before creating
1366 the edges for the CFG, and it speeds up block statement iterators in
1367 all passes later on.
1368 We rerun this pass after CFG is created, to get rid of the labels that
1369 are no longer referenced. After then we do not run it any more, since
1370 (almost) no new labels should be created. */
1372 /* A map from basic block index to the leading label of that block. */
1373 static struct label_record
1375 /* The label. */
1376 tree label;
1378 /* True if the label is referenced from somewhere. */
1379 bool used;
1380 } *label_for_bb;
1382 /* Given LABEL return the first label in the same basic block. */
1384 static tree
1385 main_block_label (tree label)
1387 basic_block bb = label_to_block (label);
1388 tree main_label = label_for_bb[bb->index].label;
1390 /* label_to_block possibly inserted undefined label into the chain. */
1391 if (!main_label)
1393 label_for_bb[bb->index].label = label;
1394 main_label = label;
1397 label_for_bb[bb->index].used = true;
1398 return main_label;
1401 /* Clean up redundant labels within the exception tree. */
1403 static void
1404 cleanup_dead_labels_eh (void)
1406 eh_landing_pad lp;
1407 eh_region r;
1408 tree lab;
1409 int i;
1411 if (cfun->eh == NULL)
1412 return;
1414 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1415 if (lp && lp->post_landing_pad)
1417 lab = main_block_label (lp->post_landing_pad);
1418 if (lab != lp->post_landing_pad)
1420 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1421 EH_LANDING_PAD_NR (lab) = lp->index;
1425 FOR_ALL_EH_REGION (r)
1426 switch (r->type)
1428 case ERT_CLEANUP:
1429 case ERT_MUST_NOT_THROW:
1430 break;
1432 case ERT_TRY:
1434 eh_catch c;
1435 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1437 lab = c->label;
1438 if (lab)
1439 c->label = main_block_label (lab);
1442 break;
1444 case ERT_ALLOWED_EXCEPTIONS:
1445 lab = r->u.allowed.label;
1446 if (lab)
1447 r->u.allowed.label = main_block_label (lab);
1448 break;
1453 /* Cleanup redundant labels. This is a three-step process:
1454 1) Find the leading label for each block.
1455 2) Redirect all references to labels to the leading labels.
1456 3) Cleanup all useless labels. */
1458 void
1459 cleanup_dead_labels (void)
1461 basic_block bb;
1462 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1464 /* Find a suitable label for each block. We use the first user-defined
1465 label if there is one, or otherwise just the first label we see. */
1466 FOR_EACH_BB_FN (bb, cfun)
1468 gimple_stmt_iterator i;
1470 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1472 tree label;
1473 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1475 if (!label_stmt)
1476 break;
1478 label = gimple_label_label (label_stmt);
1480 /* If we have not yet seen a label for the current block,
1481 remember this one and see if there are more labels. */
1482 if (!label_for_bb[bb->index].label)
1484 label_for_bb[bb->index].label = label;
1485 continue;
1488 /* If we did see a label for the current block already, but it
1489 is an artificially created label, replace it if the current
1490 label is a user defined label. */
1491 if (!DECL_ARTIFICIAL (label)
1492 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1494 label_for_bb[bb->index].label = label;
1495 break;
1500 /* Now redirect all jumps/branches to the selected label.
1501 First do so for each block ending in a control statement. */
1502 FOR_EACH_BB_FN (bb, cfun)
1504 gimple stmt = last_stmt (bb);
1505 tree label, new_label;
1507 if (!stmt)
1508 continue;
1510 switch (gimple_code (stmt))
1512 case GIMPLE_COND:
1514 gcond *cond_stmt = as_a <gcond *> (stmt);
1515 label = gimple_cond_true_label (cond_stmt);
1516 if (label)
1518 new_label = main_block_label (label);
1519 if (new_label != label)
1520 gimple_cond_set_true_label (cond_stmt, new_label);
1523 label = gimple_cond_false_label (cond_stmt);
1524 if (label)
1526 new_label = main_block_label (label);
1527 if (new_label != label)
1528 gimple_cond_set_false_label (cond_stmt, new_label);
1531 break;
1533 case GIMPLE_SWITCH:
1535 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1536 size_t i, n = gimple_switch_num_labels (switch_stmt);
1538 /* Replace all destination labels. */
1539 for (i = 0; i < n; ++i)
1541 tree case_label = gimple_switch_label (switch_stmt, i);
1542 label = CASE_LABEL (case_label);
1543 new_label = main_block_label (label);
1544 if (new_label != label)
1545 CASE_LABEL (case_label) = new_label;
1547 break;
1550 case GIMPLE_ASM:
1552 gasm *asm_stmt = as_a <gasm *> (stmt);
1553 int i, n = gimple_asm_nlabels (asm_stmt);
1555 for (i = 0; i < n; ++i)
1557 tree cons = gimple_asm_label_op (asm_stmt, i);
1558 tree label = main_block_label (TREE_VALUE (cons));
1559 TREE_VALUE (cons) = label;
1561 break;
1564 /* We have to handle gotos until they're removed, and we don't
1565 remove them until after we've created the CFG edges. */
1566 case GIMPLE_GOTO:
1567 if (!computed_goto_p (stmt))
1569 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1570 label = gimple_goto_dest (goto_stmt);
1571 new_label = main_block_label (label);
1572 if (new_label != label)
1573 gimple_goto_set_dest (goto_stmt, new_label);
1575 break;
1577 case GIMPLE_TRANSACTION:
1579 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1580 tree label = gimple_transaction_label (trans_stmt);
1581 if (label)
1583 tree new_label = main_block_label (label);
1584 if (new_label != label)
1585 gimple_transaction_set_label (trans_stmt, new_label);
1588 break;
1590 default:
1591 break;
1595 /* Do the same for the exception region tree labels. */
1596 cleanup_dead_labels_eh ();
1598 /* Finally, purge dead labels. All user-defined labels and labels that
1599 can be the target of non-local gotos and labels which have their
1600 address taken are preserved. */
1601 FOR_EACH_BB_FN (bb, cfun)
1603 gimple_stmt_iterator i;
1604 tree label_for_this_bb = label_for_bb[bb->index].label;
1606 if (!label_for_this_bb)
1607 continue;
1609 /* If the main label of the block is unused, we may still remove it. */
1610 if (!label_for_bb[bb->index].used)
1611 label_for_this_bb = NULL;
1613 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1615 tree label;
1616 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1618 if (!label_stmt)
1619 break;
1621 label = gimple_label_label (label_stmt);
1623 if (label == label_for_this_bb
1624 || !DECL_ARTIFICIAL (label)
1625 || DECL_NONLOCAL (label)
1626 || FORCED_LABEL (label))
1627 gsi_next (&i);
1628 else
1629 gsi_remove (&i, true);
1633 free (label_for_bb);
1636 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1637 the ones jumping to the same label.
1638 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1640 void
1641 group_case_labels_stmt (gswitch *stmt)
1643 int old_size = gimple_switch_num_labels (stmt);
1644 int i, j, new_size = old_size;
1645 basic_block default_bb = NULL;
1647 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1649 /* Look for possible opportunities to merge cases. */
1650 i = 1;
1651 while (i < old_size)
1653 tree base_case, base_high;
1654 basic_block base_bb;
1656 base_case = gimple_switch_label (stmt, i);
1658 gcc_assert (base_case);
1659 base_bb = label_to_block (CASE_LABEL (base_case));
1661 /* Discard cases that have the same destination as the
1662 default case. */
1663 if (base_bb == default_bb)
1665 gimple_switch_set_label (stmt, i, NULL_TREE);
1666 i++;
1667 new_size--;
1668 continue;
1671 base_high = CASE_HIGH (base_case)
1672 ? CASE_HIGH (base_case)
1673 : CASE_LOW (base_case);
1674 i++;
1676 /* Try to merge case labels. Break out when we reach the end
1677 of the label vector or when we cannot merge the next case
1678 label with the current one. */
1679 while (i < old_size)
1681 tree merge_case = gimple_switch_label (stmt, i);
1682 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1683 wide_int bhp1 = wi::add (base_high, 1);
1685 /* Merge the cases if they jump to the same place,
1686 and their ranges are consecutive. */
1687 if (merge_bb == base_bb
1688 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1690 base_high = CASE_HIGH (merge_case) ?
1691 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1692 CASE_HIGH (base_case) = base_high;
1693 gimple_switch_set_label (stmt, i, NULL_TREE);
1694 new_size--;
1695 i++;
1697 else
1698 break;
1702 /* Compress the case labels in the label vector, and adjust the
1703 length of the vector. */
1704 for (i = 0, j = 0; i < new_size; i++)
1706 while (! gimple_switch_label (stmt, j))
1707 j++;
1708 gimple_switch_set_label (stmt, i,
1709 gimple_switch_label (stmt, j++));
1712 gcc_assert (new_size <= old_size);
1713 gimple_switch_set_num_labels (stmt, new_size);
1716 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1717 and scan the sorted vector of cases. Combine the ones jumping to the
1718 same label. */
1720 void
1721 group_case_labels (void)
1723 basic_block bb;
1725 FOR_EACH_BB_FN (bb, cfun)
1727 gimple stmt = last_stmt (bb);
1728 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1729 group_case_labels_stmt (as_a <gswitch *> (stmt));
1733 /* Checks whether we can merge block B into block A. */
1735 static bool
1736 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1738 gimple stmt;
1740 if (!single_succ_p (a))
1741 return false;
1743 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1744 return false;
1746 if (single_succ (a) != b)
1747 return false;
1749 if (!single_pred_p (b))
1750 return false;
1752 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1753 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1754 return false;
1756 /* If A ends by a statement causing exceptions or something similar, we
1757 cannot merge the blocks. */
1758 stmt = last_stmt (a);
1759 if (stmt && stmt_ends_bb_p (stmt))
1760 return false;
1762 /* Do not allow a block with only a non-local label to be merged. */
1763 if (stmt)
1764 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1765 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1766 return false;
1768 /* Examine the labels at the beginning of B. */
1769 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1770 gsi_next (&gsi))
1772 tree lab;
1773 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1774 if (!label_stmt)
1775 break;
1776 lab = gimple_label_label (label_stmt);
1778 /* Do not remove user forced labels or for -O0 any user labels. */
1779 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1780 return false;
1783 /* Protect simple loop latches. We only want to avoid merging
1784 the latch with the loop header or with a block in another
1785 loop in this case. */
1786 if (current_loops
1787 && b->loop_father->latch == b
1788 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1789 && (b->loop_father->header == a
1790 || b->loop_father != a->loop_father))
1791 return false;
1793 /* It must be possible to eliminate all phi nodes in B. If ssa form
1794 is not up-to-date and a name-mapping is registered, we cannot eliminate
1795 any phis. Symbols marked for renaming are never a problem though. */
1796 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1797 gsi_next (&gsi))
1799 gphi *phi = gsi.phi ();
1800 /* Technically only new names matter. */
1801 if (name_registered_for_update_p (PHI_RESULT (phi)))
1802 return false;
1805 /* When not optimizing, don't merge if we'd lose goto_locus. */
1806 if (!optimize
1807 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1809 location_t goto_locus = single_succ_edge (a)->goto_locus;
1810 gimple_stmt_iterator prev, next;
1811 prev = gsi_last_nondebug_bb (a);
1812 next = gsi_after_labels (b);
1813 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1814 gsi_next_nondebug (&next);
1815 if ((gsi_end_p (prev)
1816 || gimple_location (gsi_stmt (prev)) != goto_locus)
1817 && (gsi_end_p (next)
1818 || gimple_location (gsi_stmt (next)) != goto_locus))
1819 return false;
1822 return true;
1825 /* Replaces all uses of NAME by VAL. */
1827 void
1828 replace_uses_by (tree name, tree val)
1830 imm_use_iterator imm_iter;
1831 use_operand_p use;
1832 gimple stmt;
1833 edge e;
1835 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1837 /* Mark the block if we change the last stmt in it. */
1838 if (cfgcleanup_altered_bbs
1839 && stmt_ends_bb_p (stmt))
1840 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1842 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1844 replace_exp (use, val);
1846 if (gimple_code (stmt) == GIMPLE_PHI)
1848 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1849 PHI_ARG_INDEX_FROM_USE (use));
1850 if (e->flags & EDGE_ABNORMAL
1851 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1853 /* This can only occur for virtual operands, since
1854 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1855 would prevent replacement. */
1856 gcc_checking_assert (virtual_operand_p (name));
1857 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1862 if (gimple_code (stmt) != GIMPLE_PHI)
1864 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1865 gimple orig_stmt = stmt;
1866 size_t i;
1868 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1869 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1870 only change sth from non-invariant to invariant, and only
1871 when propagating constants. */
1872 if (is_gimple_min_invariant (val))
1873 for (i = 0; i < gimple_num_ops (stmt); i++)
1875 tree op = gimple_op (stmt, i);
1876 /* Operands may be empty here. For example, the labels
1877 of a GIMPLE_COND are nulled out following the creation
1878 of the corresponding CFG edges. */
1879 if (op && TREE_CODE (op) == ADDR_EXPR)
1880 recompute_tree_invariant_for_addr_expr (op);
1883 if (fold_stmt (&gsi))
1884 stmt = gsi_stmt (gsi);
1886 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1887 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1889 update_stmt (stmt);
1893 gcc_checking_assert (has_zero_uses (name));
1895 /* Also update the trees stored in loop structures. */
1896 if (current_loops)
1898 struct loop *loop;
1900 FOR_EACH_LOOP (loop, 0)
1902 substitute_in_loop_info (loop, name, val);
1907 /* Merge block B into block A. */
1909 static void
1910 gimple_merge_blocks (basic_block a, basic_block b)
1912 gimple_stmt_iterator last, gsi;
1913 gphi_iterator psi;
1915 if (dump_file)
1916 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1918 /* Remove all single-valued PHI nodes from block B of the form
1919 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1920 gsi = gsi_last_bb (a);
1921 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1923 gimple phi = gsi_stmt (psi);
1924 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1925 gimple copy;
1926 bool may_replace_uses = (virtual_operand_p (def)
1927 || may_propagate_copy (def, use));
1929 /* In case we maintain loop closed ssa form, do not propagate arguments
1930 of loop exit phi nodes. */
1931 if (current_loops
1932 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1933 && !virtual_operand_p (def)
1934 && TREE_CODE (use) == SSA_NAME
1935 && a->loop_father != b->loop_father)
1936 may_replace_uses = false;
1938 if (!may_replace_uses)
1940 gcc_assert (!virtual_operand_p (def));
1942 /* Note that just emitting the copies is fine -- there is no problem
1943 with ordering of phi nodes. This is because A is the single
1944 predecessor of B, therefore results of the phi nodes cannot
1945 appear as arguments of the phi nodes. */
1946 copy = gimple_build_assign (def, use);
1947 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1948 remove_phi_node (&psi, false);
1950 else
1952 /* If we deal with a PHI for virtual operands, we can simply
1953 propagate these without fussing with folding or updating
1954 the stmt. */
1955 if (virtual_operand_p (def))
1957 imm_use_iterator iter;
1958 use_operand_p use_p;
1959 gimple stmt;
1961 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1962 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1963 SET_USE (use_p, use);
1965 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1966 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1968 else
1969 replace_uses_by (def, use);
1971 remove_phi_node (&psi, true);
1975 /* Ensure that B follows A. */
1976 move_block_after (b, a);
1978 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1979 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1981 /* Remove labels from B and set gimple_bb to A for other statements. */
1982 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1984 gimple stmt = gsi_stmt (gsi);
1985 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1987 tree label = gimple_label_label (label_stmt);
1988 int lp_nr;
1990 gsi_remove (&gsi, false);
1992 /* Now that we can thread computed gotos, we might have
1993 a situation where we have a forced label in block B
1994 However, the label at the start of block B might still be
1995 used in other ways (think about the runtime checking for
1996 Fortran assigned gotos). So we can not just delete the
1997 label. Instead we move the label to the start of block A. */
1998 if (FORCED_LABEL (label))
2000 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2001 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2003 /* Other user labels keep around in a form of a debug stmt. */
2004 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2006 gimple dbg = gimple_build_debug_bind (label,
2007 integer_zero_node,
2008 stmt);
2009 gimple_debug_bind_reset_value (dbg);
2010 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2013 lp_nr = EH_LANDING_PAD_NR (label);
2014 if (lp_nr)
2016 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2017 lp->post_landing_pad = NULL;
2020 else
2022 gimple_set_bb (stmt, a);
2023 gsi_next (&gsi);
2027 /* When merging two BBs, if their counts are different, the larger count
2028 is selected as the new bb count. This is to handle inconsistent
2029 profiles. */
2030 if (a->loop_father == b->loop_father)
2032 a->count = MAX (a->count, b->count);
2033 a->frequency = MAX (a->frequency, b->frequency);
2036 /* Merge the sequences. */
2037 last = gsi_last_bb (a);
2038 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2039 set_bb_seq (b, NULL);
2041 if (cfgcleanup_altered_bbs)
2042 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2046 /* Return the one of two successors of BB that is not reachable by a
2047 complex edge, if there is one. Else, return BB. We use
2048 this in optimizations that use post-dominators for their heuristics,
2049 to catch the cases in C++ where function calls are involved. */
2051 basic_block
2052 single_noncomplex_succ (basic_block bb)
2054 edge e0, e1;
2055 if (EDGE_COUNT (bb->succs) != 2)
2056 return bb;
2058 e0 = EDGE_SUCC (bb, 0);
2059 e1 = EDGE_SUCC (bb, 1);
2060 if (e0->flags & EDGE_COMPLEX)
2061 return e1->dest;
2062 if (e1->flags & EDGE_COMPLEX)
2063 return e0->dest;
2065 return bb;
2068 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2070 void
2071 notice_special_calls (gcall *call)
2073 int flags = gimple_call_flags (call);
2075 if (flags & ECF_MAY_BE_ALLOCA)
2076 cfun->calls_alloca = true;
2077 if (flags & ECF_RETURNS_TWICE)
2078 cfun->calls_setjmp = true;
2082 /* Clear flags set by notice_special_calls. Used by dead code removal
2083 to update the flags. */
2085 void
2086 clear_special_calls (void)
2088 cfun->calls_alloca = false;
2089 cfun->calls_setjmp = false;
2092 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2094 static void
2095 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2097 /* Since this block is no longer reachable, we can just delete all
2098 of its PHI nodes. */
2099 remove_phi_nodes (bb);
2101 /* Remove edges to BB's successors. */
2102 while (EDGE_COUNT (bb->succs) > 0)
2103 remove_edge (EDGE_SUCC (bb, 0));
2107 /* Remove statements of basic block BB. */
2109 static void
2110 remove_bb (basic_block bb)
2112 gimple_stmt_iterator i;
2114 if (dump_file)
2116 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2117 if (dump_flags & TDF_DETAILS)
2119 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2120 fprintf (dump_file, "\n");
2124 if (current_loops)
2126 struct loop *loop = bb->loop_father;
2128 /* If a loop gets removed, clean up the information associated
2129 with it. */
2130 if (loop->latch == bb
2131 || loop->header == bb)
2132 free_numbers_of_iterations_estimates_loop (loop);
2135 /* Remove all the instructions in the block. */
2136 if (bb_seq (bb) != NULL)
2138 /* Walk backwards so as to get a chance to substitute all
2139 released DEFs into debug stmts. See
2140 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2141 details. */
2142 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2144 gimple stmt = gsi_stmt (i);
2145 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2146 if (label_stmt
2147 && (FORCED_LABEL (gimple_label_label (label_stmt))
2148 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2150 basic_block new_bb;
2151 gimple_stmt_iterator new_gsi;
2153 /* A non-reachable non-local label may still be referenced.
2154 But it no longer needs to carry the extra semantics of
2155 non-locality. */
2156 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2158 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2159 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2162 new_bb = bb->prev_bb;
2163 new_gsi = gsi_start_bb (new_bb);
2164 gsi_remove (&i, false);
2165 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2167 else
2169 /* Release SSA definitions if we are in SSA. Note that we
2170 may be called when not in SSA. For example,
2171 final_cleanup calls this function via
2172 cleanup_tree_cfg. */
2173 if (gimple_in_ssa_p (cfun))
2174 release_defs (stmt);
2176 gsi_remove (&i, true);
2179 if (gsi_end_p (i))
2180 i = gsi_last_bb (bb);
2181 else
2182 gsi_prev (&i);
2186 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2187 bb->il.gimple.seq = NULL;
2188 bb->il.gimple.phi_nodes = NULL;
2192 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2193 predicate VAL, return the edge that will be taken out of the block.
2194 If VAL does not match a unique edge, NULL is returned. */
2196 edge
2197 find_taken_edge (basic_block bb, tree val)
2199 gimple stmt;
2201 stmt = last_stmt (bb);
2203 gcc_assert (stmt);
2204 gcc_assert (is_ctrl_stmt (stmt));
2206 if (val == NULL)
2207 return NULL;
2209 if (!is_gimple_min_invariant (val))
2210 return NULL;
2212 if (gimple_code (stmt) == GIMPLE_COND)
2213 return find_taken_edge_cond_expr (bb, val);
2215 if (gimple_code (stmt) == GIMPLE_SWITCH)
2216 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2218 if (computed_goto_p (stmt))
2220 /* Only optimize if the argument is a label, if the argument is
2221 not a label then we can not construct a proper CFG.
2223 It may be the case that we only need to allow the LABEL_REF to
2224 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2225 appear inside a LABEL_EXPR just to be safe. */
2226 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2227 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2228 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2229 return NULL;
2232 gcc_unreachable ();
2235 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2236 statement, determine which of the outgoing edges will be taken out of the
2237 block. Return NULL if either edge may be taken. */
2239 static edge
2240 find_taken_edge_computed_goto (basic_block bb, tree val)
2242 basic_block dest;
2243 edge e = NULL;
2245 dest = label_to_block (val);
2246 if (dest)
2248 e = find_edge (bb, dest);
2249 gcc_assert (e != NULL);
2252 return e;
2255 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2256 statement, determine which of the two edges will be taken out of the
2257 block. Return NULL if either edge may be taken. */
2259 static edge
2260 find_taken_edge_cond_expr (basic_block bb, tree val)
2262 edge true_edge, false_edge;
2264 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2266 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2267 return (integer_zerop (val) ? false_edge : true_edge);
2270 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2271 statement, determine which edge will be taken out of the block. Return
2272 NULL if any edge may be taken. */
2274 static edge
2275 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2276 tree val)
2278 basic_block dest_bb;
2279 edge e;
2280 tree taken_case;
2282 taken_case = find_case_label_for_value (switch_stmt, val);
2283 dest_bb = label_to_block (CASE_LABEL (taken_case));
2285 e = find_edge (bb, dest_bb);
2286 gcc_assert (e);
2287 return e;
2291 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2292 We can make optimal use here of the fact that the case labels are
2293 sorted: We can do a binary search for a case matching VAL. */
2295 static tree
2296 find_case_label_for_value (gswitch *switch_stmt, tree val)
2298 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2299 tree default_case = gimple_switch_default_label (switch_stmt);
2301 for (low = 0, high = n; high - low > 1; )
2303 size_t i = (high + low) / 2;
2304 tree t = gimple_switch_label (switch_stmt, i);
2305 int cmp;
2307 /* Cache the result of comparing CASE_LOW and val. */
2308 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2310 if (cmp > 0)
2311 high = i;
2312 else
2313 low = i;
2315 if (CASE_HIGH (t) == NULL)
2317 /* A singe-valued case label. */
2318 if (cmp == 0)
2319 return t;
2321 else
2323 /* A case range. We can only handle integer ranges. */
2324 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2325 return t;
2329 return default_case;
2333 /* Dump a basic block on stderr. */
2335 void
2336 gimple_debug_bb (basic_block bb)
2338 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2342 /* Dump basic block with index N on stderr. */
2344 basic_block
2345 gimple_debug_bb_n (int n)
2347 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2348 return BASIC_BLOCK_FOR_FN (cfun, n);
2352 /* Dump the CFG on stderr.
2354 FLAGS are the same used by the tree dumping functions
2355 (see TDF_* in dumpfile.h). */
2357 void
2358 gimple_debug_cfg (int flags)
2360 gimple_dump_cfg (stderr, flags);
2364 /* Dump the program showing basic block boundaries on the given FILE.
2366 FLAGS are the same used by the tree dumping functions (see TDF_* in
2367 tree.h). */
2369 void
2370 gimple_dump_cfg (FILE *file, int flags)
2372 if (flags & TDF_DETAILS)
2374 dump_function_header (file, current_function_decl, flags);
2375 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2376 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2377 last_basic_block_for_fn (cfun));
2379 brief_dump_cfg (file, flags | TDF_COMMENT);
2380 fprintf (file, "\n");
2383 if (flags & TDF_STATS)
2384 dump_cfg_stats (file);
2386 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2390 /* Dump CFG statistics on FILE. */
2392 void
2393 dump_cfg_stats (FILE *file)
2395 static long max_num_merged_labels = 0;
2396 unsigned long size, total = 0;
2397 long num_edges;
2398 basic_block bb;
2399 const char * const fmt_str = "%-30s%-13s%12s\n";
2400 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2401 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2402 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2403 const char *funcname = current_function_name ();
2405 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2407 fprintf (file, "---------------------------------------------------------\n");
2408 fprintf (file, fmt_str, "", " Number of ", "Memory");
2409 fprintf (file, fmt_str, "", " instances ", "used ");
2410 fprintf (file, "---------------------------------------------------------\n");
2412 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2413 total += size;
2414 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2415 SCALE (size), LABEL (size));
2417 num_edges = 0;
2418 FOR_EACH_BB_FN (bb, cfun)
2419 num_edges += EDGE_COUNT (bb->succs);
2420 size = num_edges * sizeof (struct edge_def);
2421 total += size;
2422 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2424 fprintf (file, "---------------------------------------------------------\n");
2425 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2426 LABEL (total));
2427 fprintf (file, "---------------------------------------------------------\n");
2428 fprintf (file, "\n");
2430 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2431 max_num_merged_labels = cfg_stats.num_merged_labels;
2433 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2434 cfg_stats.num_merged_labels, max_num_merged_labels);
2436 fprintf (file, "\n");
2440 /* Dump CFG statistics on stderr. Keep extern so that it's always
2441 linked in the final executable. */
2443 DEBUG_FUNCTION void
2444 debug_cfg_stats (void)
2446 dump_cfg_stats (stderr);
2449 /*---------------------------------------------------------------------------
2450 Miscellaneous helpers
2451 ---------------------------------------------------------------------------*/
2453 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2454 flow. Transfers of control flow associated with EH are excluded. */
2456 static bool
2457 call_can_make_abnormal_goto (gimple t)
2459 /* If the function has no non-local labels, then a call cannot make an
2460 abnormal transfer of control. */
2461 if (!cfun->has_nonlocal_label
2462 && !cfun->calls_setjmp)
2463 return false;
2465 /* Likewise if the call has no side effects. */
2466 if (!gimple_has_side_effects (t))
2467 return false;
2469 /* Likewise if the called function is leaf. */
2470 if (gimple_call_flags (t) & ECF_LEAF)
2471 return false;
2473 return true;
2477 /* Return true if T can make an abnormal transfer of control flow.
2478 Transfers of control flow associated with EH are excluded. */
2480 bool
2481 stmt_can_make_abnormal_goto (gimple t)
2483 if (computed_goto_p (t))
2484 return true;
2485 if (is_gimple_call (t))
2486 return call_can_make_abnormal_goto (t);
2487 return false;
2491 /* Return true if T represents a stmt that always transfers control. */
2493 bool
2494 is_ctrl_stmt (gimple t)
2496 switch (gimple_code (t))
2498 case GIMPLE_COND:
2499 case GIMPLE_SWITCH:
2500 case GIMPLE_GOTO:
2501 case GIMPLE_RETURN:
2502 case GIMPLE_RESX:
2503 return true;
2504 default:
2505 return false;
2510 /* Return true if T is a statement that may alter the flow of control
2511 (e.g., a call to a non-returning function). */
2513 bool
2514 is_ctrl_altering_stmt (gimple t)
2516 gcc_assert (t);
2518 switch (gimple_code (t))
2520 case GIMPLE_CALL:
2521 /* Per stmt call flag indicates whether the call could alter
2522 controlflow. */
2523 if (gimple_call_ctrl_altering_p (t))
2524 return true;
2525 break;
2527 case GIMPLE_EH_DISPATCH:
2528 /* EH_DISPATCH branches to the individual catch handlers at
2529 this level of a try or allowed-exceptions region. It can
2530 fallthru to the next statement as well. */
2531 return true;
2533 case GIMPLE_ASM:
2534 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2535 return true;
2536 break;
2538 CASE_GIMPLE_OMP:
2539 /* OpenMP directives alter control flow. */
2540 return true;
2542 case GIMPLE_TRANSACTION:
2543 /* A transaction start alters control flow. */
2544 return true;
2546 default:
2547 break;
2550 /* If a statement can throw, it alters control flow. */
2551 return stmt_can_throw_internal (t);
2555 /* Return true if T is a simple local goto. */
2557 bool
2558 simple_goto_p (gimple t)
2560 return (gimple_code (t) == GIMPLE_GOTO
2561 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2565 /* Return true if STMT should start a new basic block. PREV_STMT is
2566 the statement preceding STMT. It is used when STMT is a label or a
2567 case label. Labels should only start a new basic block if their
2568 previous statement wasn't a label. Otherwise, sequence of labels
2569 would generate unnecessary basic blocks that only contain a single
2570 label. */
2572 static inline bool
2573 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2575 if (stmt == NULL)
2576 return false;
2578 /* Labels start a new basic block only if the preceding statement
2579 wasn't a label of the same type. This prevents the creation of
2580 consecutive blocks that have nothing but a single label. */
2581 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2583 /* Nonlocal and computed GOTO targets always start a new block. */
2584 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2585 || FORCED_LABEL (gimple_label_label (label_stmt)))
2586 return true;
2588 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2590 if (DECL_NONLOCAL (gimple_label_label (
2591 as_a <glabel *> (prev_stmt))))
2592 return true;
2594 cfg_stats.num_merged_labels++;
2595 return false;
2597 else
2598 return true;
2600 else if (gimple_code (stmt) == GIMPLE_CALL
2601 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2602 /* setjmp acts similar to a nonlocal GOTO target and thus should
2603 start a new block. */
2604 return true;
2606 return false;
2610 /* Return true if T should end a basic block. */
2612 bool
2613 stmt_ends_bb_p (gimple t)
2615 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2618 /* Remove block annotations and other data structures. */
2620 void
2621 delete_tree_cfg_annotations (void)
2623 vec_free (label_to_block_map_for_fn (cfun));
2627 /* Return the first statement in basic block BB. */
2629 gimple
2630 first_stmt (basic_block bb)
2632 gimple_stmt_iterator i = gsi_start_bb (bb);
2633 gimple stmt = NULL;
2635 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2637 gsi_next (&i);
2638 stmt = NULL;
2640 return stmt;
2643 /* Return the first non-label statement in basic block BB. */
2645 static gimple
2646 first_non_label_stmt (basic_block bb)
2648 gimple_stmt_iterator i = gsi_start_bb (bb);
2649 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2650 gsi_next (&i);
2651 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2654 /* Return the last statement in basic block BB. */
2656 gimple
2657 last_stmt (basic_block bb)
2659 gimple_stmt_iterator i = gsi_last_bb (bb);
2660 gimple stmt = NULL;
2662 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2664 gsi_prev (&i);
2665 stmt = NULL;
2667 return stmt;
2670 /* Return the last statement of an otherwise empty block. Return NULL
2671 if the block is totally empty, or if it contains more than one
2672 statement. */
2674 gimple
2675 last_and_only_stmt (basic_block bb)
2677 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2678 gimple last, prev;
2680 if (gsi_end_p (i))
2681 return NULL;
2683 last = gsi_stmt (i);
2684 gsi_prev_nondebug (&i);
2685 if (gsi_end_p (i))
2686 return last;
2688 /* Empty statements should no longer appear in the instruction stream.
2689 Everything that might have appeared before should be deleted by
2690 remove_useless_stmts, and the optimizers should just gsi_remove
2691 instead of smashing with build_empty_stmt.
2693 Thus the only thing that should appear here in a block containing
2694 one executable statement is a label. */
2695 prev = gsi_stmt (i);
2696 if (gimple_code (prev) == GIMPLE_LABEL)
2697 return last;
2698 else
2699 return NULL;
2702 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2704 static void
2705 reinstall_phi_args (edge new_edge, edge old_edge)
2707 edge_var_map *vm;
2708 int i;
2709 gphi_iterator phis;
2711 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2712 if (!v)
2713 return;
2715 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2716 v->iterate (i, &vm) && !gsi_end_p (phis);
2717 i++, gsi_next (&phis))
2719 gphi *phi = phis.phi ();
2720 tree result = redirect_edge_var_map_result (vm);
2721 tree arg = redirect_edge_var_map_def (vm);
2723 gcc_assert (result == gimple_phi_result (phi));
2725 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2728 redirect_edge_var_map_clear (old_edge);
2731 /* Returns the basic block after which the new basic block created
2732 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2733 near its "logical" location. This is of most help to humans looking
2734 at debugging dumps. */
2736 basic_block
2737 split_edge_bb_loc (edge edge_in)
2739 basic_block dest = edge_in->dest;
2740 basic_block dest_prev = dest->prev_bb;
2742 if (dest_prev)
2744 edge e = find_edge (dest_prev, dest);
2745 if (e && !(e->flags & EDGE_COMPLEX))
2746 return edge_in->src;
2748 return dest_prev;
2751 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2752 Abort on abnormal edges. */
2754 static basic_block
2755 gimple_split_edge (edge edge_in)
2757 basic_block new_bb, after_bb, dest;
2758 edge new_edge, e;
2760 /* Abnormal edges cannot be split. */
2761 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2763 dest = edge_in->dest;
2765 after_bb = split_edge_bb_loc (edge_in);
2767 new_bb = create_empty_bb (after_bb);
2768 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2769 new_bb->count = edge_in->count;
2770 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2771 new_edge->probability = REG_BR_PROB_BASE;
2772 new_edge->count = edge_in->count;
2774 e = redirect_edge_and_branch (edge_in, new_bb);
2775 gcc_assert (e == edge_in);
2776 reinstall_phi_args (new_edge, e);
2778 return new_bb;
2782 /* Verify properties of the address expression T with base object BASE. */
2784 static tree
2785 verify_address (tree t, tree base)
2787 bool old_constant;
2788 bool old_side_effects;
2789 bool new_constant;
2790 bool new_side_effects;
2792 old_constant = TREE_CONSTANT (t);
2793 old_side_effects = TREE_SIDE_EFFECTS (t);
2795 recompute_tree_invariant_for_addr_expr (t);
2796 new_side_effects = TREE_SIDE_EFFECTS (t);
2797 new_constant = TREE_CONSTANT (t);
2799 if (old_constant != new_constant)
2801 error ("constant not recomputed when ADDR_EXPR changed");
2802 return t;
2804 if (old_side_effects != new_side_effects)
2806 error ("side effects not recomputed when ADDR_EXPR changed");
2807 return t;
2810 if (!(TREE_CODE (base) == VAR_DECL
2811 || TREE_CODE (base) == PARM_DECL
2812 || TREE_CODE (base) == RESULT_DECL))
2813 return NULL_TREE;
2815 if (DECL_GIMPLE_REG_P (base))
2817 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2818 return base;
2821 return NULL_TREE;
2824 /* Callback for walk_tree, check that all elements with address taken are
2825 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2826 inside a PHI node. */
2828 static tree
2829 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2831 tree t = *tp, x;
2833 if (TYPE_P (t))
2834 *walk_subtrees = 0;
2836 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2837 #define CHECK_OP(N, MSG) \
2838 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2839 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2841 switch (TREE_CODE (t))
2843 case SSA_NAME:
2844 if (SSA_NAME_IN_FREE_LIST (t))
2846 error ("SSA name in freelist but still referenced");
2847 return *tp;
2849 break;
2851 case INDIRECT_REF:
2852 error ("INDIRECT_REF in gimple IL");
2853 return t;
2855 case MEM_REF:
2856 x = TREE_OPERAND (t, 0);
2857 if (!POINTER_TYPE_P (TREE_TYPE (x))
2858 || !is_gimple_mem_ref_addr (x))
2860 error ("invalid first operand of MEM_REF");
2861 return x;
2863 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2864 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2866 error ("invalid offset operand of MEM_REF");
2867 return TREE_OPERAND (t, 1);
2869 if (TREE_CODE (x) == ADDR_EXPR
2870 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2871 return x;
2872 *walk_subtrees = 0;
2873 break;
2875 case ASSERT_EXPR:
2876 x = fold (ASSERT_EXPR_COND (t));
2877 if (x == boolean_false_node)
2879 error ("ASSERT_EXPR with an always-false condition");
2880 return *tp;
2882 break;
2884 case MODIFY_EXPR:
2885 error ("MODIFY_EXPR not expected while having tuples");
2886 return *tp;
2888 case ADDR_EXPR:
2890 tree tem;
2892 gcc_assert (is_gimple_address (t));
2894 /* Skip any references (they will be checked when we recurse down the
2895 tree) and ensure that any variable used as a prefix is marked
2896 addressable. */
2897 for (x = TREE_OPERAND (t, 0);
2898 handled_component_p (x);
2899 x = TREE_OPERAND (x, 0))
2902 if ((tem = verify_address (t, x)))
2903 return tem;
2905 if (!(TREE_CODE (x) == VAR_DECL
2906 || TREE_CODE (x) == PARM_DECL
2907 || TREE_CODE (x) == RESULT_DECL))
2908 return NULL;
2910 if (!TREE_ADDRESSABLE (x))
2912 error ("address taken, but ADDRESSABLE bit not set");
2913 return x;
2916 break;
2919 case COND_EXPR:
2920 x = COND_EXPR_COND (t);
2921 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2923 error ("non-integral used in condition");
2924 return x;
2926 if (!is_gimple_condexpr (x))
2928 error ("invalid conditional operand");
2929 return x;
2931 break;
2933 case NON_LVALUE_EXPR:
2934 case TRUTH_NOT_EXPR:
2935 gcc_unreachable ();
2937 CASE_CONVERT:
2938 case FIX_TRUNC_EXPR:
2939 case FLOAT_EXPR:
2940 case NEGATE_EXPR:
2941 case ABS_EXPR:
2942 case BIT_NOT_EXPR:
2943 CHECK_OP (0, "invalid operand to unary operator");
2944 break;
2946 case REALPART_EXPR:
2947 case IMAGPART_EXPR:
2948 case BIT_FIELD_REF:
2949 if (!is_gimple_reg_type (TREE_TYPE (t)))
2951 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2952 return t;
2955 if (TREE_CODE (t) == BIT_FIELD_REF)
2957 tree t0 = TREE_OPERAND (t, 0);
2958 tree t1 = TREE_OPERAND (t, 1);
2959 tree t2 = TREE_OPERAND (t, 2);
2960 if (!tree_fits_uhwi_p (t1)
2961 || !tree_fits_uhwi_p (t2))
2963 error ("invalid position or size operand to BIT_FIELD_REF");
2964 return t;
2966 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2967 && (TYPE_PRECISION (TREE_TYPE (t))
2968 != tree_to_uhwi (t1)))
2970 error ("integral result type precision does not match "
2971 "field size of BIT_FIELD_REF");
2972 return t;
2974 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2975 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2976 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2977 != tree_to_uhwi (t1)))
2979 error ("mode precision of non-integral result does not "
2980 "match field size of BIT_FIELD_REF");
2981 return t;
2983 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2984 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2985 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2987 error ("position plus size exceeds size of referenced object in "
2988 "BIT_FIELD_REF");
2989 return t;
2992 t = TREE_OPERAND (t, 0);
2994 /* Fall-through. */
2995 case COMPONENT_REF:
2996 case ARRAY_REF:
2997 case ARRAY_RANGE_REF:
2998 case VIEW_CONVERT_EXPR:
2999 /* We have a nest of references. Verify that each of the operands
3000 that determine where to reference is either a constant or a variable,
3001 verify that the base is valid, and then show we've already checked
3002 the subtrees. */
3003 while (handled_component_p (t))
3005 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3006 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3007 else if (TREE_CODE (t) == ARRAY_REF
3008 || TREE_CODE (t) == ARRAY_RANGE_REF)
3010 CHECK_OP (1, "invalid array index");
3011 if (TREE_OPERAND (t, 2))
3012 CHECK_OP (2, "invalid array lower bound");
3013 if (TREE_OPERAND (t, 3))
3014 CHECK_OP (3, "invalid array stride");
3016 else if (TREE_CODE (t) == BIT_FIELD_REF
3017 || TREE_CODE (t) == REALPART_EXPR
3018 || TREE_CODE (t) == IMAGPART_EXPR)
3020 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3021 "REALPART_EXPR");
3022 return t;
3025 t = TREE_OPERAND (t, 0);
3028 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3030 error ("invalid reference prefix");
3031 return t;
3033 *walk_subtrees = 0;
3034 break;
3035 case PLUS_EXPR:
3036 case MINUS_EXPR:
3037 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3038 POINTER_PLUS_EXPR. */
3039 if (POINTER_TYPE_P (TREE_TYPE (t)))
3041 error ("invalid operand to plus/minus, type is a pointer");
3042 return t;
3044 CHECK_OP (0, "invalid operand to binary operator");
3045 CHECK_OP (1, "invalid operand to binary operator");
3046 break;
3048 case POINTER_PLUS_EXPR:
3049 /* Check to make sure the first operand is a pointer or reference type. */
3050 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3052 error ("invalid operand to pointer plus, first operand is not a pointer");
3053 return t;
3055 /* Check to make sure the second operand is a ptrofftype. */
3056 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3058 error ("invalid operand to pointer plus, second operand is not an "
3059 "integer type of appropriate width");
3060 return t;
3062 /* FALLTHROUGH */
3063 case LT_EXPR:
3064 case LE_EXPR:
3065 case GT_EXPR:
3066 case GE_EXPR:
3067 case EQ_EXPR:
3068 case NE_EXPR:
3069 case UNORDERED_EXPR:
3070 case ORDERED_EXPR:
3071 case UNLT_EXPR:
3072 case UNLE_EXPR:
3073 case UNGT_EXPR:
3074 case UNGE_EXPR:
3075 case UNEQ_EXPR:
3076 case LTGT_EXPR:
3077 case MULT_EXPR:
3078 case TRUNC_DIV_EXPR:
3079 case CEIL_DIV_EXPR:
3080 case FLOOR_DIV_EXPR:
3081 case ROUND_DIV_EXPR:
3082 case TRUNC_MOD_EXPR:
3083 case CEIL_MOD_EXPR:
3084 case FLOOR_MOD_EXPR:
3085 case ROUND_MOD_EXPR:
3086 case RDIV_EXPR:
3087 case EXACT_DIV_EXPR:
3088 case MIN_EXPR:
3089 case MAX_EXPR:
3090 case LSHIFT_EXPR:
3091 case RSHIFT_EXPR:
3092 case LROTATE_EXPR:
3093 case RROTATE_EXPR:
3094 case BIT_IOR_EXPR:
3095 case BIT_XOR_EXPR:
3096 case BIT_AND_EXPR:
3097 CHECK_OP (0, "invalid operand to binary operator");
3098 CHECK_OP (1, "invalid operand to binary operator");
3099 break;
3101 case CONSTRUCTOR:
3102 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3103 *walk_subtrees = 0;
3104 break;
3106 case CASE_LABEL_EXPR:
3107 if (CASE_CHAIN (t))
3109 error ("invalid CASE_CHAIN");
3110 return t;
3112 break;
3114 default:
3115 break;
3117 return NULL;
3119 #undef CHECK_OP
3123 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3124 Returns true if there is an error, otherwise false. */
3126 static bool
3127 verify_types_in_gimple_min_lval (tree expr)
3129 tree op;
3131 if (is_gimple_id (expr))
3132 return false;
3134 if (TREE_CODE (expr) != TARGET_MEM_REF
3135 && TREE_CODE (expr) != MEM_REF)
3137 error ("invalid expression for min lvalue");
3138 return true;
3141 /* TARGET_MEM_REFs are strange beasts. */
3142 if (TREE_CODE (expr) == TARGET_MEM_REF)
3143 return false;
3145 op = TREE_OPERAND (expr, 0);
3146 if (!is_gimple_val (op))
3148 error ("invalid operand in indirect reference");
3149 debug_generic_stmt (op);
3150 return true;
3152 /* Memory references now generally can involve a value conversion. */
3154 return false;
3157 /* Verify if EXPR is a valid GIMPLE reference expression. If
3158 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3159 if there is an error, otherwise false. */
3161 static bool
3162 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3164 while (handled_component_p (expr))
3166 tree op = TREE_OPERAND (expr, 0);
3168 if (TREE_CODE (expr) == ARRAY_REF
3169 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3171 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3172 || (TREE_OPERAND (expr, 2)
3173 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3174 || (TREE_OPERAND (expr, 3)
3175 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3177 error ("invalid operands to array reference");
3178 debug_generic_stmt (expr);
3179 return true;
3183 /* Verify if the reference array element types are compatible. */
3184 if (TREE_CODE (expr) == ARRAY_REF
3185 && !useless_type_conversion_p (TREE_TYPE (expr),
3186 TREE_TYPE (TREE_TYPE (op))))
3188 error ("type mismatch in array reference");
3189 debug_generic_stmt (TREE_TYPE (expr));
3190 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3191 return true;
3193 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3194 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3195 TREE_TYPE (TREE_TYPE (op))))
3197 error ("type mismatch in array range reference");
3198 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3199 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3200 return true;
3203 if ((TREE_CODE (expr) == REALPART_EXPR
3204 || TREE_CODE (expr) == IMAGPART_EXPR)
3205 && !useless_type_conversion_p (TREE_TYPE (expr),
3206 TREE_TYPE (TREE_TYPE (op))))
3208 error ("type mismatch in real/imagpart reference");
3209 debug_generic_stmt (TREE_TYPE (expr));
3210 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3211 return true;
3214 if (TREE_CODE (expr) == COMPONENT_REF
3215 && !useless_type_conversion_p (TREE_TYPE (expr),
3216 TREE_TYPE (TREE_OPERAND (expr, 1))))
3218 error ("type mismatch in component reference");
3219 debug_generic_stmt (TREE_TYPE (expr));
3220 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3221 return true;
3224 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3226 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3227 that their operand is not an SSA name or an invariant when
3228 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3229 bug). Otherwise there is nothing to verify, gross mismatches at
3230 most invoke undefined behavior. */
3231 if (require_lvalue
3232 && (TREE_CODE (op) == SSA_NAME
3233 || is_gimple_min_invariant (op)))
3235 error ("conversion of an SSA_NAME on the left hand side");
3236 debug_generic_stmt (expr);
3237 return true;
3239 else if (TREE_CODE (op) == SSA_NAME
3240 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3242 error ("conversion of register to a different size");
3243 debug_generic_stmt (expr);
3244 return true;
3246 else if (!handled_component_p (op))
3247 return false;
3250 expr = op;
3253 if (TREE_CODE (expr) == MEM_REF)
3255 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3257 error ("invalid address operand in MEM_REF");
3258 debug_generic_stmt (expr);
3259 return true;
3261 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3262 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3264 error ("invalid offset operand in MEM_REF");
3265 debug_generic_stmt (expr);
3266 return true;
3269 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3271 if (!TMR_BASE (expr)
3272 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3274 error ("invalid address operand in TARGET_MEM_REF");
3275 return true;
3277 if (!TMR_OFFSET (expr)
3278 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3279 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3281 error ("invalid offset operand in TARGET_MEM_REF");
3282 debug_generic_stmt (expr);
3283 return true;
3287 return ((require_lvalue || !is_gimple_min_invariant (expr))
3288 && verify_types_in_gimple_min_lval (expr));
3291 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3292 list of pointer-to types that is trivially convertible to DEST. */
3294 static bool
3295 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3297 tree src;
3299 if (!TYPE_POINTER_TO (src_obj))
3300 return true;
3302 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3303 if (useless_type_conversion_p (dest, src))
3304 return true;
3306 return false;
3309 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3310 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3312 static bool
3313 valid_fixed_convert_types_p (tree type1, tree type2)
3315 return (FIXED_POINT_TYPE_P (type1)
3316 && (INTEGRAL_TYPE_P (type2)
3317 || SCALAR_FLOAT_TYPE_P (type2)
3318 || FIXED_POINT_TYPE_P (type2)));
3321 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3322 is a problem, otherwise false. */
3324 static bool
3325 verify_gimple_call (gcall *stmt)
3327 tree fn = gimple_call_fn (stmt);
3328 tree fntype, fndecl;
3329 unsigned i;
3331 if (gimple_call_internal_p (stmt))
3333 if (fn)
3335 error ("gimple call has two targets");
3336 debug_generic_stmt (fn);
3337 return true;
3340 else
3342 if (!fn)
3344 error ("gimple call has no target");
3345 return true;
3349 if (fn && !is_gimple_call_addr (fn))
3351 error ("invalid function in gimple call");
3352 debug_generic_stmt (fn);
3353 return true;
3356 if (fn
3357 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3358 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3359 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3361 error ("non-function in gimple call");
3362 return true;
3365 fndecl = gimple_call_fndecl (stmt);
3366 if (fndecl
3367 && TREE_CODE (fndecl) == FUNCTION_DECL
3368 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3369 && !DECL_PURE_P (fndecl)
3370 && !TREE_READONLY (fndecl))
3372 error ("invalid pure const state for function");
3373 return true;
3376 tree lhs = gimple_call_lhs (stmt);
3377 if (lhs
3378 && (!is_gimple_lvalue (lhs)
3379 || verify_types_in_gimple_reference (lhs, true)))
3381 error ("invalid LHS in gimple call");
3382 return true;
3385 if (lhs
3386 && gimple_call_ctrl_altering_p (stmt)
3387 && gimple_call_noreturn_p (stmt)
3388 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3390 error ("LHS in noreturn call");
3391 return true;
3394 fntype = gimple_call_fntype (stmt);
3395 if (fntype
3396 && lhs
3397 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3398 /* ??? At least C++ misses conversions at assignments from
3399 void * call results.
3400 ??? Java is completely off. Especially with functions
3401 returning java.lang.Object.
3402 For now simply allow arbitrary pointer type conversions. */
3403 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3404 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3406 error ("invalid conversion in gimple call");
3407 debug_generic_stmt (TREE_TYPE (lhs));
3408 debug_generic_stmt (TREE_TYPE (fntype));
3409 return true;
3412 if (gimple_call_chain (stmt)
3413 && !is_gimple_val (gimple_call_chain (stmt)))
3415 error ("invalid static chain in gimple call");
3416 debug_generic_stmt (gimple_call_chain (stmt));
3417 return true;
3420 /* If there is a static chain argument, the call should either be
3421 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3422 if (gimple_call_chain (stmt)
3423 && fndecl
3424 && !DECL_STATIC_CHAIN (fndecl))
3426 error ("static chain with function that doesn%'t use one");
3427 return true;
3430 /* ??? The C frontend passes unpromoted arguments in case it
3431 didn't see a function declaration before the call. So for now
3432 leave the call arguments mostly unverified. Once we gimplify
3433 unit-at-a-time we have a chance to fix this. */
3435 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3437 tree arg = gimple_call_arg (stmt, i);
3438 if ((is_gimple_reg_type (TREE_TYPE (arg))
3439 && !is_gimple_val (arg))
3440 || (!is_gimple_reg_type (TREE_TYPE (arg))
3441 && !is_gimple_lvalue (arg)))
3443 error ("invalid argument to gimple call");
3444 debug_generic_expr (arg);
3445 return true;
3449 return false;
3452 /* Verifies the gimple comparison with the result type TYPE and
3453 the operands OP0 and OP1. */
3455 static bool
3456 verify_gimple_comparison (tree type, tree op0, tree op1)
3458 tree op0_type = TREE_TYPE (op0);
3459 tree op1_type = TREE_TYPE (op1);
3461 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3463 error ("invalid operands in gimple comparison");
3464 return true;
3467 /* For comparisons we do not have the operations type as the
3468 effective type the comparison is carried out in. Instead
3469 we require that either the first operand is trivially
3470 convertible into the second, or the other way around.
3471 Because we special-case pointers to void we allow
3472 comparisons of pointers with the same mode as well. */
3473 if (!useless_type_conversion_p (op0_type, op1_type)
3474 && !useless_type_conversion_p (op1_type, op0_type)
3475 && (!POINTER_TYPE_P (op0_type)
3476 || !POINTER_TYPE_P (op1_type)
3477 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3479 error ("mismatching comparison operand types");
3480 debug_generic_expr (op0_type);
3481 debug_generic_expr (op1_type);
3482 return true;
3485 /* The resulting type of a comparison may be an effective boolean type. */
3486 if (INTEGRAL_TYPE_P (type)
3487 && (TREE_CODE (type) == BOOLEAN_TYPE
3488 || TYPE_PRECISION (type) == 1))
3490 if (TREE_CODE (op0_type) == VECTOR_TYPE
3491 || TREE_CODE (op1_type) == VECTOR_TYPE)
3493 error ("vector comparison returning a boolean");
3494 debug_generic_expr (op0_type);
3495 debug_generic_expr (op1_type);
3496 return true;
3499 /* Or an integer vector type with the same size and element count
3500 as the comparison operand types. */
3501 else if (TREE_CODE (type) == VECTOR_TYPE
3502 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3504 if (TREE_CODE (op0_type) != VECTOR_TYPE
3505 || TREE_CODE (op1_type) != VECTOR_TYPE)
3507 error ("non-vector operands in vector comparison");
3508 debug_generic_expr (op0_type);
3509 debug_generic_expr (op1_type);
3510 return true;
3513 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3514 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3515 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3516 /* The result of a vector comparison is of signed
3517 integral type. */
3518 || TYPE_UNSIGNED (TREE_TYPE (type)))
3520 error ("invalid vector comparison resulting type");
3521 debug_generic_expr (type);
3522 return true;
3525 else
3527 error ("bogus comparison result type");
3528 debug_generic_expr (type);
3529 return true;
3532 return false;
3535 /* Verify a gimple assignment statement STMT with an unary rhs.
3536 Returns true if anything is wrong. */
3538 static bool
3539 verify_gimple_assign_unary (gassign *stmt)
3541 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3542 tree lhs = gimple_assign_lhs (stmt);
3543 tree lhs_type = TREE_TYPE (lhs);
3544 tree rhs1 = gimple_assign_rhs1 (stmt);
3545 tree rhs1_type = TREE_TYPE (rhs1);
3547 if (!is_gimple_reg (lhs))
3549 error ("non-register as LHS of unary operation");
3550 return true;
3553 if (!is_gimple_val (rhs1))
3555 error ("invalid operand in unary operation");
3556 return true;
3559 /* First handle conversions. */
3560 switch (rhs_code)
3562 CASE_CONVERT:
3564 /* Allow conversions from pointer type to integral type only if
3565 there is no sign or zero extension involved.
3566 For targets were the precision of ptrofftype doesn't match that
3567 of pointers we need to allow arbitrary conversions to ptrofftype. */
3568 if ((POINTER_TYPE_P (lhs_type)
3569 && INTEGRAL_TYPE_P (rhs1_type))
3570 || (POINTER_TYPE_P (rhs1_type)
3571 && INTEGRAL_TYPE_P (lhs_type)
3572 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3573 || ptrofftype_p (sizetype))))
3574 return false;
3576 /* Allow conversion from integral to offset type and vice versa. */
3577 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3578 && INTEGRAL_TYPE_P (rhs1_type))
3579 || (INTEGRAL_TYPE_P (lhs_type)
3580 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3581 return false;
3583 /* Otherwise assert we are converting between types of the
3584 same kind. */
3585 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3587 error ("invalid types in nop conversion");
3588 debug_generic_expr (lhs_type);
3589 debug_generic_expr (rhs1_type);
3590 return true;
3593 return false;
3596 case ADDR_SPACE_CONVERT_EXPR:
3598 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3599 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3600 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3602 error ("invalid types in address space conversion");
3603 debug_generic_expr (lhs_type);
3604 debug_generic_expr (rhs1_type);
3605 return true;
3608 return false;
3611 case FIXED_CONVERT_EXPR:
3613 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3614 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3616 error ("invalid types in fixed-point conversion");
3617 debug_generic_expr (lhs_type);
3618 debug_generic_expr (rhs1_type);
3619 return true;
3622 return false;
3625 case FLOAT_EXPR:
3627 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3628 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3629 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3631 error ("invalid types in conversion to floating point");
3632 debug_generic_expr (lhs_type);
3633 debug_generic_expr (rhs1_type);
3634 return true;
3637 return false;
3640 case FIX_TRUNC_EXPR:
3642 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3643 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3644 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3646 error ("invalid types in conversion to integer");
3647 debug_generic_expr (lhs_type);
3648 debug_generic_expr (rhs1_type);
3649 return true;
3652 return false;
3654 case REDUC_MAX_EXPR:
3655 case REDUC_MIN_EXPR:
3656 case REDUC_PLUS_EXPR:
3657 if (!VECTOR_TYPE_P (rhs1_type)
3658 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3660 error ("reduction should convert from vector to element type");
3661 debug_generic_expr (lhs_type);
3662 debug_generic_expr (rhs1_type);
3663 return true;
3665 return false;
3667 case VEC_UNPACK_HI_EXPR:
3668 case VEC_UNPACK_LO_EXPR:
3669 case VEC_UNPACK_FLOAT_HI_EXPR:
3670 case VEC_UNPACK_FLOAT_LO_EXPR:
3671 /* FIXME. */
3672 return false;
3674 case NEGATE_EXPR:
3675 case ABS_EXPR:
3676 case BIT_NOT_EXPR:
3677 case PAREN_EXPR:
3678 case CONJ_EXPR:
3679 break;
3681 default:
3682 gcc_unreachable ();
3685 /* For the remaining codes assert there is no conversion involved. */
3686 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3688 error ("non-trivial conversion in unary operation");
3689 debug_generic_expr (lhs_type);
3690 debug_generic_expr (rhs1_type);
3691 return true;
3694 return false;
3697 /* Verify a gimple assignment statement STMT with a binary rhs.
3698 Returns true if anything is wrong. */
3700 static bool
3701 verify_gimple_assign_binary (gassign *stmt)
3703 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3704 tree lhs = gimple_assign_lhs (stmt);
3705 tree lhs_type = TREE_TYPE (lhs);
3706 tree rhs1 = gimple_assign_rhs1 (stmt);
3707 tree rhs1_type = TREE_TYPE (rhs1);
3708 tree rhs2 = gimple_assign_rhs2 (stmt);
3709 tree rhs2_type = TREE_TYPE (rhs2);
3711 if (!is_gimple_reg (lhs))
3713 error ("non-register as LHS of binary operation");
3714 return true;
3717 if (!is_gimple_val (rhs1)
3718 || !is_gimple_val (rhs2))
3720 error ("invalid operands in binary operation");
3721 return true;
3724 /* First handle operations that involve different types. */
3725 switch (rhs_code)
3727 case COMPLEX_EXPR:
3729 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3730 || !(INTEGRAL_TYPE_P (rhs1_type)
3731 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3732 || !(INTEGRAL_TYPE_P (rhs2_type)
3733 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3735 error ("type mismatch in complex expression");
3736 debug_generic_expr (lhs_type);
3737 debug_generic_expr (rhs1_type);
3738 debug_generic_expr (rhs2_type);
3739 return true;
3742 return false;
3745 case LSHIFT_EXPR:
3746 case RSHIFT_EXPR:
3747 case LROTATE_EXPR:
3748 case RROTATE_EXPR:
3750 /* Shifts and rotates are ok on integral types, fixed point
3751 types and integer vector types. */
3752 if ((!INTEGRAL_TYPE_P (rhs1_type)
3753 && !FIXED_POINT_TYPE_P (rhs1_type)
3754 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3755 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3756 || (!INTEGRAL_TYPE_P (rhs2_type)
3757 /* Vector shifts of vectors are also ok. */
3758 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3759 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3760 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3761 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3762 || !useless_type_conversion_p (lhs_type, rhs1_type))
3764 error ("type mismatch in shift expression");
3765 debug_generic_expr (lhs_type);
3766 debug_generic_expr (rhs1_type);
3767 debug_generic_expr (rhs2_type);
3768 return true;
3771 return false;
3774 case WIDEN_LSHIFT_EXPR:
3776 if (!INTEGRAL_TYPE_P (lhs_type)
3777 || !INTEGRAL_TYPE_P (rhs1_type)
3778 || TREE_CODE (rhs2) != INTEGER_CST
3779 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3781 error ("type mismatch in widening vector shift expression");
3782 debug_generic_expr (lhs_type);
3783 debug_generic_expr (rhs1_type);
3784 debug_generic_expr (rhs2_type);
3785 return true;
3788 return false;
3791 case VEC_WIDEN_LSHIFT_HI_EXPR:
3792 case VEC_WIDEN_LSHIFT_LO_EXPR:
3794 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3795 || TREE_CODE (lhs_type) != VECTOR_TYPE
3796 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3797 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3798 || TREE_CODE (rhs2) != INTEGER_CST
3799 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3800 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3802 error ("type mismatch in widening vector shift expression");
3803 debug_generic_expr (lhs_type);
3804 debug_generic_expr (rhs1_type);
3805 debug_generic_expr (rhs2_type);
3806 return true;
3809 return false;
3812 case PLUS_EXPR:
3813 case MINUS_EXPR:
3815 tree lhs_etype = lhs_type;
3816 tree rhs1_etype = rhs1_type;
3817 tree rhs2_etype = rhs2_type;
3818 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3820 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3821 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3823 error ("invalid non-vector operands to vector valued plus");
3824 return true;
3826 lhs_etype = TREE_TYPE (lhs_type);
3827 rhs1_etype = TREE_TYPE (rhs1_type);
3828 rhs2_etype = TREE_TYPE (rhs2_type);
3830 if (POINTER_TYPE_P (lhs_etype)
3831 || POINTER_TYPE_P (rhs1_etype)
3832 || POINTER_TYPE_P (rhs2_etype))
3834 error ("invalid (pointer) operands to plus/minus");
3835 return true;
3838 /* Continue with generic binary expression handling. */
3839 break;
3842 case POINTER_PLUS_EXPR:
3844 if (!POINTER_TYPE_P (rhs1_type)
3845 || !useless_type_conversion_p (lhs_type, rhs1_type)
3846 || !ptrofftype_p (rhs2_type))
3848 error ("type mismatch in pointer plus expression");
3849 debug_generic_stmt (lhs_type);
3850 debug_generic_stmt (rhs1_type);
3851 debug_generic_stmt (rhs2_type);
3852 return true;
3855 return false;
3858 case TRUTH_ANDIF_EXPR:
3859 case TRUTH_ORIF_EXPR:
3860 case TRUTH_AND_EXPR:
3861 case TRUTH_OR_EXPR:
3862 case TRUTH_XOR_EXPR:
3864 gcc_unreachable ();
3866 case LT_EXPR:
3867 case LE_EXPR:
3868 case GT_EXPR:
3869 case GE_EXPR:
3870 case EQ_EXPR:
3871 case NE_EXPR:
3872 case UNORDERED_EXPR:
3873 case ORDERED_EXPR:
3874 case UNLT_EXPR:
3875 case UNLE_EXPR:
3876 case UNGT_EXPR:
3877 case UNGE_EXPR:
3878 case UNEQ_EXPR:
3879 case LTGT_EXPR:
3880 /* Comparisons are also binary, but the result type is not
3881 connected to the operand types. */
3882 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3884 case WIDEN_MULT_EXPR:
3885 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3886 return true;
3887 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3888 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3890 case WIDEN_SUM_EXPR:
3891 case VEC_WIDEN_MULT_HI_EXPR:
3892 case VEC_WIDEN_MULT_LO_EXPR:
3893 case VEC_WIDEN_MULT_EVEN_EXPR:
3894 case VEC_WIDEN_MULT_ODD_EXPR:
3895 case VEC_PACK_TRUNC_EXPR:
3896 case VEC_PACK_SAT_EXPR:
3897 case VEC_PACK_FIX_TRUNC_EXPR:
3898 /* FIXME. */
3899 return false;
3901 case MULT_EXPR:
3902 case MULT_HIGHPART_EXPR:
3903 case TRUNC_DIV_EXPR:
3904 case CEIL_DIV_EXPR:
3905 case FLOOR_DIV_EXPR:
3906 case ROUND_DIV_EXPR:
3907 case TRUNC_MOD_EXPR:
3908 case CEIL_MOD_EXPR:
3909 case FLOOR_MOD_EXPR:
3910 case ROUND_MOD_EXPR:
3911 case RDIV_EXPR:
3912 case EXACT_DIV_EXPR:
3913 case MIN_EXPR:
3914 case MAX_EXPR:
3915 case BIT_IOR_EXPR:
3916 case BIT_XOR_EXPR:
3917 case BIT_AND_EXPR:
3918 /* Continue with generic binary expression handling. */
3919 break;
3921 default:
3922 gcc_unreachable ();
3925 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3926 || !useless_type_conversion_p (lhs_type, rhs2_type))
3928 error ("type mismatch in binary expression");
3929 debug_generic_stmt (lhs_type);
3930 debug_generic_stmt (rhs1_type);
3931 debug_generic_stmt (rhs2_type);
3932 return true;
3935 return false;
3938 /* Verify a gimple assignment statement STMT with a ternary rhs.
3939 Returns true if anything is wrong. */
3941 static bool
3942 verify_gimple_assign_ternary (gassign *stmt)
3944 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3945 tree lhs = gimple_assign_lhs (stmt);
3946 tree lhs_type = TREE_TYPE (lhs);
3947 tree rhs1 = gimple_assign_rhs1 (stmt);
3948 tree rhs1_type = TREE_TYPE (rhs1);
3949 tree rhs2 = gimple_assign_rhs2 (stmt);
3950 tree rhs2_type = TREE_TYPE (rhs2);
3951 tree rhs3 = gimple_assign_rhs3 (stmt);
3952 tree rhs3_type = TREE_TYPE (rhs3);
3954 if (!is_gimple_reg (lhs))
3956 error ("non-register as LHS of ternary operation");
3957 return true;
3960 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3961 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3962 || !is_gimple_val (rhs2)
3963 || !is_gimple_val (rhs3))
3965 error ("invalid operands in ternary operation");
3966 return true;
3969 /* First handle operations that involve different types. */
3970 switch (rhs_code)
3972 case WIDEN_MULT_PLUS_EXPR:
3973 case WIDEN_MULT_MINUS_EXPR:
3974 if ((!INTEGRAL_TYPE_P (rhs1_type)
3975 && !FIXED_POINT_TYPE_P (rhs1_type))
3976 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3977 || !useless_type_conversion_p (lhs_type, rhs3_type)
3978 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3979 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3981 error ("type mismatch in widening multiply-accumulate expression");
3982 debug_generic_expr (lhs_type);
3983 debug_generic_expr (rhs1_type);
3984 debug_generic_expr (rhs2_type);
3985 debug_generic_expr (rhs3_type);
3986 return true;
3988 break;
3990 case FMA_EXPR:
3991 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3992 || !useless_type_conversion_p (lhs_type, rhs2_type)
3993 || !useless_type_conversion_p (lhs_type, rhs3_type))
3995 error ("type mismatch in fused multiply-add expression");
3996 debug_generic_expr (lhs_type);
3997 debug_generic_expr (rhs1_type);
3998 debug_generic_expr (rhs2_type);
3999 debug_generic_expr (rhs3_type);
4000 return true;
4002 break;
4004 case COND_EXPR:
4005 case VEC_COND_EXPR:
4006 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4007 || !useless_type_conversion_p (lhs_type, rhs3_type))
4009 error ("type mismatch in conditional expression");
4010 debug_generic_expr (lhs_type);
4011 debug_generic_expr (rhs2_type);
4012 debug_generic_expr (rhs3_type);
4013 return true;
4015 break;
4017 case VEC_PERM_EXPR:
4018 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4019 || !useless_type_conversion_p (lhs_type, rhs2_type))
4021 error ("type mismatch in vector permute expression");
4022 debug_generic_expr (lhs_type);
4023 debug_generic_expr (rhs1_type);
4024 debug_generic_expr (rhs2_type);
4025 debug_generic_expr (rhs3_type);
4026 return true;
4029 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4030 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4031 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4033 error ("vector types expected in vector permute expression");
4034 debug_generic_expr (lhs_type);
4035 debug_generic_expr (rhs1_type);
4036 debug_generic_expr (rhs2_type);
4037 debug_generic_expr (rhs3_type);
4038 return true;
4041 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4042 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4043 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4044 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4045 != TYPE_VECTOR_SUBPARTS (lhs_type))
4047 error ("vectors with different element number found "
4048 "in vector permute expression");
4049 debug_generic_expr (lhs_type);
4050 debug_generic_expr (rhs1_type);
4051 debug_generic_expr (rhs2_type);
4052 debug_generic_expr (rhs3_type);
4053 return true;
4056 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4057 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4058 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4060 error ("invalid mask type in vector permute expression");
4061 debug_generic_expr (lhs_type);
4062 debug_generic_expr (rhs1_type);
4063 debug_generic_expr (rhs2_type);
4064 debug_generic_expr (rhs3_type);
4065 return true;
4068 return false;
4070 case SAD_EXPR:
4071 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4072 || !useless_type_conversion_p (lhs_type, rhs3_type)
4073 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4074 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4075 > GET_MODE_BITSIZE (GET_MODE_INNER
4076 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4078 error ("type mismatch in sad expression");
4079 debug_generic_expr (lhs_type);
4080 debug_generic_expr (rhs1_type);
4081 debug_generic_expr (rhs2_type);
4082 debug_generic_expr (rhs3_type);
4083 return true;
4086 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4087 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4088 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4090 error ("vector types expected in sad expression");
4091 debug_generic_expr (lhs_type);
4092 debug_generic_expr (rhs1_type);
4093 debug_generic_expr (rhs2_type);
4094 debug_generic_expr (rhs3_type);
4095 return true;
4098 return false;
4100 case DOT_PROD_EXPR:
4101 case REALIGN_LOAD_EXPR:
4102 /* FIXME. */
4103 return false;
4105 default:
4106 gcc_unreachable ();
4108 return false;
4111 /* Verify a gimple assignment statement STMT with a single rhs.
4112 Returns true if anything is wrong. */
4114 static bool
4115 verify_gimple_assign_single (gassign *stmt)
4117 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4118 tree lhs = gimple_assign_lhs (stmt);
4119 tree lhs_type = TREE_TYPE (lhs);
4120 tree rhs1 = gimple_assign_rhs1 (stmt);
4121 tree rhs1_type = TREE_TYPE (rhs1);
4122 bool res = false;
4124 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4126 error ("non-trivial conversion at assignment");
4127 debug_generic_expr (lhs_type);
4128 debug_generic_expr (rhs1_type);
4129 return true;
4132 if (gimple_clobber_p (stmt)
4133 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4135 error ("non-decl/MEM_REF LHS in clobber statement");
4136 debug_generic_expr (lhs);
4137 return true;
4140 if (handled_component_p (lhs)
4141 || TREE_CODE (lhs) == MEM_REF
4142 || TREE_CODE (lhs) == TARGET_MEM_REF)
4143 res |= verify_types_in_gimple_reference (lhs, true);
4145 /* Special codes we cannot handle via their class. */
4146 switch (rhs_code)
4148 case ADDR_EXPR:
4150 tree op = TREE_OPERAND (rhs1, 0);
4151 if (!is_gimple_addressable (op))
4153 error ("invalid operand in unary expression");
4154 return true;
4157 /* Technically there is no longer a need for matching types, but
4158 gimple hygiene asks for this check. In LTO we can end up
4159 combining incompatible units and thus end up with addresses
4160 of globals that change their type to a common one. */
4161 if (!in_lto_p
4162 && !types_compatible_p (TREE_TYPE (op),
4163 TREE_TYPE (TREE_TYPE (rhs1)))
4164 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4165 TREE_TYPE (op)))
4167 error ("type mismatch in address expression");
4168 debug_generic_stmt (TREE_TYPE (rhs1));
4169 debug_generic_stmt (TREE_TYPE (op));
4170 return true;
4173 return verify_types_in_gimple_reference (op, true);
4176 /* tcc_reference */
4177 case INDIRECT_REF:
4178 error ("INDIRECT_REF in gimple IL");
4179 return true;
4181 case COMPONENT_REF:
4182 case BIT_FIELD_REF:
4183 case ARRAY_REF:
4184 case ARRAY_RANGE_REF:
4185 case VIEW_CONVERT_EXPR:
4186 case REALPART_EXPR:
4187 case IMAGPART_EXPR:
4188 case TARGET_MEM_REF:
4189 case MEM_REF:
4190 if (!is_gimple_reg (lhs)
4191 && is_gimple_reg_type (TREE_TYPE (lhs)))
4193 error ("invalid rhs for gimple memory store");
4194 debug_generic_stmt (lhs);
4195 debug_generic_stmt (rhs1);
4196 return true;
4198 return res || verify_types_in_gimple_reference (rhs1, false);
4200 /* tcc_constant */
4201 case SSA_NAME:
4202 case INTEGER_CST:
4203 case REAL_CST:
4204 case FIXED_CST:
4205 case COMPLEX_CST:
4206 case VECTOR_CST:
4207 case STRING_CST:
4208 return res;
4210 /* tcc_declaration */
4211 case CONST_DECL:
4212 return res;
4213 case VAR_DECL:
4214 case PARM_DECL:
4215 if (!is_gimple_reg (lhs)
4216 && !is_gimple_reg (rhs1)
4217 && is_gimple_reg_type (TREE_TYPE (lhs)))
4219 error ("invalid rhs for gimple memory store");
4220 debug_generic_stmt (lhs);
4221 debug_generic_stmt (rhs1);
4222 return true;
4224 return res;
4226 case CONSTRUCTOR:
4227 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4229 unsigned int i;
4230 tree elt_i, elt_v, elt_t = NULL_TREE;
4232 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4233 return res;
4234 /* For vector CONSTRUCTORs we require that either it is empty
4235 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4236 (then the element count must be correct to cover the whole
4237 outer vector and index must be NULL on all elements, or it is
4238 a CONSTRUCTOR of scalar elements, where we as an exception allow
4239 smaller number of elements (assuming zero filling) and
4240 consecutive indexes as compared to NULL indexes (such
4241 CONSTRUCTORs can appear in the IL from FEs). */
4242 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4244 if (elt_t == NULL_TREE)
4246 elt_t = TREE_TYPE (elt_v);
4247 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4249 tree elt_t = TREE_TYPE (elt_v);
4250 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4251 TREE_TYPE (elt_t)))
4253 error ("incorrect type of vector CONSTRUCTOR"
4254 " elements");
4255 debug_generic_stmt (rhs1);
4256 return true;
4258 else if (CONSTRUCTOR_NELTS (rhs1)
4259 * TYPE_VECTOR_SUBPARTS (elt_t)
4260 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4262 error ("incorrect number of vector CONSTRUCTOR"
4263 " elements");
4264 debug_generic_stmt (rhs1);
4265 return true;
4268 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4269 elt_t))
4271 error ("incorrect type of vector CONSTRUCTOR elements");
4272 debug_generic_stmt (rhs1);
4273 return true;
4275 else if (CONSTRUCTOR_NELTS (rhs1)
4276 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4278 error ("incorrect number of vector CONSTRUCTOR elements");
4279 debug_generic_stmt (rhs1);
4280 return true;
4283 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4285 error ("incorrect type of vector CONSTRUCTOR elements");
4286 debug_generic_stmt (rhs1);
4287 return true;
4289 if (elt_i != NULL_TREE
4290 && (TREE_CODE (elt_t) == VECTOR_TYPE
4291 || TREE_CODE (elt_i) != INTEGER_CST
4292 || compare_tree_int (elt_i, i) != 0))
4294 error ("vector CONSTRUCTOR with non-NULL element index");
4295 debug_generic_stmt (rhs1);
4296 return true;
4298 if (!is_gimple_val (elt_v))
4300 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4301 debug_generic_stmt (rhs1);
4302 return true;
4306 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4308 error ("non-vector CONSTRUCTOR with elements");
4309 debug_generic_stmt (rhs1);
4310 return true;
4312 return res;
4313 case OBJ_TYPE_REF:
4314 case ASSERT_EXPR:
4315 case WITH_SIZE_EXPR:
4316 /* FIXME. */
4317 return res;
4319 default:;
4322 return res;
4325 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4326 is a problem, otherwise false. */
4328 static bool
4329 verify_gimple_assign (gassign *stmt)
4331 switch (gimple_assign_rhs_class (stmt))
4333 case GIMPLE_SINGLE_RHS:
4334 return verify_gimple_assign_single (stmt);
4336 case GIMPLE_UNARY_RHS:
4337 return verify_gimple_assign_unary (stmt);
4339 case GIMPLE_BINARY_RHS:
4340 return verify_gimple_assign_binary (stmt);
4342 case GIMPLE_TERNARY_RHS:
4343 return verify_gimple_assign_ternary (stmt);
4345 default:
4346 gcc_unreachable ();
4350 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4351 is a problem, otherwise false. */
4353 static bool
4354 verify_gimple_return (greturn *stmt)
4356 tree op = gimple_return_retval (stmt);
4357 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4359 /* We cannot test for present return values as we do not fix up missing
4360 return values from the original source. */
4361 if (op == NULL)
4362 return false;
4364 if (!is_gimple_val (op)
4365 && TREE_CODE (op) != RESULT_DECL)
4367 error ("invalid operand in return statement");
4368 debug_generic_stmt (op);
4369 return true;
4372 if ((TREE_CODE (op) == RESULT_DECL
4373 && DECL_BY_REFERENCE (op))
4374 || (TREE_CODE (op) == SSA_NAME
4375 && SSA_NAME_VAR (op)
4376 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4377 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4378 op = TREE_TYPE (op);
4380 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4382 error ("invalid conversion in return statement");
4383 debug_generic_stmt (restype);
4384 debug_generic_stmt (TREE_TYPE (op));
4385 return true;
4388 return false;
4392 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4393 is a problem, otherwise false. */
4395 static bool
4396 verify_gimple_goto (ggoto *stmt)
4398 tree dest = gimple_goto_dest (stmt);
4400 /* ??? We have two canonical forms of direct goto destinations, a
4401 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4402 if (TREE_CODE (dest) != LABEL_DECL
4403 && (!is_gimple_val (dest)
4404 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4406 error ("goto destination is neither a label nor a pointer");
4407 return true;
4410 return false;
4413 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4414 is a problem, otherwise false. */
4416 static bool
4417 verify_gimple_switch (gswitch *stmt)
4419 unsigned int i, n;
4420 tree elt, prev_upper_bound = NULL_TREE;
4421 tree index_type, elt_type = NULL_TREE;
4423 if (!is_gimple_val (gimple_switch_index (stmt)))
4425 error ("invalid operand to switch statement");
4426 debug_generic_stmt (gimple_switch_index (stmt));
4427 return true;
4430 index_type = TREE_TYPE (gimple_switch_index (stmt));
4431 if (! INTEGRAL_TYPE_P (index_type))
4433 error ("non-integral type switch statement");
4434 debug_generic_expr (index_type);
4435 return true;
4438 elt = gimple_switch_label (stmt, 0);
4439 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4441 error ("invalid default case label in switch statement");
4442 debug_generic_expr (elt);
4443 return true;
4446 n = gimple_switch_num_labels (stmt);
4447 for (i = 1; i < n; i++)
4449 elt = gimple_switch_label (stmt, i);
4451 if (! CASE_LOW (elt))
4453 error ("invalid case label in switch statement");
4454 debug_generic_expr (elt);
4455 return true;
4457 if (CASE_HIGH (elt)
4458 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4460 error ("invalid case range in switch statement");
4461 debug_generic_expr (elt);
4462 return true;
4465 if (elt_type)
4467 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4468 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4470 error ("type mismatch for case label in switch statement");
4471 debug_generic_expr (elt);
4472 return true;
4475 else
4477 elt_type = TREE_TYPE (CASE_LOW (elt));
4478 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4480 error ("type precision mismatch in switch statement");
4481 return true;
4485 if (prev_upper_bound)
4487 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4489 error ("case labels not sorted in switch statement");
4490 return true;
4494 prev_upper_bound = CASE_HIGH (elt);
4495 if (! prev_upper_bound)
4496 prev_upper_bound = CASE_LOW (elt);
4499 return false;
4502 /* Verify a gimple debug statement STMT.
4503 Returns true if anything is wrong. */
4505 static bool
4506 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4508 /* There isn't much that could be wrong in a gimple debug stmt. A
4509 gimple debug bind stmt, for example, maps a tree, that's usually
4510 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4511 component or member of an aggregate type, to another tree, that
4512 can be an arbitrary expression. These stmts expand into debug
4513 insns, and are converted to debug notes by var-tracking.c. */
4514 return false;
4517 /* Verify a gimple label statement STMT.
4518 Returns true if anything is wrong. */
4520 static bool
4521 verify_gimple_label (glabel *stmt)
4523 tree decl = gimple_label_label (stmt);
4524 int uid;
4525 bool err = false;
4527 if (TREE_CODE (decl) != LABEL_DECL)
4528 return true;
4529 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4530 && DECL_CONTEXT (decl) != current_function_decl)
4532 error ("label's context is not the current function decl");
4533 err |= true;
4536 uid = LABEL_DECL_UID (decl);
4537 if (cfun->cfg
4538 && (uid == -1
4539 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4541 error ("incorrect entry in label_to_block_map");
4542 err |= true;
4545 uid = EH_LANDING_PAD_NR (decl);
4546 if (uid)
4548 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4549 if (decl != lp->post_landing_pad)
4551 error ("incorrect setting of landing pad number");
4552 err |= true;
4556 return err;
4559 /* Verify a gimple cond statement STMT.
4560 Returns true if anything is wrong. */
4562 static bool
4563 verify_gimple_cond (gcond *stmt)
4565 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4567 error ("invalid comparison code in gimple cond");
4568 return true;
4570 if (!(!gimple_cond_true_label (stmt)
4571 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4572 || !(!gimple_cond_false_label (stmt)
4573 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4575 error ("invalid labels in gimple cond");
4576 return true;
4579 return verify_gimple_comparison (boolean_type_node,
4580 gimple_cond_lhs (stmt),
4581 gimple_cond_rhs (stmt));
4584 /* Verify the GIMPLE statement STMT. Returns true if there is an
4585 error, otherwise false. */
4587 static bool
4588 verify_gimple_stmt (gimple stmt)
4590 switch (gimple_code (stmt))
4592 case GIMPLE_ASSIGN:
4593 return verify_gimple_assign (as_a <gassign *> (stmt));
4595 case GIMPLE_LABEL:
4596 return verify_gimple_label (as_a <glabel *> (stmt));
4598 case GIMPLE_CALL:
4599 return verify_gimple_call (as_a <gcall *> (stmt));
4601 case GIMPLE_COND:
4602 return verify_gimple_cond (as_a <gcond *> (stmt));
4604 case GIMPLE_GOTO:
4605 return verify_gimple_goto (as_a <ggoto *> (stmt));
4607 case GIMPLE_SWITCH:
4608 return verify_gimple_switch (as_a <gswitch *> (stmt));
4610 case GIMPLE_RETURN:
4611 return verify_gimple_return (as_a <greturn *> (stmt));
4613 case GIMPLE_ASM:
4614 return false;
4616 case GIMPLE_TRANSACTION:
4617 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4619 /* Tuples that do not have tree operands. */
4620 case GIMPLE_NOP:
4621 case GIMPLE_PREDICT:
4622 case GIMPLE_RESX:
4623 case GIMPLE_EH_DISPATCH:
4624 case GIMPLE_EH_MUST_NOT_THROW:
4625 return false;
4627 CASE_GIMPLE_OMP:
4628 /* OpenMP directives are validated by the FE and never operated
4629 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4630 non-gimple expressions when the main index variable has had
4631 its address taken. This does not affect the loop itself
4632 because the header of an GIMPLE_OMP_FOR is merely used to determine
4633 how to setup the parallel iteration. */
4634 return false;
4636 case GIMPLE_DEBUG:
4637 return verify_gimple_debug (stmt);
4639 default:
4640 gcc_unreachable ();
4644 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4645 and false otherwise. */
4647 static bool
4648 verify_gimple_phi (gimple phi)
4650 bool err = false;
4651 unsigned i;
4652 tree phi_result = gimple_phi_result (phi);
4653 bool virtual_p;
4655 if (!phi_result)
4657 error ("invalid PHI result");
4658 return true;
4661 virtual_p = virtual_operand_p (phi_result);
4662 if (TREE_CODE (phi_result) != SSA_NAME
4663 || (virtual_p
4664 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4666 error ("invalid PHI result");
4667 err = true;
4670 for (i = 0; i < gimple_phi_num_args (phi); i++)
4672 tree t = gimple_phi_arg_def (phi, i);
4674 if (!t)
4676 error ("missing PHI def");
4677 err |= true;
4678 continue;
4680 /* Addressable variables do have SSA_NAMEs but they
4681 are not considered gimple values. */
4682 else if ((TREE_CODE (t) == SSA_NAME
4683 && virtual_p != virtual_operand_p (t))
4684 || (virtual_p
4685 && (TREE_CODE (t) != SSA_NAME
4686 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4687 || (!virtual_p
4688 && !is_gimple_val (t)))
4690 error ("invalid PHI argument");
4691 debug_generic_expr (t);
4692 err |= true;
4694 #ifdef ENABLE_TYPES_CHECKING
4695 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4697 error ("incompatible types in PHI argument %u", i);
4698 debug_generic_stmt (TREE_TYPE (phi_result));
4699 debug_generic_stmt (TREE_TYPE (t));
4700 err |= true;
4702 #endif
4705 return err;
4708 /* Verify the GIMPLE statements inside the sequence STMTS. */
4710 static bool
4711 verify_gimple_in_seq_2 (gimple_seq stmts)
4713 gimple_stmt_iterator ittr;
4714 bool err = false;
4716 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4718 gimple stmt = gsi_stmt (ittr);
4720 switch (gimple_code (stmt))
4722 case GIMPLE_BIND:
4723 err |= verify_gimple_in_seq_2 (
4724 gimple_bind_body (as_a <gbind *> (stmt)));
4725 break;
4727 case GIMPLE_TRY:
4728 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4729 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4730 break;
4732 case GIMPLE_EH_FILTER:
4733 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4734 break;
4736 case GIMPLE_EH_ELSE:
4738 geh_else *eh_else = as_a <geh_else *> (stmt);
4739 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4740 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4742 break;
4744 case GIMPLE_CATCH:
4745 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4746 as_a <gcatch *> (stmt)));
4747 break;
4749 case GIMPLE_TRANSACTION:
4750 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4751 break;
4753 default:
4755 bool err2 = verify_gimple_stmt (stmt);
4756 if (err2)
4757 debug_gimple_stmt (stmt);
4758 err |= err2;
4763 return err;
4766 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4767 is a problem, otherwise false. */
4769 static bool
4770 verify_gimple_transaction (gtransaction *stmt)
4772 tree lab = gimple_transaction_label (stmt);
4773 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4774 return true;
4775 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4779 /* Verify the GIMPLE statements inside the statement list STMTS. */
4781 DEBUG_FUNCTION void
4782 verify_gimple_in_seq (gimple_seq stmts)
4784 timevar_push (TV_TREE_STMT_VERIFY);
4785 if (verify_gimple_in_seq_2 (stmts))
4786 internal_error ("verify_gimple failed");
4787 timevar_pop (TV_TREE_STMT_VERIFY);
4790 /* Return true when the T can be shared. */
4792 static bool
4793 tree_node_can_be_shared (tree t)
4795 if (IS_TYPE_OR_DECL_P (t)
4796 || is_gimple_min_invariant (t)
4797 || TREE_CODE (t) == SSA_NAME
4798 || t == error_mark_node
4799 || TREE_CODE (t) == IDENTIFIER_NODE)
4800 return true;
4802 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4803 return true;
4805 if (DECL_P (t))
4806 return true;
4808 return false;
4811 /* Called via walk_tree. Verify tree sharing. */
4813 static tree
4814 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4816 hash_set<void *> *visited = (hash_set<void *> *) data;
4818 if (tree_node_can_be_shared (*tp))
4820 *walk_subtrees = false;
4821 return NULL;
4824 if (visited->add (*tp))
4825 return *tp;
4827 return NULL;
4830 /* Called via walk_gimple_stmt. Verify tree sharing. */
4832 static tree
4833 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4835 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4836 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4839 static bool eh_error_found;
4840 bool
4841 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4842 hash_set<gimple> *visited)
4844 if (!visited->contains (stmt))
4846 error ("dead STMT in EH table");
4847 debug_gimple_stmt (stmt);
4848 eh_error_found = true;
4850 return true;
4853 /* Verify if the location LOCs block is in BLOCKS. */
4855 static bool
4856 verify_location (hash_set<tree> *blocks, location_t loc)
4858 tree block = LOCATION_BLOCK (loc);
4859 if (block != NULL_TREE
4860 && !blocks->contains (block))
4862 error ("location references block not in block tree");
4863 return true;
4865 if (block != NULL_TREE)
4866 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4867 return false;
4870 /* Called via walk_tree. Verify that expressions have no blocks. */
4872 static tree
4873 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4875 if (!EXPR_P (*tp))
4877 *walk_subtrees = false;
4878 return NULL;
4881 location_t loc = EXPR_LOCATION (*tp);
4882 if (LOCATION_BLOCK (loc) != NULL)
4883 return *tp;
4885 return NULL;
4888 /* Called via walk_tree. Verify locations of expressions. */
4890 static tree
4891 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4893 hash_set<tree> *blocks = (hash_set<tree> *) data;
4895 if (TREE_CODE (*tp) == VAR_DECL
4896 && DECL_HAS_DEBUG_EXPR_P (*tp))
4898 tree t = DECL_DEBUG_EXPR (*tp);
4899 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4900 if (addr)
4901 return addr;
4903 if ((TREE_CODE (*tp) == VAR_DECL
4904 || TREE_CODE (*tp) == PARM_DECL
4905 || TREE_CODE (*tp) == RESULT_DECL)
4906 && DECL_HAS_VALUE_EXPR_P (*tp))
4908 tree t = DECL_VALUE_EXPR (*tp);
4909 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4910 if (addr)
4911 return addr;
4914 if (!EXPR_P (*tp))
4916 *walk_subtrees = false;
4917 return NULL;
4920 location_t loc = EXPR_LOCATION (*tp);
4921 if (verify_location (blocks, loc))
4922 return *tp;
4924 return NULL;
4927 /* Called via walk_gimple_op. Verify locations of expressions. */
4929 static tree
4930 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4932 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4933 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4936 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4938 static void
4939 collect_subblocks (hash_set<tree> *blocks, tree block)
4941 tree t;
4942 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4944 blocks->add (t);
4945 collect_subblocks (blocks, t);
4949 /* Verify the GIMPLE statements in the CFG of FN. */
4951 DEBUG_FUNCTION void
4952 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4954 basic_block bb;
4955 bool err = false;
4957 timevar_push (TV_TREE_STMT_VERIFY);
4958 hash_set<void *> visited;
4959 hash_set<gimple> visited_stmts;
4961 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4962 hash_set<tree> blocks;
4963 if (DECL_INITIAL (fn->decl))
4965 blocks.add (DECL_INITIAL (fn->decl));
4966 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4969 FOR_EACH_BB_FN (bb, fn)
4971 gimple_stmt_iterator gsi;
4973 for (gphi_iterator gpi = gsi_start_phis (bb);
4974 !gsi_end_p (gpi);
4975 gsi_next (&gpi))
4977 gphi *phi = gpi.phi ();
4978 bool err2 = false;
4979 unsigned i;
4981 visited_stmts.add (phi);
4983 if (gimple_bb (phi) != bb)
4985 error ("gimple_bb (phi) is set to a wrong basic block");
4986 err2 = true;
4989 err2 |= verify_gimple_phi (phi);
4991 /* Only PHI arguments have locations. */
4992 if (gimple_location (phi) != UNKNOWN_LOCATION)
4994 error ("PHI node with location");
4995 err2 = true;
4998 for (i = 0; i < gimple_phi_num_args (phi); i++)
5000 tree arg = gimple_phi_arg_def (phi, i);
5001 tree addr = walk_tree (&arg, verify_node_sharing_1,
5002 &visited, NULL);
5003 if (addr)
5005 error ("incorrect sharing of tree nodes");
5006 debug_generic_expr (addr);
5007 err2 |= true;
5009 location_t loc = gimple_phi_arg_location (phi, i);
5010 if (virtual_operand_p (gimple_phi_result (phi))
5011 && loc != UNKNOWN_LOCATION)
5013 error ("virtual PHI with argument locations");
5014 err2 = true;
5016 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5017 if (addr)
5019 debug_generic_expr (addr);
5020 err2 = true;
5022 err2 |= verify_location (&blocks, loc);
5025 if (err2)
5026 debug_gimple_stmt (phi);
5027 err |= err2;
5030 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5032 gimple stmt = gsi_stmt (gsi);
5033 bool err2 = false;
5034 struct walk_stmt_info wi;
5035 tree addr;
5036 int lp_nr;
5038 visited_stmts.add (stmt);
5040 if (gimple_bb (stmt) != bb)
5042 error ("gimple_bb (stmt) is set to a wrong basic block");
5043 err2 = true;
5046 err2 |= verify_gimple_stmt (stmt);
5047 err2 |= verify_location (&blocks, gimple_location (stmt));
5049 memset (&wi, 0, sizeof (wi));
5050 wi.info = (void *) &visited;
5051 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5052 if (addr)
5054 error ("incorrect sharing of tree nodes");
5055 debug_generic_expr (addr);
5056 err2 |= true;
5059 memset (&wi, 0, sizeof (wi));
5060 wi.info = (void *) &blocks;
5061 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5062 if (addr)
5064 debug_generic_expr (addr);
5065 err2 |= true;
5068 /* ??? Instead of not checking these stmts at all the walker
5069 should know its context via wi. */
5070 if (!is_gimple_debug (stmt)
5071 && !is_gimple_omp (stmt))
5073 memset (&wi, 0, sizeof (wi));
5074 addr = walk_gimple_op (stmt, verify_expr, &wi);
5075 if (addr)
5077 debug_generic_expr (addr);
5078 inform (gimple_location (stmt), "in statement");
5079 err2 |= true;
5083 /* If the statement is marked as part of an EH region, then it is
5084 expected that the statement could throw. Verify that when we
5085 have optimizations that simplify statements such that we prove
5086 that they cannot throw, that we update other data structures
5087 to match. */
5088 lp_nr = lookup_stmt_eh_lp (stmt);
5089 if (lp_nr > 0)
5091 if (!stmt_could_throw_p (stmt))
5093 if (verify_nothrow)
5095 error ("statement marked for throw, but doesn%'t");
5096 err2 |= true;
5099 else if (!gsi_one_before_end_p (gsi))
5101 error ("statement marked for throw in middle of block");
5102 err2 |= true;
5106 if (err2)
5107 debug_gimple_stmt (stmt);
5108 err |= err2;
5112 eh_error_found = false;
5113 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5114 if (eh_table)
5115 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5116 (&visited_stmts);
5118 if (err || eh_error_found)
5119 internal_error ("verify_gimple failed");
5121 verify_histograms ();
5122 timevar_pop (TV_TREE_STMT_VERIFY);
5126 /* Verifies that the flow information is OK. */
5128 static int
5129 gimple_verify_flow_info (void)
5131 int err = 0;
5132 basic_block bb;
5133 gimple_stmt_iterator gsi;
5134 gimple stmt;
5135 edge e;
5136 edge_iterator ei;
5138 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5139 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5141 error ("ENTRY_BLOCK has IL associated with it");
5142 err = 1;
5145 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5146 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5148 error ("EXIT_BLOCK has IL associated with it");
5149 err = 1;
5152 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5153 if (e->flags & EDGE_FALLTHRU)
5155 error ("fallthru to exit from bb %d", e->src->index);
5156 err = 1;
5159 FOR_EACH_BB_FN (bb, cfun)
5161 bool found_ctrl_stmt = false;
5163 stmt = NULL;
5165 /* Skip labels on the start of basic block. */
5166 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5168 tree label;
5169 gimple prev_stmt = stmt;
5171 stmt = gsi_stmt (gsi);
5173 if (gimple_code (stmt) != GIMPLE_LABEL)
5174 break;
5176 label = gimple_label_label (as_a <glabel *> (stmt));
5177 if (prev_stmt && DECL_NONLOCAL (label))
5179 error ("nonlocal label ");
5180 print_generic_expr (stderr, label, 0);
5181 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5182 bb->index);
5183 err = 1;
5186 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5188 error ("EH landing pad label ");
5189 print_generic_expr (stderr, label, 0);
5190 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5191 bb->index);
5192 err = 1;
5195 if (label_to_block (label) != bb)
5197 error ("label ");
5198 print_generic_expr (stderr, label, 0);
5199 fprintf (stderr, " to block does not match in bb %d",
5200 bb->index);
5201 err = 1;
5204 if (decl_function_context (label) != current_function_decl)
5206 error ("label ");
5207 print_generic_expr (stderr, label, 0);
5208 fprintf (stderr, " has incorrect context in bb %d",
5209 bb->index);
5210 err = 1;
5214 /* Verify that body of basic block BB is free of control flow. */
5215 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5217 gimple stmt = gsi_stmt (gsi);
5219 if (found_ctrl_stmt)
5221 error ("control flow in the middle of basic block %d",
5222 bb->index);
5223 err = 1;
5226 if (stmt_ends_bb_p (stmt))
5227 found_ctrl_stmt = true;
5229 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5231 error ("label ");
5232 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5233 fprintf (stderr, " in the middle of basic block %d", bb->index);
5234 err = 1;
5238 gsi = gsi_last_bb (bb);
5239 if (gsi_end_p (gsi))
5240 continue;
5242 stmt = gsi_stmt (gsi);
5244 if (gimple_code (stmt) == GIMPLE_LABEL)
5245 continue;
5247 err |= verify_eh_edges (stmt);
5249 if (is_ctrl_stmt (stmt))
5251 FOR_EACH_EDGE (e, ei, bb->succs)
5252 if (e->flags & EDGE_FALLTHRU)
5254 error ("fallthru edge after a control statement in bb %d",
5255 bb->index);
5256 err = 1;
5260 if (gimple_code (stmt) != GIMPLE_COND)
5262 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5263 after anything else but if statement. */
5264 FOR_EACH_EDGE (e, ei, bb->succs)
5265 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5267 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5268 bb->index);
5269 err = 1;
5273 switch (gimple_code (stmt))
5275 case GIMPLE_COND:
5277 edge true_edge;
5278 edge false_edge;
5280 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5282 if (!true_edge
5283 || !false_edge
5284 || !(true_edge->flags & EDGE_TRUE_VALUE)
5285 || !(false_edge->flags & EDGE_FALSE_VALUE)
5286 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5287 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5288 || EDGE_COUNT (bb->succs) >= 3)
5290 error ("wrong outgoing edge flags at end of bb %d",
5291 bb->index);
5292 err = 1;
5295 break;
5297 case GIMPLE_GOTO:
5298 if (simple_goto_p (stmt))
5300 error ("explicit goto at end of bb %d", bb->index);
5301 err = 1;
5303 else
5305 /* FIXME. We should double check that the labels in the
5306 destination blocks have their address taken. */
5307 FOR_EACH_EDGE (e, ei, bb->succs)
5308 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5309 | EDGE_FALSE_VALUE))
5310 || !(e->flags & EDGE_ABNORMAL))
5312 error ("wrong outgoing edge flags at end of bb %d",
5313 bb->index);
5314 err = 1;
5317 break;
5319 case GIMPLE_CALL:
5320 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5321 break;
5322 /* ... fallthru ... */
5323 case GIMPLE_RETURN:
5324 if (!single_succ_p (bb)
5325 || (single_succ_edge (bb)->flags
5326 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5327 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5329 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5330 err = 1;
5332 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5334 error ("return edge does not point to exit in bb %d",
5335 bb->index);
5336 err = 1;
5338 break;
5340 case GIMPLE_SWITCH:
5342 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5343 tree prev;
5344 edge e;
5345 size_t i, n;
5347 n = gimple_switch_num_labels (switch_stmt);
5349 /* Mark all the destination basic blocks. */
5350 for (i = 0; i < n; ++i)
5352 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5353 basic_block label_bb = label_to_block (lab);
5354 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5355 label_bb->aux = (void *)1;
5358 /* Verify that the case labels are sorted. */
5359 prev = gimple_switch_label (switch_stmt, 0);
5360 for (i = 1; i < n; ++i)
5362 tree c = gimple_switch_label (switch_stmt, i);
5363 if (!CASE_LOW (c))
5365 error ("found default case not at the start of "
5366 "case vector");
5367 err = 1;
5368 continue;
5370 if (CASE_LOW (prev)
5371 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5373 error ("case labels not sorted: ");
5374 print_generic_expr (stderr, prev, 0);
5375 fprintf (stderr," is greater than ");
5376 print_generic_expr (stderr, c, 0);
5377 fprintf (stderr," but comes before it.\n");
5378 err = 1;
5380 prev = c;
5382 /* VRP will remove the default case if it can prove it will
5383 never be executed. So do not verify there always exists
5384 a default case here. */
5386 FOR_EACH_EDGE (e, ei, bb->succs)
5388 if (!e->dest->aux)
5390 error ("extra outgoing edge %d->%d",
5391 bb->index, e->dest->index);
5392 err = 1;
5395 e->dest->aux = (void *)2;
5396 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5397 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5399 error ("wrong outgoing edge flags at end of bb %d",
5400 bb->index);
5401 err = 1;
5405 /* Check that we have all of them. */
5406 for (i = 0; i < n; ++i)
5408 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5409 basic_block label_bb = label_to_block (lab);
5411 if (label_bb->aux != (void *)2)
5413 error ("missing edge %i->%i", bb->index, label_bb->index);
5414 err = 1;
5418 FOR_EACH_EDGE (e, ei, bb->succs)
5419 e->dest->aux = (void *)0;
5421 break;
5423 case GIMPLE_EH_DISPATCH:
5424 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5425 break;
5427 default:
5428 break;
5432 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5433 verify_dominators (CDI_DOMINATORS);
5435 return err;
5439 /* Updates phi nodes after creating a forwarder block joined
5440 by edge FALLTHRU. */
5442 static void
5443 gimple_make_forwarder_block (edge fallthru)
5445 edge e;
5446 edge_iterator ei;
5447 basic_block dummy, bb;
5448 tree var;
5449 gphi_iterator gsi;
5451 dummy = fallthru->src;
5452 bb = fallthru->dest;
5454 if (single_pred_p (bb))
5455 return;
5457 /* If we redirected a branch we must create new PHI nodes at the
5458 start of BB. */
5459 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5461 gphi *phi, *new_phi;
5463 phi = gsi.phi ();
5464 var = gimple_phi_result (phi);
5465 new_phi = create_phi_node (var, bb);
5466 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5467 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5468 UNKNOWN_LOCATION);
5471 /* Add the arguments we have stored on edges. */
5472 FOR_EACH_EDGE (e, ei, bb->preds)
5474 if (e == fallthru)
5475 continue;
5477 flush_pending_stmts (e);
5482 /* Return a non-special label in the head of basic block BLOCK.
5483 Create one if it doesn't exist. */
5485 tree
5486 gimple_block_label (basic_block bb)
5488 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5489 bool first = true;
5490 tree label;
5491 glabel *stmt;
5493 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5495 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5496 if (!stmt)
5497 break;
5498 label = gimple_label_label (stmt);
5499 if (!DECL_NONLOCAL (label))
5501 if (!first)
5502 gsi_move_before (&i, &s);
5503 return label;
5507 label = create_artificial_label (UNKNOWN_LOCATION);
5508 stmt = gimple_build_label (label);
5509 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5510 return label;
5514 /* Attempt to perform edge redirection by replacing a possibly complex
5515 jump instruction by a goto or by removing the jump completely.
5516 This can apply only if all edges now point to the same block. The
5517 parameters and return values are equivalent to
5518 redirect_edge_and_branch. */
5520 static edge
5521 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5523 basic_block src = e->src;
5524 gimple_stmt_iterator i;
5525 gimple stmt;
5527 /* We can replace or remove a complex jump only when we have exactly
5528 two edges. */
5529 if (EDGE_COUNT (src->succs) != 2
5530 /* Verify that all targets will be TARGET. Specifically, the
5531 edge that is not E must also go to TARGET. */
5532 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5533 return NULL;
5535 i = gsi_last_bb (src);
5536 if (gsi_end_p (i))
5537 return NULL;
5539 stmt = gsi_stmt (i);
5541 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5543 gsi_remove (&i, true);
5544 e = ssa_redirect_edge (e, target);
5545 e->flags = EDGE_FALLTHRU;
5546 return e;
5549 return NULL;
5553 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5554 edge representing the redirected branch. */
5556 static edge
5557 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5559 basic_block bb = e->src;
5560 gimple_stmt_iterator gsi;
5561 edge ret;
5562 gimple stmt;
5564 if (e->flags & EDGE_ABNORMAL)
5565 return NULL;
5567 if (e->dest == dest)
5568 return NULL;
5570 if (e->flags & EDGE_EH)
5571 return redirect_eh_edge (e, dest);
5573 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5575 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5576 if (ret)
5577 return ret;
5580 gsi = gsi_last_bb (bb);
5581 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5583 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5585 case GIMPLE_COND:
5586 /* For COND_EXPR, we only need to redirect the edge. */
5587 break;
5589 case GIMPLE_GOTO:
5590 /* No non-abnormal edges should lead from a non-simple goto, and
5591 simple ones should be represented implicitly. */
5592 gcc_unreachable ();
5594 case GIMPLE_SWITCH:
5596 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5597 tree label = gimple_block_label (dest);
5598 tree cases = get_cases_for_edge (e, switch_stmt);
5600 /* If we have a list of cases associated with E, then use it
5601 as it's a lot faster than walking the entire case vector. */
5602 if (cases)
5604 edge e2 = find_edge (e->src, dest);
5605 tree last, first;
5607 first = cases;
5608 while (cases)
5610 last = cases;
5611 CASE_LABEL (cases) = label;
5612 cases = CASE_CHAIN (cases);
5615 /* If there was already an edge in the CFG, then we need
5616 to move all the cases associated with E to E2. */
5617 if (e2)
5619 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5621 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5622 CASE_CHAIN (cases2) = first;
5624 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5626 else
5628 size_t i, n = gimple_switch_num_labels (switch_stmt);
5630 for (i = 0; i < n; i++)
5632 tree elt = gimple_switch_label (switch_stmt, i);
5633 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5634 CASE_LABEL (elt) = label;
5638 break;
5640 case GIMPLE_ASM:
5642 gasm *asm_stmt = as_a <gasm *> (stmt);
5643 int i, n = gimple_asm_nlabels (asm_stmt);
5644 tree label = NULL;
5646 for (i = 0; i < n; ++i)
5648 tree cons = gimple_asm_label_op (asm_stmt, i);
5649 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5651 if (!label)
5652 label = gimple_block_label (dest);
5653 TREE_VALUE (cons) = label;
5657 /* If we didn't find any label matching the former edge in the
5658 asm labels, we must be redirecting the fallthrough
5659 edge. */
5660 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5662 break;
5664 case GIMPLE_RETURN:
5665 gsi_remove (&gsi, true);
5666 e->flags |= EDGE_FALLTHRU;
5667 break;
5669 case GIMPLE_OMP_RETURN:
5670 case GIMPLE_OMP_CONTINUE:
5671 case GIMPLE_OMP_SECTIONS_SWITCH:
5672 case GIMPLE_OMP_FOR:
5673 /* The edges from OMP constructs can be simply redirected. */
5674 break;
5676 case GIMPLE_EH_DISPATCH:
5677 if (!(e->flags & EDGE_FALLTHRU))
5678 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5679 break;
5681 case GIMPLE_TRANSACTION:
5682 /* The ABORT edge has a stored label associated with it, otherwise
5683 the edges are simply redirectable. */
5684 if (e->flags == 0)
5685 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5686 gimple_block_label (dest));
5687 break;
5689 default:
5690 /* Otherwise it must be a fallthru edge, and we don't need to
5691 do anything besides redirecting it. */
5692 gcc_assert (e->flags & EDGE_FALLTHRU);
5693 break;
5696 /* Update/insert PHI nodes as necessary. */
5698 /* Now update the edges in the CFG. */
5699 e = ssa_redirect_edge (e, dest);
5701 return e;
5704 /* Returns true if it is possible to remove edge E by redirecting
5705 it to the destination of the other edge from E->src. */
5707 static bool
5708 gimple_can_remove_branch_p (const_edge e)
5710 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5711 return false;
5713 return true;
5716 /* Simple wrapper, as we can always redirect fallthru edges. */
5718 static basic_block
5719 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5721 e = gimple_redirect_edge_and_branch (e, dest);
5722 gcc_assert (e);
5724 return NULL;
5728 /* Splits basic block BB after statement STMT (but at least after the
5729 labels). If STMT is NULL, BB is split just after the labels. */
5731 static basic_block
5732 gimple_split_block (basic_block bb, void *stmt)
5734 gimple_stmt_iterator gsi;
5735 gimple_stmt_iterator gsi_tgt;
5736 gimple_seq list;
5737 basic_block new_bb;
5738 edge e;
5739 edge_iterator ei;
5741 new_bb = create_empty_bb (bb);
5743 /* Redirect the outgoing edges. */
5744 new_bb->succs = bb->succs;
5745 bb->succs = NULL;
5746 FOR_EACH_EDGE (e, ei, new_bb->succs)
5747 e->src = new_bb;
5749 /* Get a stmt iterator pointing to the first stmt to move. */
5750 if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5751 gsi = gsi_after_labels (bb);
5752 else
5754 gsi = gsi_for_stmt ((gimple) stmt);
5755 gsi_next (&gsi);
5758 /* Move everything from GSI to the new basic block. */
5759 if (gsi_end_p (gsi))
5760 return new_bb;
5762 /* Split the statement list - avoid re-creating new containers as this
5763 brings ugly quadratic memory consumption in the inliner.
5764 (We are still quadratic since we need to update stmt BB pointers,
5765 sadly.) */
5766 gsi_split_seq_before (&gsi, &list);
5767 set_bb_seq (new_bb, list);
5768 for (gsi_tgt = gsi_start (list);
5769 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5770 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5772 return new_bb;
5776 /* Moves basic block BB after block AFTER. */
5778 static bool
5779 gimple_move_block_after (basic_block bb, basic_block after)
5781 if (bb->prev_bb == after)
5782 return true;
5784 unlink_block (bb);
5785 link_block (bb, after);
5787 return true;
5791 /* Return TRUE if block BB has no executable statements, otherwise return
5792 FALSE. */
5794 static bool
5795 gimple_empty_block_p (basic_block bb)
5797 /* BB must have no executable statements. */
5798 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5799 if (phi_nodes (bb))
5800 return false;
5801 if (gsi_end_p (gsi))
5802 return true;
5803 if (is_gimple_debug (gsi_stmt (gsi)))
5804 gsi_next_nondebug (&gsi);
5805 return gsi_end_p (gsi);
5809 /* Split a basic block if it ends with a conditional branch and if the
5810 other part of the block is not empty. */
5812 static basic_block
5813 gimple_split_block_before_cond_jump (basic_block bb)
5815 gimple last, split_point;
5816 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5817 if (gsi_end_p (gsi))
5818 return NULL;
5819 last = gsi_stmt (gsi);
5820 if (gimple_code (last) != GIMPLE_COND
5821 && gimple_code (last) != GIMPLE_SWITCH)
5822 return NULL;
5823 gsi_prev_nondebug (&gsi);
5824 split_point = gsi_stmt (gsi);
5825 return split_block (bb, split_point)->dest;
5829 /* Return true if basic_block can be duplicated. */
5831 static bool
5832 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5834 return true;
5837 /* Create a duplicate of the basic block BB. NOTE: This does not
5838 preserve SSA form. */
5840 static basic_block
5841 gimple_duplicate_bb (basic_block bb)
5843 basic_block new_bb;
5844 gimple_stmt_iterator gsi_tgt;
5846 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5848 /* Copy the PHI nodes. We ignore PHI node arguments here because
5849 the incoming edges have not been setup yet. */
5850 for (gphi_iterator gpi = gsi_start_phis (bb);
5851 !gsi_end_p (gpi);
5852 gsi_next (&gpi))
5854 gphi *phi, *copy;
5855 phi = gpi.phi ();
5856 copy = create_phi_node (NULL_TREE, new_bb);
5857 create_new_def_for (gimple_phi_result (phi), copy,
5858 gimple_phi_result_ptr (copy));
5859 gimple_set_uid (copy, gimple_uid (phi));
5862 gsi_tgt = gsi_start_bb (new_bb);
5863 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5864 !gsi_end_p (gsi);
5865 gsi_next (&gsi))
5867 def_operand_p def_p;
5868 ssa_op_iter op_iter;
5869 tree lhs;
5870 gimple stmt, copy;
5872 stmt = gsi_stmt (gsi);
5873 if (gimple_code (stmt) == GIMPLE_LABEL)
5874 continue;
5876 /* Don't duplicate label debug stmts. */
5877 if (gimple_debug_bind_p (stmt)
5878 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5879 == LABEL_DECL)
5880 continue;
5882 /* Create a new copy of STMT and duplicate STMT's virtual
5883 operands. */
5884 copy = gimple_copy (stmt);
5885 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5887 maybe_duplicate_eh_stmt (copy, stmt);
5888 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5890 /* When copying around a stmt writing into a local non-user
5891 aggregate, make sure it won't share stack slot with other
5892 vars. */
5893 lhs = gimple_get_lhs (stmt);
5894 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5896 tree base = get_base_address (lhs);
5897 if (base
5898 && (TREE_CODE (base) == VAR_DECL
5899 || TREE_CODE (base) == RESULT_DECL)
5900 && DECL_IGNORED_P (base)
5901 && !TREE_STATIC (base)
5902 && !DECL_EXTERNAL (base)
5903 && (TREE_CODE (base) != VAR_DECL
5904 || !DECL_HAS_VALUE_EXPR_P (base)))
5905 DECL_NONSHAREABLE (base) = 1;
5908 /* Create new names for all the definitions created by COPY and
5909 add replacement mappings for each new name. */
5910 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5911 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5914 return new_bb;
5917 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5919 static void
5920 add_phi_args_after_copy_edge (edge e_copy)
5922 basic_block bb, bb_copy = e_copy->src, dest;
5923 edge e;
5924 edge_iterator ei;
5925 gphi *phi, *phi_copy;
5926 tree def;
5927 gphi_iterator psi, psi_copy;
5929 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5930 return;
5932 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5934 if (e_copy->dest->flags & BB_DUPLICATED)
5935 dest = get_bb_original (e_copy->dest);
5936 else
5937 dest = e_copy->dest;
5939 e = find_edge (bb, dest);
5940 if (!e)
5942 /* During loop unrolling the target of the latch edge is copied.
5943 In this case we are not looking for edge to dest, but to
5944 duplicated block whose original was dest. */
5945 FOR_EACH_EDGE (e, ei, bb->succs)
5947 if ((e->dest->flags & BB_DUPLICATED)
5948 && get_bb_original (e->dest) == dest)
5949 break;
5952 gcc_assert (e != NULL);
5955 for (psi = gsi_start_phis (e->dest),
5956 psi_copy = gsi_start_phis (e_copy->dest);
5957 !gsi_end_p (psi);
5958 gsi_next (&psi), gsi_next (&psi_copy))
5960 phi = psi.phi ();
5961 phi_copy = psi_copy.phi ();
5962 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5963 add_phi_arg (phi_copy, def, e_copy,
5964 gimple_phi_arg_location_from_edge (phi, e));
5969 /* Basic block BB_COPY was created by code duplication. Add phi node
5970 arguments for edges going out of BB_COPY. The blocks that were
5971 duplicated have BB_DUPLICATED set. */
5973 void
5974 add_phi_args_after_copy_bb (basic_block bb_copy)
5976 edge e_copy;
5977 edge_iterator ei;
5979 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5981 add_phi_args_after_copy_edge (e_copy);
5985 /* Blocks in REGION_COPY array of length N_REGION were created by
5986 duplication of basic blocks. Add phi node arguments for edges
5987 going from these blocks. If E_COPY is not NULL, also add
5988 phi node arguments for its destination.*/
5990 void
5991 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5992 edge e_copy)
5994 unsigned i;
5996 for (i = 0; i < n_region; i++)
5997 region_copy[i]->flags |= BB_DUPLICATED;
5999 for (i = 0; i < n_region; i++)
6000 add_phi_args_after_copy_bb (region_copy[i]);
6001 if (e_copy)
6002 add_phi_args_after_copy_edge (e_copy);
6004 for (i = 0; i < n_region; i++)
6005 region_copy[i]->flags &= ~BB_DUPLICATED;
6008 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6009 important exit edge EXIT. By important we mean that no SSA name defined
6010 inside region is live over the other exit edges of the region. All entry
6011 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6012 to the duplicate of the region. Dominance and loop information is
6013 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6014 UPDATE_DOMINANCE is false then we assume that the caller will update the
6015 dominance information after calling this function. The new basic
6016 blocks are stored to REGION_COPY in the same order as they had in REGION,
6017 provided that REGION_COPY is not NULL.
6018 The function returns false if it is unable to copy the region,
6019 true otherwise. */
6021 bool
6022 gimple_duplicate_sese_region (edge entry, edge exit,
6023 basic_block *region, unsigned n_region,
6024 basic_block *region_copy,
6025 bool update_dominance)
6027 unsigned i;
6028 bool free_region_copy = false, copying_header = false;
6029 struct loop *loop = entry->dest->loop_father;
6030 edge exit_copy;
6031 vec<basic_block> doms;
6032 edge redirected;
6033 int total_freq = 0, entry_freq = 0;
6034 gcov_type total_count = 0, entry_count = 0;
6036 if (!can_copy_bbs_p (region, n_region))
6037 return false;
6039 /* Some sanity checking. Note that we do not check for all possible
6040 missuses of the functions. I.e. if you ask to copy something weird,
6041 it will work, but the state of structures probably will not be
6042 correct. */
6043 for (i = 0; i < n_region; i++)
6045 /* We do not handle subloops, i.e. all the blocks must belong to the
6046 same loop. */
6047 if (region[i]->loop_father != loop)
6048 return false;
6050 if (region[i] != entry->dest
6051 && region[i] == loop->header)
6052 return false;
6055 /* In case the function is used for loop header copying (which is the primary
6056 use), ensure that EXIT and its copy will be new latch and entry edges. */
6057 if (loop->header == entry->dest)
6059 copying_header = true;
6061 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6062 return false;
6064 for (i = 0; i < n_region; i++)
6065 if (region[i] != exit->src
6066 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6067 return false;
6070 initialize_original_copy_tables ();
6072 if (copying_header)
6073 set_loop_copy (loop, loop_outer (loop));
6074 else
6075 set_loop_copy (loop, loop);
6077 if (!region_copy)
6079 region_copy = XNEWVEC (basic_block, n_region);
6080 free_region_copy = true;
6083 /* Record blocks outside the region that are dominated by something
6084 inside. */
6085 if (update_dominance)
6087 doms.create (0);
6088 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6091 if (entry->dest->count)
6093 total_count = entry->dest->count;
6094 entry_count = entry->count;
6095 /* Fix up corner cases, to avoid division by zero or creation of negative
6096 frequencies. */
6097 if (entry_count > total_count)
6098 entry_count = total_count;
6100 else
6102 total_freq = entry->dest->frequency;
6103 entry_freq = EDGE_FREQUENCY (entry);
6104 /* Fix up corner cases, to avoid division by zero or creation of negative
6105 frequencies. */
6106 if (total_freq == 0)
6107 total_freq = 1;
6108 else if (entry_freq > total_freq)
6109 entry_freq = total_freq;
6112 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6113 split_edge_bb_loc (entry), update_dominance);
6114 if (total_count)
6116 scale_bbs_frequencies_gcov_type (region, n_region,
6117 total_count - entry_count,
6118 total_count);
6119 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6120 total_count);
6122 else
6124 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6125 total_freq);
6126 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6129 if (copying_header)
6131 loop->header = exit->dest;
6132 loop->latch = exit->src;
6135 /* Redirect the entry and add the phi node arguments. */
6136 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6137 gcc_assert (redirected != NULL);
6138 flush_pending_stmts (entry);
6140 /* Concerning updating of dominators: We must recount dominators
6141 for entry block and its copy. Anything that is outside of the
6142 region, but was dominated by something inside needs recounting as
6143 well. */
6144 if (update_dominance)
6146 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6147 doms.safe_push (get_bb_original (entry->dest));
6148 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6149 doms.release ();
6152 /* Add the other PHI node arguments. */
6153 add_phi_args_after_copy (region_copy, n_region, NULL);
6155 if (free_region_copy)
6156 free (region_copy);
6158 free_original_copy_tables ();
6159 return true;
6162 /* Checks if BB is part of the region defined by N_REGION BBS. */
6163 static bool
6164 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6166 unsigned int n;
6168 for (n = 0; n < n_region; n++)
6170 if (bb == bbs[n])
6171 return true;
6173 return false;
6176 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6177 are stored to REGION_COPY in the same order in that they appear
6178 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6179 the region, EXIT an exit from it. The condition guarding EXIT
6180 is moved to ENTRY. Returns true if duplication succeeds, false
6181 otherwise.
6183 For example,
6185 some_code;
6186 if (cond)
6188 else
6191 is transformed to
6193 if (cond)
6195 some_code;
6198 else
6200 some_code;
6205 bool
6206 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6207 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6208 basic_block *region_copy ATTRIBUTE_UNUSED)
6210 unsigned i;
6211 bool free_region_copy = false;
6212 struct loop *loop = exit->dest->loop_father;
6213 struct loop *orig_loop = entry->dest->loop_father;
6214 basic_block switch_bb, entry_bb, nentry_bb;
6215 vec<basic_block> doms;
6216 int total_freq = 0, exit_freq = 0;
6217 gcov_type total_count = 0, exit_count = 0;
6218 edge exits[2], nexits[2], e;
6219 gimple_stmt_iterator gsi;
6220 gimple cond_stmt;
6221 edge sorig, snew;
6222 basic_block exit_bb;
6223 gphi_iterator psi;
6224 gphi *phi;
6225 tree def;
6226 struct loop *target, *aloop, *cloop;
6228 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6229 exits[0] = exit;
6230 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6232 if (!can_copy_bbs_p (region, n_region))
6233 return false;
6235 initialize_original_copy_tables ();
6236 set_loop_copy (orig_loop, loop);
6238 target= loop;
6239 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6241 if (bb_part_of_region_p (aloop->header, region, n_region))
6243 cloop = duplicate_loop (aloop, target);
6244 duplicate_subloops (aloop, cloop);
6248 if (!region_copy)
6250 region_copy = XNEWVEC (basic_block, n_region);
6251 free_region_copy = true;
6254 gcc_assert (!need_ssa_update_p (cfun));
6256 /* Record blocks outside the region that are dominated by something
6257 inside. */
6258 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6260 if (exit->src->count)
6262 total_count = exit->src->count;
6263 exit_count = exit->count;
6264 /* Fix up corner cases, to avoid division by zero or creation of negative
6265 frequencies. */
6266 if (exit_count > total_count)
6267 exit_count = total_count;
6269 else
6271 total_freq = exit->src->frequency;
6272 exit_freq = EDGE_FREQUENCY (exit);
6273 /* Fix up corner cases, to avoid division by zero or creation of negative
6274 frequencies. */
6275 if (total_freq == 0)
6276 total_freq = 1;
6277 if (exit_freq > total_freq)
6278 exit_freq = total_freq;
6281 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6282 split_edge_bb_loc (exit), true);
6283 if (total_count)
6285 scale_bbs_frequencies_gcov_type (region, n_region,
6286 total_count - exit_count,
6287 total_count);
6288 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6289 total_count);
6291 else
6293 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6294 total_freq);
6295 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6298 /* Create the switch block, and put the exit condition to it. */
6299 entry_bb = entry->dest;
6300 nentry_bb = get_bb_copy (entry_bb);
6301 if (!last_stmt (entry->src)
6302 || !stmt_ends_bb_p (last_stmt (entry->src)))
6303 switch_bb = entry->src;
6304 else
6305 switch_bb = split_edge (entry);
6306 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6308 gsi = gsi_last_bb (switch_bb);
6309 cond_stmt = last_stmt (exit->src);
6310 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6311 cond_stmt = gimple_copy (cond_stmt);
6313 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6315 sorig = single_succ_edge (switch_bb);
6316 sorig->flags = exits[1]->flags;
6317 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6319 /* Register the new edge from SWITCH_BB in loop exit lists. */
6320 rescan_loop_exit (snew, true, false);
6322 /* Add the PHI node arguments. */
6323 add_phi_args_after_copy (region_copy, n_region, snew);
6325 /* Get rid of now superfluous conditions and associated edges (and phi node
6326 arguments). */
6327 exit_bb = exit->dest;
6329 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6330 PENDING_STMT (e) = NULL;
6332 /* The latch of ORIG_LOOP was copied, and so was the backedge
6333 to the original header. We redirect this backedge to EXIT_BB. */
6334 for (i = 0; i < n_region; i++)
6335 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6337 gcc_assert (single_succ_edge (region_copy[i]));
6338 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6339 PENDING_STMT (e) = NULL;
6340 for (psi = gsi_start_phis (exit_bb);
6341 !gsi_end_p (psi);
6342 gsi_next (&psi))
6344 phi = psi.phi ();
6345 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6346 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6349 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6350 PENDING_STMT (e) = NULL;
6352 /* Anything that is outside of the region, but was dominated by something
6353 inside needs to update dominance info. */
6354 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6355 doms.release ();
6356 /* Update the SSA web. */
6357 update_ssa (TODO_update_ssa);
6359 if (free_region_copy)
6360 free (region_copy);
6362 free_original_copy_tables ();
6363 return true;
6366 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6367 adding blocks when the dominator traversal reaches EXIT. This
6368 function silently assumes that ENTRY strictly dominates EXIT. */
6370 void
6371 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6372 vec<basic_block> *bbs_p)
6374 basic_block son;
6376 for (son = first_dom_son (CDI_DOMINATORS, entry);
6377 son;
6378 son = next_dom_son (CDI_DOMINATORS, son))
6380 bbs_p->safe_push (son);
6381 if (son != exit)
6382 gather_blocks_in_sese_region (son, exit, bbs_p);
6386 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6387 The duplicates are recorded in VARS_MAP. */
6389 static void
6390 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6391 tree to_context)
6393 tree t = *tp, new_t;
6394 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6396 if (DECL_CONTEXT (t) == to_context)
6397 return;
6399 bool existed;
6400 tree &loc = vars_map->get_or_insert (t, &existed);
6402 if (!existed)
6404 if (SSA_VAR_P (t))
6406 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6407 add_local_decl (f, new_t);
6409 else
6411 gcc_assert (TREE_CODE (t) == CONST_DECL);
6412 new_t = copy_node (t);
6414 DECL_CONTEXT (new_t) = to_context;
6416 loc = new_t;
6418 else
6419 new_t = loc;
6421 *tp = new_t;
6425 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6426 VARS_MAP maps old ssa names and var_decls to the new ones. */
6428 static tree
6429 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6430 tree to_context)
6432 tree new_name;
6434 gcc_assert (!virtual_operand_p (name));
6436 tree *loc = vars_map->get (name);
6438 if (!loc)
6440 tree decl = SSA_NAME_VAR (name);
6441 if (decl)
6443 replace_by_duplicate_decl (&decl, vars_map, to_context);
6444 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6445 decl, SSA_NAME_DEF_STMT (name));
6446 if (SSA_NAME_IS_DEFAULT_DEF (name))
6447 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6448 decl, new_name);
6450 else
6451 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6452 name, SSA_NAME_DEF_STMT (name));
6454 vars_map->put (name, new_name);
6456 else
6457 new_name = *loc;
6459 return new_name;
6462 struct move_stmt_d
6464 tree orig_block;
6465 tree new_block;
6466 tree from_context;
6467 tree to_context;
6468 hash_map<tree, tree> *vars_map;
6469 htab_t new_label_map;
6470 hash_map<void *, void *> *eh_map;
6471 bool remap_decls_p;
6474 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6475 contained in *TP if it has been ORIG_BLOCK previously and change the
6476 DECL_CONTEXT of every local variable referenced in *TP. */
6478 static tree
6479 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6481 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6482 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6483 tree t = *tp;
6485 if (EXPR_P (t))
6487 tree block = TREE_BLOCK (t);
6488 if (block == p->orig_block
6489 || (p->orig_block == NULL_TREE
6490 && block != NULL_TREE))
6491 TREE_SET_BLOCK (t, p->new_block);
6492 #ifdef ENABLE_CHECKING
6493 else if (block != NULL_TREE)
6495 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6496 block = BLOCK_SUPERCONTEXT (block);
6497 gcc_assert (block == p->orig_block);
6499 #endif
6501 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6503 if (TREE_CODE (t) == SSA_NAME)
6504 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6505 else if (TREE_CODE (t) == LABEL_DECL)
6507 if (p->new_label_map)
6509 struct tree_map in, *out;
6510 in.base.from = t;
6511 out = (struct tree_map *)
6512 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6513 if (out)
6514 *tp = t = out->to;
6517 DECL_CONTEXT (t) = p->to_context;
6519 else if (p->remap_decls_p)
6521 /* Replace T with its duplicate. T should no longer appear in the
6522 parent function, so this looks wasteful; however, it may appear
6523 in referenced_vars, and more importantly, as virtual operands of
6524 statements, and in alias lists of other variables. It would be
6525 quite difficult to expunge it from all those places. ??? It might
6526 suffice to do this for addressable variables. */
6527 if ((TREE_CODE (t) == VAR_DECL
6528 && !is_global_var (t))
6529 || TREE_CODE (t) == CONST_DECL)
6530 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6532 *walk_subtrees = 0;
6534 else if (TYPE_P (t))
6535 *walk_subtrees = 0;
6537 return NULL_TREE;
6540 /* Helper for move_stmt_r. Given an EH region number for the source
6541 function, map that to the duplicate EH regio number in the dest. */
6543 static int
6544 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6546 eh_region old_r, new_r;
6548 old_r = get_eh_region_from_number (old_nr);
6549 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6551 return new_r->index;
6554 /* Similar, but operate on INTEGER_CSTs. */
6556 static tree
6557 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6559 int old_nr, new_nr;
6561 old_nr = tree_to_shwi (old_t_nr);
6562 new_nr = move_stmt_eh_region_nr (old_nr, p);
6564 return build_int_cst (integer_type_node, new_nr);
6567 /* Like move_stmt_op, but for gimple statements.
6569 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6570 contained in the current statement in *GSI_P and change the
6571 DECL_CONTEXT of every local variable referenced in the current
6572 statement. */
6574 static tree
6575 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6576 struct walk_stmt_info *wi)
6578 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6579 gimple stmt = gsi_stmt (*gsi_p);
6580 tree block = gimple_block (stmt);
6582 if (block == p->orig_block
6583 || (p->orig_block == NULL_TREE
6584 && block != NULL_TREE))
6585 gimple_set_block (stmt, p->new_block);
6587 switch (gimple_code (stmt))
6589 case GIMPLE_CALL:
6590 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6592 tree r, fndecl = gimple_call_fndecl (stmt);
6593 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6594 switch (DECL_FUNCTION_CODE (fndecl))
6596 case BUILT_IN_EH_COPY_VALUES:
6597 r = gimple_call_arg (stmt, 1);
6598 r = move_stmt_eh_region_tree_nr (r, p);
6599 gimple_call_set_arg (stmt, 1, r);
6600 /* FALLTHRU */
6602 case BUILT_IN_EH_POINTER:
6603 case BUILT_IN_EH_FILTER:
6604 r = gimple_call_arg (stmt, 0);
6605 r = move_stmt_eh_region_tree_nr (r, p);
6606 gimple_call_set_arg (stmt, 0, r);
6607 break;
6609 default:
6610 break;
6613 break;
6615 case GIMPLE_RESX:
6617 gresx *resx_stmt = as_a <gresx *> (stmt);
6618 int r = gimple_resx_region (resx_stmt);
6619 r = move_stmt_eh_region_nr (r, p);
6620 gimple_resx_set_region (resx_stmt, r);
6622 break;
6624 case GIMPLE_EH_DISPATCH:
6626 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6627 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6628 r = move_stmt_eh_region_nr (r, p);
6629 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6631 break;
6633 case GIMPLE_OMP_RETURN:
6634 case GIMPLE_OMP_CONTINUE:
6635 break;
6636 default:
6637 if (is_gimple_omp (stmt))
6639 /* Do not remap variables inside OMP directives. Variables
6640 referenced in clauses and directive header belong to the
6641 parent function and should not be moved into the child
6642 function. */
6643 bool save_remap_decls_p = p->remap_decls_p;
6644 p->remap_decls_p = false;
6645 *handled_ops_p = true;
6647 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6648 move_stmt_op, wi);
6650 p->remap_decls_p = save_remap_decls_p;
6652 break;
6655 return NULL_TREE;
6658 /* Move basic block BB from function CFUN to function DEST_FN. The
6659 block is moved out of the original linked list and placed after
6660 block AFTER in the new list. Also, the block is removed from the
6661 original array of blocks and placed in DEST_FN's array of blocks.
6662 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6663 updated to reflect the moved edges.
6665 The local variables are remapped to new instances, VARS_MAP is used
6666 to record the mapping. */
6668 static void
6669 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6670 basic_block after, bool update_edge_count_p,
6671 struct move_stmt_d *d)
6673 struct control_flow_graph *cfg;
6674 edge_iterator ei;
6675 edge e;
6676 gimple_stmt_iterator si;
6677 unsigned old_len, new_len;
6679 /* Remove BB from dominance structures. */
6680 delete_from_dominance_info (CDI_DOMINATORS, bb);
6682 /* Move BB from its current loop to the copy in the new function. */
6683 if (current_loops)
6685 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6686 if (new_loop)
6687 bb->loop_father = new_loop;
6690 /* Link BB to the new linked list. */
6691 move_block_after (bb, after);
6693 /* Update the edge count in the corresponding flowgraphs. */
6694 if (update_edge_count_p)
6695 FOR_EACH_EDGE (e, ei, bb->succs)
6697 cfun->cfg->x_n_edges--;
6698 dest_cfun->cfg->x_n_edges++;
6701 /* Remove BB from the original basic block array. */
6702 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6703 cfun->cfg->x_n_basic_blocks--;
6705 /* Grow DEST_CFUN's basic block array if needed. */
6706 cfg = dest_cfun->cfg;
6707 cfg->x_n_basic_blocks++;
6708 if (bb->index >= cfg->x_last_basic_block)
6709 cfg->x_last_basic_block = bb->index + 1;
6711 old_len = vec_safe_length (cfg->x_basic_block_info);
6712 if ((unsigned) cfg->x_last_basic_block >= old_len)
6714 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6715 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6718 (*cfg->x_basic_block_info)[bb->index] = bb;
6720 /* Remap the variables in phi nodes. */
6721 for (gphi_iterator psi = gsi_start_phis (bb);
6722 !gsi_end_p (psi); )
6724 gphi *phi = psi.phi ();
6725 use_operand_p use;
6726 tree op = PHI_RESULT (phi);
6727 ssa_op_iter oi;
6728 unsigned i;
6730 if (virtual_operand_p (op))
6732 /* Remove the phi nodes for virtual operands (alias analysis will be
6733 run for the new function, anyway). */
6734 remove_phi_node (&psi, true);
6735 continue;
6738 SET_PHI_RESULT (phi,
6739 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6740 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6742 op = USE_FROM_PTR (use);
6743 if (TREE_CODE (op) == SSA_NAME)
6744 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6747 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6749 location_t locus = gimple_phi_arg_location (phi, i);
6750 tree block = LOCATION_BLOCK (locus);
6752 if (locus == UNKNOWN_LOCATION)
6753 continue;
6754 if (d->orig_block == NULL_TREE || block == d->orig_block)
6756 if (d->new_block == NULL_TREE)
6757 locus = LOCATION_LOCUS (locus);
6758 else
6759 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6760 gimple_phi_arg_set_location (phi, i, locus);
6764 gsi_next (&psi);
6767 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6769 gimple stmt = gsi_stmt (si);
6770 struct walk_stmt_info wi;
6772 memset (&wi, 0, sizeof (wi));
6773 wi.info = d;
6774 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6776 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6778 tree label = gimple_label_label (label_stmt);
6779 int uid = LABEL_DECL_UID (label);
6781 gcc_assert (uid > -1);
6783 old_len = vec_safe_length (cfg->x_label_to_block_map);
6784 if (old_len <= (unsigned) uid)
6786 new_len = 3 * uid / 2 + 1;
6787 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6790 (*cfg->x_label_to_block_map)[uid] = bb;
6791 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6793 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6795 if (uid >= dest_cfun->cfg->last_label_uid)
6796 dest_cfun->cfg->last_label_uid = uid + 1;
6799 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6800 remove_stmt_from_eh_lp_fn (cfun, stmt);
6802 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6803 gimple_remove_stmt_histograms (cfun, stmt);
6805 /* We cannot leave any operands allocated from the operand caches of
6806 the current function. */
6807 free_stmt_operands (cfun, stmt);
6808 push_cfun (dest_cfun);
6809 update_stmt (stmt);
6810 pop_cfun ();
6813 FOR_EACH_EDGE (e, ei, bb->succs)
6814 if (e->goto_locus != UNKNOWN_LOCATION)
6816 tree block = LOCATION_BLOCK (e->goto_locus);
6817 if (d->orig_block == NULL_TREE
6818 || block == d->orig_block)
6819 e->goto_locus = d->new_block ?
6820 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6821 LOCATION_LOCUS (e->goto_locus);
6825 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6826 the outermost EH region. Use REGION as the incoming base EH region. */
6828 static eh_region
6829 find_outermost_region_in_block (struct function *src_cfun,
6830 basic_block bb, eh_region region)
6832 gimple_stmt_iterator si;
6834 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6836 gimple stmt = gsi_stmt (si);
6837 eh_region stmt_region;
6838 int lp_nr;
6840 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6841 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6842 if (stmt_region)
6844 if (region == NULL)
6845 region = stmt_region;
6846 else if (stmt_region != region)
6848 region = eh_region_outermost (src_cfun, stmt_region, region);
6849 gcc_assert (region != NULL);
6854 return region;
6857 static tree
6858 new_label_mapper (tree decl, void *data)
6860 htab_t hash = (htab_t) data;
6861 struct tree_map *m;
6862 void **slot;
6864 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6866 m = XNEW (struct tree_map);
6867 m->hash = DECL_UID (decl);
6868 m->base.from = decl;
6869 m->to = create_artificial_label (UNKNOWN_LOCATION);
6870 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6871 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6872 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6874 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6875 gcc_assert (*slot == NULL);
6877 *slot = m;
6879 return m->to;
6882 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6883 subblocks. */
6885 static void
6886 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6887 tree to_context)
6889 tree *tp, t;
6891 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6893 t = *tp;
6894 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6895 continue;
6896 replace_by_duplicate_decl (&t, vars_map, to_context);
6897 if (t != *tp)
6899 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6901 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6902 DECL_HAS_VALUE_EXPR_P (t) = 1;
6904 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6905 *tp = t;
6909 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6910 replace_block_vars_by_duplicates (block, vars_map, to_context);
6913 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6914 from FN1 to FN2. */
6916 static void
6917 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6918 struct loop *loop)
6920 /* Discard it from the old loop array. */
6921 (*get_loops (fn1))[loop->num] = NULL;
6923 /* Place it in the new loop array, assigning it a new number. */
6924 loop->num = number_of_loops (fn2);
6925 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6927 /* Recurse to children. */
6928 for (loop = loop->inner; loop; loop = loop->next)
6929 fixup_loop_arrays_after_move (fn1, fn2, loop);
6932 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6933 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6935 DEBUG_FUNCTION void
6936 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6938 basic_block bb;
6939 edge_iterator ei;
6940 edge e;
6941 bitmap bbs = BITMAP_ALLOC (NULL);
6942 int i;
6944 gcc_assert (entry != NULL);
6945 gcc_assert (entry != exit);
6946 gcc_assert (bbs_p != NULL);
6948 gcc_assert (bbs_p->length () > 0);
6950 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6951 bitmap_set_bit (bbs, bb->index);
6953 gcc_assert (bitmap_bit_p (bbs, entry->index));
6954 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6956 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6958 if (bb == entry)
6960 gcc_assert (single_pred_p (entry));
6961 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6963 else
6964 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6966 e = ei_edge (ei);
6967 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6970 if (bb == exit)
6972 gcc_assert (single_succ_p (exit));
6973 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6975 else
6976 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6978 e = ei_edge (ei);
6979 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6983 BITMAP_FREE (bbs);
6987 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6988 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6989 single basic block in the original CFG and the new basic block is
6990 returned. DEST_CFUN must not have a CFG yet.
6992 Note that the region need not be a pure SESE region. Blocks inside
6993 the region may contain calls to abort/exit. The only restriction
6994 is that ENTRY_BB should be the only entry point and it must
6995 dominate EXIT_BB.
6997 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6998 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6999 to the new function.
7001 All local variables referenced in the region are assumed to be in
7002 the corresponding BLOCK_VARS and unexpanded variable lists
7003 associated with DEST_CFUN. */
7005 basic_block
7006 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7007 basic_block exit_bb, tree orig_block)
7009 vec<basic_block> bbs, dom_bbs;
7010 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7011 basic_block after, bb, *entry_pred, *exit_succ, abb;
7012 struct function *saved_cfun = cfun;
7013 int *entry_flag, *exit_flag;
7014 unsigned *entry_prob, *exit_prob;
7015 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7016 edge e;
7017 edge_iterator ei;
7018 htab_t new_label_map;
7019 hash_map<void *, void *> *eh_map;
7020 struct loop *loop = entry_bb->loop_father;
7021 struct loop *loop0 = get_loop (saved_cfun, 0);
7022 struct move_stmt_d d;
7024 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7025 region. */
7026 gcc_assert (entry_bb != exit_bb
7027 && (!exit_bb
7028 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7030 /* Collect all the blocks in the region. Manually add ENTRY_BB
7031 because it won't be added by dfs_enumerate_from. */
7032 bbs.create (0);
7033 bbs.safe_push (entry_bb);
7034 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7035 #ifdef ENABLE_CHECKING
7036 verify_sese (entry_bb, exit_bb, &bbs);
7037 #endif
7039 /* The blocks that used to be dominated by something in BBS will now be
7040 dominated by the new block. */
7041 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7042 bbs.address (),
7043 bbs.length ());
7045 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7046 the predecessor edges to ENTRY_BB and the successor edges to
7047 EXIT_BB so that we can re-attach them to the new basic block that
7048 will replace the region. */
7049 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7050 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7051 entry_flag = XNEWVEC (int, num_entry_edges);
7052 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7053 i = 0;
7054 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7056 entry_prob[i] = e->probability;
7057 entry_flag[i] = e->flags;
7058 entry_pred[i++] = e->src;
7059 remove_edge (e);
7062 if (exit_bb)
7064 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7065 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7066 exit_flag = XNEWVEC (int, num_exit_edges);
7067 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7068 i = 0;
7069 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7071 exit_prob[i] = e->probability;
7072 exit_flag[i] = e->flags;
7073 exit_succ[i++] = e->dest;
7074 remove_edge (e);
7077 else
7079 num_exit_edges = 0;
7080 exit_succ = NULL;
7081 exit_flag = NULL;
7082 exit_prob = NULL;
7085 /* Switch context to the child function to initialize DEST_FN's CFG. */
7086 gcc_assert (dest_cfun->cfg == NULL);
7087 push_cfun (dest_cfun);
7089 init_empty_tree_cfg ();
7091 /* Initialize EH information for the new function. */
7092 eh_map = NULL;
7093 new_label_map = NULL;
7094 if (saved_cfun->eh)
7096 eh_region region = NULL;
7098 FOR_EACH_VEC_ELT (bbs, i, bb)
7099 region = find_outermost_region_in_block (saved_cfun, bb, region);
7101 init_eh_for_function ();
7102 if (region != NULL)
7104 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7105 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7106 new_label_mapper, new_label_map);
7110 /* Initialize an empty loop tree. */
7111 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7112 init_loops_structure (dest_cfun, loops, 1);
7113 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7114 set_loops_for_fn (dest_cfun, loops);
7116 /* Move the outlined loop tree part. */
7117 num_nodes = bbs.length ();
7118 FOR_EACH_VEC_ELT (bbs, i, bb)
7120 if (bb->loop_father->header == bb)
7122 struct loop *this_loop = bb->loop_father;
7123 struct loop *outer = loop_outer (this_loop);
7124 if (outer == loop
7125 /* If the SESE region contains some bbs ending with
7126 a noreturn call, those are considered to belong
7127 to the outermost loop in saved_cfun, rather than
7128 the entry_bb's loop_father. */
7129 || outer == loop0)
7131 if (outer != loop)
7132 num_nodes -= this_loop->num_nodes;
7133 flow_loop_tree_node_remove (bb->loop_father);
7134 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7135 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7138 else if (bb->loop_father == loop0 && loop0 != loop)
7139 num_nodes--;
7141 /* Remove loop exits from the outlined region. */
7142 if (loops_for_fn (saved_cfun)->exits)
7143 FOR_EACH_EDGE (e, ei, bb->succs)
7145 struct loops *l = loops_for_fn (saved_cfun);
7146 loop_exit **slot
7147 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7148 NO_INSERT);
7149 if (slot)
7150 l->exits->clear_slot (slot);
7155 /* Adjust the number of blocks in the tree root of the outlined part. */
7156 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7158 /* Setup a mapping to be used by move_block_to_fn. */
7159 loop->aux = current_loops->tree_root;
7160 loop0->aux = current_loops->tree_root;
7162 pop_cfun ();
7164 /* Move blocks from BBS into DEST_CFUN. */
7165 gcc_assert (bbs.length () >= 2);
7166 after = dest_cfun->cfg->x_entry_block_ptr;
7167 hash_map<tree, tree> vars_map;
7169 memset (&d, 0, sizeof (d));
7170 d.orig_block = orig_block;
7171 d.new_block = DECL_INITIAL (dest_cfun->decl);
7172 d.from_context = cfun->decl;
7173 d.to_context = dest_cfun->decl;
7174 d.vars_map = &vars_map;
7175 d.new_label_map = new_label_map;
7176 d.eh_map = eh_map;
7177 d.remap_decls_p = true;
7179 FOR_EACH_VEC_ELT (bbs, i, bb)
7181 /* No need to update edge counts on the last block. It has
7182 already been updated earlier when we detached the region from
7183 the original CFG. */
7184 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7185 after = bb;
7188 loop->aux = NULL;
7189 loop0->aux = NULL;
7190 /* Loop sizes are no longer correct, fix them up. */
7191 loop->num_nodes -= num_nodes;
7192 for (struct loop *outer = loop_outer (loop);
7193 outer; outer = loop_outer (outer))
7194 outer->num_nodes -= num_nodes;
7195 loop0->num_nodes -= bbs.length () - num_nodes;
7197 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7199 struct loop *aloop;
7200 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7201 if (aloop != NULL)
7203 if (aloop->simduid)
7205 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7206 d.to_context);
7207 dest_cfun->has_simduid_loops = true;
7209 if (aloop->force_vectorize)
7210 dest_cfun->has_force_vectorize_loops = true;
7214 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7215 if (orig_block)
7217 tree block;
7218 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7219 == NULL_TREE);
7220 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7221 = BLOCK_SUBBLOCKS (orig_block);
7222 for (block = BLOCK_SUBBLOCKS (orig_block);
7223 block; block = BLOCK_CHAIN (block))
7224 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7225 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7228 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7229 &vars_map, dest_cfun->decl);
7231 if (new_label_map)
7232 htab_delete (new_label_map);
7233 if (eh_map)
7234 delete eh_map;
7236 /* Rewire the entry and exit blocks. The successor to the entry
7237 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7238 the child function. Similarly, the predecessor of DEST_FN's
7239 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7240 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7241 various CFG manipulation function get to the right CFG.
7243 FIXME, this is silly. The CFG ought to become a parameter to
7244 these helpers. */
7245 push_cfun (dest_cfun);
7246 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7247 if (exit_bb)
7248 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7249 pop_cfun ();
7251 /* Back in the original function, the SESE region has disappeared,
7252 create a new basic block in its place. */
7253 bb = create_empty_bb (entry_pred[0]);
7254 if (current_loops)
7255 add_bb_to_loop (bb, loop);
7256 for (i = 0; i < num_entry_edges; i++)
7258 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7259 e->probability = entry_prob[i];
7262 for (i = 0; i < num_exit_edges; i++)
7264 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7265 e->probability = exit_prob[i];
7268 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7269 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7270 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7271 dom_bbs.release ();
7273 if (exit_bb)
7275 free (exit_prob);
7276 free (exit_flag);
7277 free (exit_succ);
7279 free (entry_prob);
7280 free (entry_flag);
7281 free (entry_pred);
7282 bbs.release ();
7284 return bb;
7288 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7291 void
7292 dump_function_to_file (tree fndecl, FILE *file, int flags)
7294 tree arg, var, old_current_fndecl = current_function_decl;
7295 struct function *dsf;
7296 bool ignore_topmost_bind = false, any_var = false;
7297 basic_block bb;
7298 tree chain;
7299 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7300 && decl_is_tm_clone (fndecl));
7301 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7303 current_function_decl = fndecl;
7304 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7306 arg = DECL_ARGUMENTS (fndecl);
7307 while (arg)
7309 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7310 fprintf (file, " ");
7311 print_generic_expr (file, arg, dump_flags);
7312 if (flags & TDF_VERBOSE)
7313 print_node (file, "", arg, 4);
7314 if (DECL_CHAIN (arg))
7315 fprintf (file, ", ");
7316 arg = DECL_CHAIN (arg);
7318 fprintf (file, ")\n");
7320 if (flags & TDF_VERBOSE)
7321 print_node (file, "", fndecl, 2);
7323 dsf = DECL_STRUCT_FUNCTION (fndecl);
7324 if (dsf && (flags & TDF_EH))
7325 dump_eh_tree (file, dsf);
7327 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7329 dump_node (fndecl, TDF_SLIM | flags, file);
7330 current_function_decl = old_current_fndecl;
7331 return;
7334 /* When GIMPLE is lowered, the variables are no longer available in
7335 BIND_EXPRs, so display them separately. */
7336 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7338 unsigned ix;
7339 ignore_topmost_bind = true;
7341 fprintf (file, "{\n");
7342 if (!vec_safe_is_empty (fun->local_decls))
7343 FOR_EACH_LOCAL_DECL (fun, ix, var)
7345 print_generic_decl (file, var, flags);
7346 if (flags & TDF_VERBOSE)
7347 print_node (file, "", var, 4);
7348 fprintf (file, "\n");
7350 any_var = true;
7352 if (gimple_in_ssa_p (cfun))
7353 for (ix = 1; ix < num_ssa_names; ++ix)
7355 tree name = ssa_name (ix);
7356 if (name && !SSA_NAME_VAR (name))
7358 fprintf (file, " ");
7359 print_generic_expr (file, TREE_TYPE (name), flags);
7360 fprintf (file, " ");
7361 print_generic_expr (file, name, flags);
7362 fprintf (file, ";\n");
7364 any_var = true;
7369 if (fun && fun->decl == fndecl
7370 && fun->cfg
7371 && basic_block_info_for_fn (fun))
7373 /* If the CFG has been built, emit a CFG-based dump. */
7374 if (!ignore_topmost_bind)
7375 fprintf (file, "{\n");
7377 if (any_var && n_basic_blocks_for_fn (fun))
7378 fprintf (file, "\n");
7380 FOR_EACH_BB_FN (bb, fun)
7381 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7383 fprintf (file, "}\n");
7385 else if (DECL_SAVED_TREE (fndecl) == NULL)
7387 /* The function is now in GIMPLE form but the CFG has not been
7388 built yet. Emit the single sequence of GIMPLE statements
7389 that make up its body. */
7390 gimple_seq body = gimple_body (fndecl);
7392 if (gimple_seq_first_stmt (body)
7393 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7394 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7395 print_gimple_seq (file, body, 0, flags);
7396 else
7398 if (!ignore_topmost_bind)
7399 fprintf (file, "{\n");
7401 if (any_var)
7402 fprintf (file, "\n");
7404 print_gimple_seq (file, body, 2, flags);
7405 fprintf (file, "}\n");
7408 else
7410 int indent;
7412 /* Make a tree based dump. */
7413 chain = DECL_SAVED_TREE (fndecl);
7414 if (chain && TREE_CODE (chain) == BIND_EXPR)
7416 if (ignore_topmost_bind)
7418 chain = BIND_EXPR_BODY (chain);
7419 indent = 2;
7421 else
7422 indent = 0;
7424 else
7426 if (!ignore_topmost_bind)
7428 fprintf (file, "{\n");
7429 /* No topmost bind, pretend it's ignored for later. */
7430 ignore_topmost_bind = true;
7432 indent = 2;
7435 if (any_var)
7436 fprintf (file, "\n");
7438 print_generic_stmt_indented (file, chain, flags, indent);
7439 if (ignore_topmost_bind)
7440 fprintf (file, "}\n");
7443 if (flags & TDF_ENUMERATE_LOCALS)
7444 dump_enumerated_decls (file, flags);
7445 fprintf (file, "\n\n");
7447 current_function_decl = old_current_fndecl;
7450 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7452 DEBUG_FUNCTION void
7453 debug_function (tree fn, int flags)
7455 dump_function_to_file (fn, stderr, flags);
7459 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7461 static void
7462 print_pred_bbs (FILE *file, basic_block bb)
7464 edge e;
7465 edge_iterator ei;
7467 FOR_EACH_EDGE (e, ei, bb->preds)
7468 fprintf (file, "bb_%d ", e->src->index);
7472 /* Print on FILE the indexes for the successors of basic_block BB. */
7474 static void
7475 print_succ_bbs (FILE *file, basic_block bb)
7477 edge e;
7478 edge_iterator ei;
7480 FOR_EACH_EDGE (e, ei, bb->succs)
7481 fprintf (file, "bb_%d ", e->dest->index);
7484 /* Print to FILE the basic block BB following the VERBOSITY level. */
7486 void
7487 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7489 char *s_indent = (char *) alloca ((size_t) indent + 1);
7490 memset ((void *) s_indent, ' ', (size_t) indent);
7491 s_indent[indent] = '\0';
7493 /* Print basic_block's header. */
7494 if (verbosity >= 2)
7496 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7497 print_pred_bbs (file, bb);
7498 fprintf (file, "}, succs = {");
7499 print_succ_bbs (file, bb);
7500 fprintf (file, "})\n");
7503 /* Print basic_block's body. */
7504 if (verbosity >= 3)
7506 fprintf (file, "%s {\n", s_indent);
7507 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7508 fprintf (file, "%s }\n", s_indent);
7512 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7514 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7515 VERBOSITY level this outputs the contents of the loop, or just its
7516 structure. */
7518 static void
7519 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7521 char *s_indent;
7522 basic_block bb;
7524 if (loop == NULL)
7525 return;
7527 s_indent = (char *) alloca ((size_t) indent + 1);
7528 memset ((void *) s_indent, ' ', (size_t) indent);
7529 s_indent[indent] = '\0';
7531 /* Print loop's header. */
7532 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7533 if (loop->header)
7534 fprintf (file, "header = %d", loop->header->index);
7535 else
7537 fprintf (file, "deleted)\n");
7538 return;
7540 if (loop->latch)
7541 fprintf (file, ", latch = %d", loop->latch->index);
7542 else
7543 fprintf (file, ", multiple latches");
7544 fprintf (file, ", niter = ");
7545 print_generic_expr (file, loop->nb_iterations, 0);
7547 if (loop->any_upper_bound)
7549 fprintf (file, ", upper_bound = ");
7550 print_decu (loop->nb_iterations_upper_bound, file);
7553 if (loop->any_estimate)
7555 fprintf (file, ", estimate = ");
7556 print_decu (loop->nb_iterations_estimate, file);
7558 fprintf (file, ")\n");
7560 /* Print loop's body. */
7561 if (verbosity >= 1)
7563 fprintf (file, "%s{\n", s_indent);
7564 FOR_EACH_BB_FN (bb, cfun)
7565 if (bb->loop_father == loop)
7566 print_loops_bb (file, bb, indent, verbosity);
7568 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7569 fprintf (file, "%s}\n", s_indent);
7573 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7574 spaces. Following VERBOSITY level this outputs the contents of the
7575 loop, or just its structure. */
7577 static void
7578 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7579 int verbosity)
7581 if (loop == NULL)
7582 return;
7584 print_loop (file, loop, indent, verbosity);
7585 print_loop_and_siblings (file, loop->next, indent, verbosity);
7588 /* Follow a CFG edge from the entry point of the program, and on entry
7589 of a loop, pretty print the loop structure on FILE. */
7591 void
7592 print_loops (FILE *file, int verbosity)
7594 basic_block bb;
7596 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7597 if (bb && bb->loop_father)
7598 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7601 /* Dump a loop. */
7603 DEBUG_FUNCTION void
7604 debug (struct loop &ref)
7606 print_loop (stderr, &ref, 0, /*verbosity*/0);
7609 DEBUG_FUNCTION void
7610 debug (struct loop *ptr)
7612 if (ptr)
7613 debug (*ptr);
7614 else
7615 fprintf (stderr, "<nil>\n");
7618 /* Dump a loop verbosely. */
7620 DEBUG_FUNCTION void
7621 debug_verbose (struct loop &ref)
7623 print_loop (stderr, &ref, 0, /*verbosity*/3);
7626 DEBUG_FUNCTION void
7627 debug_verbose (struct loop *ptr)
7629 if (ptr)
7630 debug (*ptr);
7631 else
7632 fprintf (stderr, "<nil>\n");
7636 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7638 DEBUG_FUNCTION void
7639 debug_loops (int verbosity)
7641 print_loops (stderr, verbosity);
7644 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7646 DEBUG_FUNCTION void
7647 debug_loop (struct loop *loop, int verbosity)
7649 print_loop (stderr, loop, 0, verbosity);
7652 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7653 level. */
7655 DEBUG_FUNCTION void
7656 debug_loop_num (unsigned num, int verbosity)
7658 debug_loop (get_loop (cfun, num), verbosity);
7661 /* Return true if BB ends with a call, possibly followed by some
7662 instructions that must stay with the call. Return false,
7663 otherwise. */
7665 static bool
7666 gimple_block_ends_with_call_p (basic_block bb)
7668 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7669 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7673 /* Return true if BB ends with a conditional branch. Return false,
7674 otherwise. */
7676 static bool
7677 gimple_block_ends_with_condjump_p (const_basic_block bb)
7679 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7680 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7684 /* Return true if we need to add fake edge to exit at statement T.
7685 Helper function for gimple_flow_call_edges_add. */
7687 static bool
7688 need_fake_edge_p (gimple t)
7690 tree fndecl = NULL_TREE;
7691 int call_flags = 0;
7693 /* NORETURN and LONGJMP calls already have an edge to exit.
7694 CONST and PURE calls do not need one.
7695 We don't currently check for CONST and PURE here, although
7696 it would be a good idea, because those attributes are
7697 figured out from the RTL in mark_constant_function, and
7698 the counter incrementation code from -fprofile-arcs
7699 leads to different results from -fbranch-probabilities. */
7700 if (is_gimple_call (t))
7702 fndecl = gimple_call_fndecl (t);
7703 call_flags = gimple_call_flags (t);
7706 if (is_gimple_call (t)
7707 && fndecl
7708 && DECL_BUILT_IN (fndecl)
7709 && (call_flags & ECF_NOTHROW)
7710 && !(call_flags & ECF_RETURNS_TWICE)
7711 /* fork() doesn't really return twice, but the effect of
7712 wrapping it in __gcov_fork() which calls __gcov_flush()
7713 and clears the counters before forking has the same
7714 effect as returning twice. Force a fake edge. */
7715 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7716 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7717 return false;
7719 if (is_gimple_call (t))
7721 edge_iterator ei;
7722 edge e;
7723 basic_block bb;
7725 if (!(call_flags & ECF_NORETURN))
7726 return true;
7728 bb = gimple_bb (t);
7729 FOR_EACH_EDGE (e, ei, bb->succs)
7730 if ((e->flags & EDGE_FAKE) == 0)
7731 return true;
7734 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7735 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7736 return true;
7738 return false;
7742 /* Add fake edges to the function exit for any non constant and non
7743 noreturn calls (or noreturn calls with EH/abnormal edges),
7744 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7745 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7746 that were split.
7748 The goal is to expose cases in which entering a basic block does
7749 not imply that all subsequent instructions must be executed. */
7751 static int
7752 gimple_flow_call_edges_add (sbitmap blocks)
7754 int i;
7755 int blocks_split = 0;
7756 int last_bb = last_basic_block_for_fn (cfun);
7757 bool check_last_block = false;
7759 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7760 return 0;
7762 if (! blocks)
7763 check_last_block = true;
7764 else
7765 check_last_block = bitmap_bit_p (blocks,
7766 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7768 /* In the last basic block, before epilogue generation, there will be
7769 a fallthru edge to EXIT. Special care is required if the last insn
7770 of the last basic block is a call because make_edge folds duplicate
7771 edges, which would result in the fallthru edge also being marked
7772 fake, which would result in the fallthru edge being removed by
7773 remove_fake_edges, which would result in an invalid CFG.
7775 Moreover, we can't elide the outgoing fake edge, since the block
7776 profiler needs to take this into account in order to solve the minimal
7777 spanning tree in the case that the call doesn't return.
7779 Handle this by adding a dummy instruction in a new last basic block. */
7780 if (check_last_block)
7782 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7783 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7784 gimple t = NULL;
7786 if (!gsi_end_p (gsi))
7787 t = gsi_stmt (gsi);
7789 if (t && need_fake_edge_p (t))
7791 edge e;
7793 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7794 if (e)
7796 gsi_insert_on_edge (e, gimple_build_nop ());
7797 gsi_commit_edge_inserts ();
7802 /* Now add fake edges to the function exit for any non constant
7803 calls since there is no way that we can determine if they will
7804 return or not... */
7805 for (i = 0; i < last_bb; i++)
7807 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7808 gimple_stmt_iterator gsi;
7809 gimple stmt, last_stmt;
7811 if (!bb)
7812 continue;
7814 if (blocks && !bitmap_bit_p (blocks, i))
7815 continue;
7817 gsi = gsi_last_nondebug_bb (bb);
7818 if (!gsi_end_p (gsi))
7820 last_stmt = gsi_stmt (gsi);
7823 stmt = gsi_stmt (gsi);
7824 if (need_fake_edge_p (stmt))
7826 edge e;
7828 /* The handling above of the final block before the
7829 epilogue should be enough to verify that there is
7830 no edge to the exit block in CFG already.
7831 Calling make_edge in such case would cause us to
7832 mark that edge as fake and remove it later. */
7833 #ifdef ENABLE_CHECKING
7834 if (stmt == last_stmt)
7836 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7837 gcc_assert (e == NULL);
7839 #endif
7841 /* Note that the following may create a new basic block
7842 and renumber the existing basic blocks. */
7843 if (stmt != last_stmt)
7845 e = split_block (bb, stmt);
7846 if (e)
7847 blocks_split++;
7849 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7851 gsi_prev (&gsi);
7853 while (!gsi_end_p (gsi));
7857 if (blocks_split)
7858 verify_flow_info ();
7860 return blocks_split;
7863 /* Removes edge E and all the blocks dominated by it, and updates dominance
7864 information. The IL in E->src needs to be updated separately.
7865 If dominance info is not available, only the edge E is removed.*/
7867 void
7868 remove_edge_and_dominated_blocks (edge e)
7870 vec<basic_block> bbs_to_remove = vNULL;
7871 vec<basic_block> bbs_to_fix_dom = vNULL;
7872 bitmap df, df_idom;
7873 edge f;
7874 edge_iterator ei;
7875 bool none_removed = false;
7876 unsigned i;
7877 basic_block bb, dbb;
7878 bitmap_iterator bi;
7880 /* If we are removing a path inside a non-root loop that may change
7881 loop ownership of blocks or remove loops. Mark loops for fixup. */
7882 if (current_loops
7883 && loop_outer (e->src->loop_father) != NULL
7884 && e->src->loop_father == e->dest->loop_father)
7885 loops_state_set (LOOPS_NEED_FIXUP);
7887 if (!dom_info_available_p (CDI_DOMINATORS))
7889 remove_edge (e);
7890 return;
7893 /* No updating is needed for edges to exit. */
7894 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7896 if (cfgcleanup_altered_bbs)
7897 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7898 remove_edge (e);
7899 return;
7902 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7903 that is not dominated by E->dest, then this set is empty. Otherwise,
7904 all the basic blocks dominated by E->dest are removed.
7906 Also, to DF_IDOM we store the immediate dominators of the blocks in
7907 the dominance frontier of E (i.e., of the successors of the
7908 removed blocks, if there are any, and of E->dest otherwise). */
7909 FOR_EACH_EDGE (f, ei, e->dest->preds)
7911 if (f == e)
7912 continue;
7914 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7916 none_removed = true;
7917 break;
7921 df = BITMAP_ALLOC (NULL);
7922 df_idom = BITMAP_ALLOC (NULL);
7924 if (none_removed)
7925 bitmap_set_bit (df_idom,
7926 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7927 else
7929 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7930 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7932 FOR_EACH_EDGE (f, ei, bb->succs)
7934 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7935 bitmap_set_bit (df, f->dest->index);
7938 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7939 bitmap_clear_bit (df, bb->index);
7941 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7943 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7944 bitmap_set_bit (df_idom,
7945 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7949 if (cfgcleanup_altered_bbs)
7951 /* Record the set of the altered basic blocks. */
7952 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7953 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7956 /* Remove E and the cancelled blocks. */
7957 if (none_removed)
7958 remove_edge (e);
7959 else
7961 /* Walk backwards so as to get a chance to substitute all
7962 released DEFs into debug stmts. See
7963 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7964 details. */
7965 for (i = bbs_to_remove.length (); i-- > 0; )
7966 delete_basic_block (bbs_to_remove[i]);
7969 /* Update the dominance information. The immediate dominator may change only
7970 for blocks whose immediate dominator belongs to DF_IDOM:
7972 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7973 removal. Let Z the arbitrary block such that idom(Z) = Y and
7974 Z dominates X after the removal. Before removal, there exists a path P
7975 from Y to X that avoids Z. Let F be the last edge on P that is
7976 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7977 dominates W, and because of P, Z does not dominate W), and W belongs to
7978 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7979 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7981 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7982 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7983 dbb;
7984 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7985 bbs_to_fix_dom.safe_push (dbb);
7988 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7990 BITMAP_FREE (df);
7991 BITMAP_FREE (df_idom);
7992 bbs_to_remove.release ();
7993 bbs_to_fix_dom.release ();
7996 /* Purge dead EH edges from basic block BB. */
7998 bool
7999 gimple_purge_dead_eh_edges (basic_block bb)
8001 bool changed = false;
8002 edge e;
8003 edge_iterator ei;
8004 gimple stmt = last_stmt (bb);
8006 if (stmt && stmt_can_throw_internal (stmt))
8007 return false;
8009 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8011 if (e->flags & EDGE_EH)
8013 remove_edge_and_dominated_blocks (e);
8014 changed = true;
8016 else
8017 ei_next (&ei);
8020 return changed;
8023 /* Purge dead EH edges from basic block listed in BLOCKS. */
8025 bool
8026 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8028 bool changed = false;
8029 unsigned i;
8030 bitmap_iterator bi;
8032 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8034 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8036 /* Earlier gimple_purge_dead_eh_edges could have removed
8037 this basic block already. */
8038 gcc_assert (bb || changed);
8039 if (bb != NULL)
8040 changed |= gimple_purge_dead_eh_edges (bb);
8043 return changed;
8046 /* Purge dead abnormal call edges from basic block BB. */
8048 bool
8049 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8051 bool changed = false;
8052 edge e;
8053 edge_iterator ei;
8054 gimple stmt = last_stmt (bb);
8056 if (!cfun->has_nonlocal_label
8057 && !cfun->calls_setjmp)
8058 return false;
8060 if (stmt && stmt_can_make_abnormal_goto (stmt))
8061 return false;
8063 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8065 if (e->flags & EDGE_ABNORMAL)
8067 if (e->flags & EDGE_FALLTHRU)
8068 e->flags &= ~EDGE_ABNORMAL;
8069 else
8070 remove_edge_and_dominated_blocks (e);
8071 changed = true;
8073 else
8074 ei_next (&ei);
8077 return changed;
8080 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8082 bool
8083 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8085 bool changed = false;
8086 unsigned i;
8087 bitmap_iterator bi;
8089 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8091 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8093 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8094 this basic block already. */
8095 gcc_assert (bb || changed);
8096 if (bb != NULL)
8097 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8100 return changed;
8103 /* This function is called whenever a new edge is created or
8104 redirected. */
8106 static void
8107 gimple_execute_on_growing_pred (edge e)
8109 basic_block bb = e->dest;
8111 if (!gimple_seq_empty_p (phi_nodes (bb)))
8112 reserve_phi_args_for_new_edge (bb);
8115 /* This function is called immediately before edge E is removed from
8116 the edge vector E->dest->preds. */
8118 static void
8119 gimple_execute_on_shrinking_pred (edge e)
8121 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8122 remove_phi_args (e);
8125 /*---------------------------------------------------------------------------
8126 Helper functions for Loop versioning
8127 ---------------------------------------------------------------------------*/
8129 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8130 of 'first'. Both of them are dominated by 'new_head' basic block. When
8131 'new_head' was created by 'second's incoming edge it received phi arguments
8132 on the edge by split_edge(). Later, additional edge 'e' was created to
8133 connect 'new_head' and 'first'. Now this routine adds phi args on this
8134 additional edge 'e' that new_head to second edge received as part of edge
8135 splitting. */
8137 static void
8138 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8139 basic_block new_head, edge e)
8141 gphi *phi1, *phi2;
8142 gphi_iterator psi1, psi2;
8143 tree def;
8144 edge e2 = find_edge (new_head, second);
8146 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8147 edge, we should always have an edge from NEW_HEAD to SECOND. */
8148 gcc_assert (e2 != NULL);
8150 /* Browse all 'second' basic block phi nodes and add phi args to
8151 edge 'e' for 'first' head. PHI args are always in correct order. */
8153 for (psi2 = gsi_start_phis (second),
8154 psi1 = gsi_start_phis (first);
8155 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8156 gsi_next (&psi2), gsi_next (&psi1))
8158 phi1 = psi1.phi ();
8159 phi2 = psi2.phi ();
8160 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8161 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8166 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8167 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8168 the destination of the ELSE part. */
8170 static void
8171 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8172 basic_block second_head ATTRIBUTE_UNUSED,
8173 basic_block cond_bb, void *cond_e)
8175 gimple_stmt_iterator gsi;
8176 gimple new_cond_expr;
8177 tree cond_expr = (tree) cond_e;
8178 edge e0;
8180 /* Build new conditional expr */
8181 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8182 NULL_TREE, NULL_TREE);
8184 /* Add new cond in cond_bb. */
8185 gsi = gsi_last_bb (cond_bb);
8186 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8188 /* Adjust edges appropriately to connect new head with first head
8189 as well as second head. */
8190 e0 = single_succ_edge (cond_bb);
8191 e0->flags &= ~EDGE_FALLTHRU;
8192 e0->flags |= EDGE_FALSE_VALUE;
8196 /* Do book-keeping of basic block BB for the profile consistency checker.
8197 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8198 then do post-pass accounting. Store the counting in RECORD. */
8199 static void
8200 gimple_account_profile_record (basic_block bb, int after_pass,
8201 struct profile_record *record)
8203 gimple_stmt_iterator i;
8204 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8206 record->size[after_pass]
8207 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8208 if (profile_status_for_fn (cfun) == PROFILE_READ)
8209 record->time[after_pass]
8210 += estimate_num_insns (gsi_stmt (i),
8211 &eni_time_weights) * bb->count;
8212 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8213 record->time[after_pass]
8214 += estimate_num_insns (gsi_stmt (i),
8215 &eni_time_weights) * bb->frequency;
8219 struct cfg_hooks gimple_cfg_hooks = {
8220 "gimple",
8221 gimple_verify_flow_info,
8222 gimple_dump_bb, /* dump_bb */
8223 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8224 create_bb, /* create_basic_block */
8225 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8226 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8227 gimple_can_remove_branch_p, /* can_remove_branch_p */
8228 remove_bb, /* delete_basic_block */
8229 gimple_split_block, /* split_block */
8230 gimple_move_block_after, /* move_block_after */
8231 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8232 gimple_merge_blocks, /* merge_blocks */
8233 gimple_predict_edge, /* predict_edge */
8234 gimple_predicted_by_p, /* predicted_by_p */
8235 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8236 gimple_duplicate_bb, /* duplicate_block */
8237 gimple_split_edge, /* split_edge */
8238 gimple_make_forwarder_block, /* make_forward_block */
8239 NULL, /* tidy_fallthru_edge */
8240 NULL, /* force_nonfallthru */
8241 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8242 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8243 gimple_flow_call_edges_add, /* flow_call_edges_add */
8244 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8245 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8246 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8247 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8248 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8249 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8250 flush_pending_stmts, /* flush_pending_stmts */
8251 gimple_empty_block_p, /* block_empty_p */
8252 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8253 gimple_account_profile_record,
8257 /* Split all critical edges. */
8259 unsigned int
8260 split_critical_edges (void)
8262 basic_block bb;
8263 edge e;
8264 edge_iterator ei;
8266 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8267 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8268 mappings around the calls to split_edge. */
8269 start_recording_case_labels ();
8270 FOR_ALL_BB_FN (bb, cfun)
8272 FOR_EACH_EDGE (e, ei, bb->succs)
8274 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8275 split_edge (e);
8276 /* PRE inserts statements to edges and expects that
8277 since split_critical_edges was done beforehand, committing edge
8278 insertions will not split more edges. In addition to critical
8279 edges we must split edges that have multiple successors and
8280 end by control flow statements, such as RESX.
8281 Go ahead and split them too. This matches the logic in
8282 gimple_find_edge_insert_loc. */
8283 else if ((!single_pred_p (e->dest)
8284 || !gimple_seq_empty_p (phi_nodes (e->dest))
8285 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8286 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8287 && !(e->flags & EDGE_ABNORMAL))
8289 gimple_stmt_iterator gsi;
8291 gsi = gsi_last_bb (e->src);
8292 if (!gsi_end_p (gsi)
8293 && stmt_ends_bb_p (gsi_stmt (gsi))
8294 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8295 && !gimple_call_builtin_p (gsi_stmt (gsi),
8296 BUILT_IN_RETURN)))
8297 split_edge (e);
8301 end_recording_case_labels ();
8302 return 0;
8305 namespace {
8307 const pass_data pass_data_split_crit_edges =
8309 GIMPLE_PASS, /* type */
8310 "crited", /* name */
8311 OPTGROUP_NONE, /* optinfo_flags */
8312 TV_TREE_SPLIT_EDGES, /* tv_id */
8313 PROP_cfg, /* properties_required */
8314 PROP_no_crit_edges, /* properties_provided */
8315 0, /* properties_destroyed */
8316 0, /* todo_flags_start */
8317 0, /* todo_flags_finish */
8320 class pass_split_crit_edges : public gimple_opt_pass
8322 public:
8323 pass_split_crit_edges (gcc::context *ctxt)
8324 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8327 /* opt_pass methods: */
8328 virtual unsigned int execute (function *) { return split_critical_edges (); }
8330 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8331 }; // class pass_split_crit_edges
8333 } // anon namespace
8335 gimple_opt_pass *
8336 make_pass_split_crit_edges (gcc::context *ctxt)
8338 return new pass_split_crit_edges (ctxt);
8342 /* Insert COND expression which is GIMPLE_COND after STMT
8343 in basic block BB with appropriate basic block split
8344 and creation of a new conditionally executed basic block.
8345 Return created basic block. */
8346 basic_block
8347 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8349 edge fall = split_block (bb, stmt);
8350 gimple_stmt_iterator iter = gsi_last_bb (bb);
8351 basic_block new_bb;
8353 /* Insert cond statement. */
8354 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8355 if (gsi_end_p (iter))
8356 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8357 else
8358 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8360 /* Create conditionally executed block. */
8361 new_bb = create_empty_bb (bb);
8362 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8363 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8365 /* Fix edge for split bb. */
8366 fall->flags = EDGE_FALSE_VALUE;
8368 /* Update dominance info. */
8369 if (dom_info_available_p (CDI_DOMINATORS))
8371 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8372 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8375 /* Update loop info. */
8376 if (current_loops)
8377 add_bb_to_loop (new_bb, bb->loop_father);
8379 return new_bb;
8382 /* Build a ternary operation and gimplify it. Emit code before GSI.
8383 Return the gimple_val holding the result. */
8385 tree
8386 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8387 tree type, tree a, tree b, tree c)
8389 tree ret;
8390 location_t loc = gimple_location (gsi_stmt (*gsi));
8392 ret = fold_build3_loc (loc, code, type, a, b, c);
8393 STRIP_NOPS (ret);
8395 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8396 GSI_SAME_STMT);
8399 /* Build a binary operation and gimplify it. Emit code before GSI.
8400 Return the gimple_val holding the result. */
8402 tree
8403 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8404 tree type, tree a, tree b)
8406 tree ret;
8408 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8409 STRIP_NOPS (ret);
8411 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8412 GSI_SAME_STMT);
8415 /* Build a unary operation and gimplify it. Emit code before GSI.
8416 Return the gimple_val holding the result. */
8418 tree
8419 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8420 tree a)
8422 tree ret;
8424 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8425 STRIP_NOPS (ret);
8427 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8428 GSI_SAME_STMT);
8433 /* Given a basic block B which ends with a conditional and has
8434 precisely two successors, determine which of the edges is taken if
8435 the conditional is true and which is taken if the conditional is
8436 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8438 void
8439 extract_true_false_edges_from_block (basic_block b,
8440 edge *true_edge,
8441 edge *false_edge)
8443 edge e = EDGE_SUCC (b, 0);
8445 if (e->flags & EDGE_TRUE_VALUE)
8447 *true_edge = e;
8448 *false_edge = EDGE_SUCC (b, 1);
8450 else
8452 *false_edge = e;
8453 *true_edge = EDGE_SUCC (b, 1);
8457 /* Emit return warnings. */
8459 namespace {
8461 const pass_data pass_data_warn_function_return =
8463 GIMPLE_PASS, /* type */
8464 "*warn_function_return", /* name */
8465 OPTGROUP_NONE, /* optinfo_flags */
8466 TV_NONE, /* tv_id */
8467 PROP_cfg, /* properties_required */
8468 0, /* properties_provided */
8469 0, /* properties_destroyed */
8470 0, /* todo_flags_start */
8471 0, /* todo_flags_finish */
8474 class pass_warn_function_return : public gimple_opt_pass
8476 public:
8477 pass_warn_function_return (gcc::context *ctxt)
8478 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8481 /* opt_pass methods: */
8482 virtual unsigned int execute (function *);
8484 }; // class pass_warn_function_return
8486 unsigned int
8487 pass_warn_function_return::execute (function *fun)
8489 source_location location;
8490 gimple last;
8491 edge e;
8492 edge_iterator ei;
8494 if (!targetm.warn_func_return (fun->decl))
8495 return 0;
8497 /* If we have a path to EXIT, then we do return. */
8498 if (TREE_THIS_VOLATILE (fun->decl)
8499 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8501 location = UNKNOWN_LOCATION;
8502 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8504 last = last_stmt (e->src);
8505 if ((gimple_code (last) == GIMPLE_RETURN
8506 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8507 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8508 break;
8510 if (location == UNKNOWN_LOCATION)
8511 location = cfun->function_end_locus;
8512 warning_at (location, 0, "%<noreturn%> function does return");
8515 /* If we see "return;" in some basic block, then we do reach the end
8516 without returning a value. */
8517 else if (warn_return_type
8518 && !TREE_NO_WARNING (fun->decl)
8519 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8520 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8522 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8524 gimple last = last_stmt (e->src);
8525 greturn *return_stmt = dyn_cast <greturn *> (last);
8526 if (return_stmt
8527 && gimple_return_retval (return_stmt) == NULL
8528 && !gimple_no_warning_p (last))
8530 location = gimple_location (last);
8531 if (location == UNKNOWN_LOCATION)
8532 location = fun->function_end_locus;
8533 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8534 TREE_NO_WARNING (fun->decl) = 1;
8535 break;
8539 return 0;
8542 } // anon namespace
8544 gimple_opt_pass *
8545 make_pass_warn_function_return (gcc::context *ctxt)
8547 return new pass_warn_function_return (ctxt);
8550 /* Walk a gimplified function and warn for functions whose return value is
8551 ignored and attribute((warn_unused_result)) is set. This is done before
8552 inlining, so we don't have to worry about that. */
8554 static void
8555 do_warn_unused_result (gimple_seq seq)
8557 tree fdecl, ftype;
8558 gimple_stmt_iterator i;
8560 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8562 gimple g = gsi_stmt (i);
8564 switch (gimple_code (g))
8566 case GIMPLE_BIND:
8567 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8568 break;
8569 case GIMPLE_TRY:
8570 do_warn_unused_result (gimple_try_eval (g));
8571 do_warn_unused_result (gimple_try_cleanup (g));
8572 break;
8573 case GIMPLE_CATCH:
8574 do_warn_unused_result (gimple_catch_handler (
8575 as_a <gcatch *> (g)));
8576 break;
8577 case GIMPLE_EH_FILTER:
8578 do_warn_unused_result (gimple_eh_filter_failure (g));
8579 break;
8581 case GIMPLE_CALL:
8582 if (gimple_call_lhs (g))
8583 break;
8584 if (gimple_call_internal_p (g))
8585 break;
8587 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8588 LHS. All calls whose value is ignored should be
8589 represented like this. Look for the attribute. */
8590 fdecl = gimple_call_fndecl (g);
8591 ftype = gimple_call_fntype (g);
8593 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8595 location_t loc = gimple_location (g);
8597 if (fdecl)
8598 warning_at (loc, OPT_Wunused_result,
8599 "ignoring return value of %qD, "
8600 "declared with attribute warn_unused_result",
8601 fdecl);
8602 else
8603 warning_at (loc, OPT_Wunused_result,
8604 "ignoring return value of function "
8605 "declared with attribute warn_unused_result");
8607 break;
8609 default:
8610 /* Not a container, not a call, or a call whose value is used. */
8611 break;
8616 namespace {
8618 const pass_data pass_data_warn_unused_result =
8620 GIMPLE_PASS, /* type */
8621 "*warn_unused_result", /* name */
8622 OPTGROUP_NONE, /* optinfo_flags */
8623 TV_NONE, /* tv_id */
8624 PROP_gimple_any, /* properties_required */
8625 0, /* properties_provided */
8626 0, /* properties_destroyed */
8627 0, /* todo_flags_start */
8628 0, /* todo_flags_finish */
8631 class pass_warn_unused_result : public gimple_opt_pass
8633 public:
8634 pass_warn_unused_result (gcc::context *ctxt)
8635 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8638 /* opt_pass methods: */
8639 virtual bool gate (function *) { return flag_warn_unused_result; }
8640 virtual unsigned int execute (function *)
8642 do_warn_unused_result (gimple_body (current_function_decl));
8643 return 0;
8646 }; // class pass_warn_unused_result
8648 } // anon namespace
8650 gimple_opt_pass *
8651 make_pass_warn_unused_result (gcc::context *ctxt)
8653 return new pass_warn_unused_result (ctxt);
8656 /* IPA passes, compilation of earlier functions or inlining
8657 might have changed some properties, such as marked functions nothrow,
8658 pure, const or noreturn.
8659 Remove redundant edges and basic blocks, and create new ones if necessary.
8661 This pass can't be executed as stand alone pass from pass manager, because
8662 in between inlining and this fixup the verify_flow_info would fail. */
8664 unsigned int
8665 execute_fixup_cfg (void)
8667 basic_block bb;
8668 gimple_stmt_iterator gsi;
8669 int todo = 0;
8670 gcov_type count_scale;
8671 edge e;
8672 edge_iterator ei;
8674 count_scale
8675 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8676 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8678 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8679 cgraph_node::get (current_function_decl)->count;
8680 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8681 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8682 count_scale);
8684 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8685 e->count = apply_scale (e->count, count_scale);
8687 FOR_EACH_BB_FN (bb, cfun)
8689 bb->count = apply_scale (bb->count, count_scale);
8690 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8692 gimple stmt = gsi_stmt (gsi);
8693 tree decl = is_gimple_call (stmt)
8694 ? gimple_call_fndecl (stmt)
8695 : NULL;
8696 if (decl)
8698 int flags = gimple_call_flags (stmt);
8699 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8701 if (gimple_purge_dead_abnormal_call_edges (bb))
8702 todo |= TODO_cleanup_cfg;
8704 if (gimple_in_ssa_p (cfun))
8706 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8707 update_stmt (stmt);
8711 if (flags & ECF_NORETURN
8712 && fixup_noreturn_call (stmt))
8713 todo |= TODO_cleanup_cfg;
8716 /* Remove stores to variables we marked write-only.
8717 Keep access when store has side effect, i.e. in case when source
8718 is volatile. */
8719 if (gimple_store_p (stmt)
8720 && !gimple_has_side_effects (stmt))
8722 tree lhs = get_base_address (gimple_get_lhs (stmt));
8724 if (TREE_CODE (lhs) == VAR_DECL
8725 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8726 && varpool_node::get (lhs)->writeonly)
8728 unlink_stmt_vdef (stmt);
8729 gsi_remove (&gsi, true);
8730 release_defs (stmt);
8731 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8732 continue;
8735 /* For calls we can simply remove LHS when it is known
8736 to be write-only. */
8737 if (is_gimple_call (stmt)
8738 && gimple_get_lhs (stmt))
8740 tree lhs = get_base_address (gimple_get_lhs (stmt));
8742 if (TREE_CODE (lhs) == VAR_DECL
8743 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8744 && varpool_node::get (lhs)->writeonly)
8746 gimple_call_set_lhs (stmt, NULL);
8747 update_stmt (stmt);
8748 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8752 if (maybe_clean_eh_stmt (stmt)
8753 && gimple_purge_dead_eh_edges (bb))
8754 todo |= TODO_cleanup_cfg;
8755 gsi_next (&gsi);
8758 FOR_EACH_EDGE (e, ei, bb->succs)
8759 e->count = apply_scale (e->count, count_scale);
8761 /* If we have a basic block with no successors that does not
8762 end with a control statement or a noreturn call end it with
8763 a call to __builtin_unreachable. This situation can occur
8764 when inlining a noreturn call that does in fact return. */
8765 if (EDGE_COUNT (bb->succs) == 0)
8767 gimple stmt = last_stmt (bb);
8768 if (!stmt
8769 || (!is_ctrl_stmt (stmt)
8770 && (!is_gimple_call (stmt)
8771 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8773 if (stmt && is_gimple_call (stmt))
8774 gimple_call_set_ctrl_altering (stmt, false);
8775 stmt = gimple_build_call
8776 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8777 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8778 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8782 if (count_scale != REG_BR_PROB_BASE)
8783 compute_function_frequency ();
8785 if (current_loops
8786 && (todo & TODO_cleanup_cfg))
8787 loops_state_set (LOOPS_NEED_FIXUP);
8789 return todo;
8792 namespace {
8794 const pass_data pass_data_fixup_cfg =
8796 GIMPLE_PASS, /* type */
8797 "fixup_cfg", /* name */
8798 OPTGROUP_NONE, /* optinfo_flags */
8799 TV_NONE, /* tv_id */
8800 PROP_cfg, /* properties_required */
8801 0, /* properties_provided */
8802 0, /* properties_destroyed */
8803 0, /* todo_flags_start */
8804 0, /* todo_flags_finish */
8807 class pass_fixup_cfg : public gimple_opt_pass
8809 public:
8810 pass_fixup_cfg (gcc::context *ctxt)
8811 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8814 /* opt_pass methods: */
8815 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8816 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8818 }; // class pass_fixup_cfg
8820 } // anon namespace
8822 gimple_opt_pass *
8823 make_pass_fixup_cfg (gcc::context *ctxt)
8825 return new pass_fixup_cfg (ctxt);
8828 /* Garbage collection support for edge_def. */
8830 extern void gt_ggc_mx (tree&);
8831 extern void gt_ggc_mx (gimple&);
8832 extern void gt_ggc_mx (rtx&);
8833 extern void gt_ggc_mx (basic_block&);
8835 static void
8836 gt_ggc_mx (rtx_insn *& x)
8838 if (x)
8839 gt_ggc_mx_rtx_def ((void *) x);
8842 void
8843 gt_ggc_mx (edge_def *e)
8845 tree block = LOCATION_BLOCK (e->goto_locus);
8846 gt_ggc_mx (e->src);
8847 gt_ggc_mx (e->dest);
8848 if (current_ir_type () == IR_GIMPLE)
8849 gt_ggc_mx (e->insns.g);
8850 else
8851 gt_ggc_mx (e->insns.r);
8852 gt_ggc_mx (block);
8855 /* PCH support for edge_def. */
8857 extern void gt_pch_nx (tree&);
8858 extern void gt_pch_nx (gimple&);
8859 extern void gt_pch_nx (rtx&);
8860 extern void gt_pch_nx (basic_block&);
8862 static void
8863 gt_pch_nx (rtx_insn *& x)
8865 if (x)
8866 gt_pch_nx_rtx_def ((void *) x);
8869 void
8870 gt_pch_nx (edge_def *e)
8872 tree block = LOCATION_BLOCK (e->goto_locus);
8873 gt_pch_nx (e->src);
8874 gt_pch_nx (e->dest);
8875 if (current_ir_type () == IR_GIMPLE)
8876 gt_pch_nx (e->insns.g);
8877 else
8878 gt_pch_nx (e->insns.r);
8879 gt_pch_nx (block);
8882 void
8883 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8885 tree block = LOCATION_BLOCK (e->goto_locus);
8886 op (&(e->src), cookie);
8887 op (&(e->dest), cookie);
8888 if (current_ir_type () == IR_GIMPLE)
8889 op (&(e->insns.g), cookie);
8890 else
8891 op (&(e->insns.r), cookie);
8892 op (&(block), cookie);