PR middle-end/83164
[official-gcc.git] / gcc / tree-cfg.c
blob4d09b2cc99d7a0c0f14b05ddeed958fbbae5079f
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
66 /* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
69 /* Local declarations. */
71 /* Initial capacity for the basic block array. */
72 static const int initial_cfg_capacity = 20;
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
87 static hash_map<edge, tree> *edge_to_cases;
89 /* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
93 static bitmap touched_switch_bbs;
95 /* CFG statistics. */
96 struct cfg_stats_d
98 long num_merged_labels;
101 static struct cfg_stats_d cfg_stats;
103 /* Data to pass to replace_block_vars_by_duplicates_1. */
104 struct replace_decls_d
106 hash_map<tree, tree> *vars_map;
107 tree to_context;
110 /* Hash table to store last discriminator assigned for each locus. */
111 struct locus_discrim_map
113 location_t locus;
114 int discriminator;
117 /* Hashtable helpers. */
119 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 static inline hashval_t hash (const locus_discrim_map *);
122 static inline bool equal (const locus_discrim_map *,
123 const locus_discrim_map *);
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
129 inline hashval_t
130 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 return LOCATION_LINE (item->locus);
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
138 inline bool
139 locus_discrim_hasher::equal (const locus_discrim_map *a,
140 const locus_discrim_map *b)
142 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
145 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147 /* Basic blocks and flowgraphs. */
148 static void make_blocks (gimple_seq);
150 /* Edges. */
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block);
154 static void make_gimple_switch_edges (gswitch *, basic_block);
155 static bool make_goto_expr_edges (basic_block);
156 static void make_gimple_asm_edges (basic_block);
157 static edge gimple_redirect_edge_and_branch (edge, basic_block);
158 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160 /* Various helpers. */
161 static inline bool stmt_starts_bb_p (gimple *, gimple *);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge);
164 static gimple *first_non_label_stmt (basic_block);
165 static bool verify_gimple_transaction (gtransaction *);
166 static bool call_can_make_abnormal_goto (gimple *);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block, basic_block);
170 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
171 static void remove_bb (basic_block);
172 static edge find_taken_edge_computed_goto (basic_block, tree);
173 static edge find_taken_edge_cond_expr (basic_block, tree);
174 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
175 static tree find_case_label_for_value (gswitch *, tree);
176 static void lower_phi_internal_fn ();
178 void
179 init_empty_tree_cfg_for_function (struct function *fn)
181 /* Initialize the basic block array. */
182 init_flow (fn);
183 profile_status_for_fn (fn) = PROFILE_ABSENT;
184 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
185 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
186 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
188 initial_cfg_capacity);
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
193 initial_cfg_capacity);
195 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
196 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
198 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn);
200 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn);
204 void
205 init_empty_tree_cfg (void)
207 init_empty_tree_cfg_for_function (cfun);
210 /*---------------------------------------------------------------------------
211 Create basic blocks
212 ---------------------------------------------------------------------------*/
214 /* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
217 static void
218 build_gimple_cfg (gimple_seq seq)
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
223 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
225 init_empty_tree_cfg ();
227 make_blocks (seq);
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
237 n_basic_blocks_for_fn (cfun));
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
249 make_edges ();
250 assign_discriminators ();
251 lower_phi_internal_fn ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
261 static void
262 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
314 static void
315 replace_loop_annotate (void)
317 struct loop *loop;
318 basic_block bb;
319 gimple_stmt_iterator gsi;
320 gimple *stmt;
322 FOR_EACH_LOOP (loop, 0)
324 /* First look into the header. */
325 replace_loop_annotate_in_block (loop->header, loop);
327 /* Then look into the latch, if any. */
328 if (loop->latch)
329 replace_loop_annotate_in_block (loop->latch, loop);
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
339 continue;
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 continue;
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
346 case annot_expr_ivdep_kind:
347 case annot_expr_unroll_kind:
348 case annot_expr_no_vector_kind:
349 case annot_expr_vector_kind:
350 break;
351 default:
352 gcc_unreachable ();
355 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
356 stmt = gimple_build_assign (gimple_call_lhs (stmt),
357 gimple_call_arg (stmt, 0));
358 gsi_replace (&gsi, stmt, true);
363 /* Lower internal PHI function from GIMPLE FE. */
365 static void
366 lower_phi_internal_fn ()
368 basic_block bb, pred = NULL;
369 gimple_stmt_iterator gsi;
370 tree lhs;
371 gphi *phi_node;
372 gimple *stmt;
374 /* After edge creation, handle __PHI function from GIMPLE FE. */
375 FOR_EACH_BB_FN (bb, cfun)
377 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
379 stmt = gsi_stmt (gsi);
380 if (! gimple_call_internal_p (stmt, IFN_PHI))
381 break;
383 lhs = gimple_call_lhs (stmt);
384 phi_node = create_phi_node (lhs, bb);
386 /* Add arguments to the PHI node. */
387 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
389 tree arg = gimple_call_arg (stmt, i);
390 if (TREE_CODE (arg) == LABEL_DECL)
391 pred = label_to_block (arg);
392 else
394 edge e = find_edge (pred, bb);
395 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
399 gsi_remove (&gsi, true);
404 static unsigned int
405 execute_build_cfg (void)
407 gimple_seq body = gimple_body (current_function_decl);
409 build_gimple_cfg (body);
410 gimple_set_body (current_function_decl, NULL);
411 if (dump_file && (dump_flags & TDF_DETAILS))
413 fprintf (dump_file, "Scope blocks:\n");
414 dump_scope_blocks (dump_file, dump_flags);
416 cleanup_tree_cfg ();
417 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
418 replace_loop_annotate ();
419 return 0;
422 namespace {
424 const pass_data pass_data_build_cfg =
426 GIMPLE_PASS, /* type */
427 "cfg", /* name */
428 OPTGROUP_NONE, /* optinfo_flags */
429 TV_TREE_CFG, /* tv_id */
430 PROP_gimple_leh, /* properties_required */
431 ( PROP_cfg | PROP_loops ), /* properties_provided */
432 0, /* properties_destroyed */
433 0, /* todo_flags_start */
434 0, /* todo_flags_finish */
437 class pass_build_cfg : public gimple_opt_pass
439 public:
440 pass_build_cfg (gcc::context *ctxt)
441 : gimple_opt_pass (pass_data_build_cfg, ctxt)
444 /* opt_pass methods: */
445 virtual unsigned int execute (function *) { return execute_build_cfg (); }
447 }; // class pass_build_cfg
449 } // anon namespace
451 gimple_opt_pass *
452 make_pass_build_cfg (gcc::context *ctxt)
454 return new pass_build_cfg (ctxt);
458 /* Return true if T is a computed goto. */
460 bool
461 computed_goto_p (gimple *t)
463 return (gimple_code (t) == GIMPLE_GOTO
464 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
467 /* Returns true if the sequence of statements STMTS only contains
468 a call to __builtin_unreachable (). */
470 bool
471 gimple_seq_unreachable_p (gimple_seq stmts)
473 if (stmts == NULL
474 /* Return false if -fsanitize=unreachable, we don't want to
475 optimize away those calls, but rather turn them into
476 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
477 later. */
478 || sanitize_flags_p (SANITIZE_UNREACHABLE))
479 return false;
481 gimple_stmt_iterator gsi = gsi_last (stmts);
483 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
484 return false;
486 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
488 gimple *stmt = gsi_stmt (gsi);
489 if (gimple_code (stmt) != GIMPLE_LABEL
490 && !is_gimple_debug (stmt)
491 && !gimple_clobber_p (stmt))
492 return false;
494 return true;
497 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
498 the other edge points to a bb with just __builtin_unreachable ().
499 I.e. return true for C->M edge in:
500 <bb C>:
502 if (something)
503 goto <bb N>;
504 else
505 goto <bb M>;
506 <bb N>:
507 __builtin_unreachable ();
508 <bb M>: */
510 bool
511 assert_unreachable_fallthru_edge_p (edge e)
513 basic_block pred_bb = e->src;
514 gimple *last = last_stmt (pred_bb);
515 if (last && gimple_code (last) == GIMPLE_COND)
517 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
518 if (other_bb == e->dest)
519 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
520 if (EDGE_COUNT (other_bb->succs) == 0)
521 return gimple_seq_unreachable_p (bb_seq (other_bb));
523 return false;
527 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
528 could alter control flow except via eh. We initialize the flag at
529 CFG build time and only ever clear it later. */
531 static void
532 gimple_call_initialize_ctrl_altering (gimple *stmt)
534 int flags = gimple_call_flags (stmt);
536 /* A call alters control flow if it can make an abnormal goto. */
537 if (call_can_make_abnormal_goto (stmt)
538 /* A call also alters control flow if it does not return. */
539 || flags & ECF_NORETURN
540 /* TM ending statements have backedges out of the transaction.
541 Return true so we split the basic block containing them.
542 Note that the TM_BUILTIN test is merely an optimization. */
543 || ((flags & ECF_TM_BUILTIN)
544 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
545 /* BUILT_IN_RETURN call is same as return statement. */
546 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
547 /* IFN_UNIQUE should be the last insn, to make checking for it
548 as cheap as possible. */
549 || (gimple_call_internal_p (stmt)
550 && gimple_call_internal_unique_p (stmt)))
551 gimple_call_set_ctrl_altering (stmt, true);
552 else
553 gimple_call_set_ctrl_altering (stmt, false);
557 /* Insert SEQ after BB and build a flowgraph. */
559 static basic_block
560 make_blocks_1 (gimple_seq seq, basic_block bb)
562 gimple_stmt_iterator i = gsi_start (seq);
563 gimple *stmt = NULL;
564 bool start_new_block = true;
565 bool first_stmt_of_seq = true;
567 while (!gsi_end_p (i))
569 gimple *prev_stmt;
571 prev_stmt = stmt;
572 stmt = gsi_stmt (i);
574 if (stmt && is_gimple_call (stmt))
575 gimple_call_initialize_ctrl_altering (stmt);
577 /* If the statement starts a new basic block or if we have determined
578 in a previous pass that we need to create a new block for STMT, do
579 so now. */
580 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
582 if (!first_stmt_of_seq)
583 gsi_split_seq_before (&i, &seq);
584 bb = create_basic_block (seq, bb);
585 start_new_block = false;
588 /* Now add STMT to BB and create the subgraphs for special statement
589 codes. */
590 gimple_set_bb (stmt, bb);
592 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
593 next iteration. */
594 if (stmt_ends_bb_p (stmt))
596 /* If the stmt can make abnormal goto use a new temporary
597 for the assignment to the LHS. This makes sure the old value
598 of the LHS is available on the abnormal edge. Otherwise
599 we will end up with overlapping life-ranges for abnormal
600 SSA names. */
601 if (gimple_has_lhs (stmt)
602 && stmt_can_make_abnormal_goto (stmt)
603 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
605 tree lhs = gimple_get_lhs (stmt);
606 tree tmp = create_tmp_var (TREE_TYPE (lhs));
607 gimple *s = gimple_build_assign (lhs, tmp);
608 gimple_set_location (s, gimple_location (stmt));
609 gimple_set_block (s, gimple_block (stmt));
610 gimple_set_lhs (stmt, tmp);
611 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
612 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
613 DECL_GIMPLE_REG_P (tmp) = 1;
614 gsi_insert_after (&i, s, GSI_SAME_STMT);
616 start_new_block = true;
619 gsi_next (&i);
620 first_stmt_of_seq = false;
622 return bb;
625 /* Build a flowgraph for the sequence of stmts SEQ. */
627 static void
628 make_blocks (gimple_seq seq)
630 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
633 /* Create and return a new empty basic block after bb AFTER. */
635 static basic_block
636 create_bb (void *h, void *e, basic_block after)
638 basic_block bb;
640 gcc_assert (!e);
642 /* Create and initialize a new basic block. Since alloc_block uses
643 GC allocation that clears memory to allocate a basic block, we do
644 not have to clear the newly allocated basic block here. */
645 bb = alloc_block ();
647 bb->index = last_basic_block_for_fn (cfun);
648 bb->flags = BB_NEW;
649 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
651 /* Add the new block to the linked list of blocks. */
652 link_block (bb, after);
654 /* Grow the basic block array if needed. */
655 if ((size_t) last_basic_block_for_fn (cfun)
656 == basic_block_info_for_fn (cfun)->length ())
658 size_t new_size =
659 (last_basic_block_for_fn (cfun)
660 + (last_basic_block_for_fn (cfun) + 3) / 4);
661 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
664 /* Add the newly created block to the array. */
665 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
667 n_basic_blocks_for_fn (cfun)++;
668 last_basic_block_for_fn (cfun)++;
670 return bb;
674 /*---------------------------------------------------------------------------
675 Edge creation
676 ---------------------------------------------------------------------------*/
678 /* If basic block BB has an abnormal edge to a basic block
679 containing IFN_ABNORMAL_DISPATCHER internal call, return
680 that the dispatcher's basic block, otherwise return NULL. */
682 basic_block
683 get_abnormal_succ_dispatcher (basic_block bb)
685 edge e;
686 edge_iterator ei;
688 FOR_EACH_EDGE (e, ei, bb->succs)
689 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
691 gimple_stmt_iterator gsi
692 = gsi_start_nondebug_after_labels_bb (e->dest);
693 gimple *g = gsi_stmt (gsi);
694 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
695 return e->dest;
697 return NULL;
700 /* Helper function for make_edges. Create a basic block with
701 with ABNORMAL_DISPATCHER internal call in it if needed, and
702 create abnormal edges from BBS to it and from it to FOR_BB
703 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
705 static void
706 handle_abnormal_edges (basic_block *dispatcher_bbs,
707 basic_block for_bb, int *bb_to_omp_idx,
708 auto_vec<basic_block> *bbs, bool computed_goto)
710 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
711 unsigned int idx = 0;
712 basic_block bb;
713 bool inner = false;
715 if (bb_to_omp_idx)
717 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
718 if (bb_to_omp_idx[for_bb->index] != 0)
719 inner = true;
722 /* If the dispatcher has been created already, then there are basic
723 blocks with abnormal edges to it, so just make a new edge to
724 for_bb. */
725 if (*dispatcher == NULL)
727 /* Check if there are any basic blocks that need to have
728 abnormal edges to this dispatcher. If there are none, return
729 early. */
730 if (bb_to_omp_idx == NULL)
732 if (bbs->is_empty ())
733 return;
735 else
737 FOR_EACH_VEC_ELT (*bbs, idx, bb)
738 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
739 break;
740 if (bb == NULL)
741 return;
744 /* Create the dispatcher bb. */
745 *dispatcher = create_basic_block (NULL, for_bb);
746 if (computed_goto)
748 /* Factor computed gotos into a common computed goto site. Also
749 record the location of that site so that we can un-factor the
750 gotos after we have converted back to normal form. */
751 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
753 /* Create the destination of the factored goto. Each original
754 computed goto will put its desired destination into this
755 variable and jump to the label we create immediately below. */
756 tree var = create_tmp_var (ptr_type_node, "gotovar");
758 /* Build a label for the new block which will contain the
759 factored computed goto. */
760 tree factored_label_decl
761 = create_artificial_label (UNKNOWN_LOCATION);
762 gimple *factored_computed_goto_label
763 = gimple_build_label (factored_label_decl);
764 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
766 /* Build our new computed goto. */
767 gimple *factored_computed_goto = gimple_build_goto (var);
768 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
770 FOR_EACH_VEC_ELT (*bbs, idx, bb)
772 if (bb_to_omp_idx
773 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
774 continue;
776 gsi = gsi_last_bb (bb);
777 gimple *last = gsi_stmt (gsi);
779 gcc_assert (computed_goto_p (last));
781 /* Copy the original computed goto's destination into VAR. */
782 gimple *assignment
783 = gimple_build_assign (var, gimple_goto_dest (last));
784 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
786 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
787 e->goto_locus = gimple_location (last);
788 gsi_remove (&gsi, true);
791 else
793 tree arg = inner ? boolean_true_node : boolean_false_node;
794 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
795 1, arg);
796 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
797 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
799 /* Create predecessor edges of the dispatcher. */
800 FOR_EACH_VEC_ELT (*bbs, idx, bb)
802 if (bb_to_omp_idx
803 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
804 continue;
805 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
810 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
813 /* Creates outgoing edges for BB. Returns 1 when it ends with an
814 computed goto, returns 2 when it ends with a statement that
815 might return to this function via an nonlocal goto, otherwise
816 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
818 static int
819 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
821 gimple *last = last_stmt (bb);
822 bool fallthru = false;
823 int ret = 0;
825 if (!last)
826 return ret;
828 switch (gimple_code (last))
830 case GIMPLE_GOTO:
831 if (make_goto_expr_edges (bb))
832 ret = 1;
833 fallthru = false;
834 break;
835 case GIMPLE_RETURN:
837 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
838 e->goto_locus = gimple_location (last);
839 fallthru = false;
841 break;
842 case GIMPLE_COND:
843 make_cond_expr_edges (bb);
844 fallthru = false;
845 break;
846 case GIMPLE_SWITCH:
847 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
848 fallthru = false;
849 break;
850 case GIMPLE_RESX:
851 make_eh_edges (last);
852 fallthru = false;
853 break;
854 case GIMPLE_EH_DISPATCH:
855 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
856 break;
858 case GIMPLE_CALL:
859 /* If this function receives a nonlocal goto, then we need to
860 make edges from this call site to all the nonlocal goto
861 handlers. */
862 if (stmt_can_make_abnormal_goto (last))
863 ret = 2;
865 /* If this statement has reachable exception handlers, then
866 create abnormal edges to them. */
867 make_eh_edges (last);
869 /* BUILTIN_RETURN is really a return statement. */
870 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
872 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
873 fallthru = false;
875 /* Some calls are known not to return. */
876 else
877 fallthru = !gimple_call_noreturn_p (last);
878 break;
880 case GIMPLE_ASSIGN:
881 /* A GIMPLE_ASSIGN may throw internally and thus be considered
882 control-altering. */
883 if (is_ctrl_altering_stmt (last))
884 make_eh_edges (last);
885 fallthru = true;
886 break;
888 case GIMPLE_ASM:
889 make_gimple_asm_edges (bb);
890 fallthru = true;
891 break;
893 CASE_GIMPLE_OMP:
894 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
895 break;
897 case GIMPLE_TRANSACTION:
899 gtransaction *txn = as_a <gtransaction *> (last);
900 tree label1 = gimple_transaction_label_norm (txn);
901 tree label2 = gimple_transaction_label_uninst (txn);
903 if (label1)
904 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
905 if (label2)
906 make_edge (bb, label_to_block (label2),
907 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
909 tree label3 = gimple_transaction_label_over (txn);
910 if (gimple_transaction_subcode (txn)
911 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
912 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
914 fallthru = false;
916 break;
918 default:
919 gcc_assert (!stmt_ends_bb_p (last));
920 fallthru = true;
921 break;
924 if (fallthru)
925 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
927 return ret;
930 /* Join all the blocks in the flowgraph. */
932 static void
933 make_edges (void)
935 basic_block bb;
936 struct omp_region *cur_region = NULL;
937 auto_vec<basic_block> ab_edge_goto;
938 auto_vec<basic_block> ab_edge_call;
939 int *bb_to_omp_idx = NULL;
940 int cur_omp_region_idx = 0;
942 /* Create an edge from entry to the first block with executable
943 statements in it. */
944 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
945 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
946 EDGE_FALLTHRU);
948 /* Traverse the basic block array placing edges. */
949 FOR_EACH_BB_FN (bb, cfun)
951 int mer;
953 if (bb_to_omp_idx)
954 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
956 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
957 if (mer == 1)
958 ab_edge_goto.safe_push (bb);
959 else if (mer == 2)
960 ab_edge_call.safe_push (bb);
962 if (cur_region && bb_to_omp_idx == NULL)
963 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
966 /* Computed gotos are hell to deal with, especially if there are
967 lots of them with a large number of destinations. So we factor
968 them to a common computed goto location before we build the
969 edge list. After we convert back to normal form, we will un-factor
970 the computed gotos since factoring introduces an unwanted jump.
971 For non-local gotos and abnormal edges from calls to calls that return
972 twice or forced labels, factor the abnormal edges too, by having all
973 abnormal edges from the calls go to a common artificial basic block
974 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
975 basic block to all forced labels and calls returning twice.
976 We do this per-OpenMP structured block, because those regions
977 are guaranteed to be single entry single exit by the standard,
978 so it is not allowed to enter or exit such regions abnormally this way,
979 thus all computed gotos, non-local gotos and setjmp/longjmp calls
980 must not transfer control across SESE region boundaries. */
981 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
983 gimple_stmt_iterator gsi;
984 basic_block dispatcher_bb_array[2] = { NULL, NULL };
985 basic_block *dispatcher_bbs = dispatcher_bb_array;
986 int count = n_basic_blocks_for_fn (cfun);
988 if (bb_to_omp_idx)
989 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
991 FOR_EACH_BB_FN (bb, cfun)
993 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
995 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
996 tree target;
998 if (!label_stmt)
999 break;
1001 target = gimple_label_label (label_stmt);
1003 /* Make an edge to every label block that has been marked as a
1004 potential target for a computed goto or a non-local goto. */
1005 if (FORCED_LABEL (target))
1006 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1007 &ab_edge_goto, true);
1008 if (DECL_NONLOCAL (target))
1010 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1011 &ab_edge_call, false);
1012 break;
1016 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1017 gsi_next_nondebug (&gsi);
1018 if (!gsi_end_p (gsi))
1020 /* Make an edge to every setjmp-like call. */
1021 gimple *call_stmt = gsi_stmt (gsi);
1022 if (is_gimple_call (call_stmt)
1023 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1024 || gimple_call_builtin_p (call_stmt,
1025 BUILT_IN_SETJMP_RECEIVER)))
1026 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1027 &ab_edge_call, false);
1031 if (bb_to_omp_idx)
1032 XDELETE (dispatcher_bbs);
1035 XDELETE (bb_to_omp_idx);
1037 omp_free_regions ();
1040 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1041 needed. Returns true if new bbs were created.
1042 Note: This is transitional code, and should not be used for new code. We
1043 should be able to get rid of this by rewriting all target va-arg
1044 gimplification hooks to use an interface gimple_build_cond_value as described
1045 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1047 bool
1048 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1050 gimple *stmt = gsi_stmt (*gsi);
1051 basic_block bb = gimple_bb (stmt);
1052 basic_block lastbb, afterbb;
1053 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1054 edge e;
1055 lastbb = make_blocks_1 (seq, bb);
1056 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1057 return false;
1058 e = split_block (bb, stmt);
1059 /* Move e->dest to come after the new basic blocks. */
1060 afterbb = e->dest;
1061 unlink_block (afterbb);
1062 link_block (afterbb, lastbb);
1063 redirect_edge_succ (e, bb->next_bb);
1064 bb = bb->next_bb;
1065 while (bb != afterbb)
1067 struct omp_region *cur_region = NULL;
1068 profile_count cnt = profile_count::zero ();
1069 bool all = true;
1071 int cur_omp_region_idx = 0;
1072 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1073 gcc_assert (!mer && !cur_region);
1074 add_bb_to_loop (bb, afterbb->loop_father);
1076 edge e;
1077 edge_iterator ei;
1078 FOR_EACH_EDGE (e, ei, bb->preds)
1080 if (e->count ().initialized_p ())
1081 cnt += e->count ();
1082 else
1083 all = false;
1085 tree_guess_outgoing_edge_probabilities (bb);
1086 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1087 bb->count = cnt;
1089 bb = bb->next_bb;
1091 return true;
1094 /* Find the next available discriminator value for LOCUS. The
1095 discriminator distinguishes among several basic blocks that
1096 share a common locus, allowing for more accurate sample-based
1097 profiling. */
1099 static int
1100 next_discriminator_for_locus (location_t locus)
1102 struct locus_discrim_map item;
1103 struct locus_discrim_map **slot;
1105 item.locus = locus;
1106 item.discriminator = 0;
1107 slot = discriminator_per_locus->find_slot_with_hash (
1108 &item, LOCATION_LINE (locus), INSERT);
1109 gcc_assert (slot);
1110 if (*slot == HTAB_EMPTY_ENTRY)
1112 *slot = XNEW (struct locus_discrim_map);
1113 gcc_assert (*slot);
1114 (*slot)->locus = locus;
1115 (*slot)->discriminator = 0;
1117 (*slot)->discriminator++;
1118 return (*slot)->discriminator;
1121 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1123 static bool
1124 same_line_p (location_t locus1, location_t locus2)
1126 expanded_location from, to;
1128 if (locus1 == locus2)
1129 return true;
1131 from = expand_location (locus1);
1132 to = expand_location (locus2);
1134 if (from.line != to.line)
1135 return false;
1136 if (from.file == to.file)
1137 return true;
1138 return (from.file != NULL
1139 && to.file != NULL
1140 && filename_cmp (from.file, to.file) == 0);
1143 /* Assign discriminators to each basic block. */
1145 static void
1146 assign_discriminators (void)
1148 basic_block bb;
1150 FOR_EACH_BB_FN (bb, cfun)
1152 edge e;
1153 edge_iterator ei;
1154 gimple *last = last_stmt (bb);
1155 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1157 if (locus == UNKNOWN_LOCATION)
1158 continue;
1160 FOR_EACH_EDGE (e, ei, bb->succs)
1162 gimple *first = first_non_label_stmt (e->dest);
1163 gimple *last = last_stmt (e->dest);
1164 if ((first && same_line_p (locus, gimple_location (first)))
1165 || (last && same_line_p (locus, gimple_location (last))))
1167 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1168 bb->discriminator = next_discriminator_for_locus (locus);
1169 else
1170 e->dest->discriminator = next_discriminator_for_locus (locus);
1176 /* Create the edges for a GIMPLE_COND starting at block BB. */
1178 static void
1179 make_cond_expr_edges (basic_block bb)
1181 gcond *entry = as_a <gcond *> (last_stmt (bb));
1182 gimple *then_stmt, *else_stmt;
1183 basic_block then_bb, else_bb;
1184 tree then_label, else_label;
1185 edge e;
1187 gcc_assert (entry);
1188 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1190 /* Entry basic blocks for each component. */
1191 then_label = gimple_cond_true_label (entry);
1192 else_label = gimple_cond_false_label (entry);
1193 then_bb = label_to_block (then_label);
1194 else_bb = label_to_block (else_label);
1195 then_stmt = first_stmt (then_bb);
1196 else_stmt = first_stmt (else_bb);
1198 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1199 e->goto_locus = gimple_location (then_stmt);
1200 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1201 if (e)
1202 e->goto_locus = gimple_location (else_stmt);
1204 /* We do not need the labels anymore. */
1205 gimple_cond_set_true_label (entry, NULL_TREE);
1206 gimple_cond_set_false_label (entry, NULL_TREE);
1210 /* Called for each element in the hash table (P) as we delete the
1211 edge to cases hash table.
1213 Clear all the CASE_CHAINs to prevent problems with copying of
1214 SWITCH_EXPRs and structure sharing rules, then free the hash table
1215 element. */
1217 bool
1218 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1220 tree t, next;
1222 for (t = value; t; t = next)
1224 next = CASE_CHAIN (t);
1225 CASE_CHAIN (t) = NULL;
1228 return true;
1231 /* Start recording information mapping edges to case labels. */
1233 void
1234 start_recording_case_labels (void)
1236 gcc_assert (edge_to_cases == NULL);
1237 edge_to_cases = new hash_map<edge, tree>;
1238 touched_switch_bbs = BITMAP_ALLOC (NULL);
1241 /* Return nonzero if we are recording information for case labels. */
1243 static bool
1244 recording_case_labels_p (void)
1246 return (edge_to_cases != NULL);
1249 /* Stop recording information mapping edges to case labels and
1250 remove any information we have recorded. */
1251 void
1252 end_recording_case_labels (void)
1254 bitmap_iterator bi;
1255 unsigned i;
1256 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1257 delete edge_to_cases;
1258 edge_to_cases = NULL;
1259 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1261 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1262 if (bb)
1264 gimple *stmt = last_stmt (bb);
1265 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1266 group_case_labels_stmt (as_a <gswitch *> (stmt));
1269 BITMAP_FREE (touched_switch_bbs);
1272 /* If we are inside a {start,end}_recording_cases block, then return
1273 a chain of CASE_LABEL_EXPRs from T which reference E.
1275 Otherwise return NULL. */
1277 static tree
1278 get_cases_for_edge (edge e, gswitch *t)
1280 tree *slot;
1281 size_t i, n;
1283 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1284 chains available. Return NULL so the caller can detect this case. */
1285 if (!recording_case_labels_p ())
1286 return NULL;
1288 slot = edge_to_cases->get (e);
1289 if (slot)
1290 return *slot;
1292 /* If we did not find E in the hash table, then this must be the first
1293 time we have been queried for information about E & T. Add all the
1294 elements from T to the hash table then perform the query again. */
1296 n = gimple_switch_num_labels (t);
1297 for (i = 0; i < n; i++)
1299 tree elt = gimple_switch_label (t, i);
1300 tree lab = CASE_LABEL (elt);
1301 basic_block label_bb = label_to_block (lab);
1302 edge this_edge = find_edge (e->src, label_bb);
1304 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1305 a new chain. */
1306 tree &s = edge_to_cases->get_or_insert (this_edge);
1307 CASE_CHAIN (elt) = s;
1308 s = elt;
1311 return *edge_to_cases->get (e);
1314 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1316 static void
1317 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1319 size_t i, n;
1321 n = gimple_switch_num_labels (entry);
1323 for (i = 0; i < n; ++i)
1325 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1326 basic_block label_bb = label_to_block (lab);
1327 make_edge (bb, label_bb, 0);
1332 /* Return the basic block holding label DEST. */
1334 basic_block
1335 label_to_block_fn (struct function *ifun, tree dest)
1337 int uid = LABEL_DECL_UID (dest);
1339 /* We would die hard when faced by an undefined label. Emit a label to
1340 the very first basic block. This will hopefully make even the dataflow
1341 and undefined variable warnings quite right. */
1342 if (seen_error () && uid < 0)
1344 gimple_stmt_iterator gsi =
1345 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1346 gimple *stmt;
1348 stmt = gimple_build_label (dest);
1349 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1350 uid = LABEL_DECL_UID (dest);
1352 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1353 return NULL;
1354 return (*ifun->cfg->x_label_to_block_map)[uid];
1357 /* Create edges for a goto statement at block BB. Returns true
1358 if abnormal edges should be created. */
1360 static bool
1361 make_goto_expr_edges (basic_block bb)
1363 gimple_stmt_iterator last = gsi_last_bb (bb);
1364 gimple *goto_t = gsi_stmt (last);
1366 /* A simple GOTO creates normal edges. */
1367 if (simple_goto_p (goto_t))
1369 tree dest = gimple_goto_dest (goto_t);
1370 basic_block label_bb = label_to_block (dest);
1371 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1372 e->goto_locus = gimple_location (goto_t);
1373 gsi_remove (&last, true);
1374 return false;
1377 /* A computed GOTO creates abnormal edges. */
1378 return true;
1381 /* Create edges for an asm statement with labels at block BB. */
1383 static void
1384 make_gimple_asm_edges (basic_block bb)
1386 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1387 int i, n = gimple_asm_nlabels (stmt);
1389 for (i = 0; i < n; ++i)
1391 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1392 basic_block label_bb = label_to_block (label);
1393 make_edge (bb, label_bb, 0);
1397 /*---------------------------------------------------------------------------
1398 Flowgraph analysis
1399 ---------------------------------------------------------------------------*/
1401 /* Cleanup useless labels in basic blocks. This is something we wish
1402 to do early because it allows us to group case labels before creating
1403 the edges for the CFG, and it speeds up block statement iterators in
1404 all passes later on.
1405 We rerun this pass after CFG is created, to get rid of the labels that
1406 are no longer referenced. After then we do not run it any more, since
1407 (almost) no new labels should be created. */
1409 /* A map from basic block index to the leading label of that block. */
1410 static struct label_record
1412 /* The label. */
1413 tree label;
1415 /* True if the label is referenced from somewhere. */
1416 bool used;
1417 } *label_for_bb;
1419 /* Given LABEL return the first label in the same basic block. */
1421 static tree
1422 main_block_label (tree label)
1424 basic_block bb = label_to_block (label);
1425 tree main_label = label_for_bb[bb->index].label;
1427 /* label_to_block possibly inserted undefined label into the chain. */
1428 if (!main_label)
1430 label_for_bb[bb->index].label = label;
1431 main_label = label;
1434 label_for_bb[bb->index].used = true;
1435 return main_label;
1438 /* Clean up redundant labels within the exception tree. */
1440 static void
1441 cleanup_dead_labels_eh (void)
1443 eh_landing_pad lp;
1444 eh_region r;
1445 tree lab;
1446 int i;
1448 if (cfun->eh == NULL)
1449 return;
1451 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1452 if (lp && lp->post_landing_pad)
1454 lab = main_block_label (lp->post_landing_pad);
1455 if (lab != lp->post_landing_pad)
1457 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1458 EH_LANDING_PAD_NR (lab) = lp->index;
1462 FOR_ALL_EH_REGION (r)
1463 switch (r->type)
1465 case ERT_CLEANUP:
1466 case ERT_MUST_NOT_THROW:
1467 break;
1469 case ERT_TRY:
1471 eh_catch c;
1472 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1474 lab = c->label;
1475 if (lab)
1476 c->label = main_block_label (lab);
1479 break;
1481 case ERT_ALLOWED_EXCEPTIONS:
1482 lab = r->u.allowed.label;
1483 if (lab)
1484 r->u.allowed.label = main_block_label (lab);
1485 break;
1490 /* Cleanup redundant labels. This is a three-step process:
1491 1) Find the leading label for each block.
1492 2) Redirect all references to labels to the leading labels.
1493 3) Cleanup all useless labels. */
1495 void
1496 cleanup_dead_labels (void)
1498 basic_block bb;
1499 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1501 /* Find a suitable label for each block. We use the first user-defined
1502 label if there is one, or otherwise just the first label we see. */
1503 FOR_EACH_BB_FN (bb, cfun)
1505 gimple_stmt_iterator i;
1507 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1509 tree label;
1510 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1512 if (!label_stmt)
1513 break;
1515 label = gimple_label_label (label_stmt);
1517 /* If we have not yet seen a label for the current block,
1518 remember this one and see if there are more labels. */
1519 if (!label_for_bb[bb->index].label)
1521 label_for_bb[bb->index].label = label;
1522 continue;
1525 /* If we did see a label for the current block already, but it
1526 is an artificially created label, replace it if the current
1527 label is a user defined label. */
1528 if (!DECL_ARTIFICIAL (label)
1529 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1531 label_for_bb[bb->index].label = label;
1532 break;
1537 /* Now redirect all jumps/branches to the selected label.
1538 First do so for each block ending in a control statement. */
1539 FOR_EACH_BB_FN (bb, cfun)
1541 gimple *stmt = last_stmt (bb);
1542 tree label, new_label;
1544 if (!stmt)
1545 continue;
1547 switch (gimple_code (stmt))
1549 case GIMPLE_COND:
1551 gcond *cond_stmt = as_a <gcond *> (stmt);
1552 label = gimple_cond_true_label (cond_stmt);
1553 if (label)
1555 new_label = main_block_label (label);
1556 if (new_label != label)
1557 gimple_cond_set_true_label (cond_stmt, new_label);
1560 label = gimple_cond_false_label (cond_stmt);
1561 if (label)
1563 new_label = main_block_label (label);
1564 if (new_label != label)
1565 gimple_cond_set_false_label (cond_stmt, new_label);
1568 break;
1570 case GIMPLE_SWITCH:
1572 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1573 size_t i, n = gimple_switch_num_labels (switch_stmt);
1575 /* Replace all destination labels. */
1576 for (i = 0; i < n; ++i)
1578 tree case_label = gimple_switch_label (switch_stmt, i);
1579 label = CASE_LABEL (case_label);
1580 new_label = main_block_label (label);
1581 if (new_label != label)
1582 CASE_LABEL (case_label) = new_label;
1584 break;
1587 case GIMPLE_ASM:
1589 gasm *asm_stmt = as_a <gasm *> (stmt);
1590 int i, n = gimple_asm_nlabels (asm_stmt);
1592 for (i = 0; i < n; ++i)
1594 tree cons = gimple_asm_label_op (asm_stmt, i);
1595 tree label = main_block_label (TREE_VALUE (cons));
1596 TREE_VALUE (cons) = label;
1598 break;
1601 /* We have to handle gotos until they're removed, and we don't
1602 remove them until after we've created the CFG edges. */
1603 case GIMPLE_GOTO:
1604 if (!computed_goto_p (stmt))
1606 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1607 label = gimple_goto_dest (goto_stmt);
1608 new_label = main_block_label (label);
1609 if (new_label != label)
1610 gimple_goto_set_dest (goto_stmt, new_label);
1612 break;
1614 case GIMPLE_TRANSACTION:
1616 gtransaction *txn = as_a <gtransaction *> (stmt);
1618 label = gimple_transaction_label_norm (txn);
1619 if (label)
1621 new_label = main_block_label (label);
1622 if (new_label != label)
1623 gimple_transaction_set_label_norm (txn, new_label);
1626 label = gimple_transaction_label_uninst (txn);
1627 if (label)
1629 new_label = main_block_label (label);
1630 if (new_label != label)
1631 gimple_transaction_set_label_uninst (txn, new_label);
1634 label = gimple_transaction_label_over (txn);
1635 if (label)
1637 new_label = main_block_label (label);
1638 if (new_label != label)
1639 gimple_transaction_set_label_over (txn, new_label);
1642 break;
1644 default:
1645 break;
1649 /* Do the same for the exception region tree labels. */
1650 cleanup_dead_labels_eh ();
1652 /* Finally, purge dead labels. All user-defined labels and labels that
1653 can be the target of non-local gotos and labels which have their
1654 address taken are preserved. */
1655 FOR_EACH_BB_FN (bb, cfun)
1657 gimple_stmt_iterator i;
1658 tree label_for_this_bb = label_for_bb[bb->index].label;
1660 if (!label_for_this_bb)
1661 continue;
1663 /* If the main label of the block is unused, we may still remove it. */
1664 if (!label_for_bb[bb->index].used)
1665 label_for_this_bb = NULL;
1667 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1669 tree label;
1670 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1672 if (!label_stmt)
1673 break;
1675 label = gimple_label_label (label_stmt);
1677 if (label == label_for_this_bb
1678 || !DECL_ARTIFICIAL (label)
1679 || DECL_NONLOCAL (label)
1680 || FORCED_LABEL (label))
1681 gsi_next (&i);
1682 else
1683 gsi_remove (&i, true);
1687 free (label_for_bb);
1690 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1691 the ones jumping to the same label.
1692 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1694 bool
1695 group_case_labels_stmt (gswitch *stmt)
1697 int old_size = gimple_switch_num_labels (stmt);
1698 int i, next_index, new_size;
1699 basic_block default_bb = NULL;
1701 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1703 /* Look for possible opportunities to merge cases. */
1704 new_size = i = 1;
1705 while (i < old_size)
1707 tree base_case, base_high;
1708 basic_block base_bb;
1710 base_case = gimple_switch_label (stmt, i);
1712 gcc_assert (base_case);
1713 base_bb = label_to_block (CASE_LABEL (base_case));
1715 /* Discard cases that have the same destination as the default case or
1716 whose destiniation blocks have already been removed as unreachable. */
1717 if (base_bb == NULL || base_bb == default_bb)
1719 i++;
1720 continue;
1723 base_high = CASE_HIGH (base_case)
1724 ? CASE_HIGH (base_case)
1725 : CASE_LOW (base_case);
1726 next_index = i + 1;
1728 /* Try to merge case labels. Break out when we reach the end
1729 of the label vector or when we cannot merge the next case
1730 label with the current one. */
1731 while (next_index < old_size)
1733 tree merge_case = gimple_switch_label (stmt, next_index);
1734 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1735 wide_int bhp1 = wi::to_wide (base_high) + 1;
1737 /* Merge the cases if they jump to the same place,
1738 and their ranges are consecutive. */
1739 if (merge_bb == base_bb
1740 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1742 base_high = CASE_HIGH (merge_case) ?
1743 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1744 CASE_HIGH (base_case) = base_high;
1745 next_index++;
1747 else
1748 break;
1751 /* Discard cases that have an unreachable destination block. */
1752 if (EDGE_COUNT (base_bb->succs) == 0
1753 && gimple_seq_unreachable_p (bb_seq (base_bb))
1754 /* Don't optimize this if __builtin_unreachable () is the
1755 implicitly added one by the C++ FE too early, before
1756 -Wreturn-type can be diagnosed. We'll optimize it later
1757 during switchconv pass or any other cfg cleanup. */
1758 && (gimple_in_ssa_p (cfun)
1759 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1760 != BUILTINS_LOCATION)))
1762 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1763 if (base_edge != NULL)
1764 remove_edge_and_dominated_blocks (base_edge);
1765 i = next_index;
1766 continue;
1769 if (new_size < i)
1770 gimple_switch_set_label (stmt, new_size,
1771 gimple_switch_label (stmt, i));
1772 i = next_index;
1773 new_size++;
1776 gcc_assert (new_size <= old_size);
1778 if (new_size < old_size)
1779 gimple_switch_set_num_labels (stmt, new_size);
1781 return new_size < old_size;
1784 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1785 and scan the sorted vector of cases. Combine the ones jumping to the
1786 same label. */
1788 bool
1789 group_case_labels (void)
1791 basic_block bb;
1792 bool changed = false;
1794 FOR_EACH_BB_FN (bb, cfun)
1796 gimple *stmt = last_stmt (bb);
1797 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1798 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1801 return changed;
1804 /* Checks whether we can merge block B into block A. */
1806 static bool
1807 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1809 gimple *stmt;
1811 if (!single_succ_p (a))
1812 return false;
1814 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1815 return false;
1817 if (single_succ (a) != b)
1818 return false;
1820 if (!single_pred_p (b))
1821 return false;
1823 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1824 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1825 return false;
1827 /* If A ends by a statement causing exceptions or something similar, we
1828 cannot merge the blocks. */
1829 stmt = last_stmt (a);
1830 if (stmt && stmt_ends_bb_p (stmt))
1831 return false;
1833 /* Do not allow a block with only a non-local label to be merged. */
1834 if (stmt)
1835 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1836 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1837 return false;
1839 /* Examine the labels at the beginning of B. */
1840 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1841 gsi_next (&gsi))
1843 tree lab;
1844 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1845 if (!label_stmt)
1846 break;
1847 lab = gimple_label_label (label_stmt);
1849 /* Do not remove user forced labels or for -O0 any user labels. */
1850 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1851 return false;
1854 /* Protect simple loop latches. We only want to avoid merging
1855 the latch with the loop header or with a block in another
1856 loop in this case. */
1857 if (current_loops
1858 && b->loop_father->latch == b
1859 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1860 && (b->loop_father->header == a
1861 || b->loop_father != a->loop_father))
1862 return false;
1864 /* It must be possible to eliminate all phi nodes in B. If ssa form
1865 is not up-to-date and a name-mapping is registered, we cannot eliminate
1866 any phis. Symbols marked for renaming are never a problem though. */
1867 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1868 gsi_next (&gsi))
1870 gphi *phi = gsi.phi ();
1871 /* Technically only new names matter. */
1872 if (name_registered_for_update_p (PHI_RESULT (phi)))
1873 return false;
1876 /* When not optimizing, don't merge if we'd lose goto_locus. */
1877 if (!optimize
1878 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1880 location_t goto_locus = single_succ_edge (a)->goto_locus;
1881 gimple_stmt_iterator prev, next;
1882 prev = gsi_last_nondebug_bb (a);
1883 next = gsi_after_labels (b);
1884 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1885 gsi_next_nondebug (&next);
1886 if ((gsi_end_p (prev)
1887 || gimple_location (gsi_stmt (prev)) != goto_locus)
1888 && (gsi_end_p (next)
1889 || gimple_location (gsi_stmt (next)) != goto_locus))
1890 return false;
1893 return true;
1896 /* Replaces all uses of NAME by VAL. */
1898 void
1899 replace_uses_by (tree name, tree val)
1901 imm_use_iterator imm_iter;
1902 use_operand_p use;
1903 gimple *stmt;
1904 edge e;
1906 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1908 /* Mark the block if we change the last stmt in it. */
1909 if (cfgcleanup_altered_bbs
1910 && stmt_ends_bb_p (stmt))
1911 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1913 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1915 replace_exp (use, val);
1917 if (gimple_code (stmt) == GIMPLE_PHI)
1919 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1920 PHI_ARG_INDEX_FROM_USE (use));
1921 if (e->flags & EDGE_ABNORMAL
1922 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1924 /* This can only occur for virtual operands, since
1925 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1926 would prevent replacement. */
1927 gcc_checking_assert (virtual_operand_p (name));
1928 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1933 if (gimple_code (stmt) != GIMPLE_PHI)
1935 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1936 gimple *orig_stmt = stmt;
1937 size_t i;
1939 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1940 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1941 only change sth from non-invariant to invariant, and only
1942 when propagating constants. */
1943 if (is_gimple_min_invariant (val))
1944 for (i = 0; i < gimple_num_ops (stmt); i++)
1946 tree op = gimple_op (stmt, i);
1947 /* Operands may be empty here. For example, the labels
1948 of a GIMPLE_COND are nulled out following the creation
1949 of the corresponding CFG edges. */
1950 if (op && TREE_CODE (op) == ADDR_EXPR)
1951 recompute_tree_invariant_for_addr_expr (op);
1954 if (fold_stmt (&gsi))
1955 stmt = gsi_stmt (gsi);
1957 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1958 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1960 update_stmt (stmt);
1964 gcc_checking_assert (has_zero_uses (name));
1966 /* Also update the trees stored in loop structures. */
1967 if (current_loops)
1969 struct loop *loop;
1971 FOR_EACH_LOOP (loop, 0)
1973 substitute_in_loop_info (loop, name, val);
1978 /* Merge block B into block A. */
1980 static void
1981 gimple_merge_blocks (basic_block a, basic_block b)
1983 gimple_stmt_iterator last, gsi;
1984 gphi_iterator psi;
1986 if (dump_file)
1987 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1989 /* Remove all single-valued PHI nodes from block B of the form
1990 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1991 gsi = gsi_last_bb (a);
1992 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1994 gimple *phi = gsi_stmt (psi);
1995 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1996 gimple *copy;
1997 bool may_replace_uses = (virtual_operand_p (def)
1998 || may_propagate_copy (def, use));
2000 /* In case we maintain loop closed ssa form, do not propagate arguments
2001 of loop exit phi nodes. */
2002 if (current_loops
2003 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2004 && !virtual_operand_p (def)
2005 && TREE_CODE (use) == SSA_NAME
2006 && a->loop_father != b->loop_father)
2007 may_replace_uses = false;
2009 if (!may_replace_uses)
2011 gcc_assert (!virtual_operand_p (def));
2013 /* Note that just emitting the copies is fine -- there is no problem
2014 with ordering of phi nodes. This is because A is the single
2015 predecessor of B, therefore results of the phi nodes cannot
2016 appear as arguments of the phi nodes. */
2017 copy = gimple_build_assign (def, use);
2018 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2019 remove_phi_node (&psi, false);
2021 else
2023 /* If we deal with a PHI for virtual operands, we can simply
2024 propagate these without fussing with folding or updating
2025 the stmt. */
2026 if (virtual_operand_p (def))
2028 imm_use_iterator iter;
2029 use_operand_p use_p;
2030 gimple *stmt;
2032 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2033 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2034 SET_USE (use_p, use);
2036 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2037 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2039 else
2040 replace_uses_by (def, use);
2042 remove_phi_node (&psi, true);
2046 /* Ensure that B follows A. */
2047 move_block_after (b, a);
2049 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2050 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2052 /* Remove labels from B and set gimple_bb to A for other statements. */
2053 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2055 gimple *stmt = gsi_stmt (gsi);
2056 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2058 tree label = gimple_label_label (label_stmt);
2059 int lp_nr;
2061 gsi_remove (&gsi, false);
2063 /* Now that we can thread computed gotos, we might have
2064 a situation where we have a forced label in block B
2065 However, the label at the start of block B might still be
2066 used in other ways (think about the runtime checking for
2067 Fortran assigned gotos). So we can not just delete the
2068 label. Instead we move the label to the start of block A. */
2069 if (FORCED_LABEL (label))
2071 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2072 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2074 /* Other user labels keep around in a form of a debug stmt. */
2075 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2077 gimple *dbg = gimple_build_debug_bind (label,
2078 integer_zero_node,
2079 stmt);
2080 gimple_debug_bind_reset_value (dbg);
2081 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2084 lp_nr = EH_LANDING_PAD_NR (label);
2085 if (lp_nr)
2087 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2088 lp->post_landing_pad = NULL;
2091 else
2093 gimple_set_bb (stmt, a);
2094 gsi_next (&gsi);
2098 /* When merging two BBs, if their counts are different, the larger count
2099 is selected as the new bb count. This is to handle inconsistent
2100 profiles. */
2101 if (a->loop_father == b->loop_father)
2103 a->count = a->count.merge (b->count);
2106 /* Merge the sequences. */
2107 last = gsi_last_bb (a);
2108 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2109 set_bb_seq (b, NULL);
2111 if (cfgcleanup_altered_bbs)
2112 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2116 /* Return the one of two successors of BB that is not reachable by a
2117 complex edge, if there is one. Else, return BB. We use
2118 this in optimizations that use post-dominators for their heuristics,
2119 to catch the cases in C++ where function calls are involved. */
2121 basic_block
2122 single_noncomplex_succ (basic_block bb)
2124 edge e0, e1;
2125 if (EDGE_COUNT (bb->succs) != 2)
2126 return bb;
2128 e0 = EDGE_SUCC (bb, 0);
2129 e1 = EDGE_SUCC (bb, 1);
2130 if (e0->flags & EDGE_COMPLEX)
2131 return e1->dest;
2132 if (e1->flags & EDGE_COMPLEX)
2133 return e0->dest;
2135 return bb;
2138 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2140 void
2141 notice_special_calls (gcall *call)
2143 int flags = gimple_call_flags (call);
2145 if (flags & ECF_MAY_BE_ALLOCA)
2146 cfun->calls_alloca = true;
2147 if (flags & ECF_RETURNS_TWICE)
2148 cfun->calls_setjmp = true;
2152 /* Clear flags set by notice_special_calls. Used by dead code removal
2153 to update the flags. */
2155 void
2156 clear_special_calls (void)
2158 cfun->calls_alloca = false;
2159 cfun->calls_setjmp = false;
2162 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2164 static void
2165 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2167 /* Since this block is no longer reachable, we can just delete all
2168 of its PHI nodes. */
2169 remove_phi_nodes (bb);
2171 /* Remove edges to BB's successors. */
2172 while (EDGE_COUNT (bb->succs) > 0)
2173 remove_edge (EDGE_SUCC (bb, 0));
2177 /* Remove statements of basic block BB. */
2179 static void
2180 remove_bb (basic_block bb)
2182 gimple_stmt_iterator i;
2184 if (dump_file)
2186 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2187 if (dump_flags & TDF_DETAILS)
2189 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2190 fprintf (dump_file, "\n");
2194 if (current_loops)
2196 struct loop *loop = bb->loop_father;
2198 /* If a loop gets removed, clean up the information associated
2199 with it. */
2200 if (loop->latch == bb
2201 || loop->header == bb)
2202 free_numbers_of_iterations_estimates (loop);
2205 /* Remove all the instructions in the block. */
2206 if (bb_seq (bb) != NULL)
2208 /* Walk backwards so as to get a chance to substitute all
2209 released DEFs into debug stmts. See
2210 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2211 details. */
2212 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2214 gimple *stmt = gsi_stmt (i);
2215 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2216 if (label_stmt
2217 && (FORCED_LABEL (gimple_label_label (label_stmt))
2218 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2220 basic_block new_bb;
2221 gimple_stmt_iterator new_gsi;
2223 /* A non-reachable non-local label may still be referenced.
2224 But it no longer needs to carry the extra semantics of
2225 non-locality. */
2226 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2228 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2229 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2232 new_bb = bb->prev_bb;
2233 new_gsi = gsi_start_bb (new_bb);
2234 gsi_remove (&i, false);
2235 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2237 else
2239 /* Release SSA definitions. */
2240 release_defs (stmt);
2241 gsi_remove (&i, true);
2244 if (gsi_end_p (i))
2245 i = gsi_last_bb (bb);
2246 else
2247 gsi_prev (&i);
2251 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2252 bb->il.gimple.seq = NULL;
2253 bb->il.gimple.phi_nodes = NULL;
2257 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2258 predicate VAL, return the edge that will be taken out of the block.
2259 If VAL does not match a unique edge, NULL is returned. */
2261 edge
2262 find_taken_edge (basic_block bb, tree val)
2264 gimple *stmt;
2266 stmt = last_stmt (bb);
2268 gcc_assert (is_ctrl_stmt (stmt));
2270 if (gimple_code (stmt) == GIMPLE_COND)
2271 return find_taken_edge_cond_expr (bb, val);
2273 if (gimple_code (stmt) == GIMPLE_SWITCH)
2274 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2276 if (computed_goto_p (stmt))
2278 /* Only optimize if the argument is a label, if the argument is
2279 not a label then we can not construct a proper CFG.
2281 It may be the case that we only need to allow the LABEL_REF to
2282 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2283 appear inside a LABEL_EXPR just to be safe. */
2284 if (val
2285 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2286 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2287 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2288 return NULL;
2291 gcc_unreachable ();
2294 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2295 statement, determine which of the outgoing edges will be taken out of the
2296 block. Return NULL if either edge may be taken. */
2298 static edge
2299 find_taken_edge_computed_goto (basic_block bb, tree val)
2301 basic_block dest;
2302 edge e = NULL;
2304 dest = label_to_block (val);
2305 if (dest)
2307 e = find_edge (bb, dest);
2308 gcc_assert (e != NULL);
2311 return e;
2314 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2315 statement, determine which of the two edges will be taken out of the
2316 block. Return NULL if either edge may be taken. */
2318 static edge
2319 find_taken_edge_cond_expr (basic_block bb, tree val)
2321 edge true_edge, false_edge;
2323 if (val == NULL
2324 || TREE_CODE (val) != INTEGER_CST)
2325 return NULL;
2327 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2329 return (integer_zerop (val) ? false_edge : true_edge);
2332 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2333 statement, determine which edge will be taken out of the block. Return
2334 NULL if any edge may be taken. */
2336 static edge
2337 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2338 tree val)
2340 basic_block dest_bb;
2341 edge e;
2342 tree taken_case;
2344 if (gimple_switch_num_labels (switch_stmt) == 1)
2345 taken_case = gimple_switch_default_label (switch_stmt);
2346 else if (! val || TREE_CODE (val) != INTEGER_CST)
2347 return NULL;
2348 else
2349 taken_case = find_case_label_for_value (switch_stmt, val);
2350 dest_bb = label_to_block (CASE_LABEL (taken_case));
2352 e = find_edge (bb, dest_bb);
2353 gcc_assert (e);
2354 return e;
2358 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2359 We can make optimal use here of the fact that the case labels are
2360 sorted: We can do a binary search for a case matching VAL. */
2362 static tree
2363 find_case_label_for_value (gswitch *switch_stmt, tree val)
2365 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2366 tree default_case = gimple_switch_default_label (switch_stmt);
2368 for (low = 0, high = n; high - low > 1; )
2370 size_t i = (high + low) / 2;
2371 tree t = gimple_switch_label (switch_stmt, i);
2372 int cmp;
2374 /* Cache the result of comparing CASE_LOW and val. */
2375 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2377 if (cmp > 0)
2378 high = i;
2379 else
2380 low = i;
2382 if (CASE_HIGH (t) == NULL)
2384 /* A singe-valued case label. */
2385 if (cmp == 0)
2386 return t;
2388 else
2390 /* A case range. We can only handle integer ranges. */
2391 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2392 return t;
2396 return default_case;
2400 /* Dump a basic block on stderr. */
2402 void
2403 gimple_debug_bb (basic_block bb)
2405 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2409 /* Dump basic block with index N on stderr. */
2411 basic_block
2412 gimple_debug_bb_n (int n)
2414 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2415 return BASIC_BLOCK_FOR_FN (cfun, n);
2419 /* Dump the CFG on stderr.
2421 FLAGS are the same used by the tree dumping functions
2422 (see TDF_* in dumpfile.h). */
2424 void
2425 gimple_debug_cfg (dump_flags_t flags)
2427 gimple_dump_cfg (stderr, flags);
2431 /* Dump the program showing basic block boundaries on the given FILE.
2433 FLAGS are the same used by the tree dumping functions (see TDF_* in
2434 tree.h). */
2436 void
2437 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2439 if (flags & TDF_DETAILS)
2441 dump_function_header (file, current_function_decl, flags);
2442 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2443 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2444 last_basic_block_for_fn (cfun));
2446 brief_dump_cfg (file, flags);
2447 fprintf (file, "\n");
2450 if (flags & TDF_STATS)
2451 dump_cfg_stats (file);
2453 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2457 /* Dump CFG statistics on FILE. */
2459 void
2460 dump_cfg_stats (FILE *file)
2462 static long max_num_merged_labels = 0;
2463 unsigned long size, total = 0;
2464 long num_edges;
2465 basic_block bb;
2466 const char * const fmt_str = "%-30s%-13s%12s\n";
2467 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2468 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2469 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2470 const char *funcname = current_function_name ();
2472 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2474 fprintf (file, "---------------------------------------------------------\n");
2475 fprintf (file, fmt_str, "", " Number of ", "Memory");
2476 fprintf (file, fmt_str, "", " instances ", "used ");
2477 fprintf (file, "---------------------------------------------------------\n");
2479 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2480 total += size;
2481 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2482 SCALE (size), LABEL (size));
2484 num_edges = 0;
2485 FOR_EACH_BB_FN (bb, cfun)
2486 num_edges += EDGE_COUNT (bb->succs);
2487 size = num_edges * sizeof (struct edge_def);
2488 total += size;
2489 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2491 fprintf (file, "---------------------------------------------------------\n");
2492 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2493 LABEL (total));
2494 fprintf (file, "---------------------------------------------------------\n");
2495 fprintf (file, "\n");
2497 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2498 max_num_merged_labels = cfg_stats.num_merged_labels;
2500 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2501 cfg_stats.num_merged_labels, max_num_merged_labels);
2503 fprintf (file, "\n");
2507 /* Dump CFG statistics on stderr. Keep extern so that it's always
2508 linked in the final executable. */
2510 DEBUG_FUNCTION void
2511 debug_cfg_stats (void)
2513 dump_cfg_stats (stderr);
2516 /*---------------------------------------------------------------------------
2517 Miscellaneous helpers
2518 ---------------------------------------------------------------------------*/
2520 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2521 flow. Transfers of control flow associated with EH are excluded. */
2523 static bool
2524 call_can_make_abnormal_goto (gimple *t)
2526 /* If the function has no non-local labels, then a call cannot make an
2527 abnormal transfer of control. */
2528 if (!cfun->has_nonlocal_label
2529 && !cfun->calls_setjmp)
2530 return false;
2532 /* Likewise if the call has no side effects. */
2533 if (!gimple_has_side_effects (t))
2534 return false;
2536 /* Likewise if the called function is leaf. */
2537 if (gimple_call_flags (t) & ECF_LEAF)
2538 return false;
2540 return true;
2544 /* Return true if T can make an abnormal transfer of control flow.
2545 Transfers of control flow associated with EH are excluded. */
2547 bool
2548 stmt_can_make_abnormal_goto (gimple *t)
2550 if (computed_goto_p (t))
2551 return true;
2552 if (is_gimple_call (t))
2553 return call_can_make_abnormal_goto (t);
2554 return false;
2558 /* Return true if T represents a stmt that always transfers control. */
2560 bool
2561 is_ctrl_stmt (gimple *t)
2563 switch (gimple_code (t))
2565 case GIMPLE_COND:
2566 case GIMPLE_SWITCH:
2567 case GIMPLE_GOTO:
2568 case GIMPLE_RETURN:
2569 case GIMPLE_RESX:
2570 return true;
2571 default:
2572 return false;
2577 /* Return true if T is a statement that may alter the flow of control
2578 (e.g., a call to a non-returning function). */
2580 bool
2581 is_ctrl_altering_stmt (gimple *t)
2583 gcc_assert (t);
2585 switch (gimple_code (t))
2587 case GIMPLE_CALL:
2588 /* Per stmt call flag indicates whether the call could alter
2589 controlflow. */
2590 if (gimple_call_ctrl_altering_p (t))
2591 return true;
2592 break;
2594 case GIMPLE_EH_DISPATCH:
2595 /* EH_DISPATCH branches to the individual catch handlers at
2596 this level of a try or allowed-exceptions region. It can
2597 fallthru to the next statement as well. */
2598 return true;
2600 case GIMPLE_ASM:
2601 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2602 return true;
2603 break;
2605 CASE_GIMPLE_OMP:
2606 /* OpenMP directives alter control flow. */
2607 return true;
2609 case GIMPLE_TRANSACTION:
2610 /* A transaction start alters control flow. */
2611 return true;
2613 default:
2614 break;
2617 /* If a statement can throw, it alters control flow. */
2618 return stmt_can_throw_internal (t);
2622 /* Return true if T is a simple local goto. */
2624 bool
2625 simple_goto_p (gimple *t)
2627 return (gimple_code (t) == GIMPLE_GOTO
2628 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2632 /* Return true if STMT should start a new basic block. PREV_STMT is
2633 the statement preceding STMT. It is used when STMT is a label or a
2634 case label. Labels should only start a new basic block if their
2635 previous statement wasn't a label. Otherwise, sequence of labels
2636 would generate unnecessary basic blocks that only contain a single
2637 label. */
2639 static inline bool
2640 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2642 if (stmt == NULL)
2643 return false;
2645 /* Labels start a new basic block only if the preceding statement
2646 wasn't a label of the same type. This prevents the creation of
2647 consecutive blocks that have nothing but a single label. */
2648 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2650 /* Nonlocal and computed GOTO targets always start a new block. */
2651 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2652 || FORCED_LABEL (gimple_label_label (label_stmt)))
2653 return true;
2655 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2657 if (DECL_NONLOCAL (gimple_label_label (
2658 as_a <glabel *> (prev_stmt))))
2659 return true;
2661 cfg_stats.num_merged_labels++;
2662 return false;
2664 else
2665 return true;
2667 else if (gimple_code (stmt) == GIMPLE_CALL)
2669 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2670 /* setjmp acts similar to a nonlocal GOTO target and thus should
2671 start a new block. */
2672 return true;
2673 if (gimple_call_internal_p (stmt, IFN_PHI)
2674 && prev_stmt
2675 && gimple_code (prev_stmt) != GIMPLE_LABEL
2676 && (gimple_code (prev_stmt) != GIMPLE_CALL
2677 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2678 /* PHI nodes start a new block unless preceeded by a label
2679 or another PHI. */
2680 return true;
2683 return false;
2687 /* Return true if T should end a basic block. */
2689 bool
2690 stmt_ends_bb_p (gimple *t)
2692 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2695 /* Remove block annotations and other data structures. */
2697 void
2698 delete_tree_cfg_annotations (struct function *fn)
2700 vec_free (label_to_block_map_for_fn (fn));
2703 /* Return the virtual phi in BB. */
2705 gphi *
2706 get_virtual_phi (basic_block bb)
2708 for (gphi_iterator gsi = gsi_start_phis (bb);
2709 !gsi_end_p (gsi);
2710 gsi_next (&gsi))
2712 gphi *phi = gsi.phi ();
2714 if (virtual_operand_p (PHI_RESULT (phi)))
2715 return phi;
2718 return NULL;
2721 /* Return the first statement in basic block BB. */
2723 gimple *
2724 first_stmt (basic_block bb)
2726 gimple_stmt_iterator i = gsi_start_bb (bb);
2727 gimple *stmt = NULL;
2729 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2731 gsi_next (&i);
2732 stmt = NULL;
2734 return stmt;
2737 /* Return the first non-label statement in basic block BB. */
2739 static gimple *
2740 first_non_label_stmt (basic_block bb)
2742 gimple_stmt_iterator i = gsi_start_bb (bb);
2743 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2744 gsi_next (&i);
2745 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2748 /* Return the last statement in basic block BB. */
2750 gimple *
2751 last_stmt (basic_block bb)
2753 gimple_stmt_iterator i = gsi_last_bb (bb);
2754 gimple *stmt = NULL;
2756 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2758 gsi_prev (&i);
2759 stmt = NULL;
2761 return stmt;
2764 /* Return the last statement of an otherwise empty block. Return NULL
2765 if the block is totally empty, or if it contains more than one
2766 statement. */
2768 gimple *
2769 last_and_only_stmt (basic_block bb)
2771 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2772 gimple *last, *prev;
2774 if (gsi_end_p (i))
2775 return NULL;
2777 last = gsi_stmt (i);
2778 gsi_prev_nondebug (&i);
2779 if (gsi_end_p (i))
2780 return last;
2782 /* Empty statements should no longer appear in the instruction stream.
2783 Everything that might have appeared before should be deleted by
2784 remove_useless_stmts, and the optimizers should just gsi_remove
2785 instead of smashing with build_empty_stmt.
2787 Thus the only thing that should appear here in a block containing
2788 one executable statement is a label. */
2789 prev = gsi_stmt (i);
2790 if (gimple_code (prev) == GIMPLE_LABEL)
2791 return last;
2792 else
2793 return NULL;
2796 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2798 static void
2799 reinstall_phi_args (edge new_edge, edge old_edge)
2801 edge_var_map *vm;
2802 int i;
2803 gphi_iterator phis;
2805 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2806 if (!v)
2807 return;
2809 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2810 v->iterate (i, &vm) && !gsi_end_p (phis);
2811 i++, gsi_next (&phis))
2813 gphi *phi = phis.phi ();
2814 tree result = redirect_edge_var_map_result (vm);
2815 tree arg = redirect_edge_var_map_def (vm);
2817 gcc_assert (result == gimple_phi_result (phi));
2819 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2822 redirect_edge_var_map_clear (old_edge);
2825 /* Returns the basic block after which the new basic block created
2826 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2827 near its "logical" location. This is of most help to humans looking
2828 at debugging dumps. */
2830 basic_block
2831 split_edge_bb_loc (edge edge_in)
2833 basic_block dest = edge_in->dest;
2834 basic_block dest_prev = dest->prev_bb;
2836 if (dest_prev)
2838 edge e = find_edge (dest_prev, dest);
2839 if (e && !(e->flags & EDGE_COMPLEX))
2840 return edge_in->src;
2842 return dest_prev;
2845 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2846 Abort on abnormal edges. */
2848 static basic_block
2849 gimple_split_edge (edge edge_in)
2851 basic_block new_bb, after_bb, dest;
2852 edge new_edge, e;
2854 /* Abnormal edges cannot be split. */
2855 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2857 dest = edge_in->dest;
2859 after_bb = split_edge_bb_loc (edge_in);
2861 new_bb = create_empty_bb (after_bb);
2862 new_bb->count = edge_in->count ();
2864 e = redirect_edge_and_branch (edge_in, new_bb);
2865 gcc_assert (e == edge_in);
2867 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2868 reinstall_phi_args (new_edge, e);
2870 return new_bb;
2874 /* Verify properties of the address expression T with base object BASE. */
2876 static tree
2877 verify_address (tree t, tree base)
2879 bool old_constant;
2880 bool old_side_effects;
2881 bool new_constant;
2882 bool new_side_effects;
2884 old_constant = TREE_CONSTANT (t);
2885 old_side_effects = TREE_SIDE_EFFECTS (t);
2887 recompute_tree_invariant_for_addr_expr (t);
2888 new_side_effects = TREE_SIDE_EFFECTS (t);
2889 new_constant = TREE_CONSTANT (t);
2891 if (old_constant != new_constant)
2893 error ("constant not recomputed when ADDR_EXPR changed");
2894 return t;
2896 if (old_side_effects != new_side_effects)
2898 error ("side effects not recomputed when ADDR_EXPR changed");
2899 return t;
2902 if (!(VAR_P (base)
2903 || TREE_CODE (base) == PARM_DECL
2904 || TREE_CODE (base) == RESULT_DECL))
2905 return NULL_TREE;
2907 if (DECL_GIMPLE_REG_P (base))
2909 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2910 return base;
2913 return NULL_TREE;
2916 /* Callback for walk_tree, check that all elements with address taken are
2917 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2918 inside a PHI node. */
2920 static tree
2921 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2923 tree t = *tp, x;
2925 if (TYPE_P (t))
2926 *walk_subtrees = 0;
2928 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2929 #define CHECK_OP(N, MSG) \
2930 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2931 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2933 switch (TREE_CODE (t))
2935 case SSA_NAME:
2936 if (SSA_NAME_IN_FREE_LIST (t))
2938 error ("SSA name in freelist but still referenced");
2939 return *tp;
2941 break;
2943 case PARM_DECL:
2944 case VAR_DECL:
2945 case RESULT_DECL:
2947 tree context = decl_function_context (t);
2948 if (context != cfun->decl
2949 && !SCOPE_FILE_SCOPE_P (context)
2950 && !TREE_STATIC (t)
2951 && !DECL_EXTERNAL (t))
2953 error ("Local declaration from a different function");
2954 return t;
2957 break;
2959 case INDIRECT_REF:
2960 error ("INDIRECT_REF in gimple IL");
2961 return t;
2963 case MEM_REF:
2964 x = TREE_OPERAND (t, 0);
2965 if (!POINTER_TYPE_P (TREE_TYPE (x))
2966 || !is_gimple_mem_ref_addr (x))
2968 error ("invalid first operand of MEM_REF");
2969 return x;
2971 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2972 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2974 error ("invalid offset operand of MEM_REF");
2975 return TREE_OPERAND (t, 1);
2977 if (TREE_CODE (x) == ADDR_EXPR)
2979 tree va = verify_address (x, TREE_OPERAND (x, 0));
2980 if (va)
2981 return va;
2982 x = TREE_OPERAND (x, 0);
2984 walk_tree (&x, verify_expr, data, NULL);
2985 *walk_subtrees = 0;
2986 break;
2988 case ASSERT_EXPR:
2989 x = fold (ASSERT_EXPR_COND (t));
2990 if (x == boolean_false_node)
2992 error ("ASSERT_EXPR with an always-false condition");
2993 return *tp;
2995 break;
2997 case MODIFY_EXPR:
2998 error ("MODIFY_EXPR not expected while having tuples");
2999 return *tp;
3001 case ADDR_EXPR:
3003 tree tem;
3005 gcc_assert (is_gimple_address (t));
3007 /* Skip any references (they will be checked when we recurse down the
3008 tree) and ensure that any variable used as a prefix is marked
3009 addressable. */
3010 for (x = TREE_OPERAND (t, 0);
3011 handled_component_p (x);
3012 x = TREE_OPERAND (x, 0))
3015 if ((tem = verify_address (t, x)))
3016 return tem;
3018 if (!(VAR_P (x)
3019 || TREE_CODE (x) == PARM_DECL
3020 || TREE_CODE (x) == RESULT_DECL))
3021 return NULL;
3023 if (!TREE_ADDRESSABLE (x))
3025 error ("address taken, but ADDRESSABLE bit not set");
3026 return x;
3029 break;
3032 case COND_EXPR:
3033 x = COND_EXPR_COND (t);
3034 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3036 error ("non-integral used in condition");
3037 return x;
3039 if (!is_gimple_condexpr (x))
3041 error ("invalid conditional operand");
3042 return x;
3044 break;
3046 case NON_LVALUE_EXPR:
3047 case TRUTH_NOT_EXPR:
3048 gcc_unreachable ();
3050 CASE_CONVERT:
3051 case FIX_TRUNC_EXPR:
3052 case FLOAT_EXPR:
3053 case NEGATE_EXPR:
3054 case ABS_EXPR:
3055 case BIT_NOT_EXPR:
3056 CHECK_OP (0, "invalid operand to unary operator");
3057 break;
3059 case REALPART_EXPR:
3060 case IMAGPART_EXPR:
3061 case BIT_FIELD_REF:
3062 if (!is_gimple_reg_type (TREE_TYPE (t)))
3064 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3065 return t;
3068 if (TREE_CODE (t) == BIT_FIELD_REF)
3070 tree t0 = TREE_OPERAND (t, 0);
3071 tree t1 = TREE_OPERAND (t, 1);
3072 tree t2 = TREE_OPERAND (t, 2);
3073 if (!tree_fits_uhwi_p (t1)
3074 || !tree_fits_uhwi_p (t2)
3075 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3076 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3078 error ("invalid position or size operand to BIT_FIELD_REF");
3079 return t;
3081 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3082 && (TYPE_PRECISION (TREE_TYPE (t))
3083 != tree_to_uhwi (t1)))
3085 error ("integral result type precision does not match "
3086 "field size of BIT_FIELD_REF");
3087 return t;
3089 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3090 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3091 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3092 != tree_to_uhwi (t1)))
3094 error ("mode size of non-integral result does not "
3095 "match field size of BIT_FIELD_REF");
3096 return t;
3098 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3099 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3100 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3102 error ("position plus size exceeds size of referenced object in "
3103 "BIT_FIELD_REF");
3104 return t;
3107 t = TREE_OPERAND (t, 0);
3109 /* Fall-through. */
3110 case COMPONENT_REF:
3111 case ARRAY_REF:
3112 case ARRAY_RANGE_REF:
3113 case VIEW_CONVERT_EXPR:
3114 /* We have a nest of references. Verify that each of the operands
3115 that determine where to reference is either a constant or a variable,
3116 verify that the base is valid, and then show we've already checked
3117 the subtrees. */
3118 while (handled_component_p (t))
3120 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3121 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3122 else if (TREE_CODE (t) == ARRAY_REF
3123 || TREE_CODE (t) == ARRAY_RANGE_REF)
3125 CHECK_OP (1, "invalid array index");
3126 if (TREE_OPERAND (t, 2))
3127 CHECK_OP (2, "invalid array lower bound");
3128 if (TREE_OPERAND (t, 3))
3129 CHECK_OP (3, "invalid array stride");
3131 else if (TREE_CODE (t) == BIT_FIELD_REF
3132 || TREE_CODE (t) == REALPART_EXPR
3133 || TREE_CODE (t) == IMAGPART_EXPR)
3135 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3136 "REALPART_EXPR");
3137 return t;
3140 t = TREE_OPERAND (t, 0);
3143 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3145 error ("invalid reference prefix");
3146 return t;
3148 walk_tree (&t, verify_expr, data, NULL);
3149 *walk_subtrees = 0;
3150 break;
3151 case PLUS_EXPR:
3152 case MINUS_EXPR:
3153 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3154 POINTER_PLUS_EXPR. */
3155 if (POINTER_TYPE_P (TREE_TYPE (t)))
3157 error ("invalid operand to plus/minus, type is a pointer");
3158 return t;
3160 CHECK_OP (0, "invalid operand to binary operator");
3161 CHECK_OP (1, "invalid operand to binary operator");
3162 break;
3164 case POINTER_DIFF_EXPR:
3165 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3166 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
3168 error ("invalid operand to pointer diff, operand is not a pointer");
3169 return t;
3171 if (TREE_CODE (TREE_TYPE (t)) != INTEGER_TYPE
3172 || TYPE_UNSIGNED (TREE_TYPE (t))
3173 || (TYPE_PRECISION (TREE_TYPE (t))
3174 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))))
3176 error ("invalid type for pointer diff");
3177 return t;
3179 CHECK_OP (0, "invalid operand to pointer diff");
3180 CHECK_OP (1, "invalid operand to pointer diff");
3181 break;
3183 case POINTER_PLUS_EXPR:
3184 /* Check to make sure the first operand is a pointer or reference type. */
3185 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3187 error ("invalid operand to pointer plus, first operand is not a pointer");
3188 return t;
3190 /* Check to make sure the second operand is a ptrofftype. */
3191 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3193 error ("invalid operand to pointer plus, second operand is not an "
3194 "integer type of appropriate width");
3195 return t;
3197 /* FALLTHROUGH */
3198 case LT_EXPR:
3199 case LE_EXPR:
3200 case GT_EXPR:
3201 case GE_EXPR:
3202 case EQ_EXPR:
3203 case NE_EXPR:
3204 case UNORDERED_EXPR:
3205 case ORDERED_EXPR:
3206 case UNLT_EXPR:
3207 case UNLE_EXPR:
3208 case UNGT_EXPR:
3209 case UNGE_EXPR:
3210 case UNEQ_EXPR:
3211 case LTGT_EXPR:
3212 case MULT_EXPR:
3213 case TRUNC_DIV_EXPR:
3214 case CEIL_DIV_EXPR:
3215 case FLOOR_DIV_EXPR:
3216 case ROUND_DIV_EXPR:
3217 case TRUNC_MOD_EXPR:
3218 case CEIL_MOD_EXPR:
3219 case FLOOR_MOD_EXPR:
3220 case ROUND_MOD_EXPR:
3221 case RDIV_EXPR:
3222 case EXACT_DIV_EXPR:
3223 case MIN_EXPR:
3224 case MAX_EXPR:
3225 case LSHIFT_EXPR:
3226 case RSHIFT_EXPR:
3227 case LROTATE_EXPR:
3228 case RROTATE_EXPR:
3229 case BIT_IOR_EXPR:
3230 case BIT_XOR_EXPR:
3231 case BIT_AND_EXPR:
3232 CHECK_OP (0, "invalid operand to binary operator");
3233 CHECK_OP (1, "invalid operand to binary operator");
3234 break;
3236 case CONSTRUCTOR:
3237 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3238 *walk_subtrees = 0;
3239 break;
3241 case CASE_LABEL_EXPR:
3242 if (CASE_CHAIN (t))
3244 error ("invalid CASE_CHAIN");
3245 return t;
3247 break;
3249 default:
3250 break;
3252 return NULL;
3254 #undef CHECK_OP
3258 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3259 Returns true if there is an error, otherwise false. */
3261 static bool
3262 verify_types_in_gimple_min_lval (tree expr)
3264 tree op;
3266 if (is_gimple_id (expr))
3267 return false;
3269 if (TREE_CODE (expr) != TARGET_MEM_REF
3270 && TREE_CODE (expr) != MEM_REF)
3272 error ("invalid expression for min lvalue");
3273 return true;
3276 /* TARGET_MEM_REFs are strange beasts. */
3277 if (TREE_CODE (expr) == TARGET_MEM_REF)
3278 return false;
3280 op = TREE_OPERAND (expr, 0);
3281 if (!is_gimple_val (op))
3283 error ("invalid operand in indirect reference");
3284 debug_generic_stmt (op);
3285 return true;
3287 /* Memory references now generally can involve a value conversion. */
3289 return false;
3292 /* Verify if EXPR is a valid GIMPLE reference expression. If
3293 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3294 if there is an error, otherwise false. */
3296 static bool
3297 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3299 while (handled_component_p (expr))
3301 tree op = TREE_OPERAND (expr, 0);
3303 if (TREE_CODE (expr) == ARRAY_REF
3304 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3306 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3307 || (TREE_OPERAND (expr, 2)
3308 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3309 || (TREE_OPERAND (expr, 3)
3310 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3312 error ("invalid operands to array reference");
3313 debug_generic_stmt (expr);
3314 return true;
3318 /* Verify if the reference array element types are compatible. */
3319 if (TREE_CODE (expr) == ARRAY_REF
3320 && !useless_type_conversion_p (TREE_TYPE (expr),
3321 TREE_TYPE (TREE_TYPE (op))))
3323 error ("type mismatch in array reference");
3324 debug_generic_stmt (TREE_TYPE (expr));
3325 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3326 return true;
3328 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3329 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3330 TREE_TYPE (TREE_TYPE (op))))
3332 error ("type mismatch in array range reference");
3333 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3334 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3335 return true;
3338 if ((TREE_CODE (expr) == REALPART_EXPR
3339 || TREE_CODE (expr) == IMAGPART_EXPR)
3340 && !useless_type_conversion_p (TREE_TYPE (expr),
3341 TREE_TYPE (TREE_TYPE (op))))
3343 error ("type mismatch in real/imagpart reference");
3344 debug_generic_stmt (TREE_TYPE (expr));
3345 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3346 return true;
3349 if (TREE_CODE (expr) == COMPONENT_REF
3350 && !useless_type_conversion_p (TREE_TYPE (expr),
3351 TREE_TYPE (TREE_OPERAND (expr, 1))))
3353 error ("type mismatch in component reference");
3354 debug_generic_stmt (TREE_TYPE (expr));
3355 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3356 return true;
3359 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3361 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3362 that their operand is not an SSA name or an invariant when
3363 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3364 bug). Otherwise there is nothing to verify, gross mismatches at
3365 most invoke undefined behavior. */
3366 if (require_lvalue
3367 && (TREE_CODE (op) == SSA_NAME
3368 || is_gimple_min_invariant (op)))
3370 error ("conversion of an SSA_NAME on the left hand side");
3371 debug_generic_stmt (expr);
3372 return true;
3374 else if (TREE_CODE (op) == SSA_NAME
3375 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3377 error ("conversion of register to a different size");
3378 debug_generic_stmt (expr);
3379 return true;
3381 else if (!handled_component_p (op))
3382 return false;
3385 expr = op;
3388 if (TREE_CODE (expr) == MEM_REF)
3390 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3392 error ("invalid address operand in MEM_REF");
3393 debug_generic_stmt (expr);
3394 return true;
3396 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3397 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3399 error ("invalid offset operand in MEM_REF");
3400 debug_generic_stmt (expr);
3401 return true;
3404 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3406 if (!TMR_BASE (expr)
3407 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3409 error ("invalid address operand in TARGET_MEM_REF");
3410 return true;
3412 if (!TMR_OFFSET (expr)
3413 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3414 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3416 error ("invalid offset operand in TARGET_MEM_REF");
3417 debug_generic_stmt (expr);
3418 return true;
3422 return ((require_lvalue || !is_gimple_min_invariant (expr))
3423 && verify_types_in_gimple_min_lval (expr));
3426 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3427 list of pointer-to types that is trivially convertible to DEST. */
3429 static bool
3430 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3432 tree src;
3434 if (!TYPE_POINTER_TO (src_obj))
3435 return true;
3437 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3438 if (useless_type_conversion_p (dest, src))
3439 return true;
3441 return false;
3444 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3445 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3447 static bool
3448 valid_fixed_convert_types_p (tree type1, tree type2)
3450 return (FIXED_POINT_TYPE_P (type1)
3451 && (INTEGRAL_TYPE_P (type2)
3452 || SCALAR_FLOAT_TYPE_P (type2)
3453 || FIXED_POINT_TYPE_P (type2)));
3456 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3457 is a problem, otherwise false. */
3459 static bool
3460 verify_gimple_call (gcall *stmt)
3462 tree fn = gimple_call_fn (stmt);
3463 tree fntype, fndecl;
3464 unsigned i;
3466 if (gimple_call_internal_p (stmt))
3468 if (fn)
3470 error ("gimple call has two targets");
3471 debug_generic_stmt (fn);
3472 return true;
3474 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3475 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3477 return false;
3480 else
3482 if (!fn)
3484 error ("gimple call has no target");
3485 return true;
3489 if (fn && !is_gimple_call_addr (fn))
3491 error ("invalid function in gimple call");
3492 debug_generic_stmt (fn);
3493 return true;
3496 if (fn
3497 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3498 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3499 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3501 error ("non-function in gimple call");
3502 return true;
3505 fndecl = gimple_call_fndecl (stmt);
3506 if (fndecl
3507 && TREE_CODE (fndecl) == FUNCTION_DECL
3508 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3509 && !DECL_PURE_P (fndecl)
3510 && !TREE_READONLY (fndecl))
3512 error ("invalid pure const state for function");
3513 return true;
3516 tree lhs = gimple_call_lhs (stmt);
3517 if (lhs
3518 && (!is_gimple_lvalue (lhs)
3519 || verify_types_in_gimple_reference (lhs, true)))
3521 error ("invalid LHS in gimple call");
3522 return true;
3525 if (gimple_call_ctrl_altering_p (stmt)
3526 && gimple_call_noreturn_p (stmt)
3527 && should_remove_lhs_p (lhs))
3529 error ("LHS in noreturn call");
3530 return true;
3533 fntype = gimple_call_fntype (stmt);
3534 if (fntype
3535 && lhs
3536 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3537 /* ??? At least C++ misses conversions at assignments from
3538 void * call results.
3539 For now simply allow arbitrary pointer type conversions. */
3540 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3541 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3543 error ("invalid conversion in gimple call");
3544 debug_generic_stmt (TREE_TYPE (lhs));
3545 debug_generic_stmt (TREE_TYPE (fntype));
3546 return true;
3549 if (gimple_call_chain (stmt)
3550 && !is_gimple_val (gimple_call_chain (stmt)))
3552 error ("invalid static chain in gimple call");
3553 debug_generic_stmt (gimple_call_chain (stmt));
3554 return true;
3557 /* If there is a static chain argument, the call should either be
3558 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3559 if (gimple_call_chain (stmt)
3560 && fndecl
3561 && !DECL_STATIC_CHAIN (fndecl))
3563 error ("static chain with function that doesn%'t use one");
3564 return true;
3567 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3569 switch (DECL_FUNCTION_CODE (fndecl))
3571 case BUILT_IN_UNREACHABLE:
3572 case BUILT_IN_TRAP:
3573 if (gimple_call_num_args (stmt) > 0)
3575 /* Built-in unreachable with parameters might not be caught by
3576 undefined behavior sanitizer. Front-ends do check users do not
3577 call them that way but we also produce calls to
3578 __builtin_unreachable internally, for example when IPA figures
3579 out a call cannot happen in a legal program. In such cases,
3580 we must make sure arguments are stripped off. */
3581 error ("__builtin_unreachable or __builtin_trap call with "
3582 "arguments");
3583 return true;
3585 break;
3586 default:
3587 break;
3591 /* ??? The C frontend passes unpromoted arguments in case it
3592 didn't see a function declaration before the call. So for now
3593 leave the call arguments mostly unverified. Once we gimplify
3594 unit-at-a-time we have a chance to fix this. */
3596 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3598 tree arg = gimple_call_arg (stmt, i);
3599 if ((is_gimple_reg_type (TREE_TYPE (arg))
3600 && !is_gimple_val (arg))
3601 || (!is_gimple_reg_type (TREE_TYPE (arg))
3602 && !is_gimple_lvalue (arg)))
3604 error ("invalid argument to gimple call");
3605 debug_generic_expr (arg);
3606 return true;
3610 return false;
3613 /* Verifies the gimple comparison with the result type TYPE and
3614 the operands OP0 and OP1, comparison code is CODE. */
3616 static bool
3617 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3619 tree op0_type = TREE_TYPE (op0);
3620 tree op1_type = TREE_TYPE (op1);
3622 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3624 error ("invalid operands in gimple comparison");
3625 return true;
3628 /* For comparisons we do not have the operations type as the
3629 effective type the comparison is carried out in. Instead
3630 we require that either the first operand is trivially
3631 convertible into the second, or the other way around.
3632 Because we special-case pointers to void we allow
3633 comparisons of pointers with the same mode as well. */
3634 if (!useless_type_conversion_p (op0_type, op1_type)
3635 && !useless_type_conversion_p (op1_type, op0_type)
3636 && (!POINTER_TYPE_P (op0_type)
3637 || !POINTER_TYPE_P (op1_type)
3638 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3640 error ("mismatching comparison operand types");
3641 debug_generic_expr (op0_type);
3642 debug_generic_expr (op1_type);
3643 return true;
3646 /* The resulting type of a comparison may be an effective boolean type. */
3647 if (INTEGRAL_TYPE_P (type)
3648 && (TREE_CODE (type) == BOOLEAN_TYPE
3649 || TYPE_PRECISION (type) == 1))
3651 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3652 || TREE_CODE (op1_type) == VECTOR_TYPE)
3653 && code != EQ_EXPR && code != NE_EXPR
3654 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3655 && !VECTOR_INTEGER_TYPE_P (op0_type))
3657 error ("unsupported operation or type for vector comparison"
3658 " returning a boolean");
3659 debug_generic_expr (op0_type);
3660 debug_generic_expr (op1_type);
3661 return true;
3664 /* Or a boolean vector type with the same element count
3665 as the comparison operand types. */
3666 else if (TREE_CODE (type) == VECTOR_TYPE
3667 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3669 if (TREE_CODE (op0_type) != VECTOR_TYPE
3670 || TREE_CODE (op1_type) != VECTOR_TYPE)
3672 error ("non-vector operands in vector comparison");
3673 debug_generic_expr (op0_type);
3674 debug_generic_expr (op1_type);
3675 return true;
3678 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3680 error ("invalid vector comparison resulting type");
3681 debug_generic_expr (type);
3682 return true;
3685 else
3687 error ("bogus comparison result type");
3688 debug_generic_expr (type);
3689 return true;
3692 return false;
3695 /* Verify a gimple assignment statement STMT with an unary rhs.
3696 Returns true if anything is wrong. */
3698 static bool
3699 verify_gimple_assign_unary (gassign *stmt)
3701 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3702 tree lhs = gimple_assign_lhs (stmt);
3703 tree lhs_type = TREE_TYPE (lhs);
3704 tree rhs1 = gimple_assign_rhs1 (stmt);
3705 tree rhs1_type = TREE_TYPE (rhs1);
3707 if (!is_gimple_reg (lhs))
3709 error ("non-register as LHS of unary operation");
3710 return true;
3713 if (!is_gimple_val (rhs1))
3715 error ("invalid operand in unary operation");
3716 return true;
3719 /* First handle conversions. */
3720 switch (rhs_code)
3722 CASE_CONVERT:
3724 /* Allow conversions from pointer type to integral type only if
3725 there is no sign or zero extension involved.
3726 For targets were the precision of ptrofftype doesn't match that
3727 of pointers we need to allow arbitrary conversions to ptrofftype. */
3728 if ((POINTER_TYPE_P (lhs_type)
3729 && INTEGRAL_TYPE_P (rhs1_type))
3730 || (POINTER_TYPE_P (rhs1_type)
3731 && INTEGRAL_TYPE_P (lhs_type)
3732 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3733 || ptrofftype_p (sizetype))))
3734 return false;
3736 /* Allow conversion from integral to offset type and vice versa. */
3737 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3738 && INTEGRAL_TYPE_P (rhs1_type))
3739 || (INTEGRAL_TYPE_P (lhs_type)
3740 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3741 return false;
3743 /* Otherwise assert we are converting between types of the
3744 same kind. */
3745 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3747 error ("invalid types in nop conversion");
3748 debug_generic_expr (lhs_type);
3749 debug_generic_expr (rhs1_type);
3750 return true;
3753 return false;
3756 case ADDR_SPACE_CONVERT_EXPR:
3758 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3759 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3760 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3762 error ("invalid types in address space conversion");
3763 debug_generic_expr (lhs_type);
3764 debug_generic_expr (rhs1_type);
3765 return true;
3768 return false;
3771 case FIXED_CONVERT_EXPR:
3773 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3774 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3776 error ("invalid types in fixed-point conversion");
3777 debug_generic_expr (lhs_type);
3778 debug_generic_expr (rhs1_type);
3779 return true;
3782 return false;
3785 case FLOAT_EXPR:
3787 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3788 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3789 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3791 error ("invalid types in conversion to floating point");
3792 debug_generic_expr (lhs_type);
3793 debug_generic_expr (rhs1_type);
3794 return true;
3797 return false;
3800 case FIX_TRUNC_EXPR:
3802 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3803 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3804 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3806 error ("invalid types in conversion to integer");
3807 debug_generic_expr (lhs_type);
3808 debug_generic_expr (rhs1_type);
3809 return true;
3812 return false;
3815 case VEC_UNPACK_HI_EXPR:
3816 case VEC_UNPACK_LO_EXPR:
3817 case VEC_UNPACK_FLOAT_HI_EXPR:
3818 case VEC_UNPACK_FLOAT_LO_EXPR:
3819 /* FIXME. */
3820 return false;
3822 case NEGATE_EXPR:
3823 case ABS_EXPR:
3824 case BIT_NOT_EXPR:
3825 case PAREN_EXPR:
3826 case CONJ_EXPR:
3827 break;
3829 default:
3830 gcc_unreachable ();
3833 /* For the remaining codes assert there is no conversion involved. */
3834 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3836 error ("non-trivial conversion in unary operation");
3837 debug_generic_expr (lhs_type);
3838 debug_generic_expr (rhs1_type);
3839 return true;
3842 return false;
3845 /* Verify a gimple assignment statement STMT with a binary rhs.
3846 Returns true if anything is wrong. */
3848 static bool
3849 verify_gimple_assign_binary (gassign *stmt)
3851 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3852 tree lhs = gimple_assign_lhs (stmt);
3853 tree lhs_type = TREE_TYPE (lhs);
3854 tree rhs1 = gimple_assign_rhs1 (stmt);
3855 tree rhs1_type = TREE_TYPE (rhs1);
3856 tree rhs2 = gimple_assign_rhs2 (stmt);
3857 tree rhs2_type = TREE_TYPE (rhs2);
3859 if (!is_gimple_reg (lhs))
3861 error ("non-register as LHS of binary operation");
3862 return true;
3865 if (!is_gimple_val (rhs1)
3866 || !is_gimple_val (rhs2))
3868 error ("invalid operands in binary operation");
3869 return true;
3872 /* First handle operations that involve different types. */
3873 switch (rhs_code)
3875 case COMPLEX_EXPR:
3877 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3878 || !(INTEGRAL_TYPE_P (rhs1_type)
3879 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3880 || !(INTEGRAL_TYPE_P (rhs2_type)
3881 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3883 error ("type mismatch in complex expression");
3884 debug_generic_expr (lhs_type);
3885 debug_generic_expr (rhs1_type);
3886 debug_generic_expr (rhs2_type);
3887 return true;
3890 return false;
3893 case LSHIFT_EXPR:
3894 case RSHIFT_EXPR:
3895 case LROTATE_EXPR:
3896 case RROTATE_EXPR:
3898 /* Shifts and rotates are ok on integral types, fixed point
3899 types and integer vector types. */
3900 if ((!INTEGRAL_TYPE_P (rhs1_type)
3901 && !FIXED_POINT_TYPE_P (rhs1_type)
3902 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3903 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3904 || (!INTEGRAL_TYPE_P (rhs2_type)
3905 /* Vector shifts of vectors are also ok. */
3906 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3907 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3908 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3909 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3910 || !useless_type_conversion_p (lhs_type, rhs1_type))
3912 error ("type mismatch in shift expression");
3913 debug_generic_expr (lhs_type);
3914 debug_generic_expr (rhs1_type);
3915 debug_generic_expr (rhs2_type);
3916 return true;
3919 return false;
3922 case WIDEN_LSHIFT_EXPR:
3924 if (!INTEGRAL_TYPE_P (lhs_type)
3925 || !INTEGRAL_TYPE_P (rhs1_type)
3926 || TREE_CODE (rhs2) != INTEGER_CST
3927 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3929 error ("type mismatch in widening vector shift expression");
3930 debug_generic_expr (lhs_type);
3931 debug_generic_expr (rhs1_type);
3932 debug_generic_expr (rhs2_type);
3933 return true;
3936 return false;
3939 case VEC_WIDEN_LSHIFT_HI_EXPR:
3940 case VEC_WIDEN_LSHIFT_LO_EXPR:
3942 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3943 || TREE_CODE (lhs_type) != VECTOR_TYPE
3944 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3945 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3946 || TREE_CODE (rhs2) != INTEGER_CST
3947 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3948 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3950 error ("type mismatch in widening vector shift expression");
3951 debug_generic_expr (lhs_type);
3952 debug_generic_expr (rhs1_type);
3953 debug_generic_expr (rhs2_type);
3954 return true;
3957 return false;
3960 case PLUS_EXPR:
3961 case MINUS_EXPR:
3963 tree lhs_etype = lhs_type;
3964 tree rhs1_etype = rhs1_type;
3965 tree rhs2_etype = rhs2_type;
3966 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3968 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3969 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3971 error ("invalid non-vector operands to vector valued plus");
3972 return true;
3974 lhs_etype = TREE_TYPE (lhs_type);
3975 rhs1_etype = TREE_TYPE (rhs1_type);
3976 rhs2_etype = TREE_TYPE (rhs2_type);
3978 if (POINTER_TYPE_P (lhs_etype)
3979 || POINTER_TYPE_P (rhs1_etype)
3980 || POINTER_TYPE_P (rhs2_etype))
3982 error ("invalid (pointer) operands to plus/minus");
3983 return true;
3986 /* Continue with generic binary expression handling. */
3987 break;
3990 case POINTER_PLUS_EXPR:
3992 if (!POINTER_TYPE_P (rhs1_type)
3993 || !useless_type_conversion_p (lhs_type, rhs1_type)
3994 || !ptrofftype_p (rhs2_type))
3996 error ("type mismatch in pointer plus expression");
3997 debug_generic_stmt (lhs_type);
3998 debug_generic_stmt (rhs1_type);
3999 debug_generic_stmt (rhs2_type);
4000 return true;
4003 return false;
4006 case POINTER_DIFF_EXPR:
4008 if (!POINTER_TYPE_P (rhs1_type)
4009 || !POINTER_TYPE_P (rhs2_type)
4010 /* Because we special-case pointers to void we allow difference
4011 of arbitrary pointers with the same mode. */
4012 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4013 || TREE_CODE (lhs_type) != INTEGER_TYPE
4014 || TYPE_UNSIGNED (lhs_type)
4015 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4017 error ("type mismatch in pointer diff expression");
4018 debug_generic_stmt (lhs_type);
4019 debug_generic_stmt (rhs1_type);
4020 debug_generic_stmt (rhs2_type);
4021 return true;
4024 return false;
4027 case TRUTH_ANDIF_EXPR:
4028 case TRUTH_ORIF_EXPR:
4029 case TRUTH_AND_EXPR:
4030 case TRUTH_OR_EXPR:
4031 case TRUTH_XOR_EXPR:
4033 gcc_unreachable ();
4035 case LT_EXPR:
4036 case LE_EXPR:
4037 case GT_EXPR:
4038 case GE_EXPR:
4039 case EQ_EXPR:
4040 case NE_EXPR:
4041 case UNORDERED_EXPR:
4042 case ORDERED_EXPR:
4043 case UNLT_EXPR:
4044 case UNLE_EXPR:
4045 case UNGT_EXPR:
4046 case UNGE_EXPR:
4047 case UNEQ_EXPR:
4048 case LTGT_EXPR:
4049 /* Comparisons are also binary, but the result type is not
4050 connected to the operand types. */
4051 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4053 case WIDEN_MULT_EXPR:
4054 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4055 return true;
4056 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4057 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4059 case WIDEN_SUM_EXPR:
4061 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4062 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4063 && ((!INTEGRAL_TYPE_P (rhs1_type)
4064 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4065 || (!INTEGRAL_TYPE_P (lhs_type)
4066 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4067 || !useless_type_conversion_p (lhs_type, rhs2_type)
4068 || (GET_MODE_SIZE (element_mode (rhs2_type))
4069 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4071 error ("type mismatch in widening sum reduction");
4072 debug_generic_expr (lhs_type);
4073 debug_generic_expr (rhs1_type);
4074 debug_generic_expr (rhs2_type);
4075 return true;
4077 return false;
4080 case VEC_WIDEN_MULT_HI_EXPR:
4081 case VEC_WIDEN_MULT_LO_EXPR:
4082 case VEC_WIDEN_MULT_EVEN_EXPR:
4083 case VEC_WIDEN_MULT_ODD_EXPR:
4085 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4086 || TREE_CODE (lhs_type) != VECTOR_TYPE
4087 || !types_compatible_p (rhs1_type, rhs2_type)
4088 || (GET_MODE_SIZE (element_mode (lhs_type))
4089 != 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4091 error ("type mismatch in vector widening multiplication");
4092 debug_generic_expr (lhs_type);
4093 debug_generic_expr (rhs1_type);
4094 debug_generic_expr (rhs2_type);
4095 return true;
4097 return false;
4100 case VEC_PACK_TRUNC_EXPR:
4101 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4102 vector boolean types. */
4103 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4104 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4105 && types_compatible_p (rhs1_type, rhs2_type)
4106 && (TYPE_VECTOR_SUBPARTS (lhs_type)
4107 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4108 return false;
4110 /* Fallthru. */
4111 case VEC_PACK_SAT_EXPR:
4112 case VEC_PACK_FIX_TRUNC_EXPR:
4114 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4115 || TREE_CODE (lhs_type) != VECTOR_TYPE
4116 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4117 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4118 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4119 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4120 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4121 || !types_compatible_p (rhs1_type, rhs2_type)
4122 || (GET_MODE_SIZE (element_mode (rhs1_type))
4123 != 2 * GET_MODE_SIZE (element_mode (lhs_type))))
4125 error ("type mismatch in vector pack expression");
4126 debug_generic_expr (lhs_type);
4127 debug_generic_expr (rhs1_type);
4128 debug_generic_expr (rhs2_type);
4129 return true;
4132 return false;
4135 case MULT_EXPR:
4136 case MULT_HIGHPART_EXPR:
4137 case TRUNC_DIV_EXPR:
4138 case CEIL_DIV_EXPR:
4139 case FLOOR_DIV_EXPR:
4140 case ROUND_DIV_EXPR:
4141 case TRUNC_MOD_EXPR:
4142 case CEIL_MOD_EXPR:
4143 case FLOOR_MOD_EXPR:
4144 case ROUND_MOD_EXPR:
4145 case RDIV_EXPR:
4146 case EXACT_DIV_EXPR:
4147 case MIN_EXPR:
4148 case MAX_EXPR:
4149 case BIT_IOR_EXPR:
4150 case BIT_XOR_EXPR:
4151 case BIT_AND_EXPR:
4152 /* Continue with generic binary expression handling. */
4153 break;
4155 default:
4156 gcc_unreachable ();
4159 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4160 || !useless_type_conversion_p (lhs_type, rhs2_type))
4162 error ("type mismatch in binary expression");
4163 debug_generic_stmt (lhs_type);
4164 debug_generic_stmt (rhs1_type);
4165 debug_generic_stmt (rhs2_type);
4166 return true;
4169 return false;
4172 /* Verify a gimple assignment statement STMT with a ternary rhs.
4173 Returns true if anything is wrong. */
4175 static bool
4176 verify_gimple_assign_ternary (gassign *stmt)
4178 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4179 tree lhs = gimple_assign_lhs (stmt);
4180 tree lhs_type = TREE_TYPE (lhs);
4181 tree rhs1 = gimple_assign_rhs1 (stmt);
4182 tree rhs1_type = TREE_TYPE (rhs1);
4183 tree rhs2 = gimple_assign_rhs2 (stmt);
4184 tree rhs2_type = TREE_TYPE (rhs2);
4185 tree rhs3 = gimple_assign_rhs3 (stmt);
4186 tree rhs3_type = TREE_TYPE (rhs3);
4188 if (!is_gimple_reg (lhs))
4190 error ("non-register as LHS of ternary operation");
4191 return true;
4194 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4195 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4196 || !is_gimple_val (rhs2)
4197 || !is_gimple_val (rhs3))
4199 error ("invalid operands in ternary operation");
4200 return true;
4203 /* First handle operations that involve different types. */
4204 switch (rhs_code)
4206 case WIDEN_MULT_PLUS_EXPR:
4207 case WIDEN_MULT_MINUS_EXPR:
4208 if ((!INTEGRAL_TYPE_P (rhs1_type)
4209 && !FIXED_POINT_TYPE_P (rhs1_type))
4210 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4211 || !useless_type_conversion_p (lhs_type, rhs3_type)
4212 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4213 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4215 error ("type mismatch in widening multiply-accumulate expression");
4216 debug_generic_expr (lhs_type);
4217 debug_generic_expr (rhs1_type);
4218 debug_generic_expr (rhs2_type);
4219 debug_generic_expr (rhs3_type);
4220 return true;
4222 break;
4224 case FMA_EXPR:
4225 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4226 || !useless_type_conversion_p (lhs_type, rhs2_type)
4227 || !useless_type_conversion_p (lhs_type, rhs3_type))
4229 error ("type mismatch in fused multiply-add expression");
4230 debug_generic_expr (lhs_type);
4231 debug_generic_expr (rhs1_type);
4232 debug_generic_expr (rhs2_type);
4233 debug_generic_expr (rhs3_type);
4234 return true;
4236 break;
4238 case VEC_COND_EXPR:
4239 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4240 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4241 != TYPE_VECTOR_SUBPARTS (lhs_type))
4243 error ("the first argument of a VEC_COND_EXPR must be of a "
4244 "boolean vector type of the same number of elements "
4245 "as the result");
4246 debug_generic_expr (lhs_type);
4247 debug_generic_expr (rhs1_type);
4248 return true;
4250 /* Fallthrough. */
4251 case COND_EXPR:
4252 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4253 || !useless_type_conversion_p (lhs_type, rhs3_type))
4255 error ("type mismatch in conditional expression");
4256 debug_generic_expr (lhs_type);
4257 debug_generic_expr (rhs2_type);
4258 debug_generic_expr (rhs3_type);
4259 return true;
4261 break;
4263 case VEC_PERM_EXPR:
4264 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4265 || !useless_type_conversion_p (lhs_type, rhs2_type))
4267 error ("type mismatch in vector permute expression");
4268 debug_generic_expr (lhs_type);
4269 debug_generic_expr (rhs1_type);
4270 debug_generic_expr (rhs2_type);
4271 debug_generic_expr (rhs3_type);
4272 return true;
4275 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4276 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4277 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4279 error ("vector types expected in vector permute expression");
4280 debug_generic_expr (lhs_type);
4281 debug_generic_expr (rhs1_type);
4282 debug_generic_expr (rhs2_type);
4283 debug_generic_expr (rhs3_type);
4284 return true;
4287 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4288 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4289 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4290 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4291 != TYPE_VECTOR_SUBPARTS (lhs_type))
4293 error ("vectors with different element number found "
4294 "in vector permute expression");
4295 debug_generic_expr (lhs_type);
4296 debug_generic_expr (rhs1_type);
4297 debug_generic_expr (rhs2_type);
4298 debug_generic_expr (rhs3_type);
4299 return true;
4302 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4303 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type)))
4304 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type))))
4306 error ("invalid mask type in vector permute expression");
4307 debug_generic_expr (lhs_type);
4308 debug_generic_expr (rhs1_type);
4309 debug_generic_expr (rhs2_type);
4310 debug_generic_expr (rhs3_type);
4311 return true;
4314 return false;
4316 case SAD_EXPR:
4317 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4318 || !useless_type_conversion_p (lhs_type, rhs3_type)
4319 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4320 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4322 error ("type mismatch in sad expression");
4323 debug_generic_expr (lhs_type);
4324 debug_generic_expr (rhs1_type);
4325 debug_generic_expr (rhs2_type);
4326 debug_generic_expr (rhs3_type);
4327 return true;
4330 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4331 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4332 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4334 error ("vector types expected in sad expression");
4335 debug_generic_expr (lhs_type);
4336 debug_generic_expr (rhs1_type);
4337 debug_generic_expr (rhs2_type);
4338 debug_generic_expr (rhs3_type);
4339 return true;
4342 return false;
4344 case BIT_INSERT_EXPR:
4345 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4347 error ("type mismatch in BIT_INSERT_EXPR");
4348 debug_generic_expr (lhs_type);
4349 debug_generic_expr (rhs1_type);
4350 return true;
4352 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4353 && INTEGRAL_TYPE_P (rhs2_type))
4354 || (VECTOR_TYPE_P (rhs1_type)
4355 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4357 error ("not allowed type combination in BIT_INSERT_EXPR");
4358 debug_generic_expr (rhs1_type);
4359 debug_generic_expr (rhs2_type);
4360 return true;
4362 if (! tree_fits_uhwi_p (rhs3)
4363 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4364 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4366 error ("invalid position or size in BIT_INSERT_EXPR");
4367 return true;
4369 if (INTEGRAL_TYPE_P (rhs1_type))
4371 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4372 if (bitpos >= TYPE_PRECISION (rhs1_type)
4373 || (bitpos + TYPE_PRECISION (rhs2_type)
4374 > TYPE_PRECISION (rhs1_type)))
4376 error ("insertion out of range in BIT_INSERT_EXPR");
4377 return true;
4380 else if (VECTOR_TYPE_P (rhs1_type))
4382 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4383 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4384 if (bitpos % bitsize != 0)
4386 error ("vector insertion not at element boundary");
4387 return true;
4390 return false;
4392 case DOT_PROD_EXPR:
4394 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4395 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4396 && ((!INTEGRAL_TYPE_P (rhs1_type)
4397 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4398 || (!INTEGRAL_TYPE_P (lhs_type)
4399 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4400 || !types_compatible_p (rhs1_type, rhs2_type)
4401 || !useless_type_conversion_p (lhs_type, rhs3_type)
4402 || (GET_MODE_SIZE (element_mode (rhs3_type))
4403 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4405 error ("type mismatch in dot product reduction");
4406 debug_generic_expr (lhs_type);
4407 debug_generic_expr (rhs1_type);
4408 debug_generic_expr (rhs2_type);
4409 return true;
4411 return false;
4414 case REALIGN_LOAD_EXPR:
4415 /* FIXME. */
4416 return false;
4418 default:
4419 gcc_unreachable ();
4421 return false;
4424 /* Verify a gimple assignment statement STMT with a single rhs.
4425 Returns true if anything is wrong. */
4427 static bool
4428 verify_gimple_assign_single (gassign *stmt)
4430 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4431 tree lhs = gimple_assign_lhs (stmt);
4432 tree lhs_type = TREE_TYPE (lhs);
4433 tree rhs1 = gimple_assign_rhs1 (stmt);
4434 tree rhs1_type = TREE_TYPE (rhs1);
4435 bool res = false;
4437 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4439 error ("non-trivial conversion at assignment");
4440 debug_generic_expr (lhs_type);
4441 debug_generic_expr (rhs1_type);
4442 return true;
4445 if (gimple_clobber_p (stmt)
4446 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4448 error ("non-decl/MEM_REF LHS in clobber statement");
4449 debug_generic_expr (lhs);
4450 return true;
4453 if (handled_component_p (lhs)
4454 || TREE_CODE (lhs) == MEM_REF
4455 || TREE_CODE (lhs) == TARGET_MEM_REF)
4456 res |= verify_types_in_gimple_reference (lhs, true);
4458 /* Special codes we cannot handle via their class. */
4459 switch (rhs_code)
4461 case ADDR_EXPR:
4463 tree op = TREE_OPERAND (rhs1, 0);
4464 if (!is_gimple_addressable (op))
4466 error ("invalid operand in unary expression");
4467 return true;
4470 /* Technically there is no longer a need for matching types, but
4471 gimple hygiene asks for this check. In LTO we can end up
4472 combining incompatible units and thus end up with addresses
4473 of globals that change their type to a common one. */
4474 if (!in_lto_p
4475 && !types_compatible_p (TREE_TYPE (op),
4476 TREE_TYPE (TREE_TYPE (rhs1)))
4477 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4478 TREE_TYPE (op)))
4480 error ("type mismatch in address expression");
4481 debug_generic_stmt (TREE_TYPE (rhs1));
4482 debug_generic_stmt (TREE_TYPE (op));
4483 return true;
4486 return verify_types_in_gimple_reference (op, true);
4489 /* tcc_reference */
4490 case INDIRECT_REF:
4491 error ("INDIRECT_REF in gimple IL");
4492 return true;
4494 case COMPONENT_REF:
4495 case BIT_FIELD_REF:
4496 case ARRAY_REF:
4497 case ARRAY_RANGE_REF:
4498 case VIEW_CONVERT_EXPR:
4499 case REALPART_EXPR:
4500 case IMAGPART_EXPR:
4501 case TARGET_MEM_REF:
4502 case MEM_REF:
4503 if (!is_gimple_reg (lhs)
4504 && is_gimple_reg_type (TREE_TYPE (lhs)))
4506 error ("invalid rhs for gimple memory store");
4507 debug_generic_stmt (lhs);
4508 debug_generic_stmt (rhs1);
4509 return true;
4511 return res || verify_types_in_gimple_reference (rhs1, false);
4513 /* tcc_constant */
4514 case SSA_NAME:
4515 case INTEGER_CST:
4516 case REAL_CST:
4517 case FIXED_CST:
4518 case COMPLEX_CST:
4519 case VECTOR_CST:
4520 case STRING_CST:
4521 return res;
4523 /* tcc_declaration */
4524 case CONST_DECL:
4525 return res;
4526 case VAR_DECL:
4527 case PARM_DECL:
4528 if (!is_gimple_reg (lhs)
4529 && !is_gimple_reg (rhs1)
4530 && is_gimple_reg_type (TREE_TYPE (lhs)))
4532 error ("invalid rhs for gimple memory store");
4533 debug_generic_stmt (lhs);
4534 debug_generic_stmt (rhs1);
4535 return true;
4537 return res;
4539 case CONSTRUCTOR:
4540 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4542 unsigned int i;
4543 tree elt_i, elt_v, elt_t = NULL_TREE;
4545 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4546 return res;
4547 /* For vector CONSTRUCTORs we require that either it is empty
4548 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4549 (then the element count must be correct to cover the whole
4550 outer vector and index must be NULL on all elements, or it is
4551 a CONSTRUCTOR of scalar elements, where we as an exception allow
4552 smaller number of elements (assuming zero filling) and
4553 consecutive indexes as compared to NULL indexes (such
4554 CONSTRUCTORs can appear in the IL from FEs). */
4555 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4557 if (elt_t == NULL_TREE)
4559 elt_t = TREE_TYPE (elt_v);
4560 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4562 tree elt_t = TREE_TYPE (elt_v);
4563 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4564 TREE_TYPE (elt_t)))
4566 error ("incorrect type of vector CONSTRUCTOR"
4567 " elements");
4568 debug_generic_stmt (rhs1);
4569 return true;
4571 else if (CONSTRUCTOR_NELTS (rhs1)
4572 * TYPE_VECTOR_SUBPARTS (elt_t)
4573 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4575 error ("incorrect number of vector CONSTRUCTOR"
4576 " elements");
4577 debug_generic_stmt (rhs1);
4578 return true;
4581 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4582 elt_t))
4584 error ("incorrect type of vector CONSTRUCTOR elements");
4585 debug_generic_stmt (rhs1);
4586 return true;
4588 else if (CONSTRUCTOR_NELTS (rhs1)
4589 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4591 error ("incorrect number of vector CONSTRUCTOR elements");
4592 debug_generic_stmt (rhs1);
4593 return true;
4596 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4598 error ("incorrect type of vector CONSTRUCTOR elements");
4599 debug_generic_stmt (rhs1);
4600 return true;
4602 if (elt_i != NULL_TREE
4603 && (TREE_CODE (elt_t) == VECTOR_TYPE
4604 || TREE_CODE (elt_i) != INTEGER_CST
4605 || compare_tree_int (elt_i, i) != 0))
4607 error ("vector CONSTRUCTOR with non-NULL element index");
4608 debug_generic_stmt (rhs1);
4609 return true;
4611 if (!is_gimple_val (elt_v))
4613 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4614 debug_generic_stmt (rhs1);
4615 return true;
4619 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4621 error ("non-vector CONSTRUCTOR with elements");
4622 debug_generic_stmt (rhs1);
4623 return true;
4625 return res;
4626 case OBJ_TYPE_REF:
4627 case ASSERT_EXPR:
4628 case WITH_SIZE_EXPR:
4629 /* FIXME. */
4630 return res;
4632 default:;
4635 return res;
4638 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4639 is a problem, otherwise false. */
4641 static bool
4642 verify_gimple_assign (gassign *stmt)
4644 switch (gimple_assign_rhs_class (stmt))
4646 case GIMPLE_SINGLE_RHS:
4647 return verify_gimple_assign_single (stmt);
4649 case GIMPLE_UNARY_RHS:
4650 return verify_gimple_assign_unary (stmt);
4652 case GIMPLE_BINARY_RHS:
4653 return verify_gimple_assign_binary (stmt);
4655 case GIMPLE_TERNARY_RHS:
4656 return verify_gimple_assign_ternary (stmt);
4658 default:
4659 gcc_unreachable ();
4663 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4664 is a problem, otherwise false. */
4666 static bool
4667 verify_gimple_return (greturn *stmt)
4669 tree op = gimple_return_retval (stmt);
4670 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4672 /* We cannot test for present return values as we do not fix up missing
4673 return values from the original source. */
4674 if (op == NULL)
4675 return false;
4677 if (!is_gimple_val (op)
4678 && TREE_CODE (op) != RESULT_DECL)
4680 error ("invalid operand in return statement");
4681 debug_generic_stmt (op);
4682 return true;
4685 if ((TREE_CODE (op) == RESULT_DECL
4686 && DECL_BY_REFERENCE (op))
4687 || (TREE_CODE (op) == SSA_NAME
4688 && SSA_NAME_VAR (op)
4689 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4690 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4691 op = TREE_TYPE (op);
4693 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4695 error ("invalid conversion in return statement");
4696 debug_generic_stmt (restype);
4697 debug_generic_stmt (TREE_TYPE (op));
4698 return true;
4701 return false;
4705 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4706 is a problem, otherwise false. */
4708 static bool
4709 verify_gimple_goto (ggoto *stmt)
4711 tree dest = gimple_goto_dest (stmt);
4713 /* ??? We have two canonical forms of direct goto destinations, a
4714 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4715 if (TREE_CODE (dest) != LABEL_DECL
4716 && (!is_gimple_val (dest)
4717 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4719 error ("goto destination is neither a label nor a pointer");
4720 return true;
4723 return false;
4726 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4727 is a problem, otherwise false. */
4729 static bool
4730 verify_gimple_switch (gswitch *stmt)
4732 unsigned int i, n;
4733 tree elt, prev_upper_bound = NULL_TREE;
4734 tree index_type, elt_type = NULL_TREE;
4736 if (!is_gimple_val (gimple_switch_index (stmt)))
4738 error ("invalid operand to switch statement");
4739 debug_generic_stmt (gimple_switch_index (stmt));
4740 return true;
4743 index_type = TREE_TYPE (gimple_switch_index (stmt));
4744 if (! INTEGRAL_TYPE_P (index_type))
4746 error ("non-integral type switch statement");
4747 debug_generic_expr (index_type);
4748 return true;
4751 elt = gimple_switch_label (stmt, 0);
4752 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4754 error ("invalid default case label in switch statement");
4755 debug_generic_expr (elt);
4756 return true;
4759 n = gimple_switch_num_labels (stmt);
4760 for (i = 1; i < n; i++)
4762 elt = gimple_switch_label (stmt, i);
4764 if (! CASE_LOW (elt))
4766 error ("invalid case label in switch statement");
4767 debug_generic_expr (elt);
4768 return true;
4770 if (CASE_HIGH (elt)
4771 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4773 error ("invalid case range in switch statement");
4774 debug_generic_expr (elt);
4775 return true;
4778 if (elt_type)
4780 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4781 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4783 error ("type mismatch for case label in switch statement");
4784 debug_generic_expr (elt);
4785 return true;
4788 else
4790 elt_type = TREE_TYPE (CASE_LOW (elt));
4791 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4793 error ("type precision mismatch in switch statement");
4794 return true;
4798 if (prev_upper_bound)
4800 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4802 error ("case labels not sorted in switch statement");
4803 return true;
4807 prev_upper_bound = CASE_HIGH (elt);
4808 if (! prev_upper_bound)
4809 prev_upper_bound = CASE_LOW (elt);
4812 return false;
4815 /* Verify a gimple debug statement STMT.
4816 Returns true if anything is wrong. */
4818 static bool
4819 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4821 /* There isn't much that could be wrong in a gimple debug stmt. A
4822 gimple debug bind stmt, for example, maps a tree, that's usually
4823 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4824 component or member of an aggregate type, to another tree, that
4825 can be an arbitrary expression. These stmts expand into debug
4826 insns, and are converted to debug notes by var-tracking.c. */
4827 return false;
4830 /* Verify a gimple label statement STMT.
4831 Returns true if anything is wrong. */
4833 static bool
4834 verify_gimple_label (glabel *stmt)
4836 tree decl = gimple_label_label (stmt);
4837 int uid;
4838 bool err = false;
4840 if (TREE_CODE (decl) != LABEL_DECL)
4841 return true;
4842 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4843 && DECL_CONTEXT (decl) != current_function_decl)
4845 error ("label's context is not the current function decl");
4846 err |= true;
4849 uid = LABEL_DECL_UID (decl);
4850 if (cfun->cfg
4851 && (uid == -1
4852 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4854 error ("incorrect entry in label_to_block_map");
4855 err |= true;
4858 uid = EH_LANDING_PAD_NR (decl);
4859 if (uid)
4861 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4862 if (decl != lp->post_landing_pad)
4864 error ("incorrect setting of landing pad number");
4865 err |= true;
4869 return err;
4872 /* Verify a gimple cond statement STMT.
4873 Returns true if anything is wrong. */
4875 static bool
4876 verify_gimple_cond (gcond *stmt)
4878 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4880 error ("invalid comparison code in gimple cond");
4881 return true;
4883 if (!(!gimple_cond_true_label (stmt)
4884 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4885 || !(!gimple_cond_false_label (stmt)
4886 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4888 error ("invalid labels in gimple cond");
4889 return true;
4892 return verify_gimple_comparison (boolean_type_node,
4893 gimple_cond_lhs (stmt),
4894 gimple_cond_rhs (stmt),
4895 gimple_cond_code (stmt));
4898 /* Verify the GIMPLE statement STMT. Returns true if there is an
4899 error, otherwise false. */
4901 static bool
4902 verify_gimple_stmt (gimple *stmt)
4904 switch (gimple_code (stmt))
4906 case GIMPLE_ASSIGN:
4907 return verify_gimple_assign (as_a <gassign *> (stmt));
4909 case GIMPLE_LABEL:
4910 return verify_gimple_label (as_a <glabel *> (stmt));
4912 case GIMPLE_CALL:
4913 return verify_gimple_call (as_a <gcall *> (stmt));
4915 case GIMPLE_COND:
4916 return verify_gimple_cond (as_a <gcond *> (stmt));
4918 case GIMPLE_GOTO:
4919 return verify_gimple_goto (as_a <ggoto *> (stmt));
4921 case GIMPLE_SWITCH:
4922 return verify_gimple_switch (as_a <gswitch *> (stmt));
4924 case GIMPLE_RETURN:
4925 return verify_gimple_return (as_a <greturn *> (stmt));
4927 case GIMPLE_ASM:
4928 return false;
4930 case GIMPLE_TRANSACTION:
4931 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4933 /* Tuples that do not have tree operands. */
4934 case GIMPLE_NOP:
4935 case GIMPLE_PREDICT:
4936 case GIMPLE_RESX:
4937 case GIMPLE_EH_DISPATCH:
4938 case GIMPLE_EH_MUST_NOT_THROW:
4939 return false;
4941 CASE_GIMPLE_OMP:
4942 /* OpenMP directives are validated by the FE and never operated
4943 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4944 non-gimple expressions when the main index variable has had
4945 its address taken. This does not affect the loop itself
4946 because the header of an GIMPLE_OMP_FOR is merely used to determine
4947 how to setup the parallel iteration. */
4948 return false;
4950 case GIMPLE_DEBUG:
4951 return verify_gimple_debug (stmt);
4953 default:
4954 gcc_unreachable ();
4958 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4959 and false otherwise. */
4961 static bool
4962 verify_gimple_phi (gimple *phi)
4964 bool err = false;
4965 unsigned i;
4966 tree phi_result = gimple_phi_result (phi);
4967 bool virtual_p;
4969 if (!phi_result)
4971 error ("invalid PHI result");
4972 return true;
4975 virtual_p = virtual_operand_p (phi_result);
4976 if (TREE_CODE (phi_result) != SSA_NAME
4977 || (virtual_p
4978 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4980 error ("invalid PHI result");
4981 err = true;
4984 for (i = 0; i < gimple_phi_num_args (phi); i++)
4986 tree t = gimple_phi_arg_def (phi, i);
4988 if (!t)
4990 error ("missing PHI def");
4991 err |= true;
4992 continue;
4994 /* Addressable variables do have SSA_NAMEs but they
4995 are not considered gimple values. */
4996 else if ((TREE_CODE (t) == SSA_NAME
4997 && virtual_p != virtual_operand_p (t))
4998 || (virtual_p
4999 && (TREE_CODE (t) != SSA_NAME
5000 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5001 || (!virtual_p
5002 && !is_gimple_val (t)))
5004 error ("invalid PHI argument");
5005 debug_generic_expr (t);
5006 err |= true;
5008 #ifdef ENABLE_TYPES_CHECKING
5009 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5011 error ("incompatible types in PHI argument %u", i);
5012 debug_generic_stmt (TREE_TYPE (phi_result));
5013 debug_generic_stmt (TREE_TYPE (t));
5014 err |= true;
5016 #endif
5019 return err;
5022 /* Verify the GIMPLE statements inside the sequence STMTS. */
5024 static bool
5025 verify_gimple_in_seq_2 (gimple_seq stmts)
5027 gimple_stmt_iterator ittr;
5028 bool err = false;
5030 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5032 gimple *stmt = gsi_stmt (ittr);
5034 switch (gimple_code (stmt))
5036 case GIMPLE_BIND:
5037 err |= verify_gimple_in_seq_2 (
5038 gimple_bind_body (as_a <gbind *> (stmt)));
5039 break;
5041 case GIMPLE_TRY:
5042 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5043 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5044 break;
5046 case GIMPLE_EH_FILTER:
5047 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5048 break;
5050 case GIMPLE_EH_ELSE:
5052 geh_else *eh_else = as_a <geh_else *> (stmt);
5053 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5054 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5056 break;
5058 case GIMPLE_CATCH:
5059 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5060 as_a <gcatch *> (stmt)));
5061 break;
5063 case GIMPLE_TRANSACTION:
5064 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5065 break;
5067 default:
5069 bool err2 = verify_gimple_stmt (stmt);
5070 if (err2)
5071 debug_gimple_stmt (stmt);
5072 err |= err2;
5077 return err;
5080 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5081 is a problem, otherwise false. */
5083 static bool
5084 verify_gimple_transaction (gtransaction *stmt)
5086 tree lab;
5088 lab = gimple_transaction_label_norm (stmt);
5089 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5090 return true;
5091 lab = gimple_transaction_label_uninst (stmt);
5092 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5093 return true;
5094 lab = gimple_transaction_label_over (stmt);
5095 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5096 return true;
5098 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5102 /* Verify the GIMPLE statements inside the statement list STMTS. */
5104 DEBUG_FUNCTION void
5105 verify_gimple_in_seq (gimple_seq stmts)
5107 timevar_push (TV_TREE_STMT_VERIFY);
5108 if (verify_gimple_in_seq_2 (stmts))
5109 internal_error ("verify_gimple failed");
5110 timevar_pop (TV_TREE_STMT_VERIFY);
5113 /* Return true when the T can be shared. */
5115 static bool
5116 tree_node_can_be_shared (tree t)
5118 if (IS_TYPE_OR_DECL_P (t)
5119 || is_gimple_min_invariant (t)
5120 || TREE_CODE (t) == SSA_NAME
5121 || t == error_mark_node
5122 || TREE_CODE (t) == IDENTIFIER_NODE)
5123 return true;
5125 if (TREE_CODE (t) == CASE_LABEL_EXPR)
5126 return true;
5128 if (DECL_P (t))
5129 return true;
5131 return false;
5134 /* Called via walk_tree. Verify tree sharing. */
5136 static tree
5137 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5139 hash_set<void *> *visited = (hash_set<void *> *) data;
5141 if (tree_node_can_be_shared (*tp))
5143 *walk_subtrees = false;
5144 return NULL;
5147 if (visited->add (*tp))
5148 return *tp;
5150 return NULL;
5153 /* Called via walk_gimple_stmt. Verify tree sharing. */
5155 static tree
5156 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5158 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5159 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5162 static bool eh_error_found;
5163 bool
5164 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5165 hash_set<gimple *> *visited)
5167 if (!visited->contains (stmt))
5169 error ("dead STMT in EH table");
5170 debug_gimple_stmt (stmt);
5171 eh_error_found = true;
5173 return true;
5176 /* Verify if the location LOCs block is in BLOCKS. */
5178 static bool
5179 verify_location (hash_set<tree> *blocks, location_t loc)
5181 tree block = LOCATION_BLOCK (loc);
5182 if (block != NULL_TREE
5183 && !blocks->contains (block))
5185 error ("location references block not in block tree");
5186 return true;
5188 if (block != NULL_TREE)
5189 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5190 return false;
5193 /* Called via walk_tree. Verify that expressions have no blocks. */
5195 static tree
5196 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5198 if (!EXPR_P (*tp))
5200 *walk_subtrees = false;
5201 return NULL;
5204 location_t loc = EXPR_LOCATION (*tp);
5205 if (LOCATION_BLOCK (loc) != NULL)
5206 return *tp;
5208 return NULL;
5211 /* Called via walk_tree. Verify locations of expressions. */
5213 static tree
5214 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5216 hash_set<tree> *blocks = (hash_set<tree> *) data;
5218 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5220 tree t = DECL_DEBUG_EXPR (*tp);
5221 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5222 if (addr)
5223 return addr;
5225 if ((VAR_P (*tp)
5226 || TREE_CODE (*tp) == PARM_DECL
5227 || TREE_CODE (*tp) == RESULT_DECL)
5228 && DECL_HAS_VALUE_EXPR_P (*tp))
5230 tree t = DECL_VALUE_EXPR (*tp);
5231 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5232 if (addr)
5233 return addr;
5236 if (!EXPR_P (*tp))
5238 *walk_subtrees = false;
5239 return NULL;
5242 location_t loc = EXPR_LOCATION (*tp);
5243 if (verify_location (blocks, loc))
5244 return *tp;
5246 return NULL;
5249 /* Called via walk_gimple_op. Verify locations of expressions. */
5251 static tree
5252 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5254 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5255 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5258 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5260 static void
5261 collect_subblocks (hash_set<tree> *blocks, tree block)
5263 tree t;
5264 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5266 blocks->add (t);
5267 collect_subblocks (blocks, t);
5271 /* Verify the GIMPLE statements in the CFG of FN. */
5273 DEBUG_FUNCTION void
5274 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5276 basic_block bb;
5277 bool err = false;
5279 timevar_push (TV_TREE_STMT_VERIFY);
5280 hash_set<void *> visited;
5281 hash_set<gimple *> visited_stmts;
5283 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5284 hash_set<tree> blocks;
5285 if (DECL_INITIAL (fn->decl))
5287 blocks.add (DECL_INITIAL (fn->decl));
5288 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5291 FOR_EACH_BB_FN (bb, fn)
5293 gimple_stmt_iterator gsi;
5295 for (gphi_iterator gpi = gsi_start_phis (bb);
5296 !gsi_end_p (gpi);
5297 gsi_next (&gpi))
5299 gphi *phi = gpi.phi ();
5300 bool err2 = false;
5301 unsigned i;
5303 visited_stmts.add (phi);
5305 if (gimple_bb (phi) != bb)
5307 error ("gimple_bb (phi) is set to a wrong basic block");
5308 err2 = true;
5311 err2 |= verify_gimple_phi (phi);
5313 /* Only PHI arguments have locations. */
5314 if (gimple_location (phi) != UNKNOWN_LOCATION)
5316 error ("PHI node with location");
5317 err2 = true;
5320 for (i = 0; i < gimple_phi_num_args (phi); i++)
5322 tree arg = gimple_phi_arg_def (phi, i);
5323 tree addr = walk_tree (&arg, verify_node_sharing_1,
5324 &visited, NULL);
5325 if (addr)
5327 error ("incorrect sharing of tree nodes");
5328 debug_generic_expr (addr);
5329 err2 |= true;
5331 location_t loc = gimple_phi_arg_location (phi, i);
5332 if (virtual_operand_p (gimple_phi_result (phi))
5333 && loc != UNKNOWN_LOCATION)
5335 error ("virtual PHI with argument locations");
5336 err2 = true;
5338 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5339 if (addr)
5341 debug_generic_expr (addr);
5342 err2 = true;
5344 err2 |= verify_location (&blocks, loc);
5347 if (err2)
5348 debug_gimple_stmt (phi);
5349 err |= err2;
5352 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5354 gimple *stmt = gsi_stmt (gsi);
5355 bool err2 = false;
5356 struct walk_stmt_info wi;
5357 tree addr;
5358 int lp_nr;
5360 visited_stmts.add (stmt);
5362 if (gimple_bb (stmt) != bb)
5364 error ("gimple_bb (stmt) is set to a wrong basic block");
5365 err2 = true;
5368 err2 |= verify_gimple_stmt (stmt);
5369 err2 |= verify_location (&blocks, gimple_location (stmt));
5371 memset (&wi, 0, sizeof (wi));
5372 wi.info = (void *) &visited;
5373 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5374 if (addr)
5376 error ("incorrect sharing of tree nodes");
5377 debug_generic_expr (addr);
5378 err2 |= true;
5381 memset (&wi, 0, sizeof (wi));
5382 wi.info = (void *) &blocks;
5383 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5384 if (addr)
5386 debug_generic_expr (addr);
5387 err2 |= true;
5390 /* ??? Instead of not checking these stmts at all the walker
5391 should know its context via wi. */
5392 if (!is_gimple_debug (stmt)
5393 && !is_gimple_omp (stmt))
5395 memset (&wi, 0, sizeof (wi));
5396 addr = walk_gimple_op (stmt, verify_expr, &wi);
5397 if (addr)
5399 debug_generic_expr (addr);
5400 inform (gimple_location (stmt), "in statement");
5401 err2 |= true;
5405 /* If the statement is marked as part of an EH region, then it is
5406 expected that the statement could throw. Verify that when we
5407 have optimizations that simplify statements such that we prove
5408 that they cannot throw, that we update other data structures
5409 to match. */
5410 lp_nr = lookup_stmt_eh_lp (stmt);
5411 if (lp_nr > 0)
5413 if (!stmt_could_throw_p (stmt))
5415 if (verify_nothrow)
5417 error ("statement marked for throw, but doesn%'t");
5418 err2 |= true;
5421 else if (!gsi_one_before_end_p (gsi))
5423 error ("statement marked for throw in middle of block");
5424 err2 |= true;
5428 if (err2)
5429 debug_gimple_stmt (stmt);
5430 err |= err2;
5434 eh_error_found = false;
5435 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5436 if (eh_table)
5437 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5438 (&visited_stmts);
5440 if (err || eh_error_found)
5441 internal_error ("verify_gimple failed");
5443 verify_histograms ();
5444 timevar_pop (TV_TREE_STMT_VERIFY);
5448 /* Verifies that the flow information is OK. */
5450 static int
5451 gimple_verify_flow_info (void)
5453 int err = 0;
5454 basic_block bb;
5455 gimple_stmt_iterator gsi;
5456 gimple *stmt;
5457 edge e;
5458 edge_iterator ei;
5460 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5461 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5463 error ("ENTRY_BLOCK has IL associated with it");
5464 err = 1;
5467 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5468 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5470 error ("EXIT_BLOCK has IL associated with it");
5471 err = 1;
5474 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5475 if (e->flags & EDGE_FALLTHRU)
5477 error ("fallthru to exit from bb %d", e->src->index);
5478 err = 1;
5481 FOR_EACH_BB_FN (bb, cfun)
5483 bool found_ctrl_stmt = false;
5485 stmt = NULL;
5487 /* Skip labels on the start of basic block. */
5488 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5490 tree label;
5491 gimple *prev_stmt = stmt;
5493 stmt = gsi_stmt (gsi);
5495 if (gimple_code (stmt) != GIMPLE_LABEL)
5496 break;
5498 label = gimple_label_label (as_a <glabel *> (stmt));
5499 if (prev_stmt && DECL_NONLOCAL (label))
5501 error ("nonlocal label ");
5502 print_generic_expr (stderr, label);
5503 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5504 bb->index);
5505 err = 1;
5508 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5510 error ("EH landing pad label ");
5511 print_generic_expr (stderr, label);
5512 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5513 bb->index);
5514 err = 1;
5517 if (label_to_block (label) != bb)
5519 error ("label ");
5520 print_generic_expr (stderr, label);
5521 fprintf (stderr, " to block does not match in bb %d",
5522 bb->index);
5523 err = 1;
5526 if (decl_function_context (label) != current_function_decl)
5528 error ("label ");
5529 print_generic_expr (stderr, label);
5530 fprintf (stderr, " has incorrect context in bb %d",
5531 bb->index);
5532 err = 1;
5536 /* Verify that body of basic block BB is free of control flow. */
5537 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5539 gimple *stmt = gsi_stmt (gsi);
5541 if (found_ctrl_stmt)
5543 error ("control flow in the middle of basic block %d",
5544 bb->index);
5545 err = 1;
5548 if (stmt_ends_bb_p (stmt))
5549 found_ctrl_stmt = true;
5551 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5553 error ("label ");
5554 print_generic_expr (stderr, gimple_label_label (label_stmt));
5555 fprintf (stderr, " in the middle of basic block %d", bb->index);
5556 err = 1;
5560 gsi = gsi_last_bb (bb);
5561 if (gsi_end_p (gsi))
5562 continue;
5564 stmt = gsi_stmt (gsi);
5566 if (gimple_code (stmt) == GIMPLE_LABEL)
5567 continue;
5569 err |= verify_eh_edges (stmt);
5571 if (is_ctrl_stmt (stmt))
5573 FOR_EACH_EDGE (e, ei, bb->succs)
5574 if (e->flags & EDGE_FALLTHRU)
5576 error ("fallthru edge after a control statement in bb %d",
5577 bb->index);
5578 err = 1;
5582 if (gimple_code (stmt) != GIMPLE_COND)
5584 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5585 after anything else but if statement. */
5586 FOR_EACH_EDGE (e, ei, bb->succs)
5587 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5589 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5590 bb->index);
5591 err = 1;
5595 switch (gimple_code (stmt))
5597 case GIMPLE_COND:
5599 edge true_edge;
5600 edge false_edge;
5602 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5604 if (!true_edge
5605 || !false_edge
5606 || !(true_edge->flags & EDGE_TRUE_VALUE)
5607 || !(false_edge->flags & EDGE_FALSE_VALUE)
5608 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5609 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5610 || EDGE_COUNT (bb->succs) >= 3)
5612 error ("wrong outgoing edge flags at end of bb %d",
5613 bb->index);
5614 err = 1;
5617 break;
5619 case GIMPLE_GOTO:
5620 if (simple_goto_p (stmt))
5622 error ("explicit goto at end of bb %d", bb->index);
5623 err = 1;
5625 else
5627 /* FIXME. We should double check that the labels in the
5628 destination blocks have their address taken. */
5629 FOR_EACH_EDGE (e, ei, bb->succs)
5630 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5631 | EDGE_FALSE_VALUE))
5632 || !(e->flags & EDGE_ABNORMAL))
5634 error ("wrong outgoing edge flags at end of bb %d",
5635 bb->index);
5636 err = 1;
5639 break;
5641 case GIMPLE_CALL:
5642 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5643 break;
5644 /* fallthru */
5645 case GIMPLE_RETURN:
5646 if (!single_succ_p (bb)
5647 || (single_succ_edge (bb)->flags
5648 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5649 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5651 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5652 err = 1;
5654 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5656 error ("return edge does not point to exit in bb %d",
5657 bb->index);
5658 err = 1;
5660 break;
5662 case GIMPLE_SWITCH:
5664 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5665 tree prev;
5666 edge e;
5667 size_t i, n;
5669 n = gimple_switch_num_labels (switch_stmt);
5671 /* Mark all the destination basic blocks. */
5672 for (i = 0; i < n; ++i)
5674 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5675 basic_block label_bb = label_to_block (lab);
5676 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5677 label_bb->aux = (void *)1;
5680 /* Verify that the case labels are sorted. */
5681 prev = gimple_switch_label (switch_stmt, 0);
5682 for (i = 1; i < n; ++i)
5684 tree c = gimple_switch_label (switch_stmt, i);
5685 if (!CASE_LOW (c))
5687 error ("found default case not at the start of "
5688 "case vector");
5689 err = 1;
5690 continue;
5692 if (CASE_LOW (prev)
5693 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5695 error ("case labels not sorted: ");
5696 print_generic_expr (stderr, prev);
5697 fprintf (stderr," is greater than ");
5698 print_generic_expr (stderr, c);
5699 fprintf (stderr," but comes before it.\n");
5700 err = 1;
5702 prev = c;
5704 /* VRP will remove the default case if it can prove it will
5705 never be executed. So do not verify there always exists
5706 a default case here. */
5708 FOR_EACH_EDGE (e, ei, bb->succs)
5710 if (!e->dest->aux)
5712 error ("extra outgoing edge %d->%d",
5713 bb->index, e->dest->index);
5714 err = 1;
5717 e->dest->aux = (void *)2;
5718 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5719 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5721 error ("wrong outgoing edge flags at end of bb %d",
5722 bb->index);
5723 err = 1;
5727 /* Check that we have all of them. */
5728 for (i = 0; i < n; ++i)
5730 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5731 basic_block label_bb = label_to_block (lab);
5733 if (label_bb->aux != (void *)2)
5735 error ("missing edge %i->%i", bb->index, label_bb->index);
5736 err = 1;
5740 FOR_EACH_EDGE (e, ei, bb->succs)
5741 e->dest->aux = (void *)0;
5743 break;
5745 case GIMPLE_EH_DISPATCH:
5746 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5747 break;
5749 default:
5750 break;
5754 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5755 verify_dominators (CDI_DOMINATORS);
5757 return err;
5761 /* Updates phi nodes after creating a forwarder block joined
5762 by edge FALLTHRU. */
5764 static void
5765 gimple_make_forwarder_block (edge fallthru)
5767 edge e;
5768 edge_iterator ei;
5769 basic_block dummy, bb;
5770 tree var;
5771 gphi_iterator gsi;
5773 dummy = fallthru->src;
5774 bb = fallthru->dest;
5776 if (single_pred_p (bb))
5777 return;
5779 /* If we redirected a branch we must create new PHI nodes at the
5780 start of BB. */
5781 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5783 gphi *phi, *new_phi;
5785 phi = gsi.phi ();
5786 var = gimple_phi_result (phi);
5787 new_phi = create_phi_node (var, bb);
5788 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5789 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5790 UNKNOWN_LOCATION);
5793 /* Add the arguments we have stored on edges. */
5794 FOR_EACH_EDGE (e, ei, bb->preds)
5796 if (e == fallthru)
5797 continue;
5799 flush_pending_stmts (e);
5804 /* Return a non-special label in the head of basic block BLOCK.
5805 Create one if it doesn't exist. */
5807 tree
5808 gimple_block_label (basic_block bb)
5810 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5811 bool first = true;
5812 tree label;
5813 glabel *stmt;
5815 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5817 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5818 if (!stmt)
5819 break;
5820 label = gimple_label_label (stmt);
5821 if (!DECL_NONLOCAL (label))
5823 if (!first)
5824 gsi_move_before (&i, &s);
5825 return label;
5829 label = create_artificial_label (UNKNOWN_LOCATION);
5830 stmt = gimple_build_label (label);
5831 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5832 return label;
5836 /* Attempt to perform edge redirection by replacing a possibly complex
5837 jump instruction by a goto or by removing the jump completely.
5838 This can apply only if all edges now point to the same block. The
5839 parameters and return values are equivalent to
5840 redirect_edge_and_branch. */
5842 static edge
5843 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5845 basic_block src = e->src;
5846 gimple_stmt_iterator i;
5847 gimple *stmt;
5849 /* We can replace or remove a complex jump only when we have exactly
5850 two edges. */
5851 if (EDGE_COUNT (src->succs) != 2
5852 /* Verify that all targets will be TARGET. Specifically, the
5853 edge that is not E must also go to TARGET. */
5854 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5855 return NULL;
5857 i = gsi_last_bb (src);
5858 if (gsi_end_p (i))
5859 return NULL;
5861 stmt = gsi_stmt (i);
5863 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5865 gsi_remove (&i, true);
5866 e = ssa_redirect_edge (e, target);
5867 e->flags = EDGE_FALLTHRU;
5868 return e;
5871 return NULL;
5875 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5876 edge representing the redirected branch. */
5878 static edge
5879 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5881 basic_block bb = e->src;
5882 gimple_stmt_iterator gsi;
5883 edge ret;
5884 gimple *stmt;
5886 if (e->flags & EDGE_ABNORMAL)
5887 return NULL;
5889 if (e->dest == dest)
5890 return NULL;
5892 if (e->flags & EDGE_EH)
5893 return redirect_eh_edge (e, dest);
5895 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5897 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5898 if (ret)
5899 return ret;
5902 gsi = gsi_last_bb (bb);
5903 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5905 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5907 case GIMPLE_COND:
5908 /* For COND_EXPR, we only need to redirect the edge. */
5909 break;
5911 case GIMPLE_GOTO:
5912 /* No non-abnormal edges should lead from a non-simple goto, and
5913 simple ones should be represented implicitly. */
5914 gcc_unreachable ();
5916 case GIMPLE_SWITCH:
5918 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5919 tree label = gimple_block_label (dest);
5920 tree cases = get_cases_for_edge (e, switch_stmt);
5922 /* If we have a list of cases associated with E, then use it
5923 as it's a lot faster than walking the entire case vector. */
5924 if (cases)
5926 edge e2 = find_edge (e->src, dest);
5927 tree last, first;
5929 first = cases;
5930 while (cases)
5932 last = cases;
5933 CASE_LABEL (cases) = label;
5934 cases = CASE_CHAIN (cases);
5937 /* If there was already an edge in the CFG, then we need
5938 to move all the cases associated with E to E2. */
5939 if (e2)
5941 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5943 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5944 CASE_CHAIN (cases2) = first;
5946 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5948 else
5950 size_t i, n = gimple_switch_num_labels (switch_stmt);
5952 for (i = 0; i < n; i++)
5954 tree elt = gimple_switch_label (switch_stmt, i);
5955 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5956 CASE_LABEL (elt) = label;
5960 break;
5962 case GIMPLE_ASM:
5964 gasm *asm_stmt = as_a <gasm *> (stmt);
5965 int i, n = gimple_asm_nlabels (asm_stmt);
5966 tree label = NULL;
5968 for (i = 0; i < n; ++i)
5970 tree cons = gimple_asm_label_op (asm_stmt, i);
5971 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5973 if (!label)
5974 label = gimple_block_label (dest);
5975 TREE_VALUE (cons) = label;
5979 /* If we didn't find any label matching the former edge in the
5980 asm labels, we must be redirecting the fallthrough
5981 edge. */
5982 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5984 break;
5986 case GIMPLE_RETURN:
5987 gsi_remove (&gsi, true);
5988 e->flags |= EDGE_FALLTHRU;
5989 break;
5991 case GIMPLE_OMP_RETURN:
5992 case GIMPLE_OMP_CONTINUE:
5993 case GIMPLE_OMP_SECTIONS_SWITCH:
5994 case GIMPLE_OMP_FOR:
5995 /* The edges from OMP constructs can be simply redirected. */
5996 break;
5998 case GIMPLE_EH_DISPATCH:
5999 if (!(e->flags & EDGE_FALLTHRU))
6000 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6001 break;
6003 case GIMPLE_TRANSACTION:
6004 if (e->flags & EDGE_TM_ABORT)
6005 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6006 gimple_block_label (dest));
6007 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6008 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6009 gimple_block_label (dest));
6010 else
6011 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6012 gimple_block_label (dest));
6013 break;
6015 default:
6016 /* Otherwise it must be a fallthru edge, and we don't need to
6017 do anything besides redirecting it. */
6018 gcc_assert (e->flags & EDGE_FALLTHRU);
6019 break;
6022 /* Update/insert PHI nodes as necessary. */
6024 /* Now update the edges in the CFG. */
6025 e = ssa_redirect_edge (e, dest);
6027 return e;
6030 /* Returns true if it is possible to remove edge E by redirecting
6031 it to the destination of the other edge from E->src. */
6033 static bool
6034 gimple_can_remove_branch_p (const_edge e)
6036 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6037 return false;
6039 return true;
6042 /* Simple wrapper, as we can always redirect fallthru edges. */
6044 static basic_block
6045 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6047 e = gimple_redirect_edge_and_branch (e, dest);
6048 gcc_assert (e);
6050 return NULL;
6054 /* Splits basic block BB after statement STMT (but at least after the
6055 labels). If STMT is NULL, BB is split just after the labels. */
6057 static basic_block
6058 gimple_split_block (basic_block bb, void *stmt)
6060 gimple_stmt_iterator gsi;
6061 gimple_stmt_iterator gsi_tgt;
6062 gimple_seq list;
6063 basic_block new_bb;
6064 edge e;
6065 edge_iterator ei;
6067 new_bb = create_empty_bb (bb);
6069 /* Redirect the outgoing edges. */
6070 new_bb->succs = bb->succs;
6071 bb->succs = NULL;
6072 FOR_EACH_EDGE (e, ei, new_bb->succs)
6073 e->src = new_bb;
6075 /* Get a stmt iterator pointing to the first stmt to move. */
6076 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6077 gsi = gsi_after_labels (bb);
6078 else
6080 gsi = gsi_for_stmt ((gimple *) stmt);
6081 gsi_next (&gsi);
6084 /* Move everything from GSI to the new basic block. */
6085 if (gsi_end_p (gsi))
6086 return new_bb;
6088 /* Split the statement list - avoid re-creating new containers as this
6089 brings ugly quadratic memory consumption in the inliner.
6090 (We are still quadratic since we need to update stmt BB pointers,
6091 sadly.) */
6092 gsi_split_seq_before (&gsi, &list);
6093 set_bb_seq (new_bb, list);
6094 for (gsi_tgt = gsi_start (list);
6095 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6096 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6098 return new_bb;
6102 /* Moves basic block BB after block AFTER. */
6104 static bool
6105 gimple_move_block_after (basic_block bb, basic_block after)
6107 if (bb->prev_bb == after)
6108 return true;
6110 unlink_block (bb);
6111 link_block (bb, after);
6113 return true;
6117 /* Return TRUE if block BB has no executable statements, otherwise return
6118 FALSE. */
6120 static bool
6121 gimple_empty_block_p (basic_block bb)
6123 /* BB must have no executable statements. */
6124 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6125 if (phi_nodes (bb))
6126 return false;
6127 if (gsi_end_p (gsi))
6128 return true;
6129 if (is_gimple_debug (gsi_stmt (gsi)))
6130 gsi_next_nondebug (&gsi);
6131 return gsi_end_p (gsi);
6135 /* Split a basic block if it ends with a conditional branch and if the
6136 other part of the block is not empty. */
6138 static basic_block
6139 gimple_split_block_before_cond_jump (basic_block bb)
6141 gimple *last, *split_point;
6142 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6143 if (gsi_end_p (gsi))
6144 return NULL;
6145 last = gsi_stmt (gsi);
6146 if (gimple_code (last) != GIMPLE_COND
6147 && gimple_code (last) != GIMPLE_SWITCH)
6148 return NULL;
6149 gsi_prev (&gsi);
6150 split_point = gsi_stmt (gsi);
6151 return split_block (bb, split_point)->dest;
6155 /* Return true if basic_block can be duplicated. */
6157 static bool
6158 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6160 return true;
6163 /* Create a duplicate of the basic block BB. NOTE: This does not
6164 preserve SSA form. */
6166 static basic_block
6167 gimple_duplicate_bb (basic_block bb)
6169 basic_block new_bb;
6170 gimple_stmt_iterator gsi_tgt;
6172 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6174 /* Copy the PHI nodes. We ignore PHI node arguments here because
6175 the incoming edges have not been setup yet. */
6176 for (gphi_iterator gpi = gsi_start_phis (bb);
6177 !gsi_end_p (gpi);
6178 gsi_next (&gpi))
6180 gphi *phi, *copy;
6181 phi = gpi.phi ();
6182 copy = create_phi_node (NULL_TREE, new_bb);
6183 create_new_def_for (gimple_phi_result (phi), copy,
6184 gimple_phi_result_ptr (copy));
6185 gimple_set_uid (copy, gimple_uid (phi));
6188 gsi_tgt = gsi_start_bb (new_bb);
6189 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6190 !gsi_end_p (gsi);
6191 gsi_next (&gsi))
6193 def_operand_p def_p;
6194 ssa_op_iter op_iter;
6195 tree lhs;
6196 gimple *stmt, *copy;
6198 stmt = gsi_stmt (gsi);
6199 if (gimple_code (stmt) == GIMPLE_LABEL)
6200 continue;
6202 /* Don't duplicate label debug stmts. */
6203 if (gimple_debug_bind_p (stmt)
6204 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6205 == LABEL_DECL)
6206 continue;
6208 /* Create a new copy of STMT and duplicate STMT's virtual
6209 operands. */
6210 copy = gimple_copy (stmt);
6211 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6213 maybe_duplicate_eh_stmt (copy, stmt);
6214 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6216 /* When copying around a stmt writing into a local non-user
6217 aggregate, make sure it won't share stack slot with other
6218 vars. */
6219 lhs = gimple_get_lhs (stmt);
6220 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6222 tree base = get_base_address (lhs);
6223 if (base
6224 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6225 && DECL_IGNORED_P (base)
6226 && !TREE_STATIC (base)
6227 && !DECL_EXTERNAL (base)
6228 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6229 DECL_NONSHAREABLE (base) = 1;
6232 /* Create new names for all the definitions created by COPY and
6233 add replacement mappings for each new name. */
6234 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6235 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6238 return new_bb;
6241 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6243 static void
6244 add_phi_args_after_copy_edge (edge e_copy)
6246 basic_block bb, bb_copy = e_copy->src, dest;
6247 edge e;
6248 edge_iterator ei;
6249 gphi *phi, *phi_copy;
6250 tree def;
6251 gphi_iterator psi, psi_copy;
6253 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6254 return;
6256 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6258 if (e_copy->dest->flags & BB_DUPLICATED)
6259 dest = get_bb_original (e_copy->dest);
6260 else
6261 dest = e_copy->dest;
6263 e = find_edge (bb, dest);
6264 if (!e)
6266 /* During loop unrolling the target of the latch edge is copied.
6267 In this case we are not looking for edge to dest, but to
6268 duplicated block whose original was dest. */
6269 FOR_EACH_EDGE (e, ei, bb->succs)
6271 if ((e->dest->flags & BB_DUPLICATED)
6272 && get_bb_original (e->dest) == dest)
6273 break;
6276 gcc_assert (e != NULL);
6279 for (psi = gsi_start_phis (e->dest),
6280 psi_copy = gsi_start_phis (e_copy->dest);
6281 !gsi_end_p (psi);
6282 gsi_next (&psi), gsi_next (&psi_copy))
6284 phi = psi.phi ();
6285 phi_copy = psi_copy.phi ();
6286 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6287 add_phi_arg (phi_copy, def, e_copy,
6288 gimple_phi_arg_location_from_edge (phi, e));
6293 /* Basic block BB_COPY was created by code duplication. Add phi node
6294 arguments for edges going out of BB_COPY. The blocks that were
6295 duplicated have BB_DUPLICATED set. */
6297 void
6298 add_phi_args_after_copy_bb (basic_block bb_copy)
6300 edge e_copy;
6301 edge_iterator ei;
6303 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6305 add_phi_args_after_copy_edge (e_copy);
6309 /* Blocks in REGION_COPY array of length N_REGION were created by
6310 duplication of basic blocks. Add phi node arguments for edges
6311 going from these blocks. If E_COPY is not NULL, also add
6312 phi node arguments for its destination.*/
6314 void
6315 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6316 edge e_copy)
6318 unsigned i;
6320 for (i = 0; i < n_region; i++)
6321 region_copy[i]->flags |= BB_DUPLICATED;
6323 for (i = 0; i < n_region; i++)
6324 add_phi_args_after_copy_bb (region_copy[i]);
6325 if (e_copy)
6326 add_phi_args_after_copy_edge (e_copy);
6328 for (i = 0; i < n_region; i++)
6329 region_copy[i]->flags &= ~BB_DUPLICATED;
6332 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6333 important exit edge EXIT. By important we mean that no SSA name defined
6334 inside region is live over the other exit edges of the region. All entry
6335 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6336 to the duplicate of the region. Dominance and loop information is
6337 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6338 UPDATE_DOMINANCE is false then we assume that the caller will update the
6339 dominance information after calling this function. The new basic
6340 blocks are stored to REGION_COPY in the same order as they had in REGION,
6341 provided that REGION_COPY is not NULL.
6342 The function returns false if it is unable to copy the region,
6343 true otherwise. */
6345 bool
6346 gimple_duplicate_sese_region (edge entry, edge exit,
6347 basic_block *region, unsigned n_region,
6348 basic_block *region_copy,
6349 bool update_dominance)
6351 unsigned i;
6352 bool free_region_copy = false, copying_header = false;
6353 struct loop *loop = entry->dest->loop_father;
6354 edge exit_copy;
6355 vec<basic_block> doms = vNULL;
6356 edge redirected;
6357 profile_count total_count = profile_count::uninitialized ();
6358 profile_count entry_count = profile_count::uninitialized ();
6360 if (!can_copy_bbs_p (region, n_region))
6361 return false;
6363 /* Some sanity checking. Note that we do not check for all possible
6364 missuses of the functions. I.e. if you ask to copy something weird,
6365 it will work, but the state of structures probably will not be
6366 correct. */
6367 for (i = 0; i < n_region; i++)
6369 /* We do not handle subloops, i.e. all the blocks must belong to the
6370 same loop. */
6371 if (region[i]->loop_father != loop)
6372 return false;
6374 if (region[i] != entry->dest
6375 && region[i] == loop->header)
6376 return false;
6379 /* In case the function is used for loop header copying (which is the primary
6380 use), ensure that EXIT and its copy will be new latch and entry edges. */
6381 if (loop->header == entry->dest)
6383 copying_header = true;
6385 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6386 return false;
6388 for (i = 0; i < n_region; i++)
6389 if (region[i] != exit->src
6390 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6391 return false;
6394 initialize_original_copy_tables ();
6396 if (copying_header)
6397 set_loop_copy (loop, loop_outer (loop));
6398 else
6399 set_loop_copy (loop, loop);
6401 if (!region_copy)
6403 region_copy = XNEWVEC (basic_block, n_region);
6404 free_region_copy = true;
6407 /* Record blocks outside the region that are dominated by something
6408 inside. */
6409 if (update_dominance)
6411 doms.create (0);
6412 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6415 if (entry->dest->count.initialized_p ())
6417 total_count = entry->dest->count;
6418 entry_count = entry->count ();
6419 /* Fix up corner cases, to avoid division by zero or creation of negative
6420 frequencies. */
6421 if (entry_count > total_count)
6422 entry_count = total_count;
6425 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6426 split_edge_bb_loc (entry), update_dominance);
6427 if (total_count.initialized_p () && entry_count.initialized_p ())
6429 scale_bbs_frequencies_profile_count (region, n_region,
6430 total_count - entry_count,
6431 total_count);
6432 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6433 total_count);
6436 if (copying_header)
6438 loop->header = exit->dest;
6439 loop->latch = exit->src;
6442 /* Redirect the entry and add the phi node arguments. */
6443 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6444 gcc_assert (redirected != NULL);
6445 flush_pending_stmts (entry);
6447 /* Concerning updating of dominators: We must recount dominators
6448 for entry block and its copy. Anything that is outside of the
6449 region, but was dominated by something inside needs recounting as
6450 well. */
6451 if (update_dominance)
6453 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6454 doms.safe_push (get_bb_original (entry->dest));
6455 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6456 doms.release ();
6459 /* Add the other PHI node arguments. */
6460 add_phi_args_after_copy (region_copy, n_region, NULL);
6462 if (free_region_copy)
6463 free (region_copy);
6465 free_original_copy_tables ();
6466 return true;
6469 /* Checks if BB is part of the region defined by N_REGION BBS. */
6470 static bool
6471 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6473 unsigned int n;
6475 for (n = 0; n < n_region; n++)
6477 if (bb == bbs[n])
6478 return true;
6480 return false;
6483 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6484 are stored to REGION_COPY in the same order in that they appear
6485 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6486 the region, EXIT an exit from it. The condition guarding EXIT
6487 is moved to ENTRY. Returns true if duplication succeeds, false
6488 otherwise.
6490 For example,
6492 some_code;
6493 if (cond)
6495 else
6498 is transformed to
6500 if (cond)
6502 some_code;
6505 else
6507 some_code;
6512 bool
6513 gimple_duplicate_sese_tail (edge entry, edge exit,
6514 basic_block *region, unsigned n_region,
6515 basic_block *region_copy)
6517 unsigned i;
6518 bool free_region_copy = false;
6519 struct loop *loop = exit->dest->loop_father;
6520 struct loop *orig_loop = entry->dest->loop_father;
6521 basic_block switch_bb, entry_bb, nentry_bb;
6522 vec<basic_block> doms;
6523 profile_count total_count = profile_count::uninitialized (),
6524 exit_count = profile_count::uninitialized ();
6525 edge exits[2], nexits[2], e;
6526 gimple_stmt_iterator gsi;
6527 gimple *cond_stmt;
6528 edge sorig, snew;
6529 basic_block exit_bb;
6530 gphi_iterator psi;
6531 gphi *phi;
6532 tree def;
6533 struct loop *target, *aloop, *cloop;
6535 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6536 exits[0] = exit;
6537 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6539 if (!can_copy_bbs_p (region, n_region))
6540 return false;
6542 initialize_original_copy_tables ();
6543 set_loop_copy (orig_loop, loop);
6545 target= loop;
6546 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6548 if (bb_part_of_region_p (aloop->header, region, n_region))
6550 cloop = duplicate_loop (aloop, target);
6551 duplicate_subloops (aloop, cloop);
6555 if (!region_copy)
6557 region_copy = XNEWVEC (basic_block, n_region);
6558 free_region_copy = true;
6561 gcc_assert (!need_ssa_update_p (cfun));
6563 /* Record blocks outside the region that are dominated by something
6564 inside. */
6565 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6567 total_count = exit->src->count;
6568 exit_count = exit->count ();
6569 /* Fix up corner cases, to avoid division by zero or creation of negative
6570 frequencies. */
6571 if (exit_count > total_count)
6572 exit_count = total_count;
6574 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6575 split_edge_bb_loc (exit), true);
6576 if (total_count.initialized_p () && exit_count.initialized_p ())
6578 scale_bbs_frequencies_profile_count (region, n_region,
6579 total_count - exit_count,
6580 total_count);
6581 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6582 total_count);
6585 /* Create the switch block, and put the exit condition to it. */
6586 entry_bb = entry->dest;
6587 nentry_bb = get_bb_copy (entry_bb);
6588 if (!last_stmt (entry->src)
6589 || !stmt_ends_bb_p (last_stmt (entry->src)))
6590 switch_bb = entry->src;
6591 else
6592 switch_bb = split_edge (entry);
6593 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6595 gsi = gsi_last_bb (switch_bb);
6596 cond_stmt = last_stmt (exit->src);
6597 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6598 cond_stmt = gimple_copy (cond_stmt);
6600 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6602 sorig = single_succ_edge (switch_bb);
6603 sorig->flags = exits[1]->flags;
6604 sorig->probability = exits[1]->probability;
6605 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6606 snew->probability = exits[0]->probability;
6609 /* Register the new edge from SWITCH_BB in loop exit lists. */
6610 rescan_loop_exit (snew, true, false);
6612 /* Add the PHI node arguments. */
6613 add_phi_args_after_copy (region_copy, n_region, snew);
6615 /* Get rid of now superfluous conditions and associated edges (and phi node
6616 arguments). */
6617 exit_bb = exit->dest;
6619 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6620 PENDING_STMT (e) = NULL;
6622 /* The latch of ORIG_LOOP was copied, and so was the backedge
6623 to the original header. We redirect this backedge to EXIT_BB. */
6624 for (i = 0; i < n_region; i++)
6625 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6627 gcc_assert (single_succ_edge (region_copy[i]));
6628 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6629 PENDING_STMT (e) = NULL;
6630 for (psi = gsi_start_phis (exit_bb);
6631 !gsi_end_p (psi);
6632 gsi_next (&psi))
6634 phi = psi.phi ();
6635 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6636 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6639 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6640 PENDING_STMT (e) = NULL;
6642 /* Anything that is outside of the region, but was dominated by something
6643 inside needs to update dominance info. */
6644 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6645 doms.release ();
6646 /* Update the SSA web. */
6647 update_ssa (TODO_update_ssa);
6649 if (free_region_copy)
6650 free (region_copy);
6652 free_original_copy_tables ();
6653 return true;
6656 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6657 adding blocks when the dominator traversal reaches EXIT. This
6658 function silently assumes that ENTRY strictly dominates EXIT. */
6660 void
6661 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6662 vec<basic_block> *bbs_p)
6664 basic_block son;
6666 for (son = first_dom_son (CDI_DOMINATORS, entry);
6667 son;
6668 son = next_dom_son (CDI_DOMINATORS, son))
6670 bbs_p->safe_push (son);
6671 if (son != exit)
6672 gather_blocks_in_sese_region (son, exit, bbs_p);
6676 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6677 The duplicates are recorded in VARS_MAP. */
6679 static void
6680 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6681 tree to_context)
6683 tree t = *tp, new_t;
6684 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6686 if (DECL_CONTEXT (t) == to_context)
6687 return;
6689 bool existed;
6690 tree &loc = vars_map->get_or_insert (t, &existed);
6692 if (!existed)
6694 if (SSA_VAR_P (t))
6696 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6697 add_local_decl (f, new_t);
6699 else
6701 gcc_assert (TREE_CODE (t) == CONST_DECL);
6702 new_t = copy_node (t);
6704 DECL_CONTEXT (new_t) = to_context;
6706 loc = new_t;
6708 else
6709 new_t = loc;
6711 *tp = new_t;
6715 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6716 VARS_MAP maps old ssa names and var_decls to the new ones. */
6718 static tree
6719 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6720 tree to_context)
6722 tree new_name;
6724 gcc_assert (!virtual_operand_p (name));
6726 tree *loc = vars_map->get (name);
6728 if (!loc)
6730 tree decl = SSA_NAME_VAR (name);
6731 if (decl)
6733 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6734 replace_by_duplicate_decl (&decl, vars_map, to_context);
6735 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6736 decl, SSA_NAME_DEF_STMT (name));
6738 else
6739 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6740 name, SSA_NAME_DEF_STMT (name));
6742 /* Now that we've used the def stmt to define new_name, make sure it
6743 doesn't define name anymore. */
6744 SSA_NAME_DEF_STMT (name) = NULL;
6746 vars_map->put (name, new_name);
6748 else
6749 new_name = *loc;
6751 return new_name;
6754 struct move_stmt_d
6756 tree orig_block;
6757 tree new_block;
6758 tree from_context;
6759 tree to_context;
6760 hash_map<tree, tree> *vars_map;
6761 htab_t new_label_map;
6762 hash_map<void *, void *> *eh_map;
6763 bool remap_decls_p;
6766 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6767 contained in *TP if it has been ORIG_BLOCK previously and change the
6768 DECL_CONTEXT of every local variable referenced in *TP. */
6770 static tree
6771 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6773 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6774 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6775 tree t = *tp;
6777 if (EXPR_P (t))
6779 tree block = TREE_BLOCK (t);
6780 if (block == NULL_TREE)
6782 else if (block == p->orig_block
6783 || p->orig_block == NULL_TREE)
6784 TREE_SET_BLOCK (t, p->new_block);
6785 else if (flag_checking)
6787 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6788 block = BLOCK_SUPERCONTEXT (block);
6789 gcc_assert (block == p->orig_block);
6792 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6794 if (TREE_CODE (t) == SSA_NAME)
6795 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6796 else if (TREE_CODE (t) == PARM_DECL
6797 && gimple_in_ssa_p (cfun))
6798 *tp = *(p->vars_map->get (t));
6799 else if (TREE_CODE (t) == LABEL_DECL)
6801 if (p->new_label_map)
6803 struct tree_map in, *out;
6804 in.base.from = t;
6805 out = (struct tree_map *)
6806 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6807 if (out)
6808 *tp = t = out->to;
6811 /* For FORCED_LABELs we can end up with references from other
6812 functions if some SESE regions are outlined. It is UB to
6813 jump in between them, but they could be used just for printing
6814 addresses etc. In that case, DECL_CONTEXT on the label should
6815 be the function containing the glabel stmt with that LABEL_DECL,
6816 rather than whatever function a reference to the label was seen
6817 last time. */
6818 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6819 DECL_CONTEXT (t) = p->to_context;
6821 else if (p->remap_decls_p)
6823 /* Replace T with its duplicate. T should no longer appear in the
6824 parent function, so this looks wasteful; however, it may appear
6825 in referenced_vars, and more importantly, as virtual operands of
6826 statements, and in alias lists of other variables. It would be
6827 quite difficult to expunge it from all those places. ??? It might
6828 suffice to do this for addressable variables. */
6829 if ((VAR_P (t) && !is_global_var (t))
6830 || TREE_CODE (t) == CONST_DECL)
6831 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6833 *walk_subtrees = 0;
6835 else if (TYPE_P (t))
6836 *walk_subtrees = 0;
6838 return NULL_TREE;
6841 /* Helper for move_stmt_r. Given an EH region number for the source
6842 function, map that to the duplicate EH regio number in the dest. */
6844 static int
6845 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6847 eh_region old_r, new_r;
6849 old_r = get_eh_region_from_number (old_nr);
6850 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6852 return new_r->index;
6855 /* Similar, but operate on INTEGER_CSTs. */
6857 static tree
6858 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6860 int old_nr, new_nr;
6862 old_nr = tree_to_shwi (old_t_nr);
6863 new_nr = move_stmt_eh_region_nr (old_nr, p);
6865 return build_int_cst (integer_type_node, new_nr);
6868 /* Like move_stmt_op, but for gimple statements.
6870 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6871 contained in the current statement in *GSI_P and change the
6872 DECL_CONTEXT of every local variable referenced in the current
6873 statement. */
6875 static tree
6876 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6877 struct walk_stmt_info *wi)
6879 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6880 gimple *stmt = gsi_stmt (*gsi_p);
6881 tree block = gimple_block (stmt);
6883 if (block == p->orig_block
6884 || (p->orig_block == NULL_TREE
6885 && block != NULL_TREE))
6886 gimple_set_block (stmt, p->new_block);
6888 switch (gimple_code (stmt))
6890 case GIMPLE_CALL:
6891 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6893 tree r, fndecl = gimple_call_fndecl (stmt);
6894 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6895 switch (DECL_FUNCTION_CODE (fndecl))
6897 case BUILT_IN_EH_COPY_VALUES:
6898 r = gimple_call_arg (stmt, 1);
6899 r = move_stmt_eh_region_tree_nr (r, p);
6900 gimple_call_set_arg (stmt, 1, r);
6901 /* FALLTHRU */
6903 case BUILT_IN_EH_POINTER:
6904 case BUILT_IN_EH_FILTER:
6905 r = gimple_call_arg (stmt, 0);
6906 r = move_stmt_eh_region_tree_nr (r, p);
6907 gimple_call_set_arg (stmt, 0, r);
6908 break;
6910 default:
6911 break;
6914 break;
6916 case GIMPLE_RESX:
6918 gresx *resx_stmt = as_a <gresx *> (stmt);
6919 int r = gimple_resx_region (resx_stmt);
6920 r = move_stmt_eh_region_nr (r, p);
6921 gimple_resx_set_region (resx_stmt, r);
6923 break;
6925 case GIMPLE_EH_DISPATCH:
6927 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6928 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6929 r = move_stmt_eh_region_nr (r, p);
6930 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6932 break;
6934 case GIMPLE_OMP_RETURN:
6935 case GIMPLE_OMP_CONTINUE:
6936 break;
6938 case GIMPLE_LABEL:
6940 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6941 so that such labels can be referenced from other regions.
6942 Make sure to update it when seeing a GIMPLE_LABEL though,
6943 that is the owner of the label. */
6944 walk_gimple_op (stmt, move_stmt_op, wi);
6945 *handled_ops_p = true;
6946 tree label = gimple_label_label (as_a <glabel *> (stmt));
6947 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6948 DECL_CONTEXT (label) = p->to_context;
6950 break;
6952 default:
6953 if (is_gimple_omp (stmt))
6955 /* Do not remap variables inside OMP directives. Variables
6956 referenced in clauses and directive header belong to the
6957 parent function and should not be moved into the child
6958 function. */
6959 bool save_remap_decls_p = p->remap_decls_p;
6960 p->remap_decls_p = false;
6961 *handled_ops_p = true;
6963 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6964 move_stmt_op, wi);
6966 p->remap_decls_p = save_remap_decls_p;
6968 break;
6971 return NULL_TREE;
6974 /* Move basic block BB from function CFUN to function DEST_FN. The
6975 block is moved out of the original linked list and placed after
6976 block AFTER in the new list. Also, the block is removed from the
6977 original array of blocks and placed in DEST_FN's array of blocks.
6978 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6979 updated to reflect the moved edges.
6981 The local variables are remapped to new instances, VARS_MAP is used
6982 to record the mapping. */
6984 static void
6985 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6986 basic_block after, bool update_edge_count_p,
6987 struct move_stmt_d *d)
6989 struct control_flow_graph *cfg;
6990 edge_iterator ei;
6991 edge e;
6992 gimple_stmt_iterator si;
6993 unsigned old_len, new_len;
6995 /* Remove BB from dominance structures. */
6996 delete_from_dominance_info (CDI_DOMINATORS, bb);
6998 /* Move BB from its current loop to the copy in the new function. */
6999 if (current_loops)
7001 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
7002 if (new_loop)
7003 bb->loop_father = new_loop;
7006 /* Link BB to the new linked list. */
7007 move_block_after (bb, after);
7009 /* Update the edge count in the corresponding flowgraphs. */
7010 if (update_edge_count_p)
7011 FOR_EACH_EDGE (e, ei, bb->succs)
7013 cfun->cfg->x_n_edges--;
7014 dest_cfun->cfg->x_n_edges++;
7017 /* Remove BB from the original basic block array. */
7018 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7019 cfun->cfg->x_n_basic_blocks--;
7021 /* Grow DEST_CFUN's basic block array if needed. */
7022 cfg = dest_cfun->cfg;
7023 cfg->x_n_basic_blocks++;
7024 if (bb->index >= cfg->x_last_basic_block)
7025 cfg->x_last_basic_block = bb->index + 1;
7027 old_len = vec_safe_length (cfg->x_basic_block_info);
7028 if ((unsigned) cfg->x_last_basic_block >= old_len)
7030 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7031 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7034 (*cfg->x_basic_block_info)[bb->index] = bb;
7036 /* Remap the variables in phi nodes. */
7037 for (gphi_iterator psi = gsi_start_phis (bb);
7038 !gsi_end_p (psi); )
7040 gphi *phi = psi.phi ();
7041 use_operand_p use;
7042 tree op = PHI_RESULT (phi);
7043 ssa_op_iter oi;
7044 unsigned i;
7046 if (virtual_operand_p (op))
7048 /* Remove the phi nodes for virtual operands (alias analysis will be
7049 run for the new function, anyway). */
7050 remove_phi_node (&psi, true);
7051 continue;
7054 SET_PHI_RESULT (phi,
7055 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7056 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7058 op = USE_FROM_PTR (use);
7059 if (TREE_CODE (op) == SSA_NAME)
7060 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7063 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7065 location_t locus = gimple_phi_arg_location (phi, i);
7066 tree block = LOCATION_BLOCK (locus);
7068 if (locus == UNKNOWN_LOCATION)
7069 continue;
7070 if (d->orig_block == NULL_TREE || block == d->orig_block)
7072 locus = set_block (locus, d->new_block);
7073 gimple_phi_arg_set_location (phi, i, locus);
7077 gsi_next (&psi);
7080 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7082 gimple *stmt = gsi_stmt (si);
7083 struct walk_stmt_info wi;
7085 memset (&wi, 0, sizeof (wi));
7086 wi.info = d;
7087 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7089 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7091 tree label = gimple_label_label (label_stmt);
7092 int uid = LABEL_DECL_UID (label);
7094 gcc_assert (uid > -1);
7096 old_len = vec_safe_length (cfg->x_label_to_block_map);
7097 if (old_len <= (unsigned) uid)
7099 new_len = 3 * uid / 2 + 1;
7100 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7103 (*cfg->x_label_to_block_map)[uid] = bb;
7104 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7106 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7108 if (uid >= dest_cfun->cfg->last_label_uid)
7109 dest_cfun->cfg->last_label_uid = uid + 1;
7112 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7113 remove_stmt_from_eh_lp_fn (cfun, stmt);
7115 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7116 gimple_remove_stmt_histograms (cfun, stmt);
7118 /* We cannot leave any operands allocated from the operand caches of
7119 the current function. */
7120 free_stmt_operands (cfun, stmt);
7121 push_cfun (dest_cfun);
7122 update_stmt (stmt);
7123 pop_cfun ();
7126 FOR_EACH_EDGE (e, ei, bb->succs)
7127 if (e->goto_locus != UNKNOWN_LOCATION)
7129 tree block = LOCATION_BLOCK (e->goto_locus);
7130 if (d->orig_block == NULL_TREE
7131 || block == d->orig_block)
7132 e->goto_locus = set_block (e->goto_locus, d->new_block);
7136 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7137 the outermost EH region. Use REGION as the incoming base EH region. */
7139 static eh_region
7140 find_outermost_region_in_block (struct function *src_cfun,
7141 basic_block bb, eh_region region)
7143 gimple_stmt_iterator si;
7145 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7147 gimple *stmt = gsi_stmt (si);
7148 eh_region stmt_region;
7149 int lp_nr;
7151 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7152 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7153 if (stmt_region)
7155 if (region == NULL)
7156 region = stmt_region;
7157 else if (stmt_region != region)
7159 region = eh_region_outermost (src_cfun, stmt_region, region);
7160 gcc_assert (region != NULL);
7165 return region;
7168 static tree
7169 new_label_mapper (tree decl, void *data)
7171 htab_t hash = (htab_t) data;
7172 struct tree_map *m;
7173 void **slot;
7175 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7177 m = XNEW (struct tree_map);
7178 m->hash = DECL_UID (decl);
7179 m->base.from = decl;
7180 m->to = create_artificial_label (UNKNOWN_LOCATION);
7181 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7182 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7183 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7185 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7186 gcc_assert (*slot == NULL);
7188 *slot = m;
7190 return m->to;
7193 /* Tree walker to replace the decls used inside value expressions by
7194 duplicates. */
7196 static tree
7197 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7199 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7201 switch (TREE_CODE (*tp))
7203 case VAR_DECL:
7204 case PARM_DECL:
7205 case RESULT_DECL:
7206 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7207 break;
7208 default:
7209 break;
7212 if (IS_TYPE_OR_DECL_P (*tp))
7213 *walk_subtrees = false;
7215 return NULL;
7218 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7219 subblocks. */
7221 static void
7222 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7223 tree to_context)
7225 tree *tp, t;
7227 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7229 t = *tp;
7230 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7231 continue;
7232 replace_by_duplicate_decl (&t, vars_map, to_context);
7233 if (t != *tp)
7235 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7237 tree x = DECL_VALUE_EXPR (*tp);
7238 struct replace_decls_d rd = { vars_map, to_context };
7239 unshare_expr (x);
7240 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7241 SET_DECL_VALUE_EXPR (t, x);
7242 DECL_HAS_VALUE_EXPR_P (t) = 1;
7244 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7245 *tp = t;
7249 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7250 replace_block_vars_by_duplicates (block, vars_map, to_context);
7253 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7254 from FN1 to FN2. */
7256 static void
7257 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7258 struct loop *loop)
7260 /* Discard it from the old loop array. */
7261 (*get_loops (fn1))[loop->num] = NULL;
7263 /* Place it in the new loop array, assigning it a new number. */
7264 loop->num = number_of_loops (fn2);
7265 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7267 /* Recurse to children. */
7268 for (loop = loop->inner; loop; loop = loop->next)
7269 fixup_loop_arrays_after_move (fn1, fn2, loop);
7272 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7273 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7275 DEBUG_FUNCTION void
7276 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7278 basic_block bb;
7279 edge_iterator ei;
7280 edge e;
7281 bitmap bbs = BITMAP_ALLOC (NULL);
7282 int i;
7284 gcc_assert (entry != NULL);
7285 gcc_assert (entry != exit);
7286 gcc_assert (bbs_p != NULL);
7288 gcc_assert (bbs_p->length () > 0);
7290 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7291 bitmap_set_bit (bbs, bb->index);
7293 gcc_assert (bitmap_bit_p (bbs, entry->index));
7294 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7296 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7298 if (bb == entry)
7300 gcc_assert (single_pred_p (entry));
7301 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7303 else
7304 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7306 e = ei_edge (ei);
7307 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7310 if (bb == exit)
7312 gcc_assert (single_succ_p (exit));
7313 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7315 else
7316 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7318 e = ei_edge (ei);
7319 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7323 BITMAP_FREE (bbs);
7326 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7328 bool
7329 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7331 bitmap release_names = (bitmap)data;
7333 if (TREE_CODE (from) != SSA_NAME)
7334 return true;
7336 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7337 return true;
7340 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7341 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7342 single basic block in the original CFG and the new basic block is
7343 returned. DEST_CFUN must not have a CFG yet.
7345 Note that the region need not be a pure SESE region. Blocks inside
7346 the region may contain calls to abort/exit. The only restriction
7347 is that ENTRY_BB should be the only entry point and it must
7348 dominate EXIT_BB.
7350 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7351 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7352 to the new function.
7354 All local variables referenced in the region are assumed to be in
7355 the corresponding BLOCK_VARS and unexpanded variable lists
7356 associated with DEST_CFUN.
7358 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7359 reimplement move_sese_region_to_fn by duplicating the region rather than
7360 moving it. */
7362 basic_block
7363 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7364 basic_block exit_bb, tree orig_block)
7366 vec<basic_block> bbs, dom_bbs;
7367 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7368 basic_block after, bb, *entry_pred, *exit_succ, abb;
7369 struct function *saved_cfun = cfun;
7370 int *entry_flag, *exit_flag;
7371 profile_probability *entry_prob, *exit_prob;
7372 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7373 edge e;
7374 edge_iterator ei;
7375 htab_t new_label_map;
7376 hash_map<void *, void *> *eh_map;
7377 struct loop *loop = entry_bb->loop_father;
7378 struct loop *loop0 = get_loop (saved_cfun, 0);
7379 struct move_stmt_d d;
7381 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7382 region. */
7383 gcc_assert (entry_bb != exit_bb
7384 && (!exit_bb
7385 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7387 /* Collect all the blocks in the region. Manually add ENTRY_BB
7388 because it won't be added by dfs_enumerate_from. */
7389 bbs.create (0);
7390 bbs.safe_push (entry_bb);
7391 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7393 if (flag_checking)
7394 verify_sese (entry_bb, exit_bb, &bbs);
7396 /* The blocks that used to be dominated by something in BBS will now be
7397 dominated by the new block. */
7398 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7399 bbs.address (),
7400 bbs.length ());
7402 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7403 the predecessor edges to ENTRY_BB and the successor edges to
7404 EXIT_BB so that we can re-attach them to the new basic block that
7405 will replace the region. */
7406 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7407 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7408 entry_flag = XNEWVEC (int, num_entry_edges);
7409 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7410 i = 0;
7411 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7413 entry_prob[i] = e->probability;
7414 entry_flag[i] = e->flags;
7415 entry_pred[i++] = e->src;
7416 remove_edge (e);
7419 if (exit_bb)
7421 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7422 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7423 exit_flag = XNEWVEC (int, num_exit_edges);
7424 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7425 i = 0;
7426 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7428 exit_prob[i] = e->probability;
7429 exit_flag[i] = e->flags;
7430 exit_succ[i++] = e->dest;
7431 remove_edge (e);
7434 else
7436 num_exit_edges = 0;
7437 exit_succ = NULL;
7438 exit_flag = NULL;
7439 exit_prob = NULL;
7442 /* Switch context to the child function to initialize DEST_FN's CFG. */
7443 gcc_assert (dest_cfun->cfg == NULL);
7444 push_cfun (dest_cfun);
7446 init_empty_tree_cfg ();
7448 /* Initialize EH information for the new function. */
7449 eh_map = NULL;
7450 new_label_map = NULL;
7451 if (saved_cfun->eh)
7453 eh_region region = NULL;
7455 FOR_EACH_VEC_ELT (bbs, i, bb)
7456 region = find_outermost_region_in_block (saved_cfun, bb, region);
7458 init_eh_for_function ();
7459 if (region != NULL)
7461 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7462 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7463 new_label_mapper, new_label_map);
7467 /* Initialize an empty loop tree. */
7468 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7469 init_loops_structure (dest_cfun, loops, 1);
7470 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7471 set_loops_for_fn (dest_cfun, loops);
7473 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7475 /* Move the outlined loop tree part. */
7476 num_nodes = bbs.length ();
7477 FOR_EACH_VEC_ELT (bbs, i, bb)
7479 if (bb->loop_father->header == bb)
7481 struct loop *this_loop = bb->loop_father;
7482 struct loop *outer = loop_outer (this_loop);
7483 if (outer == loop
7484 /* If the SESE region contains some bbs ending with
7485 a noreturn call, those are considered to belong
7486 to the outermost loop in saved_cfun, rather than
7487 the entry_bb's loop_father. */
7488 || outer == loop0)
7490 if (outer != loop)
7491 num_nodes -= this_loop->num_nodes;
7492 flow_loop_tree_node_remove (bb->loop_father);
7493 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7494 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7497 else if (bb->loop_father == loop0 && loop0 != loop)
7498 num_nodes--;
7500 /* Remove loop exits from the outlined region. */
7501 if (loops_for_fn (saved_cfun)->exits)
7502 FOR_EACH_EDGE (e, ei, bb->succs)
7504 struct loops *l = loops_for_fn (saved_cfun);
7505 loop_exit **slot
7506 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7507 NO_INSERT);
7508 if (slot)
7509 l->exits->clear_slot (slot);
7514 /* Adjust the number of blocks in the tree root of the outlined part. */
7515 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7517 /* Setup a mapping to be used by move_block_to_fn. */
7518 loop->aux = current_loops->tree_root;
7519 loop0->aux = current_loops->tree_root;
7521 /* Fix up orig_loop_num. If the block referenced in it has been moved
7522 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7523 struct loop *dloop;
7524 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7525 if (dloop->orig_loop_num)
7527 if ((*larray)[dloop->orig_loop_num] != NULL
7528 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7529 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7530 else
7531 dloop->orig_loop_num = 0;
7533 ggc_free (larray);
7535 pop_cfun ();
7537 /* Move blocks from BBS into DEST_CFUN. */
7538 gcc_assert (bbs.length () >= 2);
7539 after = dest_cfun->cfg->x_entry_block_ptr;
7540 hash_map<tree, tree> vars_map;
7542 memset (&d, 0, sizeof (d));
7543 d.orig_block = orig_block;
7544 d.new_block = DECL_INITIAL (dest_cfun->decl);
7545 d.from_context = cfun->decl;
7546 d.to_context = dest_cfun->decl;
7547 d.vars_map = &vars_map;
7548 d.new_label_map = new_label_map;
7549 d.eh_map = eh_map;
7550 d.remap_decls_p = true;
7552 if (gimple_in_ssa_p (cfun))
7553 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7555 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7556 set_ssa_default_def (dest_cfun, arg, narg);
7557 vars_map.put (arg, narg);
7560 FOR_EACH_VEC_ELT (bbs, i, bb)
7562 /* No need to update edge counts on the last block. It has
7563 already been updated earlier when we detached the region from
7564 the original CFG. */
7565 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7566 after = bb;
7569 loop->aux = NULL;
7570 loop0->aux = NULL;
7571 /* Loop sizes are no longer correct, fix them up. */
7572 loop->num_nodes -= num_nodes;
7573 for (struct loop *outer = loop_outer (loop);
7574 outer; outer = loop_outer (outer))
7575 outer->num_nodes -= num_nodes;
7576 loop0->num_nodes -= bbs.length () - num_nodes;
7578 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7580 struct loop *aloop;
7581 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7582 if (aloop != NULL)
7584 if (aloop->simduid)
7586 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7587 d.to_context);
7588 dest_cfun->has_simduid_loops = true;
7590 if (aloop->force_vectorize)
7591 dest_cfun->has_force_vectorize_loops = true;
7595 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7596 if (orig_block)
7598 tree block;
7599 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7600 == NULL_TREE);
7601 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7602 = BLOCK_SUBBLOCKS (orig_block);
7603 for (block = BLOCK_SUBBLOCKS (orig_block);
7604 block; block = BLOCK_CHAIN (block))
7605 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7606 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7609 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7610 &vars_map, dest_cfun->decl);
7612 if (new_label_map)
7613 htab_delete (new_label_map);
7614 if (eh_map)
7615 delete eh_map;
7617 if (gimple_in_ssa_p (cfun))
7619 /* We need to release ssa-names in a defined order, so first find them,
7620 and then iterate in ascending version order. */
7621 bitmap release_names = BITMAP_ALLOC (NULL);
7622 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7623 bitmap_iterator bi;
7624 unsigned i;
7625 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7626 release_ssa_name (ssa_name (i));
7627 BITMAP_FREE (release_names);
7630 /* Rewire the entry and exit blocks. The successor to the entry
7631 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7632 the child function. Similarly, the predecessor of DEST_FN's
7633 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7634 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7635 various CFG manipulation function get to the right CFG.
7637 FIXME, this is silly. The CFG ought to become a parameter to
7638 these helpers. */
7639 push_cfun (dest_cfun);
7640 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7641 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7642 if (exit_bb)
7644 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7645 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7647 else
7648 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7649 pop_cfun ();
7651 /* Back in the original function, the SESE region has disappeared,
7652 create a new basic block in its place. */
7653 bb = create_empty_bb (entry_pred[0]);
7654 if (current_loops)
7655 add_bb_to_loop (bb, loop);
7656 for (i = 0; i < num_entry_edges; i++)
7658 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7659 e->probability = entry_prob[i];
7662 for (i = 0; i < num_exit_edges; i++)
7664 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7665 e->probability = exit_prob[i];
7668 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7669 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7670 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7671 dom_bbs.release ();
7673 if (exit_bb)
7675 free (exit_prob);
7676 free (exit_flag);
7677 free (exit_succ);
7679 free (entry_prob);
7680 free (entry_flag);
7681 free (entry_pred);
7682 bbs.release ();
7684 return bb;
7687 /* Dump default def DEF to file FILE using FLAGS and indentation
7688 SPC. */
7690 static void
7691 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7693 for (int i = 0; i < spc; ++i)
7694 fprintf (file, " ");
7695 dump_ssaname_info_to_file (file, def, spc);
7697 print_generic_expr (file, TREE_TYPE (def), flags);
7698 fprintf (file, " ");
7699 print_generic_expr (file, def, flags);
7700 fprintf (file, " = ");
7701 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7702 fprintf (file, ";\n");
7705 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7707 static void
7708 print_no_sanitize_attr_value (FILE *file, tree value)
7710 unsigned int flags = tree_to_uhwi (value);
7711 bool first = true;
7712 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7714 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7716 if (!first)
7717 fprintf (file, " | ");
7718 fprintf (file, "%s", sanitizer_opts[i].name);
7719 first = false;
7724 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7727 void
7728 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7730 tree arg, var, old_current_fndecl = current_function_decl;
7731 struct function *dsf;
7732 bool ignore_topmost_bind = false, any_var = false;
7733 basic_block bb;
7734 tree chain;
7735 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7736 && decl_is_tm_clone (fndecl));
7737 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7739 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7741 fprintf (file, "__attribute__((");
7743 bool first = true;
7744 tree chain;
7745 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7746 first = false, chain = TREE_CHAIN (chain))
7748 if (!first)
7749 fprintf (file, ", ");
7751 tree name = get_attribute_name (chain);
7752 print_generic_expr (file, name, dump_flags);
7753 if (TREE_VALUE (chain) != NULL_TREE)
7755 fprintf (file, " (");
7757 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7758 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7759 else
7760 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7761 fprintf (file, ")");
7765 fprintf (file, "))\n");
7768 current_function_decl = fndecl;
7769 if (flags & TDF_GIMPLE)
7771 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7772 dump_flags | TDF_SLIM);
7773 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7775 else
7776 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7778 arg = DECL_ARGUMENTS (fndecl);
7779 while (arg)
7781 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7782 fprintf (file, " ");
7783 print_generic_expr (file, arg, dump_flags);
7784 if (DECL_CHAIN (arg))
7785 fprintf (file, ", ");
7786 arg = DECL_CHAIN (arg);
7788 fprintf (file, ")\n");
7790 dsf = DECL_STRUCT_FUNCTION (fndecl);
7791 if (dsf && (flags & TDF_EH))
7792 dump_eh_tree (file, dsf);
7794 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7796 dump_node (fndecl, TDF_SLIM | flags, file);
7797 current_function_decl = old_current_fndecl;
7798 return;
7801 /* When GIMPLE is lowered, the variables are no longer available in
7802 BIND_EXPRs, so display them separately. */
7803 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7805 unsigned ix;
7806 ignore_topmost_bind = true;
7808 fprintf (file, "{\n");
7809 if (gimple_in_ssa_p (fun)
7810 && (flags & TDF_ALIAS))
7812 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7813 arg = DECL_CHAIN (arg))
7815 tree def = ssa_default_def (fun, arg);
7816 if (def)
7817 dump_default_def (file, def, 2, flags);
7820 tree res = DECL_RESULT (fun->decl);
7821 if (res != NULL_TREE
7822 && DECL_BY_REFERENCE (res))
7824 tree def = ssa_default_def (fun, res);
7825 if (def)
7826 dump_default_def (file, def, 2, flags);
7829 tree static_chain = fun->static_chain_decl;
7830 if (static_chain != NULL_TREE)
7832 tree def = ssa_default_def (fun, static_chain);
7833 if (def)
7834 dump_default_def (file, def, 2, flags);
7838 if (!vec_safe_is_empty (fun->local_decls))
7839 FOR_EACH_LOCAL_DECL (fun, ix, var)
7841 print_generic_decl (file, var, flags);
7842 fprintf (file, "\n");
7844 any_var = true;
7847 tree name;
7849 if (gimple_in_ssa_p (cfun))
7850 FOR_EACH_SSA_NAME (ix, name, cfun)
7852 if (!SSA_NAME_VAR (name))
7854 fprintf (file, " ");
7855 print_generic_expr (file, TREE_TYPE (name), flags);
7856 fprintf (file, " ");
7857 print_generic_expr (file, name, flags);
7858 fprintf (file, ";\n");
7860 any_var = true;
7865 if (fun && fun->decl == fndecl
7866 && fun->cfg
7867 && basic_block_info_for_fn (fun))
7869 /* If the CFG has been built, emit a CFG-based dump. */
7870 if (!ignore_topmost_bind)
7871 fprintf (file, "{\n");
7873 if (any_var && n_basic_blocks_for_fn (fun))
7874 fprintf (file, "\n");
7876 FOR_EACH_BB_FN (bb, fun)
7877 dump_bb (file, bb, 2, flags);
7879 fprintf (file, "}\n");
7881 else if (fun->curr_properties & PROP_gimple_any)
7883 /* The function is now in GIMPLE form but the CFG has not been
7884 built yet. Emit the single sequence of GIMPLE statements
7885 that make up its body. */
7886 gimple_seq body = gimple_body (fndecl);
7888 if (gimple_seq_first_stmt (body)
7889 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7890 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7891 print_gimple_seq (file, body, 0, flags);
7892 else
7894 if (!ignore_topmost_bind)
7895 fprintf (file, "{\n");
7897 if (any_var)
7898 fprintf (file, "\n");
7900 print_gimple_seq (file, body, 2, flags);
7901 fprintf (file, "}\n");
7904 else
7906 int indent;
7908 /* Make a tree based dump. */
7909 chain = DECL_SAVED_TREE (fndecl);
7910 if (chain && TREE_CODE (chain) == BIND_EXPR)
7912 if (ignore_topmost_bind)
7914 chain = BIND_EXPR_BODY (chain);
7915 indent = 2;
7917 else
7918 indent = 0;
7920 else
7922 if (!ignore_topmost_bind)
7924 fprintf (file, "{\n");
7925 /* No topmost bind, pretend it's ignored for later. */
7926 ignore_topmost_bind = true;
7928 indent = 2;
7931 if (any_var)
7932 fprintf (file, "\n");
7934 print_generic_stmt_indented (file, chain, flags, indent);
7935 if (ignore_topmost_bind)
7936 fprintf (file, "}\n");
7939 if (flags & TDF_ENUMERATE_LOCALS)
7940 dump_enumerated_decls (file, flags);
7941 fprintf (file, "\n\n");
7943 current_function_decl = old_current_fndecl;
7946 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7948 DEBUG_FUNCTION void
7949 debug_function (tree fn, dump_flags_t flags)
7951 dump_function_to_file (fn, stderr, flags);
7955 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7957 static void
7958 print_pred_bbs (FILE *file, basic_block bb)
7960 edge e;
7961 edge_iterator ei;
7963 FOR_EACH_EDGE (e, ei, bb->preds)
7964 fprintf (file, "bb_%d ", e->src->index);
7968 /* Print on FILE the indexes for the successors of basic_block BB. */
7970 static void
7971 print_succ_bbs (FILE *file, basic_block bb)
7973 edge e;
7974 edge_iterator ei;
7976 FOR_EACH_EDGE (e, ei, bb->succs)
7977 fprintf (file, "bb_%d ", e->dest->index);
7980 /* Print to FILE the basic block BB following the VERBOSITY level. */
7982 void
7983 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7985 char *s_indent = (char *) alloca ((size_t) indent + 1);
7986 memset ((void *) s_indent, ' ', (size_t) indent);
7987 s_indent[indent] = '\0';
7989 /* Print basic_block's header. */
7990 if (verbosity >= 2)
7992 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7993 print_pred_bbs (file, bb);
7994 fprintf (file, "}, succs = {");
7995 print_succ_bbs (file, bb);
7996 fprintf (file, "})\n");
7999 /* Print basic_block's body. */
8000 if (verbosity >= 3)
8002 fprintf (file, "%s {\n", s_indent);
8003 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8004 fprintf (file, "%s }\n", s_indent);
8008 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
8010 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8011 VERBOSITY level this outputs the contents of the loop, or just its
8012 structure. */
8014 static void
8015 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
8017 char *s_indent;
8018 basic_block bb;
8020 if (loop == NULL)
8021 return;
8023 s_indent = (char *) alloca ((size_t) indent + 1);
8024 memset ((void *) s_indent, ' ', (size_t) indent);
8025 s_indent[indent] = '\0';
8027 /* Print loop's header. */
8028 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8029 if (loop->header)
8030 fprintf (file, "header = %d", loop->header->index);
8031 else
8033 fprintf (file, "deleted)\n");
8034 return;
8036 if (loop->latch)
8037 fprintf (file, ", latch = %d", loop->latch->index);
8038 else
8039 fprintf (file, ", multiple latches");
8040 fprintf (file, ", niter = ");
8041 print_generic_expr (file, loop->nb_iterations);
8043 if (loop->any_upper_bound)
8045 fprintf (file, ", upper_bound = ");
8046 print_decu (loop->nb_iterations_upper_bound, file);
8048 if (loop->any_likely_upper_bound)
8050 fprintf (file, ", likely_upper_bound = ");
8051 print_decu (loop->nb_iterations_likely_upper_bound, file);
8054 if (loop->any_estimate)
8056 fprintf (file, ", estimate = ");
8057 print_decu (loop->nb_iterations_estimate, file);
8059 if (loop->unroll)
8060 fprintf (file, ", unroll = %d", loop->unroll);
8061 fprintf (file, ")\n");
8063 /* Print loop's body. */
8064 if (verbosity >= 1)
8066 fprintf (file, "%s{\n", s_indent);
8067 FOR_EACH_BB_FN (bb, cfun)
8068 if (bb->loop_father == loop)
8069 print_loops_bb (file, bb, indent, verbosity);
8071 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8072 fprintf (file, "%s}\n", s_indent);
8076 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8077 spaces. Following VERBOSITY level this outputs the contents of the
8078 loop, or just its structure. */
8080 static void
8081 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8082 int verbosity)
8084 if (loop == NULL)
8085 return;
8087 print_loop (file, loop, indent, verbosity);
8088 print_loop_and_siblings (file, loop->next, indent, verbosity);
8091 /* Follow a CFG edge from the entry point of the program, and on entry
8092 of a loop, pretty print the loop structure on FILE. */
8094 void
8095 print_loops (FILE *file, int verbosity)
8097 basic_block bb;
8099 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8100 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8101 if (bb && bb->loop_father)
8102 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8105 /* Dump a loop. */
8107 DEBUG_FUNCTION void
8108 debug (struct loop &ref)
8110 print_loop (stderr, &ref, 0, /*verbosity*/0);
8113 DEBUG_FUNCTION void
8114 debug (struct loop *ptr)
8116 if (ptr)
8117 debug (*ptr);
8118 else
8119 fprintf (stderr, "<nil>\n");
8122 /* Dump a loop verbosely. */
8124 DEBUG_FUNCTION void
8125 debug_verbose (struct loop &ref)
8127 print_loop (stderr, &ref, 0, /*verbosity*/3);
8130 DEBUG_FUNCTION void
8131 debug_verbose (struct loop *ptr)
8133 if (ptr)
8134 debug (*ptr);
8135 else
8136 fprintf (stderr, "<nil>\n");
8140 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8142 DEBUG_FUNCTION void
8143 debug_loops (int verbosity)
8145 print_loops (stderr, verbosity);
8148 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8150 DEBUG_FUNCTION void
8151 debug_loop (struct loop *loop, int verbosity)
8153 print_loop (stderr, loop, 0, verbosity);
8156 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8157 level. */
8159 DEBUG_FUNCTION void
8160 debug_loop_num (unsigned num, int verbosity)
8162 debug_loop (get_loop (cfun, num), verbosity);
8165 /* Return true if BB ends with a call, possibly followed by some
8166 instructions that must stay with the call. Return false,
8167 otherwise. */
8169 static bool
8170 gimple_block_ends_with_call_p (basic_block bb)
8172 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8173 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8177 /* Return true if BB ends with a conditional branch. Return false,
8178 otherwise. */
8180 static bool
8181 gimple_block_ends_with_condjump_p (const_basic_block bb)
8183 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8184 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8188 /* Return true if statement T may terminate execution of BB in ways not
8189 explicitly represtented in the CFG. */
8191 bool
8192 stmt_can_terminate_bb_p (gimple *t)
8194 tree fndecl = NULL_TREE;
8195 int call_flags = 0;
8197 /* Eh exception not handled internally terminates execution of the whole
8198 function. */
8199 if (stmt_can_throw_external (t))
8200 return true;
8202 /* NORETURN and LONGJMP calls already have an edge to exit.
8203 CONST and PURE calls do not need one.
8204 We don't currently check for CONST and PURE here, although
8205 it would be a good idea, because those attributes are
8206 figured out from the RTL in mark_constant_function, and
8207 the counter incrementation code from -fprofile-arcs
8208 leads to different results from -fbranch-probabilities. */
8209 if (is_gimple_call (t))
8211 fndecl = gimple_call_fndecl (t);
8212 call_flags = gimple_call_flags (t);
8215 if (is_gimple_call (t)
8216 && fndecl
8217 && DECL_BUILT_IN (fndecl)
8218 && (call_flags & ECF_NOTHROW)
8219 && !(call_flags & ECF_RETURNS_TWICE)
8220 /* fork() doesn't really return twice, but the effect of
8221 wrapping it in __gcov_fork() which calls __gcov_flush()
8222 and clears the counters before forking has the same
8223 effect as returning twice. Force a fake edge. */
8224 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8225 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8226 return false;
8228 if (is_gimple_call (t))
8230 edge_iterator ei;
8231 edge e;
8232 basic_block bb;
8234 if (call_flags & (ECF_PURE | ECF_CONST)
8235 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8236 return false;
8238 /* Function call may do longjmp, terminate program or do other things.
8239 Special case noreturn that have non-abnormal edges out as in this case
8240 the fact is sufficiently represented by lack of edges out of T. */
8241 if (!(call_flags & ECF_NORETURN))
8242 return true;
8244 bb = gimple_bb (t);
8245 FOR_EACH_EDGE (e, ei, bb->succs)
8246 if ((e->flags & EDGE_FAKE) == 0)
8247 return true;
8250 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8251 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8252 return true;
8254 return false;
8258 /* Add fake edges to the function exit for any non constant and non
8259 noreturn calls (or noreturn calls with EH/abnormal edges),
8260 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8261 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8262 that were split.
8264 The goal is to expose cases in which entering a basic block does
8265 not imply that all subsequent instructions must be executed. */
8267 static int
8268 gimple_flow_call_edges_add (sbitmap blocks)
8270 int i;
8271 int blocks_split = 0;
8272 int last_bb = last_basic_block_for_fn (cfun);
8273 bool check_last_block = false;
8275 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8276 return 0;
8278 if (! blocks)
8279 check_last_block = true;
8280 else
8281 check_last_block = bitmap_bit_p (blocks,
8282 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8284 /* In the last basic block, before epilogue generation, there will be
8285 a fallthru edge to EXIT. Special care is required if the last insn
8286 of the last basic block is a call because make_edge folds duplicate
8287 edges, which would result in the fallthru edge also being marked
8288 fake, which would result in the fallthru edge being removed by
8289 remove_fake_edges, which would result in an invalid CFG.
8291 Moreover, we can't elide the outgoing fake edge, since the block
8292 profiler needs to take this into account in order to solve the minimal
8293 spanning tree in the case that the call doesn't return.
8295 Handle this by adding a dummy instruction in a new last basic block. */
8296 if (check_last_block)
8298 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8299 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8300 gimple *t = NULL;
8302 if (!gsi_end_p (gsi))
8303 t = gsi_stmt (gsi);
8305 if (t && stmt_can_terminate_bb_p (t))
8307 edge e;
8309 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8310 if (e)
8312 gsi_insert_on_edge (e, gimple_build_nop ());
8313 gsi_commit_edge_inserts ();
8318 /* Now add fake edges to the function exit for any non constant
8319 calls since there is no way that we can determine if they will
8320 return or not... */
8321 for (i = 0; i < last_bb; i++)
8323 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8324 gimple_stmt_iterator gsi;
8325 gimple *stmt, *last_stmt;
8327 if (!bb)
8328 continue;
8330 if (blocks && !bitmap_bit_p (blocks, i))
8331 continue;
8333 gsi = gsi_last_nondebug_bb (bb);
8334 if (!gsi_end_p (gsi))
8336 last_stmt = gsi_stmt (gsi);
8339 stmt = gsi_stmt (gsi);
8340 if (stmt_can_terminate_bb_p (stmt))
8342 edge e;
8344 /* The handling above of the final block before the
8345 epilogue should be enough to verify that there is
8346 no edge to the exit block in CFG already.
8347 Calling make_edge in such case would cause us to
8348 mark that edge as fake and remove it later. */
8349 if (flag_checking && stmt == last_stmt)
8351 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8352 gcc_assert (e == NULL);
8355 /* Note that the following may create a new basic block
8356 and renumber the existing basic blocks. */
8357 if (stmt != last_stmt)
8359 e = split_block (bb, stmt);
8360 if (e)
8361 blocks_split++;
8363 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8364 e->probability = profile_probability::guessed_never ();
8366 gsi_prev (&gsi);
8368 while (!gsi_end_p (gsi));
8372 if (blocks_split)
8373 checking_verify_flow_info ();
8375 return blocks_split;
8378 /* Removes edge E and all the blocks dominated by it, and updates dominance
8379 information. The IL in E->src needs to be updated separately.
8380 If dominance info is not available, only the edge E is removed.*/
8382 void
8383 remove_edge_and_dominated_blocks (edge e)
8385 vec<basic_block> bbs_to_remove = vNULL;
8386 vec<basic_block> bbs_to_fix_dom = vNULL;
8387 edge f;
8388 edge_iterator ei;
8389 bool none_removed = false;
8390 unsigned i;
8391 basic_block bb, dbb;
8392 bitmap_iterator bi;
8394 /* If we are removing a path inside a non-root loop that may change
8395 loop ownership of blocks or remove loops. Mark loops for fixup. */
8396 if (current_loops
8397 && loop_outer (e->src->loop_father) != NULL
8398 && e->src->loop_father == e->dest->loop_father)
8399 loops_state_set (LOOPS_NEED_FIXUP);
8401 if (!dom_info_available_p (CDI_DOMINATORS))
8403 remove_edge (e);
8404 return;
8407 /* No updating is needed for edges to exit. */
8408 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8410 if (cfgcleanup_altered_bbs)
8411 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8412 remove_edge (e);
8413 return;
8416 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8417 that is not dominated by E->dest, then this set is empty. Otherwise,
8418 all the basic blocks dominated by E->dest are removed.
8420 Also, to DF_IDOM we store the immediate dominators of the blocks in
8421 the dominance frontier of E (i.e., of the successors of the
8422 removed blocks, if there are any, and of E->dest otherwise). */
8423 FOR_EACH_EDGE (f, ei, e->dest->preds)
8425 if (f == e)
8426 continue;
8428 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8430 none_removed = true;
8431 break;
8435 auto_bitmap df, df_idom;
8436 if (none_removed)
8437 bitmap_set_bit (df_idom,
8438 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8439 else
8441 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8442 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8444 FOR_EACH_EDGE (f, ei, bb->succs)
8446 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8447 bitmap_set_bit (df, f->dest->index);
8450 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8451 bitmap_clear_bit (df, bb->index);
8453 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8455 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8456 bitmap_set_bit (df_idom,
8457 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8461 if (cfgcleanup_altered_bbs)
8463 /* Record the set of the altered basic blocks. */
8464 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8465 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8468 /* Remove E and the cancelled blocks. */
8469 if (none_removed)
8470 remove_edge (e);
8471 else
8473 /* Walk backwards so as to get a chance to substitute all
8474 released DEFs into debug stmts. See
8475 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8476 details. */
8477 for (i = bbs_to_remove.length (); i-- > 0; )
8478 delete_basic_block (bbs_to_remove[i]);
8481 /* Update the dominance information. The immediate dominator may change only
8482 for blocks whose immediate dominator belongs to DF_IDOM:
8484 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8485 removal. Let Z the arbitrary block such that idom(Z) = Y and
8486 Z dominates X after the removal. Before removal, there exists a path P
8487 from Y to X that avoids Z. Let F be the last edge on P that is
8488 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8489 dominates W, and because of P, Z does not dominate W), and W belongs to
8490 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8491 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8493 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8494 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8495 dbb;
8496 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8497 bbs_to_fix_dom.safe_push (dbb);
8500 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8502 bbs_to_remove.release ();
8503 bbs_to_fix_dom.release ();
8506 /* Purge dead EH edges from basic block BB. */
8508 bool
8509 gimple_purge_dead_eh_edges (basic_block bb)
8511 bool changed = false;
8512 edge e;
8513 edge_iterator ei;
8514 gimple *stmt = last_stmt (bb);
8516 if (stmt && stmt_can_throw_internal (stmt))
8517 return false;
8519 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8521 if (e->flags & EDGE_EH)
8523 remove_edge_and_dominated_blocks (e);
8524 changed = true;
8526 else
8527 ei_next (&ei);
8530 return changed;
8533 /* Purge dead EH edges from basic block listed in BLOCKS. */
8535 bool
8536 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8538 bool changed = false;
8539 unsigned i;
8540 bitmap_iterator bi;
8542 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8544 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8546 /* Earlier gimple_purge_dead_eh_edges could have removed
8547 this basic block already. */
8548 gcc_assert (bb || changed);
8549 if (bb != NULL)
8550 changed |= gimple_purge_dead_eh_edges (bb);
8553 return changed;
8556 /* Purge dead abnormal call edges from basic block BB. */
8558 bool
8559 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8561 bool changed = false;
8562 edge e;
8563 edge_iterator ei;
8564 gimple *stmt = last_stmt (bb);
8566 if (!cfun->has_nonlocal_label
8567 && !cfun->calls_setjmp)
8568 return false;
8570 if (stmt && stmt_can_make_abnormal_goto (stmt))
8571 return false;
8573 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8575 if (e->flags & EDGE_ABNORMAL)
8577 if (e->flags & EDGE_FALLTHRU)
8578 e->flags &= ~EDGE_ABNORMAL;
8579 else
8580 remove_edge_and_dominated_blocks (e);
8581 changed = true;
8583 else
8584 ei_next (&ei);
8587 return changed;
8590 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8592 bool
8593 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8595 bool changed = false;
8596 unsigned i;
8597 bitmap_iterator bi;
8599 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8601 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8603 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8604 this basic block already. */
8605 gcc_assert (bb || changed);
8606 if (bb != NULL)
8607 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8610 return changed;
8613 /* This function is called whenever a new edge is created or
8614 redirected. */
8616 static void
8617 gimple_execute_on_growing_pred (edge e)
8619 basic_block bb = e->dest;
8621 if (!gimple_seq_empty_p (phi_nodes (bb)))
8622 reserve_phi_args_for_new_edge (bb);
8625 /* This function is called immediately before edge E is removed from
8626 the edge vector E->dest->preds. */
8628 static void
8629 gimple_execute_on_shrinking_pred (edge e)
8631 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8632 remove_phi_args (e);
8635 /*---------------------------------------------------------------------------
8636 Helper functions for Loop versioning
8637 ---------------------------------------------------------------------------*/
8639 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8640 of 'first'. Both of them are dominated by 'new_head' basic block. When
8641 'new_head' was created by 'second's incoming edge it received phi arguments
8642 on the edge by split_edge(). Later, additional edge 'e' was created to
8643 connect 'new_head' and 'first'. Now this routine adds phi args on this
8644 additional edge 'e' that new_head to second edge received as part of edge
8645 splitting. */
8647 static void
8648 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8649 basic_block new_head, edge e)
8651 gphi *phi1, *phi2;
8652 gphi_iterator psi1, psi2;
8653 tree def;
8654 edge e2 = find_edge (new_head, second);
8656 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8657 edge, we should always have an edge from NEW_HEAD to SECOND. */
8658 gcc_assert (e2 != NULL);
8660 /* Browse all 'second' basic block phi nodes and add phi args to
8661 edge 'e' for 'first' head. PHI args are always in correct order. */
8663 for (psi2 = gsi_start_phis (second),
8664 psi1 = gsi_start_phis (first);
8665 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8666 gsi_next (&psi2), gsi_next (&psi1))
8668 phi1 = psi1.phi ();
8669 phi2 = psi2.phi ();
8670 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8671 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8676 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8677 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8678 the destination of the ELSE part. */
8680 static void
8681 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8682 basic_block second_head ATTRIBUTE_UNUSED,
8683 basic_block cond_bb, void *cond_e)
8685 gimple_stmt_iterator gsi;
8686 gimple *new_cond_expr;
8687 tree cond_expr = (tree) cond_e;
8688 edge e0;
8690 /* Build new conditional expr */
8691 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8692 NULL_TREE, NULL_TREE);
8694 /* Add new cond in cond_bb. */
8695 gsi = gsi_last_bb (cond_bb);
8696 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8698 /* Adjust edges appropriately to connect new head with first head
8699 as well as second head. */
8700 e0 = single_succ_edge (cond_bb);
8701 e0->flags &= ~EDGE_FALLTHRU;
8702 e0->flags |= EDGE_FALSE_VALUE;
8706 /* Do book-keeping of basic block BB for the profile consistency checker.
8707 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8708 then do post-pass accounting. Store the counting in RECORD. */
8709 static void
8710 gimple_account_profile_record (basic_block bb, int after_pass,
8711 struct profile_record *record)
8713 gimple_stmt_iterator i;
8714 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8716 record->size[after_pass]
8717 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8718 if (bb->count.initialized_p ())
8719 record->time[after_pass]
8720 += estimate_num_insns (gsi_stmt (i),
8721 &eni_time_weights) * bb->count.to_gcov_type ();
8722 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8723 record->time[after_pass]
8724 += estimate_num_insns (gsi_stmt (i),
8725 &eni_time_weights) * bb->count.to_frequency (cfun);
8729 struct cfg_hooks gimple_cfg_hooks = {
8730 "gimple",
8731 gimple_verify_flow_info,
8732 gimple_dump_bb, /* dump_bb */
8733 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8734 create_bb, /* create_basic_block */
8735 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8736 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8737 gimple_can_remove_branch_p, /* can_remove_branch_p */
8738 remove_bb, /* delete_basic_block */
8739 gimple_split_block, /* split_block */
8740 gimple_move_block_after, /* move_block_after */
8741 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8742 gimple_merge_blocks, /* merge_blocks */
8743 gimple_predict_edge, /* predict_edge */
8744 gimple_predicted_by_p, /* predicted_by_p */
8745 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8746 gimple_duplicate_bb, /* duplicate_block */
8747 gimple_split_edge, /* split_edge */
8748 gimple_make_forwarder_block, /* make_forward_block */
8749 NULL, /* tidy_fallthru_edge */
8750 NULL, /* force_nonfallthru */
8751 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8752 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8753 gimple_flow_call_edges_add, /* flow_call_edges_add */
8754 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8755 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8756 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8757 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8758 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8759 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8760 flush_pending_stmts, /* flush_pending_stmts */
8761 gimple_empty_block_p, /* block_empty_p */
8762 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8763 gimple_account_profile_record,
8767 /* Split all critical edges. */
8769 unsigned int
8770 split_critical_edges (void)
8772 basic_block bb;
8773 edge e;
8774 edge_iterator ei;
8776 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8777 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8778 mappings around the calls to split_edge. */
8779 start_recording_case_labels ();
8780 FOR_ALL_BB_FN (bb, cfun)
8782 FOR_EACH_EDGE (e, ei, bb->succs)
8784 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8785 split_edge (e);
8786 /* PRE inserts statements to edges and expects that
8787 since split_critical_edges was done beforehand, committing edge
8788 insertions will not split more edges. In addition to critical
8789 edges we must split edges that have multiple successors and
8790 end by control flow statements, such as RESX.
8791 Go ahead and split them too. This matches the logic in
8792 gimple_find_edge_insert_loc. */
8793 else if ((!single_pred_p (e->dest)
8794 || !gimple_seq_empty_p (phi_nodes (e->dest))
8795 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8796 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8797 && !(e->flags & EDGE_ABNORMAL))
8799 gimple_stmt_iterator gsi;
8801 gsi = gsi_last_bb (e->src);
8802 if (!gsi_end_p (gsi)
8803 && stmt_ends_bb_p (gsi_stmt (gsi))
8804 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8805 && !gimple_call_builtin_p (gsi_stmt (gsi),
8806 BUILT_IN_RETURN)))
8807 split_edge (e);
8811 end_recording_case_labels ();
8812 return 0;
8815 namespace {
8817 const pass_data pass_data_split_crit_edges =
8819 GIMPLE_PASS, /* type */
8820 "crited", /* name */
8821 OPTGROUP_NONE, /* optinfo_flags */
8822 TV_TREE_SPLIT_EDGES, /* tv_id */
8823 PROP_cfg, /* properties_required */
8824 PROP_no_crit_edges, /* properties_provided */
8825 0, /* properties_destroyed */
8826 0, /* todo_flags_start */
8827 0, /* todo_flags_finish */
8830 class pass_split_crit_edges : public gimple_opt_pass
8832 public:
8833 pass_split_crit_edges (gcc::context *ctxt)
8834 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8837 /* opt_pass methods: */
8838 virtual unsigned int execute (function *) { return split_critical_edges (); }
8840 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8841 }; // class pass_split_crit_edges
8843 } // anon namespace
8845 gimple_opt_pass *
8846 make_pass_split_crit_edges (gcc::context *ctxt)
8848 return new pass_split_crit_edges (ctxt);
8852 /* Insert COND expression which is GIMPLE_COND after STMT
8853 in basic block BB with appropriate basic block split
8854 and creation of a new conditionally executed basic block.
8855 Update profile so the new bb is visited with probability PROB.
8856 Return created basic block. */
8857 basic_block
8858 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
8859 profile_probability prob)
8861 edge fall = split_block (bb, stmt);
8862 gimple_stmt_iterator iter = gsi_last_bb (bb);
8863 basic_block new_bb;
8865 /* Insert cond statement. */
8866 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8867 if (gsi_end_p (iter))
8868 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8869 else
8870 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8872 /* Create conditionally executed block. */
8873 new_bb = create_empty_bb (bb);
8874 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8875 e->probability = prob;
8876 new_bb->count = e->count ();
8877 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8879 /* Fix edge for split bb. */
8880 fall->flags = EDGE_FALSE_VALUE;
8881 fall->probability -= e->probability;
8883 /* Update dominance info. */
8884 if (dom_info_available_p (CDI_DOMINATORS))
8886 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8887 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8890 /* Update loop info. */
8891 if (current_loops)
8892 add_bb_to_loop (new_bb, bb->loop_father);
8894 return new_bb;
8897 /* Build a ternary operation and gimplify it. Emit code before GSI.
8898 Return the gimple_val holding the result. */
8900 tree
8901 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8902 tree type, tree a, tree b, tree c)
8904 tree ret;
8905 location_t loc = gimple_location (gsi_stmt (*gsi));
8907 ret = fold_build3_loc (loc, code, type, a, b, c);
8908 STRIP_NOPS (ret);
8910 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8911 GSI_SAME_STMT);
8914 /* Build a binary operation and gimplify it. Emit code before GSI.
8915 Return the gimple_val holding the result. */
8917 tree
8918 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8919 tree type, tree a, tree b)
8921 tree ret;
8923 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8924 STRIP_NOPS (ret);
8926 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8927 GSI_SAME_STMT);
8930 /* Build a unary operation and gimplify it. Emit code before GSI.
8931 Return the gimple_val holding the result. */
8933 tree
8934 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8935 tree a)
8937 tree ret;
8939 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8940 STRIP_NOPS (ret);
8942 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8943 GSI_SAME_STMT);
8948 /* Given a basic block B which ends with a conditional and has
8949 precisely two successors, determine which of the edges is taken if
8950 the conditional is true and which is taken if the conditional is
8951 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8953 void
8954 extract_true_false_edges_from_block (basic_block b,
8955 edge *true_edge,
8956 edge *false_edge)
8958 edge e = EDGE_SUCC (b, 0);
8960 if (e->flags & EDGE_TRUE_VALUE)
8962 *true_edge = e;
8963 *false_edge = EDGE_SUCC (b, 1);
8965 else
8967 *false_edge = e;
8968 *true_edge = EDGE_SUCC (b, 1);
8973 /* From a controlling predicate in the immediate dominator DOM of
8974 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8975 predicate evaluates to true and false and store them to
8976 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8977 they are non-NULL. Returns true if the edges can be determined,
8978 else return false. */
8980 bool
8981 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8982 edge *true_controlled_edge,
8983 edge *false_controlled_edge)
8985 basic_block bb = phiblock;
8986 edge true_edge, false_edge, tem;
8987 edge e0 = NULL, e1 = NULL;
8989 /* We have to verify that one edge into the PHI node is dominated
8990 by the true edge of the predicate block and the other edge
8991 dominated by the false edge. This ensures that the PHI argument
8992 we are going to take is completely determined by the path we
8993 take from the predicate block.
8994 We can only use BB dominance checks below if the destination of
8995 the true/false edges are dominated by their edge, thus only
8996 have a single predecessor. */
8997 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8998 tem = EDGE_PRED (bb, 0);
8999 if (tem == true_edge
9000 || (single_pred_p (true_edge->dest)
9001 && (tem->src == true_edge->dest
9002 || dominated_by_p (CDI_DOMINATORS,
9003 tem->src, true_edge->dest))))
9004 e0 = tem;
9005 else if (tem == false_edge
9006 || (single_pred_p (false_edge->dest)
9007 && (tem->src == false_edge->dest
9008 || dominated_by_p (CDI_DOMINATORS,
9009 tem->src, false_edge->dest))))
9010 e1 = tem;
9011 else
9012 return false;
9013 tem = EDGE_PRED (bb, 1);
9014 if (tem == true_edge
9015 || (single_pred_p (true_edge->dest)
9016 && (tem->src == true_edge->dest
9017 || dominated_by_p (CDI_DOMINATORS,
9018 tem->src, true_edge->dest))))
9019 e0 = tem;
9020 else if (tem == false_edge
9021 || (single_pred_p (false_edge->dest)
9022 && (tem->src == false_edge->dest
9023 || dominated_by_p (CDI_DOMINATORS,
9024 tem->src, false_edge->dest))))
9025 e1 = tem;
9026 else
9027 return false;
9028 if (!e0 || !e1)
9029 return false;
9031 if (true_controlled_edge)
9032 *true_controlled_edge = e0;
9033 if (false_controlled_edge)
9034 *false_controlled_edge = e1;
9036 return true;
9039 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9040 range [low, high]. Place associated stmts before *GSI. */
9042 void
9043 generate_range_test (basic_block bb, tree index, tree low, tree high,
9044 tree *lhs, tree *rhs)
9046 tree type = TREE_TYPE (index);
9047 tree utype = unsigned_type_for (type);
9049 low = fold_convert (type, low);
9050 high = fold_convert (type, high);
9052 tree tmp = make_ssa_name (type);
9053 gassign *sub1
9054 = gimple_build_assign (tmp, MINUS_EXPR, index, low);
9056 *lhs = make_ssa_name (utype);
9057 gassign *a = gimple_build_assign (*lhs, NOP_EXPR, tmp);
9059 *rhs = fold_build2 (MINUS_EXPR, utype, high, low);
9060 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9061 gsi_insert_before (&gsi, sub1, GSI_SAME_STMT);
9062 gsi_insert_before (&gsi, a, GSI_SAME_STMT);
9065 /* Emit return warnings. */
9067 namespace {
9069 const pass_data pass_data_warn_function_return =
9071 GIMPLE_PASS, /* type */
9072 "*warn_function_return", /* name */
9073 OPTGROUP_NONE, /* optinfo_flags */
9074 TV_NONE, /* tv_id */
9075 PROP_cfg, /* properties_required */
9076 0, /* properties_provided */
9077 0, /* properties_destroyed */
9078 0, /* todo_flags_start */
9079 0, /* todo_flags_finish */
9082 class pass_warn_function_return : public gimple_opt_pass
9084 public:
9085 pass_warn_function_return (gcc::context *ctxt)
9086 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9089 /* opt_pass methods: */
9090 virtual unsigned int execute (function *);
9092 }; // class pass_warn_function_return
9094 unsigned int
9095 pass_warn_function_return::execute (function *fun)
9097 source_location location;
9098 gimple *last;
9099 edge e;
9100 edge_iterator ei;
9102 if (!targetm.warn_func_return (fun->decl))
9103 return 0;
9105 /* If we have a path to EXIT, then we do return. */
9106 if (TREE_THIS_VOLATILE (fun->decl)
9107 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9109 location = UNKNOWN_LOCATION;
9110 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9111 (e = ei_safe_edge (ei)); )
9113 last = last_stmt (e->src);
9114 if ((gimple_code (last) == GIMPLE_RETURN
9115 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9116 && location == UNKNOWN_LOCATION
9117 && ((location = LOCATION_LOCUS (gimple_location (last)))
9118 != UNKNOWN_LOCATION)
9119 && !optimize)
9120 break;
9121 /* When optimizing, replace return stmts in noreturn functions
9122 with __builtin_unreachable () call. */
9123 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9125 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9126 gimple *new_stmt = gimple_build_call (fndecl, 0);
9127 gimple_set_location (new_stmt, gimple_location (last));
9128 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9129 gsi_replace (&gsi, new_stmt, true);
9130 remove_edge (e);
9132 else
9133 ei_next (&ei);
9135 if (location == UNKNOWN_LOCATION)
9136 location = cfun->function_end_locus;
9137 warning_at (location, 0, "%<noreturn%> function does return");
9140 /* If we see "return;" in some basic block, then we do reach the end
9141 without returning a value. */
9142 else if (warn_return_type > 0
9143 && !TREE_NO_WARNING (fun->decl)
9144 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9146 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9148 gimple *last = last_stmt (e->src);
9149 greturn *return_stmt = dyn_cast <greturn *> (last);
9150 if (return_stmt
9151 && gimple_return_retval (return_stmt) == NULL
9152 && !gimple_no_warning_p (last))
9154 location = gimple_location (last);
9155 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9156 location = fun->function_end_locus;
9157 warning_at (location, OPT_Wreturn_type,
9158 "control reaches end of non-void function");
9159 TREE_NO_WARNING (fun->decl) = 1;
9160 break;
9163 /* The C++ FE turns fallthrough from the end of non-void function
9164 into __builtin_unreachable () call with BUILTINS_LOCATION.
9165 Recognize those too. */
9166 basic_block bb;
9167 if (!TREE_NO_WARNING (fun->decl))
9168 FOR_EACH_BB_FN (bb, fun)
9169 if (EDGE_COUNT (bb->succs) == 0)
9171 gimple *last = last_stmt (bb);
9172 const enum built_in_function ubsan_missing_ret
9173 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9174 if (last
9175 && ((LOCATION_LOCUS (gimple_location (last))
9176 == BUILTINS_LOCATION
9177 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9178 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9180 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9181 gsi_prev_nondebug (&gsi);
9182 gimple *prev = gsi_stmt (gsi);
9183 if (prev == NULL)
9184 location = UNKNOWN_LOCATION;
9185 else
9186 location = gimple_location (prev);
9187 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9188 location = fun->function_end_locus;
9189 warning_at (location, OPT_Wreturn_type,
9190 "control reaches end of non-void function");
9191 TREE_NO_WARNING (fun->decl) = 1;
9192 break;
9196 return 0;
9199 } // anon namespace
9201 gimple_opt_pass *
9202 make_pass_warn_function_return (gcc::context *ctxt)
9204 return new pass_warn_function_return (ctxt);
9207 /* Walk a gimplified function and warn for functions whose return value is
9208 ignored and attribute((warn_unused_result)) is set. This is done before
9209 inlining, so we don't have to worry about that. */
9211 static void
9212 do_warn_unused_result (gimple_seq seq)
9214 tree fdecl, ftype;
9215 gimple_stmt_iterator i;
9217 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9219 gimple *g = gsi_stmt (i);
9221 switch (gimple_code (g))
9223 case GIMPLE_BIND:
9224 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9225 break;
9226 case GIMPLE_TRY:
9227 do_warn_unused_result (gimple_try_eval (g));
9228 do_warn_unused_result (gimple_try_cleanup (g));
9229 break;
9230 case GIMPLE_CATCH:
9231 do_warn_unused_result (gimple_catch_handler (
9232 as_a <gcatch *> (g)));
9233 break;
9234 case GIMPLE_EH_FILTER:
9235 do_warn_unused_result (gimple_eh_filter_failure (g));
9236 break;
9238 case GIMPLE_CALL:
9239 if (gimple_call_lhs (g))
9240 break;
9241 if (gimple_call_internal_p (g))
9242 break;
9244 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9245 LHS. All calls whose value is ignored should be
9246 represented like this. Look for the attribute. */
9247 fdecl = gimple_call_fndecl (g);
9248 ftype = gimple_call_fntype (g);
9250 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9252 location_t loc = gimple_location (g);
9254 if (fdecl)
9255 warning_at (loc, OPT_Wunused_result,
9256 "ignoring return value of %qD, "
9257 "declared with attribute warn_unused_result",
9258 fdecl);
9259 else
9260 warning_at (loc, OPT_Wunused_result,
9261 "ignoring return value of function "
9262 "declared with attribute warn_unused_result");
9264 break;
9266 default:
9267 /* Not a container, not a call, or a call whose value is used. */
9268 break;
9273 namespace {
9275 const pass_data pass_data_warn_unused_result =
9277 GIMPLE_PASS, /* type */
9278 "*warn_unused_result", /* name */
9279 OPTGROUP_NONE, /* optinfo_flags */
9280 TV_NONE, /* tv_id */
9281 PROP_gimple_any, /* properties_required */
9282 0, /* properties_provided */
9283 0, /* properties_destroyed */
9284 0, /* todo_flags_start */
9285 0, /* todo_flags_finish */
9288 class pass_warn_unused_result : public gimple_opt_pass
9290 public:
9291 pass_warn_unused_result (gcc::context *ctxt)
9292 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9295 /* opt_pass methods: */
9296 virtual bool gate (function *) { return flag_warn_unused_result; }
9297 virtual unsigned int execute (function *)
9299 do_warn_unused_result (gimple_body (current_function_decl));
9300 return 0;
9303 }; // class pass_warn_unused_result
9305 } // anon namespace
9307 gimple_opt_pass *
9308 make_pass_warn_unused_result (gcc::context *ctxt)
9310 return new pass_warn_unused_result (ctxt);
9313 /* IPA passes, compilation of earlier functions or inlining
9314 might have changed some properties, such as marked functions nothrow,
9315 pure, const or noreturn.
9316 Remove redundant edges and basic blocks, and create new ones if necessary.
9318 This pass can't be executed as stand alone pass from pass manager, because
9319 in between inlining and this fixup the verify_flow_info would fail. */
9321 unsigned int
9322 execute_fixup_cfg (void)
9324 basic_block bb;
9325 gimple_stmt_iterator gsi;
9326 int todo = 0;
9327 cgraph_node *node = cgraph_node::get (current_function_decl);
9328 profile_count num = node->count;
9329 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9330 bool scale = num.initialized_p () && !(num == den);
9332 if (scale)
9334 profile_count::adjust_for_ipa_scaling (&num, &den);
9335 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9336 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9337 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9340 FOR_EACH_BB_FN (bb, cfun)
9342 if (scale)
9343 bb->count = bb->count.apply_scale (num, den);
9344 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9346 gimple *stmt = gsi_stmt (gsi);
9347 tree decl = is_gimple_call (stmt)
9348 ? gimple_call_fndecl (stmt)
9349 : NULL;
9350 if (decl)
9352 int flags = gimple_call_flags (stmt);
9353 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9355 if (gimple_purge_dead_abnormal_call_edges (bb))
9356 todo |= TODO_cleanup_cfg;
9358 if (gimple_in_ssa_p (cfun))
9360 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9361 update_stmt (stmt);
9365 if (flags & ECF_NORETURN
9366 && fixup_noreturn_call (stmt))
9367 todo |= TODO_cleanup_cfg;
9370 /* Remove stores to variables we marked write-only.
9371 Keep access when store has side effect, i.e. in case when source
9372 is volatile. */
9373 if (gimple_store_p (stmt)
9374 && !gimple_has_side_effects (stmt))
9376 tree lhs = get_base_address (gimple_get_lhs (stmt));
9378 if (VAR_P (lhs)
9379 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9380 && varpool_node::get (lhs)->writeonly)
9382 unlink_stmt_vdef (stmt);
9383 gsi_remove (&gsi, true);
9384 release_defs (stmt);
9385 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9386 continue;
9389 /* For calls we can simply remove LHS when it is known
9390 to be write-only. */
9391 if (is_gimple_call (stmt)
9392 && gimple_get_lhs (stmt))
9394 tree lhs = get_base_address (gimple_get_lhs (stmt));
9396 if (VAR_P (lhs)
9397 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9398 && varpool_node::get (lhs)->writeonly)
9400 gimple_call_set_lhs (stmt, NULL);
9401 update_stmt (stmt);
9402 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9406 if (maybe_clean_eh_stmt (stmt)
9407 && gimple_purge_dead_eh_edges (bb))
9408 todo |= TODO_cleanup_cfg;
9409 gsi_next (&gsi);
9412 /* If we have a basic block with no successors that does not
9413 end with a control statement or a noreturn call end it with
9414 a call to __builtin_unreachable. This situation can occur
9415 when inlining a noreturn call that does in fact return. */
9416 if (EDGE_COUNT (bb->succs) == 0)
9418 gimple *stmt = last_stmt (bb);
9419 if (!stmt
9420 || (!is_ctrl_stmt (stmt)
9421 && (!is_gimple_call (stmt)
9422 || !gimple_call_noreturn_p (stmt))))
9424 if (stmt && is_gimple_call (stmt))
9425 gimple_call_set_ctrl_altering (stmt, false);
9426 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9427 stmt = gimple_build_call (fndecl, 0);
9428 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9429 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9430 if (!cfun->after_inlining)
9432 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9433 node->create_edge (cgraph_node::get_create (fndecl),
9434 call_stmt, bb->count);
9439 if (scale)
9440 compute_function_frequency ();
9442 if (current_loops
9443 && (todo & TODO_cleanup_cfg))
9444 loops_state_set (LOOPS_NEED_FIXUP);
9446 return todo;
9449 namespace {
9451 const pass_data pass_data_fixup_cfg =
9453 GIMPLE_PASS, /* type */
9454 "fixup_cfg", /* name */
9455 OPTGROUP_NONE, /* optinfo_flags */
9456 TV_NONE, /* tv_id */
9457 PROP_cfg, /* properties_required */
9458 0, /* properties_provided */
9459 0, /* properties_destroyed */
9460 0, /* todo_flags_start */
9461 0, /* todo_flags_finish */
9464 class pass_fixup_cfg : public gimple_opt_pass
9466 public:
9467 pass_fixup_cfg (gcc::context *ctxt)
9468 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9471 /* opt_pass methods: */
9472 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9473 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9475 }; // class pass_fixup_cfg
9477 } // anon namespace
9479 gimple_opt_pass *
9480 make_pass_fixup_cfg (gcc::context *ctxt)
9482 return new pass_fixup_cfg (ctxt);
9485 /* Garbage collection support for edge_def. */
9487 extern void gt_ggc_mx (tree&);
9488 extern void gt_ggc_mx (gimple *&);
9489 extern void gt_ggc_mx (rtx&);
9490 extern void gt_ggc_mx (basic_block&);
9492 static void
9493 gt_ggc_mx (rtx_insn *& x)
9495 if (x)
9496 gt_ggc_mx_rtx_def ((void *) x);
9499 void
9500 gt_ggc_mx (edge_def *e)
9502 tree block = LOCATION_BLOCK (e->goto_locus);
9503 gt_ggc_mx (e->src);
9504 gt_ggc_mx (e->dest);
9505 if (current_ir_type () == IR_GIMPLE)
9506 gt_ggc_mx (e->insns.g);
9507 else
9508 gt_ggc_mx (e->insns.r);
9509 gt_ggc_mx (block);
9512 /* PCH support for edge_def. */
9514 extern void gt_pch_nx (tree&);
9515 extern void gt_pch_nx (gimple *&);
9516 extern void gt_pch_nx (rtx&);
9517 extern void gt_pch_nx (basic_block&);
9519 static void
9520 gt_pch_nx (rtx_insn *& x)
9522 if (x)
9523 gt_pch_nx_rtx_def ((void *) x);
9526 void
9527 gt_pch_nx (edge_def *e)
9529 tree block = LOCATION_BLOCK (e->goto_locus);
9530 gt_pch_nx (e->src);
9531 gt_pch_nx (e->dest);
9532 if (current_ir_type () == IR_GIMPLE)
9533 gt_pch_nx (e->insns.g);
9534 else
9535 gt_pch_nx (e->insns.r);
9536 gt_pch_nx (block);
9539 void
9540 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9542 tree block = LOCATION_BLOCK (e->goto_locus);
9543 op (&(e->src), cookie);
9544 op (&(e->dest), cookie);
9545 if (current_ir_type () == IR_GIMPLE)
9546 op (&(e->insns.g), cookie);
9547 else
9548 op (&(e->insns.r), cookie);
9549 op (&(block), cookie);
9552 #if CHECKING_P
9554 namespace selftest {
9556 /* Helper function for CFG selftests: create a dummy function decl
9557 and push it as cfun. */
9559 static tree
9560 push_fndecl (const char *name)
9562 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9563 /* FIXME: this uses input_location: */
9564 tree fndecl = build_fn_decl (name, fn_type);
9565 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9566 NULL_TREE, integer_type_node);
9567 DECL_RESULT (fndecl) = retval;
9568 push_struct_function (fndecl);
9569 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9570 ASSERT_TRUE (fun != NULL);
9571 init_empty_tree_cfg_for_function (fun);
9572 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9573 ASSERT_EQ (0, n_edges_for_fn (fun));
9574 return fndecl;
9577 /* These tests directly create CFGs.
9578 Compare with the static fns within tree-cfg.c:
9579 - build_gimple_cfg
9580 - make_blocks: calls create_basic_block (seq, bb);
9581 - make_edges. */
9583 /* Verify a simple cfg of the form:
9584 ENTRY -> A -> B -> C -> EXIT. */
9586 static void
9587 test_linear_chain ()
9589 gimple_register_cfg_hooks ();
9591 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9592 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9594 /* Create some empty blocks. */
9595 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9596 basic_block bb_b = create_empty_bb (bb_a);
9597 basic_block bb_c = create_empty_bb (bb_b);
9599 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9600 ASSERT_EQ (0, n_edges_for_fn (fun));
9602 /* Create some edges: a simple linear chain of BBs. */
9603 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9604 make_edge (bb_a, bb_b, 0);
9605 make_edge (bb_b, bb_c, 0);
9606 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9608 /* Verify the edges. */
9609 ASSERT_EQ (4, n_edges_for_fn (fun));
9610 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9611 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9612 ASSERT_EQ (1, bb_a->preds->length ());
9613 ASSERT_EQ (1, bb_a->succs->length ());
9614 ASSERT_EQ (1, bb_b->preds->length ());
9615 ASSERT_EQ (1, bb_b->succs->length ());
9616 ASSERT_EQ (1, bb_c->preds->length ());
9617 ASSERT_EQ (1, bb_c->succs->length ());
9618 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9619 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9621 /* Verify the dominance information
9622 Each BB in our simple chain should be dominated by the one before
9623 it. */
9624 calculate_dominance_info (CDI_DOMINATORS);
9625 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9626 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9627 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9628 ASSERT_EQ (1, dom_by_b.length ());
9629 ASSERT_EQ (bb_c, dom_by_b[0]);
9630 free_dominance_info (CDI_DOMINATORS);
9631 dom_by_b.release ();
9633 /* Similarly for post-dominance: each BB in our chain is post-dominated
9634 by the one after it. */
9635 calculate_dominance_info (CDI_POST_DOMINATORS);
9636 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9637 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9638 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9639 ASSERT_EQ (1, postdom_by_b.length ());
9640 ASSERT_EQ (bb_a, postdom_by_b[0]);
9641 free_dominance_info (CDI_POST_DOMINATORS);
9642 postdom_by_b.release ();
9644 pop_cfun ();
9647 /* Verify a simple CFG of the form:
9648 ENTRY
9652 /t \f
9658 EXIT. */
9660 static void
9661 test_diamond ()
9663 gimple_register_cfg_hooks ();
9665 tree fndecl = push_fndecl ("cfg_test_diamond");
9666 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9668 /* Create some empty blocks. */
9669 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9670 basic_block bb_b = create_empty_bb (bb_a);
9671 basic_block bb_c = create_empty_bb (bb_a);
9672 basic_block bb_d = create_empty_bb (bb_b);
9674 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9675 ASSERT_EQ (0, n_edges_for_fn (fun));
9677 /* Create the edges. */
9678 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9679 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9680 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9681 make_edge (bb_b, bb_d, 0);
9682 make_edge (bb_c, bb_d, 0);
9683 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9685 /* Verify the edges. */
9686 ASSERT_EQ (6, n_edges_for_fn (fun));
9687 ASSERT_EQ (1, bb_a->preds->length ());
9688 ASSERT_EQ (2, bb_a->succs->length ());
9689 ASSERT_EQ (1, bb_b->preds->length ());
9690 ASSERT_EQ (1, bb_b->succs->length ());
9691 ASSERT_EQ (1, bb_c->preds->length ());
9692 ASSERT_EQ (1, bb_c->succs->length ());
9693 ASSERT_EQ (2, bb_d->preds->length ());
9694 ASSERT_EQ (1, bb_d->succs->length ());
9696 /* Verify the dominance information. */
9697 calculate_dominance_info (CDI_DOMINATORS);
9698 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9699 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9700 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9701 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9702 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9703 dom_by_a.release ();
9704 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9705 ASSERT_EQ (0, dom_by_b.length ());
9706 dom_by_b.release ();
9707 free_dominance_info (CDI_DOMINATORS);
9709 /* Similarly for post-dominance. */
9710 calculate_dominance_info (CDI_POST_DOMINATORS);
9711 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9712 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9713 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9714 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9715 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9716 postdom_by_d.release ();
9717 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9718 ASSERT_EQ (0, postdom_by_b.length ());
9719 postdom_by_b.release ();
9720 free_dominance_info (CDI_POST_DOMINATORS);
9722 pop_cfun ();
9725 /* Verify that we can handle a CFG containing a "complete" aka
9726 fully-connected subgraph (where A B C D below all have edges
9727 pointing to each other node, also to themselves).
9728 e.g.:
9729 ENTRY EXIT
9735 A<--->B
9736 ^^ ^^
9737 | \ / |
9738 | X |
9739 | / \ |
9740 VV VV
9741 C<--->D
9744 static void
9745 test_fully_connected ()
9747 gimple_register_cfg_hooks ();
9749 tree fndecl = push_fndecl ("cfg_fully_connected");
9750 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9752 const int n = 4;
9754 /* Create some empty blocks. */
9755 auto_vec <basic_block> subgraph_nodes;
9756 for (int i = 0; i < n; i++)
9757 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9759 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9760 ASSERT_EQ (0, n_edges_for_fn (fun));
9762 /* Create the edges. */
9763 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9764 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9765 for (int i = 0; i < n; i++)
9766 for (int j = 0; j < n; j++)
9767 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9769 /* Verify the edges. */
9770 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9771 /* The first one is linked to ENTRY/EXIT as well as itself and
9772 everything else. */
9773 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9774 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9775 /* The other ones in the subgraph are linked to everything in
9776 the subgraph (including themselves). */
9777 for (int i = 1; i < n; i++)
9779 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9780 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9783 /* Verify the dominance information. */
9784 calculate_dominance_info (CDI_DOMINATORS);
9785 /* The initial block in the subgraph should be dominated by ENTRY. */
9786 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9787 get_immediate_dominator (CDI_DOMINATORS,
9788 subgraph_nodes[0]));
9789 /* Every other block in the subgraph should be dominated by the
9790 initial block. */
9791 for (int i = 1; i < n; i++)
9792 ASSERT_EQ (subgraph_nodes[0],
9793 get_immediate_dominator (CDI_DOMINATORS,
9794 subgraph_nodes[i]));
9795 free_dominance_info (CDI_DOMINATORS);
9797 /* Similarly for post-dominance. */
9798 calculate_dominance_info (CDI_POST_DOMINATORS);
9799 /* The initial block in the subgraph should be postdominated by EXIT. */
9800 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9801 get_immediate_dominator (CDI_POST_DOMINATORS,
9802 subgraph_nodes[0]));
9803 /* Every other block in the subgraph should be postdominated by the
9804 initial block, since that leads to EXIT. */
9805 for (int i = 1; i < n; i++)
9806 ASSERT_EQ (subgraph_nodes[0],
9807 get_immediate_dominator (CDI_POST_DOMINATORS,
9808 subgraph_nodes[i]));
9809 free_dominance_info (CDI_POST_DOMINATORS);
9811 pop_cfun ();
9814 /* Run all of the selftests within this file. */
9816 void
9817 tree_cfg_c_tests ()
9819 test_linear_chain ();
9820 test_diamond ();
9821 test_fully_connected ();
9824 } // namespace selftest
9826 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9827 - loop
9828 - nested loops
9829 - switch statement (a block with many out-edges)
9830 - something that jumps to itself
9831 - etc */
9833 #endif /* CHECKING_P */