PR ada/66205
[official-gcc.git] / gcc / tree-cfg.c
blobe313df904433d3552630bea3d6ff5d3f5bfc2334
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
66 /* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
69 /* Local declarations. */
71 /* Initial capacity for the basic block array. */
72 static const int initial_cfg_capacity = 20;
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
87 static hash_map<edge, tree> *edge_to_cases;
89 /* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
93 static bitmap touched_switch_bbs;
95 /* CFG statistics. */
96 struct cfg_stats_d
98 long num_merged_labels;
101 static struct cfg_stats_d cfg_stats;
103 /* Data to pass to replace_block_vars_by_duplicates_1. */
104 struct replace_decls_d
106 hash_map<tree, tree> *vars_map;
107 tree to_context;
110 /* Hash table to store last discriminator assigned for each locus. */
111 struct locus_discrim_map
113 location_t locus;
114 int discriminator;
117 /* Hashtable helpers. */
119 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 static inline hashval_t hash (const locus_discrim_map *);
122 static inline bool equal (const locus_discrim_map *,
123 const locus_discrim_map *);
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
129 inline hashval_t
130 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 return LOCATION_LINE (item->locus);
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
138 inline bool
139 locus_discrim_hasher::equal (const locus_discrim_map *a,
140 const locus_discrim_map *b)
142 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
145 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147 /* Basic blocks and flowgraphs. */
148 static void make_blocks (gimple_seq);
150 /* Edges. */
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block);
154 static void make_gimple_switch_edges (gswitch *, basic_block);
155 static bool make_goto_expr_edges (basic_block);
156 static void make_gimple_asm_edges (basic_block);
157 static edge gimple_redirect_edge_and_branch (edge, basic_block);
158 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160 /* Various helpers. */
161 static inline bool stmt_starts_bb_p (gimple *, gimple *);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge);
164 static gimple *first_non_label_stmt (basic_block);
165 static bool verify_gimple_transaction (gtransaction *);
166 static bool call_can_make_abnormal_goto (gimple *);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block, basic_block);
170 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
171 static void remove_bb (basic_block);
172 static edge find_taken_edge_computed_goto (basic_block, tree);
173 static edge find_taken_edge_cond_expr (basic_block, tree);
174 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
175 static tree find_case_label_for_value (gswitch *, tree);
176 static void lower_phi_internal_fn ();
178 void
179 init_empty_tree_cfg_for_function (struct function *fn)
181 /* Initialize the basic block array. */
182 init_flow (fn);
183 profile_status_for_fn (fn) = PROFILE_ABSENT;
184 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
185 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
186 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
188 initial_cfg_capacity);
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
193 initial_cfg_capacity);
195 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
196 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
198 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn);
200 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn);
204 void
205 init_empty_tree_cfg (void)
207 init_empty_tree_cfg_for_function (cfun);
210 /*---------------------------------------------------------------------------
211 Create basic blocks
212 ---------------------------------------------------------------------------*/
214 /* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
217 static void
218 build_gimple_cfg (gimple_seq seq)
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
223 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
225 init_empty_tree_cfg ();
227 make_blocks (seq);
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
237 n_basic_blocks_for_fn (cfun));
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
249 make_edges ();
250 assign_discriminators ();
251 lower_phi_internal_fn ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
261 static void
262 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
314 static void
315 replace_loop_annotate (void)
317 struct loop *loop;
318 basic_block bb;
319 gimple_stmt_iterator gsi;
320 gimple *stmt;
322 FOR_EACH_LOOP (loop, 0)
324 /* First look into the header. */
325 replace_loop_annotate_in_block (loop->header, loop);
327 /* Then look into the latch, if any. */
328 if (loop->latch)
329 replace_loop_annotate_in_block (loop->latch, loop);
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
339 continue;
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 continue;
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
346 case annot_expr_ivdep_kind:
347 case annot_expr_unroll_kind:
348 case annot_expr_no_vector_kind:
349 case annot_expr_vector_kind:
350 break;
351 default:
352 gcc_unreachable ();
355 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
356 stmt = gimple_build_assign (gimple_call_lhs (stmt),
357 gimple_call_arg (stmt, 0));
358 gsi_replace (&gsi, stmt, true);
363 /* Lower internal PHI function from GIMPLE FE. */
365 static void
366 lower_phi_internal_fn ()
368 basic_block bb, pred = NULL;
369 gimple_stmt_iterator gsi;
370 tree lhs;
371 gphi *phi_node;
372 gimple *stmt;
374 /* After edge creation, handle __PHI function from GIMPLE FE. */
375 FOR_EACH_BB_FN (bb, cfun)
377 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
379 stmt = gsi_stmt (gsi);
380 if (! gimple_call_internal_p (stmt, IFN_PHI))
381 break;
383 lhs = gimple_call_lhs (stmt);
384 phi_node = create_phi_node (lhs, bb);
386 /* Add arguments to the PHI node. */
387 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
389 tree arg = gimple_call_arg (stmt, i);
390 if (TREE_CODE (arg) == LABEL_DECL)
391 pred = label_to_block (arg);
392 else
394 edge e = find_edge (pred, bb);
395 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
399 gsi_remove (&gsi, true);
404 static unsigned int
405 execute_build_cfg (void)
407 gimple_seq body = gimple_body (current_function_decl);
409 build_gimple_cfg (body);
410 gimple_set_body (current_function_decl, NULL);
411 if (dump_file && (dump_flags & TDF_DETAILS))
413 fprintf (dump_file, "Scope blocks:\n");
414 dump_scope_blocks (dump_file, dump_flags);
416 cleanup_tree_cfg ();
417 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
418 replace_loop_annotate ();
419 return 0;
422 namespace {
424 const pass_data pass_data_build_cfg =
426 GIMPLE_PASS, /* type */
427 "cfg", /* name */
428 OPTGROUP_NONE, /* optinfo_flags */
429 TV_TREE_CFG, /* tv_id */
430 PROP_gimple_leh, /* properties_required */
431 ( PROP_cfg | PROP_loops ), /* properties_provided */
432 0, /* properties_destroyed */
433 0, /* todo_flags_start */
434 0, /* todo_flags_finish */
437 class pass_build_cfg : public gimple_opt_pass
439 public:
440 pass_build_cfg (gcc::context *ctxt)
441 : gimple_opt_pass (pass_data_build_cfg, ctxt)
444 /* opt_pass methods: */
445 virtual unsigned int execute (function *) { return execute_build_cfg (); }
447 }; // class pass_build_cfg
449 } // anon namespace
451 gimple_opt_pass *
452 make_pass_build_cfg (gcc::context *ctxt)
454 return new pass_build_cfg (ctxt);
458 /* Return true if T is a computed goto. */
460 bool
461 computed_goto_p (gimple *t)
463 return (gimple_code (t) == GIMPLE_GOTO
464 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
467 /* Returns true if the sequence of statements STMTS only contains
468 a call to __builtin_unreachable (). */
470 bool
471 gimple_seq_unreachable_p (gimple_seq stmts)
473 if (stmts == NULL
474 /* Return false if -fsanitize=unreachable, we don't want to
475 optimize away those calls, but rather turn them into
476 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
477 later. */
478 || sanitize_flags_p (SANITIZE_UNREACHABLE))
479 return false;
481 gimple_stmt_iterator gsi = gsi_last (stmts);
483 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
484 return false;
486 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
488 gimple *stmt = gsi_stmt (gsi);
489 if (gimple_code (stmt) != GIMPLE_LABEL
490 && !is_gimple_debug (stmt)
491 && !gimple_clobber_p (stmt))
492 return false;
494 return true;
497 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
498 the other edge points to a bb with just __builtin_unreachable ().
499 I.e. return true for C->M edge in:
500 <bb C>:
502 if (something)
503 goto <bb N>;
504 else
505 goto <bb M>;
506 <bb N>:
507 __builtin_unreachable ();
508 <bb M>: */
510 bool
511 assert_unreachable_fallthru_edge_p (edge e)
513 basic_block pred_bb = e->src;
514 gimple *last = last_stmt (pred_bb);
515 if (last && gimple_code (last) == GIMPLE_COND)
517 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
518 if (other_bb == e->dest)
519 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
520 if (EDGE_COUNT (other_bb->succs) == 0)
521 return gimple_seq_unreachable_p (bb_seq (other_bb));
523 return false;
527 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
528 could alter control flow except via eh. We initialize the flag at
529 CFG build time and only ever clear it later. */
531 static void
532 gimple_call_initialize_ctrl_altering (gimple *stmt)
534 int flags = gimple_call_flags (stmt);
536 /* A call alters control flow if it can make an abnormal goto. */
537 if (call_can_make_abnormal_goto (stmt)
538 /* A call also alters control flow if it does not return. */
539 || flags & ECF_NORETURN
540 /* TM ending statements have backedges out of the transaction.
541 Return true so we split the basic block containing them.
542 Note that the TM_BUILTIN test is merely an optimization. */
543 || ((flags & ECF_TM_BUILTIN)
544 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
545 /* BUILT_IN_RETURN call is same as return statement. */
546 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
547 /* IFN_UNIQUE should be the last insn, to make checking for it
548 as cheap as possible. */
549 || (gimple_call_internal_p (stmt)
550 && gimple_call_internal_unique_p (stmt)))
551 gimple_call_set_ctrl_altering (stmt, true);
552 else
553 gimple_call_set_ctrl_altering (stmt, false);
557 /* Insert SEQ after BB and build a flowgraph. */
559 static basic_block
560 make_blocks_1 (gimple_seq seq, basic_block bb)
562 gimple_stmt_iterator i = gsi_start (seq);
563 gimple *stmt = NULL;
564 bool start_new_block = true;
565 bool first_stmt_of_seq = true;
567 while (!gsi_end_p (i))
569 gimple *prev_stmt;
571 prev_stmt = stmt;
572 stmt = gsi_stmt (i);
574 if (stmt && is_gimple_call (stmt))
575 gimple_call_initialize_ctrl_altering (stmt);
577 /* If the statement starts a new basic block or if we have determined
578 in a previous pass that we need to create a new block for STMT, do
579 so now. */
580 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
582 if (!first_stmt_of_seq)
583 gsi_split_seq_before (&i, &seq);
584 bb = create_basic_block (seq, bb);
585 start_new_block = false;
588 /* Now add STMT to BB and create the subgraphs for special statement
589 codes. */
590 gimple_set_bb (stmt, bb);
592 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
593 next iteration. */
594 if (stmt_ends_bb_p (stmt))
596 /* If the stmt can make abnormal goto use a new temporary
597 for the assignment to the LHS. This makes sure the old value
598 of the LHS is available on the abnormal edge. Otherwise
599 we will end up with overlapping life-ranges for abnormal
600 SSA names. */
601 if (gimple_has_lhs (stmt)
602 && stmt_can_make_abnormal_goto (stmt)
603 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
605 tree lhs = gimple_get_lhs (stmt);
606 tree tmp = create_tmp_var (TREE_TYPE (lhs));
607 gimple *s = gimple_build_assign (lhs, tmp);
608 gimple_set_location (s, gimple_location (stmt));
609 gimple_set_block (s, gimple_block (stmt));
610 gimple_set_lhs (stmt, tmp);
611 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
612 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
613 DECL_GIMPLE_REG_P (tmp) = 1;
614 gsi_insert_after (&i, s, GSI_SAME_STMT);
616 start_new_block = true;
619 gsi_next (&i);
620 first_stmt_of_seq = false;
622 return bb;
625 /* Build a flowgraph for the sequence of stmts SEQ. */
627 static void
628 make_blocks (gimple_seq seq)
630 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
633 /* Create and return a new empty basic block after bb AFTER. */
635 static basic_block
636 create_bb (void *h, void *e, basic_block after)
638 basic_block bb;
640 gcc_assert (!e);
642 /* Create and initialize a new basic block. Since alloc_block uses
643 GC allocation that clears memory to allocate a basic block, we do
644 not have to clear the newly allocated basic block here. */
645 bb = alloc_block ();
647 bb->index = last_basic_block_for_fn (cfun);
648 bb->flags = BB_NEW;
649 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
651 /* Add the new block to the linked list of blocks. */
652 link_block (bb, after);
654 /* Grow the basic block array if needed. */
655 if ((size_t) last_basic_block_for_fn (cfun)
656 == basic_block_info_for_fn (cfun)->length ())
658 size_t new_size =
659 (last_basic_block_for_fn (cfun)
660 + (last_basic_block_for_fn (cfun) + 3) / 4);
661 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
664 /* Add the newly created block to the array. */
665 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
667 n_basic_blocks_for_fn (cfun)++;
668 last_basic_block_for_fn (cfun)++;
670 return bb;
674 /*---------------------------------------------------------------------------
675 Edge creation
676 ---------------------------------------------------------------------------*/
678 /* If basic block BB has an abnormal edge to a basic block
679 containing IFN_ABNORMAL_DISPATCHER internal call, return
680 that the dispatcher's basic block, otherwise return NULL. */
682 basic_block
683 get_abnormal_succ_dispatcher (basic_block bb)
685 edge e;
686 edge_iterator ei;
688 FOR_EACH_EDGE (e, ei, bb->succs)
689 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
691 gimple_stmt_iterator gsi
692 = gsi_start_nondebug_after_labels_bb (e->dest);
693 gimple *g = gsi_stmt (gsi);
694 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
695 return e->dest;
697 return NULL;
700 /* Helper function for make_edges. Create a basic block with
701 with ABNORMAL_DISPATCHER internal call in it if needed, and
702 create abnormal edges from BBS to it and from it to FOR_BB
703 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
705 static void
706 handle_abnormal_edges (basic_block *dispatcher_bbs,
707 basic_block for_bb, int *bb_to_omp_idx,
708 auto_vec<basic_block> *bbs, bool computed_goto)
710 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
711 unsigned int idx = 0;
712 basic_block bb;
713 bool inner = false;
715 if (bb_to_omp_idx)
717 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
718 if (bb_to_omp_idx[for_bb->index] != 0)
719 inner = true;
722 /* If the dispatcher has been created already, then there are basic
723 blocks with abnormal edges to it, so just make a new edge to
724 for_bb. */
725 if (*dispatcher == NULL)
727 /* Check if there are any basic blocks that need to have
728 abnormal edges to this dispatcher. If there are none, return
729 early. */
730 if (bb_to_omp_idx == NULL)
732 if (bbs->is_empty ())
733 return;
735 else
737 FOR_EACH_VEC_ELT (*bbs, idx, bb)
738 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
739 break;
740 if (bb == NULL)
741 return;
744 /* Create the dispatcher bb. */
745 *dispatcher = create_basic_block (NULL, for_bb);
746 if (computed_goto)
748 /* Factor computed gotos into a common computed goto site. Also
749 record the location of that site so that we can un-factor the
750 gotos after we have converted back to normal form. */
751 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
753 /* Create the destination of the factored goto. Each original
754 computed goto will put its desired destination into this
755 variable and jump to the label we create immediately below. */
756 tree var = create_tmp_var (ptr_type_node, "gotovar");
758 /* Build a label for the new block which will contain the
759 factored computed goto. */
760 tree factored_label_decl
761 = create_artificial_label (UNKNOWN_LOCATION);
762 gimple *factored_computed_goto_label
763 = gimple_build_label (factored_label_decl);
764 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
766 /* Build our new computed goto. */
767 gimple *factored_computed_goto = gimple_build_goto (var);
768 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
770 FOR_EACH_VEC_ELT (*bbs, idx, bb)
772 if (bb_to_omp_idx
773 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
774 continue;
776 gsi = gsi_last_bb (bb);
777 gimple *last = gsi_stmt (gsi);
779 gcc_assert (computed_goto_p (last));
781 /* Copy the original computed goto's destination into VAR. */
782 gimple *assignment
783 = gimple_build_assign (var, gimple_goto_dest (last));
784 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
786 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
787 e->goto_locus = gimple_location (last);
788 gsi_remove (&gsi, true);
791 else
793 tree arg = inner ? boolean_true_node : boolean_false_node;
794 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
795 1, arg);
796 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
797 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
799 /* Create predecessor edges of the dispatcher. */
800 FOR_EACH_VEC_ELT (*bbs, idx, bb)
802 if (bb_to_omp_idx
803 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
804 continue;
805 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
810 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
813 /* Creates outgoing edges for BB. Returns 1 when it ends with an
814 computed goto, returns 2 when it ends with a statement that
815 might return to this function via an nonlocal goto, otherwise
816 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
818 static int
819 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
821 gimple *last = last_stmt (bb);
822 bool fallthru = false;
823 int ret = 0;
825 if (!last)
826 return ret;
828 switch (gimple_code (last))
830 case GIMPLE_GOTO:
831 if (make_goto_expr_edges (bb))
832 ret = 1;
833 fallthru = false;
834 break;
835 case GIMPLE_RETURN:
837 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
838 e->goto_locus = gimple_location (last);
839 fallthru = false;
841 break;
842 case GIMPLE_COND:
843 make_cond_expr_edges (bb);
844 fallthru = false;
845 break;
846 case GIMPLE_SWITCH:
847 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
848 fallthru = false;
849 break;
850 case GIMPLE_RESX:
851 make_eh_edges (last);
852 fallthru = false;
853 break;
854 case GIMPLE_EH_DISPATCH:
855 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
856 break;
858 case GIMPLE_CALL:
859 /* If this function receives a nonlocal goto, then we need to
860 make edges from this call site to all the nonlocal goto
861 handlers. */
862 if (stmt_can_make_abnormal_goto (last))
863 ret = 2;
865 /* If this statement has reachable exception handlers, then
866 create abnormal edges to them. */
867 make_eh_edges (last);
869 /* BUILTIN_RETURN is really a return statement. */
870 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
872 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
873 fallthru = false;
875 /* Some calls are known not to return. */
876 else
877 fallthru = !gimple_call_noreturn_p (last);
878 break;
880 case GIMPLE_ASSIGN:
881 /* A GIMPLE_ASSIGN may throw internally and thus be considered
882 control-altering. */
883 if (is_ctrl_altering_stmt (last))
884 make_eh_edges (last);
885 fallthru = true;
886 break;
888 case GIMPLE_ASM:
889 make_gimple_asm_edges (bb);
890 fallthru = true;
891 break;
893 CASE_GIMPLE_OMP:
894 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
895 break;
897 case GIMPLE_TRANSACTION:
899 gtransaction *txn = as_a <gtransaction *> (last);
900 tree label1 = gimple_transaction_label_norm (txn);
901 tree label2 = gimple_transaction_label_uninst (txn);
903 if (label1)
904 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
905 if (label2)
906 make_edge (bb, label_to_block (label2),
907 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
909 tree label3 = gimple_transaction_label_over (txn);
910 if (gimple_transaction_subcode (txn)
911 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
912 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
914 fallthru = false;
916 break;
918 default:
919 gcc_assert (!stmt_ends_bb_p (last));
920 fallthru = true;
921 break;
924 if (fallthru)
925 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
927 return ret;
930 /* Join all the blocks in the flowgraph. */
932 static void
933 make_edges (void)
935 basic_block bb;
936 struct omp_region *cur_region = NULL;
937 auto_vec<basic_block> ab_edge_goto;
938 auto_vec<basic_block> ab_edge_call;
939 int *bb_to_omp_idx = NULL;
940 int cur_omp_region_idx = 0;
942 /* Create an edge from entry to the first block with executable
943 statements in it. */
944 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
945 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
946 EDGE_FALLTHRU);
948 /* Traverse the basic block array placing edges. */
949 FOR_EACH_BB_FN (bb, cfun)
951 int mer;
953 if (bb_to_omp_idx)
954 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
956 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
957 if (mer == 1)
958 ab_edge_goto.safe_push (bb);
959 else if (mer == 2)
960 ab_edge_call.safe_push (bb);
962 if (cur_region && bb_to_omp_idx == NULL)
963 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
966 /* Computed gotos are hell to deal with, especially if there are
967 lots of them with a large number of destinations. So we factor
968 them to a common computed goto location before we build the
969 edge list. After we convert back to normal form, we will un-factor
970 the computed gotos since factoring introduces an unwanted jump.
971 For non-local gotos and abnormal edges from calls to calls that return
972 twice or forced labels, factor the abnormal edges too, by having all
973 abnormal edges from the calls go to a common artificial basic block
974 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
975 basic block to all forced labels and calls returning twice.
976 We do this per-OpenMP structured block, because those regions
977 are guaranteed to be single entry single exit by the standard,
978 so it is not allowed to enter or exit such regions abnormally this way,
979 thus all computed gotos, non-local gotos and setjmp/longjmp calls
980 must not transfer control across SESE region boundaries. */
981 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
983 gimple_stmt_iterator gsi;
984 basic_block dispatcher_bb_array[2] = { NULL, NULL };
985 basic_block *dispatcher_bbs = dispatcher_bb_array;
986 int count = n_basic_blocks_for_fn (cfun);
988 if (bb_to_omp_idx)
989 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
991 FOR_EACH_BB_FN (bb, cfun)
993 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
995 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
996 tree target;
998 if (!label_stmt)
999 break;
1001 target = gimple_label_label (label_stmt);
1003 /* Make an edge to every label block that has been marked as a
1004 potential target for a computed goto or a non-local goto. */
1005 if (FORCED_LABEL (target))
1006 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1007 &ab_edge_goto, true);
1008 if (DECL_NONLOCAL (target))
1010 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1011 &ab_edge_call, false);
1012 break;
1016 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1017 gsi_next_nondebug (&gsi);
1018 if (!gsi_end_p (gsi))
1020 /* Make an edge to every setjmp-like call. */
1021 gimple *call_stmt = gsi_stmt (gsi);
1022 if (is_gimple_call (call_stmt)
1023 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1024 || gimple_call_builtin_p (call_stmt,
1025 BUILT_IN_SETJMP_RECEIVER)))
1026 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1027 &ab_edge_call, false);
1031 if (bb_to_omp_idx)
1032 XDELETE (dispatcher_bbs);
1035 XDELETE (bb_to_omp_idx);
1037 omp_free_regions ();
1040 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1041 needed. Returns true if new bbs were created.
1042 Note: This is transitional code, and should not be used for new code. We
1043 should be able to get rid of this by rewriting all target va-arg
1044 gimplification hooks to use an interface gimple_build_cond_value as described
1045 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1047 bool
1048 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1050 gimple *stmt = gsi_stmt (*gsi);
1051 basic_block bb = gimple_bb (stmt);
1052 basic_block lastbb, afterbb;
1053 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1054 edge e;
1055 lastbb = make_blocks_1 (seq, bb);
1056 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1057 return false;
1058 e = split_block (bb, stmt);
1059 /* Move e->dest to come after the new basic blocks. */
1060 afterbb = e->dest;
1061 unlink_block (afterbb);
1062 link_block (afterbb, lastbb);
1063 redirect_edge_succ (e, bb->next_bb);
1064 bb = bb->next_bb;
1065 while (bb != afterbb)
1067 struct omp_region *cur_region = NULL;
1068 profile_count cnt = profile_count::zero ();
1069 bool all = true;
1071 int cur_omp_region_idx = 0;
1072 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1073 gcc_assert (!mer && !cur_region);
1074 add_bb_to_loop (bb, afterbb->loop_father);
1076 edge e;
1077 edge_iterator ei;
1078 FOR_EACH_EDGE (e, ei, bb->preds)
1080 if (e->count ().initialized_p ())
1081 cnt += e->count ();
1082 else
1083 all = false;
1085 tree_guess_outgoing_edge_probabilities (bb);
1086 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1087 bb->count = cnt;
1089 bb = bb->next_bb;
1091 return true;
1094 /* Find the next available discriminator value for LOCUS. The
1095 discriminator distinguishes among several basic blocks that
1096 share a common locus, allowing for more accurate sample-based
1097 profiling. */
1099 static int
1100 next_discriminator_for_locus (location_t locus)
1102 struct locus_discrim_map item;
1103 struct locus_discrim_map **slot;
1105 item.locus = locus;
1106 item.discriminator = 0;
1107 slot = discriminator_per_locus->find_slot_with_hash (
1108 &item, LOCATION_LINE (locus), INSERT);
1109 gcc_assert (slot);
1110 if (*slot == HTAB_EMPTY_ENTRY)
1112 *slot = XNEW (struct locus_discrim_map);
1113 gcc_assert (*slot);
1114 (*slot)->locus = locus;
1115 (*slot)->discriminator = 0;
1117 (*slot)->discriminator++;
1118 return (*slot)->discriminator;
1121 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1123 static bool
1124 same_line_p (location_t locus1, location_t locus2)
1126 expanded_location from, to;
1128 if (locus1 == locus2)
1129 return true;
1131 from = expand_location (locus1);
1132 to = expand_location (locus2);
1134 if (from.line != to.line)
1135 return false;
1136 if (from.file == to.file)
1137 return true;
1138 return (from.file != NULL
1139 && to.file != NULL
1140 && filename_cmp (from.file, to.file) == 0);
1143 /* Assign discriminators to each basic block. */
1145 static void
1146 assign_discriminators (void)
1148 basic_block bb;
1150 FOR_EACH_BB_FN (bb, cfun)
1152 edge e;
1153 edge_iterator ei;
1154 gimple *last = last_stmt (bb);
1155 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1157 if (locus == UNKNOWN_LOCATION)
1158 continue;
1160 FOR_EACH_EDGE (e, ei, bb->succs)
1162 gimple *first = first_non_label_stmt (e->dest);
1163 gimple *last = last_stmt (e->dest);
1164 if ((first && same_line_p (locus, gimple_location (first)))
1165 || (last && same_line_p (locus, gimple_location (last))))
1167 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1168 bb->discriminator = next_discriminator_for_locus (locus);
1169 else
1170 e->dest->discriminator = next_discriminator_for_locus (locus);
1176 /* Create the edges for a GIMPLE_COND starting at block BB. */
1178 static void
1179 make_cond_expr_edges (basic_block bb)
1181 gcond *entry = as_a <gcond *> (last_stmt (bb));
1182 gimple *then_stmt, *else_stmt;
1183 basic_block then_bb, else_bb;
1184 tree then_label, else_label;
1185 edge e;
1187 gcc_assert (entry);
1188 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1190 /* Entry basic blocks for each component. */
1191 then_label = gimple_cond_true_label (entry);
1192 else_label = gimple_cond_false_label (entry);
1193 then_bb = label_to_block (then_label);
1194 else_bb = label_to_block (else_label);
1195 then_stmt = first_stmt (then_bb);
1196 else_stmt = first_stmt (else_bb);
1198 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1199 e->goto_locus = gimple_location (then_stmt);
1200 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1201 if (e)
1202 e->goto_locus = gimple_location (else_stmt);
1204 /* We do not need the labels anymore. */
1205 gimple_cond_set_true_label (entry, NULL_TREE);
1206 gimple_cond_set_false_label (entry, NULL_TREE);
1210 /* Called for each element in the hash table (P) as we delete the
1211 edge to cases hash table.
1213 Clear all the CASE_CHAINs to prevent problems with copying of
1214 SWITCH_EXPRs and structure sharing rules, then free the hash table
1215 element. */
1217 bool
1218 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1220 tree t, next;
1222 for (t = value; t; t = next)
1224 next = CASE_CHAIN (t);
1225 CASE_CHAIN (t) = NULL;
1228 return true;
1231 /* Start recording information mapping edges to case labels. */
1233 void
1234 start_recording_case_labels (void)
1236 gcc_assert (edge_to_cases == NULL);
1237 edge_to_cases = new hash_map<edge, tree>;
1238 touched_switch_bbs = BITMAP_ALLOC (NULL);
1241 /* Return nonzero if we are recording information for case labels. */
1243 static bool
1244 recording_case_labels_p (void)
1246 return (edge_to_cases != NULL);
1249 /* Stop recording information mapping edges to case labels and
1250 remove any information we have recorded. */
1251 void
1252 end_recording_case_labels (void)
1254 bitmap_iterator bi;
1255 unsigned i;
1256 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1257 delete edge_to_cases;
1258 edge_to_cases = NULL;
1259 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1261 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1262 if (bb)
1264 gimple *stmt = last_stmt (bb);
1265 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1266 group_case_labels_stmt (as_a <gswitch *> (stmt));
1269 BITMAP_FREE (touched_switch_bbs);
1272 /* If we are inside a {start,end}_recording_cases block, then return
1273 a chain of CASE_LABEL_EXPRs from T which reference E.
1275 Otherwise return NULL. */
1277 static tree
1278 get_cases_for_edge (edge e, gswitch *t)
1280 tree *slot;
1281 size_t i, n;
1283 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1284 chains available. Return NULL so the caller can detect this case. */
1285 if (!recording_case_labels_p ())
1286 return NULL;
1288 slot = edge_to_cases->get (e);
1289 if (slot)
1290 return *slot;
1292 /* If we did not find E in the hash table, then this must be the first
1293 time we have been queried for information about E & T. Add all the
1294 elements from T to the hash table then perform the query again. */
1296 n = gimple_switch_num_labels (t);
1297 for (i = 0; i < n; i++)
1299 tree elt = gimple_switch_label (t, i);
1300 tree lab = CASE_LABEL (elt);
1301 basic_block label_bb = label_to_block (lab);
1302 edge this_edge = find_edge (e->src, label_bb);
1304 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1305 a new chain. */
1306 tree &s = edge_to_cases->get_or_insert (this_edge);
1307 CASE_CHAIN (elt) = s;
1308 s = elt;
1311 return *edge_to_cases->get (e);
1314 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1316 static void
1317 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1319 size_t i, n;
1321 n = gimple_switch_num_labels (entry);
1323 for (i = 0; i < n; ++i)
1325 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1326 basic_block label_bb = label_to_block (lab);
1327 make_edge (bb, label_bb, 0);
1332 /* Return the basic block holding label DEST. */
1334 basic_block
1335 label_to_block_fn (struct function *ifun, tree dest)
1337 int uid = LABEL_DECL_UID (dest);
1339 /* We would die hard when faced by an undefined label. Emit a label to
1340 the very first basic block. This will hopefully make even the dataflow
1341 and undefined variable warnings quite right. */
1342 if (seen_error () && uid < 0)
1344 gimple_stmt_iterator gsi =
1345 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1346 gimple *stmt;
1348 stmt = gimple_build_label (dest);
1349 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1350 uid = LABEL_DECL_UID (dest);
1352 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1353 return NULL;
1354 return (*ifun->cfg->x_label_to_block_map)[uid];
1357 /* Create edges for a goto statement at block BB. Returns true
1358 if abnormal edges should be created. */
1360 static bool
1361 make_goto_expr_edges (basic_block bb)
1363 gimple_stmt_iterator last = gsi_last_bb (bb);
1364 gimple *goto_t = gsi_stmt (last);
1366 /* A simple GOTO creates normal edges. */
1367 if (simple_goto_p (goto_t))
1369 tree dest = gimple_goto_dest (goto_t);
1370 basic_block label_bb = label_to_block (dest);
1371 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1372 e->goto_locus = gimple_location (goto_t);
1373 gsi_remove (&last, true);
1374 return false;
1377 /* A computed GOTO creates abnormal edges. */
1378 return true;
1381 /* Create edges for an asm statement with labels at block BB. */
1383 static void
1384 make_gimple_asm_edges (basic_block bb)
1386 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1387 int i, n = gimple_asm_nlabels (stmt);
1389 for (i = 0; i < n; ++i)
1391 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1392 basic_block label_bb = label_to_block (label);
1393 make_edge (bb, label_bb, 0);
1397 /*---------------------------------------------------------------------------
1398 Flowgraph analysis
1399 ---------------------------------------------------------------------------*/
1401 /* Cleanup useless labels in basic blocks. This is something we wish
1402 to do early because it allows us to group case labels before creating
1403 the edges for the CFG, and it speeds up block statement iterators in
1404 all passes later on.
1405 We rerun this pass after CFG is created, to get rid of the labels that
1406 are no longer referenced. After then we do not run it any more, since
1407 (almost) no new labels should be created. */
1409 /* A map from basic block index to the leading label of that block. */
1410 static struct label_record
1412 /* The label. */
1413 tree label;
1415 /* True if the label is referenced from somewhere. */
1416 bool used;
1417 } *label_for_bb;
1419 /* Given LABEL return the first label in the same basic block. */
1421 static tree
1422 main_block_label (tree label)
1424 basic_block bb = label_to_block (label);
1425 tree main_label = label_for_bb[bb->index].label;
1427 /* label_to_block possibly inserted undefined label into the chain. */
1428 if (!main_label)
1430 label_for_bb[bb->index].label = label;
1431 main_label = label;
1434 label_for_bb[bb->index].used = true;
1435 return main_label;
1438 /* Clean up redundant labels within the exception tree. */
1440 static void
1441 cleanup_dead_labels_eh (void)
1443 eh_landing_pad lp;
1444 eh_region r;
1445 tree lab;
1446 int i;
1448 if (cfun->eh == NULL)
1449 return;
1451 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1452 if (lp && lp->post_landing_pad)
1454 lab = main_block_label (lp->post_landing_pad);
1455 if (lab != lp->post_landing_pad)
1457 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1458 EH_LANDING_PAD_NR (lab) = lp->index;
1462 FOR_ALL_EH_REGION (r)
1463 switch (r->type)
1465 case ERT_CLEANUP:
1466 case ERT_MUST_NOT_THROW:
1467 break;
1469 case ERT_TRY:
1471 eh_catch c;
1472 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1474 lab = c->label;
1475 if (lab)
1476 c->label = main_block_label (lab);
1479 break;
1481 case ERT_ALLOWED_EXCEPTIONS:
1482 lab = r->u.allowed.label;
1483 if (lab)
1484 r->u.allowed.label = main_block_label (lab);
1485 break;
1490 /* Cleanup redundant labels. This is a three-step process:
1491 1) Find the leading label for each block.
1492 2) Redirect all references to labels to the leading labels.
1493 3) Cleanup all useless labels. */
1495 void
1496 cleanup_dead_labels (void)
1498 basic_block bb;
1499 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1501 /* Find a suitable label for each block. We use the first user-defined
1502 label if there is one, or otherwise just the first label we see. */
1503 FOR_EACH_BB_FN (bb, cfun)
1505 gimple_stmt_iterator i;
1507 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1509 tree label;
1510 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1512 if (!label_stmt)
1513 break;
1515 label = gimple_label_label (label_stmt);
1517 /* If we have not yet seen a label for the current block,
1518 remember this one and see if there are more labels. */
1519 if (!label_for_bb[bb->index].label)
1521 label_for_bb[bb->index].label = label;
1522 continue;
1525 /* If we did see a label for the current block already, but it
1526 is an artificially created label, replace it if the current
1527 label is a user defined label. */
1528 if (!DECL_ARTIFICIAL (label)
1529 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1531 label_for_bb[bb->index].label = label;
1532 break;
1537 /* Now redirect all jumps/branches to the selected label.
1538 First do so for each block ending in a control statement. */
1539 FOR_EACH_BB_FN (bb, cfun)
1541 gimple *stmt = last_stmt (bb);
1542 tree label, new_label;
1544 if (!stmt)
1545 continue;
1547 switch (gimple_code (stmt))
1549 case GIMPLE_COND:
1551 gcond *cond_stmt = as_a <gcond *> (stmt);
1552 label = gimple_cond_true_label (cond_stmt);
1553 if (label)
1555 new_label = main_block_label (label);
1556 if (new_label != label)
1557 gimple_cond_set_true_label (cond_stmt, new_label);
1560 label = gimple_cond_false_label (cond_stmt);
1561 if (label)
1563 new_label = main_block_label (label);
1564 if (new_label != label)
1565 gimple_cond_set_false_label (cond_stmt, new_label);
1568 break;
1570 case GIMPLE_SWITCH:
1572 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1573 size_t i, n = gimple_switch_num_labels (switch_stmt);
1575 /* Replace all destination labels. */
1576 for (i = 0; i < n; ++i)
1578 tree case_label = gimple_switch_label (switch_stmt, i);
1579 label = CASE_LABEL (case_label);
1580 new_label = main_block_label (label);
1581 if (new_label != label)
1582 CASE_LABEL (case_label) = new_label;
1584 break;
1587 case GIMPLE_ASM:
1589 gasm *asm_stmt = as_a <gasm *> (stmt);
1590 int i, n = gimple_asm_nlabels (asm_stmt);
1592 for (i = 0; i < n; ++i)
1594 tree cons = gimple_asm_label_op (asm_stmt, i);
1595 tree label = main_block_label (TREE_VALUE (cons));
1596 TREE_VALUE (cons) = label;
1598 break;
1601 /* We have to handle gotos until they're removed, and we don't
1602 remove them until after we've created the CFG edges. */
1603 case GIMPLE_GOTO:
1604 if (!computed_goto_p (stmt))
1606 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1607 label = gimple_goto_dest (goto_stmt);
1608 new_label = main_block_label (label);
1609 if (new_label != label)
1610 gimple_goto_set_dest (goto_stmt, new_label);
1612 break;
1614 case GIMPLE_TRANSACTION:
1616 gtransaction *txn = as_a <gtransaction *> (stmt);
1618 label = gimple_transaction_label_norm (txn);
1619 if (label)
1621 new_label = main_block_label (label);
1622 if (new_label != label)
1623 gimple_transaction_set_label_norm (txn, new_label);
1626 label = gimple_transaction_label_uninst (txn);
1627 if (label)
1629 new_label = main_block_label (label);
1630 if (new_label != label)
1631 gimple_transaction_set_label_uninst (txn, new_label);
1634 label = gimple_transaction_label_over (txn);
1635 if (label)
1637 new_label = main_block_label (label);
1638 if (new_label != label)
1639 gimple_transaction_set_label_over (txn, new_label);
1642 break;
1644 default:
1645 break;
1649 /* Do the same for the exception region tree labels. */
1650 cleanup_dead_labels_eh ();
1652 /* Finally, purge dead labels. All user-defined labels and labels that
1653 can be the target of non-local gotos and labels which have their
1654 address taken are preserved. */
1655 FOR_EACH_BB_FN (bb, cfun)
1657 gimple_stmt_iterator i;
1658 tree label_for_this_bb = label_for_bb[bb->index].label;
1660 if (!label_for_this_bb)
1661 continue;
1663 /* If the main label of the block is unused, we may still remove it. */
1664 if (!label_for_bb[bb->index].used)
1665 label_for_this_bb = NULL;
1667 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1669 tree label;
1670 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1672 if (!label_stmt)
1673 break;
1675 label = gimple_label_label (label_stmt);
1677 if (label == label_for_this_bb
1678 || !DECL_ARTIFICIAL (label)
1679 || DECL_NONLOCAL (label)
1680 || FORCED_LABEL (label))
1681 gsi_next (&i);
1682 else
1683 gsi_remove (&i, true);
1687 free (label_for_bb);
1690 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1691 the ones jumping to the same label.
1692 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1694 bool
1695 group_case_labels_stmt (gswitch *stmt)
1697 int old_size = gimple_switch_num_labels (stmt);
1698 int i, next_index, new_size;
1699 basic_block default_bb = NULL;
1701 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1703 /* Look for possible opportunities to merge cases. */
1704 new_size = i = 1;
1705 while (i < old_size)
1707 tree base_case, base_high;
1708 basic_block base_bb;
1710 base_case = gimple_switch_label (stmt, i);
1712 gcc_assert (base_case);
1713 base_bb = label_to_block (CASE_LABEL (base_case));
1715 /* Discard cases that have the same destination as the default case or
1716 whose destiniation blocks have already been removed as unreachable. */
1717 if (base_bb == NULL || base_bb == default_bb)
1719 i++;
1720 continue;
1723 base_high = CASE_HIGH (base_case)
1724 ? CASE_HIGH (base_case)
1725 : CASE_LOW (base_case);
1726 next_index = i + 1;
1728 /* Try to merge case labels. Break out when we reach the end
1729 of the label vector or when we cannot merge the next case
1730 label with the current one. */
1731 while (next_index < old_size)
1733 tree merge_case = gimple_switch_label (stmt, next_index);
1734 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1735 wide_int bhp1 = wi::to_wide (base_high) + 1;
1737 /* Merge the cases if they jump to the same place,
1738 and their ranges are consecutive. */
1739 if (merge_bb == base_bb
1740 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1742 base_high = CASE_HIGH (merge_case) ?
1743 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1744 CASE_HIGH (base_case) = base_high;
1745 next_index++;
1747 else
1748 break;
1751 /* Discard cases that have an unreachable destination block. */
1752 if (EDGE_COUNT (base_bb->succs) == 0
1753 && gimple_seq_unreachable_p (bb_seq (base_bb))
1754 /* Don't optimize this if __builtin_unreachable () is the
1755 implicitly added one by the C++ FE too early, before
1756 -Wreturn-type can be diagnosed. We'll optimize it later
1757 during switchconv pass or any other cfg cleanup. */
1758 && (gimple_in_ssa_p (cfun)
1759 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1760 != BUILTINS_LOCATION)))
1762 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1763 if (base_edge != NULL)
1764 remove_edge_and_dominated_blocks (base_edge);
1765 i = next_index;
1766 continue;
1769 if (new_size < i)
1770 gimple_switch_set_label (stmt, new_size,
1771 gimple_switch_label (stmt, i));
1772 i = next_index;
1773 new_size++;
1776 gcc_assert (new_size <= old_size);
1778 if (new_size < old_size)
1779 gimple_switch_set_num_labels (stmt, new_size);
1781 return new_size < old_size;
1784 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1785 and scan the sorted vector of cases. Combine the ones jumping to the
1786 same label. */
1788 bool
1789 group_case_labels (void)
1791 basic_block bb;
1792 bool changed = false;
1794 FOR_EACH_BB_FN (bb, cfun)
1796 gimple *stmt = last_stmt (bb);
1797 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1798 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1801 return changed;
1804 /* Checks whether we can merge block B into block A. */
1806 static bool
1807 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1809 gimple *stmt;
1811 if (!single_succ_p (a))
1812 return false;
1814 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1815 return false;
1817 if (single_succ (a) != b)
1818 return false;
1820 if (!single_pred_p (b))
1821 return false;
1823 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1824 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1825 return false;
1827 /* If A ends by a statement causing exceptions or something similar, we
1828 cannot merge the blocks. */
1829 stmt = last_stmt (a);
1830 if (stmt && stmt_ends_bb_p (stmt))
1831 return false;
1833 /* Do not allow a block with only a non-local label to be merged. */
1834 if (stmt)
1835 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1836 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1837 return false;
1839 /* Examine the labels at the beginning of B. */
1840 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1841 gsi_next (&gsi))
1843 tree lab;
1844 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1845 if (!label_stmt)
1846 break;
1847 lab = gimple_label_label (label_stmt);
1849 /* Do not remove user forced labels or for -O0 any user labels. */
1850 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1851 return false;
1854 /* Protect simple loop latches. We only want to avoid merging
1855 the latch with the loop header or with a block in another
1856 loop in this case. */
1857 if (current_loops
1858 && b->loop_father->latch == b
1859 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1860 && (b->loop_father->header == a
1861 || b->loop_father != a->loop_father))
1862 return false;
1864 /* It must be possible to eliminate all phi nodes in B. If ssa form
1865 is not up-to-date and a name-mapping is registered, we cannot eliminate
1866 any phis. Symbols marked for renaming are never a problem though. */
1867 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1868 gsi_next (&gsi))
1870 gphi *phi = gsi.phi ();
1871 /* Technically only new names matter. */
1872 if (name_registered_for_update_p (PHI_RESULT (phi)))
1873 return false;
1876 /* When not optimizing, don't merge if we'd lose goto_locus. */
1877 if (!optimize
1878 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1880 location_t goto_locus = single_succ_edge (a)->goto_locus;
1881 gimple_stmt_iterator prev, next;
1882 prev = gsi_last_nondebug_bb (a);
1883 next = gsi_after_labels (b);
1884 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1885 gsi_next_nondebug (&next);
1886 if ((gsi_end_p (prev)
1887 || gimple_location (gsi_stmt (prev)) != goto_locus)
1888 && (gsi_end_p (next)
1889 || gimple_location (gsi_stmt (next)) != goto_locus))
1890 return false;
1893 return true;
1896 /* Replaces all uses of NAME by VAL. */
1898 void
1899 replace_uses_by (tree name, tree val)
1901 imm_use_iterator imm_iter;
1902 use_operand_p use;
1903 gimple *stmt;
1904 edge e;
1906 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1908 /* Mark the block if we change the last stmt in it. */
1909 if (cfgcleanup_altered_bbs
1910 && stmt_ends_bb_p (stmt))
1911 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1913 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1915 replace_exp (use, val);
1917 if (gimple_code (stmt) == GIMPLE_PHI)
1919 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1920 PHI_ARG_INDEX_FROM_USE (use));
1921 if (e->flags & EDGE_ABNORMAL
1922 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1924 /* This can only occur for virtual operands, since
1925 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1926 would prevent replacement. */
1927 gcc_checking_assert (virtual_operand_p (name));
1928 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1933 if (gimple_code (stmt) != GIMPLE_PHI)
1935 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1936 gimple *orig_stmt = stmt;
1937 size_t i;
1939 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1940 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1941 only change sth from non-invariant to invariant, and only
1942 when propagating constants. */
1943 if (is_gimple_min_invariant (val))
1944 for (i = 0; i < gimple_num_ops (stmt); i++)
1946 tree op = gimple_op (stmt, i);
1947 /* Operands may be empty here. For example, the labels
1948 of a GIMPLE_COND are nulled out following the creation
1949 of the corresponding CFG edges. */
1950 if (op && TREE_CODE (op) == ADDR_EXPR)
1951 recompute_tree_invariant_for_addr_expr (op);
1954 if (fold_stmt (&gsi))
1955 stmt = gsi_stmt (gsi);
1957 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1958 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1960 update_stmt (stmt);
1964 gcc_checking_assert (has_zero_uses (name));
1966 /* Also update the trees stored in loop structures. */
1967 if (current_loops)
1969 struct loop *loop;
1971 FOR_EACH_LOOP (loop, 0)
1973 substitute_in_loop_info (loop, name, val);
1978 /* Merge block B into block A. */
1980 static void
1981 gimple_merge_blocks (basic_block a, basic_block b)
1983 gimple_stmt_iterator last, gsi;
1984 gphi_iterator psi;
1986 if (dump_file)
1987 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1989 /* Remove all single-valued PHI nodes from block B of the form
1990 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1991 gsi = gsi_last_bb (a);
1992 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1994 gimple *phi = gsi_stmt (psi);
1995 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1996 gimple *copy;
1997 bool may_replace_uses = (virtual_operand_p (def)
1998 || may_propagate_copy (def, use));
2000 /* In case we maintain loop closed ssa form, do not propagate arguments
2001 of loop exit phi nodes. */
2002 if (current_loops
2003 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2004 && !virtual_operand_p (def)
2005 && TREE_CODE (use) == SSA_NAME
2006 && a->loop_father != b->loop_father)
2007 may_replace_uses = false;
2009 if (!may_replace_uses)
2011 gcc_assert (!virtual_operand_p (def));
2013 /* Note that just emitting the copies is fine -- there is no problem
2014 with ordering of phi nodes. This is because A is the single
2015 predecessor of B, therefore results of the phi nodes cannot
2016 appear as arguments of the phi nodes. */
2017 copy = gimple_build_assign (def, use);
2018 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2019 remove_phi_node (&psi, false);
2021 else
2023 /* If we deal with a PHI for virtual operands, we can simply
2024 propagate these without fussing with folding or updating
2025 the stmt. */
2026 if (virtual_operand_p (def))
2028 imm_use_iterator iter;
2029 use_operand_p use_p;
2030 gimple *stmt;
2032 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2033 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2034 SET_USE (use_p, use);
2036 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2037 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2039 else
2040 replace_uses_by (def, use);
2042 remove_phi_node (&psi, true);
2046 /* Ensure that B follows A. */
2047 move_block_after (b, a);
2049 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2050 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2052 /* Remove labels from B and set gimple_bb to A for other statements. */
2053 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2055 gimple *stmt = gsi_stmt (gsi);
2056 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2058 tree label = gimple_label_label (label_stmt);
2059 int lp_nr;
2061 gsi_remove (&gsi, false);
2063 /* Now that we can thread computed gotos, we might have
2064 a situation where we have a forced label in block B
2065 However, the label at the start of block B might still be
2066 used in other ways (think about the runtime checking for
2067 Fortran assigned gotos). So we can not just delete the
2068 label. Instead we move the label to the start of block A. */
2069 if (FORCED_LABEL (label))
2071 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2072 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2074 /* Other user labels keep around in a form of a debug stmt. */
2075 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2077 gimple *dbg = gimple_build_debug_bind (label,
2078 integer_zero_node,
2079 stmt);
2080 gimple_debug_bind_reset_value (dbg);
2081 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2084 lp_nr = EH_LANDING_PAD_NR (label);
2085 if (lp_nr)
2087 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2088 lp->post_landing_pad = NULL;
2091 else
2093 gimple_set_bb (stmt, a);
2094 gsi_next (&gsi);
2098 /* When merging two BBs, if their counts are different, the larger count
2099 is selected as the new bb count. This is to handle inconsistent
2100 profiles. */
2101 if (a->loop_father == b->loop_father)
2103 a->count = a->count.merge (b->count);
2106 /* Merge the sequences. */
2107 last = gsi_last_bb (a);
2108 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2109 set_bb_seq (b, NULL);
2111 if (cfgcleanup_altered_bbs)
2112 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2116 /* Return the one of two successors of BB that is not reachable by a
2117 complex edge, if there is one. Else, return BB. We use
2118 this in optimizations that use post-dominators for their heuristics,
2119 to catch the cases in C++ where function calls are involved. */
2121 basic_block
2122 single_noncomplex_succ (basic_block bb)
2124 edge e0, e1;
2125 if (EDGE_COUNT (bb->succs) != 2)
2126 return bb;
2128 e0 = EDGE_SUCC (bb, 0);
2129 e1 = EDGE_SUCC (bb, 1);
2130 if (e0->flags & EDGE_COMPLEX)
2131 return e1->dest;
2132 if (e1->flags & EDGE_COMPLEX)
2133 return e0->dest;
2135 return bb;
2138 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2140 void
2141 notice_special_calls (gcall *call)
2143 int flags = gimple_call_flags (call);
2145 if (flags & ECF_MAY_BE_ALLOCA)
2146 cfun->calls_alloca = true;
2147 if (flags & ECF_RETURNS_TWICE)
2148 cfun->calls_setjmp = true;
2152 /* Clear flags set by notice_special_calls. Used by dead code removal
2153 to update the flags. */
2155 void
2156 clear_special_calls (void)
2158 cfun->calls_alloca = false;
2159 cfun->calls_setjmp = false;
2162 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2164 static void
2165 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2167 /* Since this block is no longer reachable, we can just delete all
2168 of its PHI nodes. */
2169 remove_phi_nodes (bb);
2171 /* Remove edges to BB's successors. */
2172 while (EDGE_COUNT (bb->succs) > 0)
2173 remove_edge (EDGE_SUCC (bb, 0));
2177 /* Remove statements of basic block BB. */
2179 static void
2180 remove_bb (basic_block bb)
2182 gimple_stmt_iterator i;
2184 if (dump_file)
2186 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2187 if (dump_flags & TDF_DETAILS)
2189 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2190 fprintf (dump_file, "\n");
2194 if (current_loops)
2196 struct loop *loop = bb->loop_father;
2198 /* If a loop gets removed, clean up the information associated
2199 with it. */
2200 if (loop->latch == bb
2201 || loop->header == bb)
2202 free_numbers_of_iterations_estimates (loop);
2205 /* Remove all the instructions in the block. */
2206 if (bb_seq (bb) != NULL)
2208 /* Walk backwards so as to get a chance to substitute all
2209 released DEFs into debug stmts. See
2210 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2211 details. */
2212 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2214 gimple *stmt = gsi_stmt (i);
2215 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2216 if (label_stmt
2217 && (FORCED_LABEL (gimple_label_label (label_stmt))
2218 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2220 basic_block new_bb;
2221 gimple_stmt_iterator new_gsi;
2223 /* A non-reachable non-local label may still be referenced.
2224 But it no longer needs to carry the extra semantics of
2225 non-locality. */
2226 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2228 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2229 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2232 new_bb = bb->prev_bb;
2233 new_gsi = gsi_start_bb (new_bb);
2234 gsi_remove (&i, false);
2235 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2237 else
2239 /* Release SSA definitions. */
2240 release_defs (stmt);
2241 gsi_remove (&i, true);
2244 if (gsi_end_p (i))
2245 i = gsi_last_bb (bb);
2246 else
2247 gsi_prev (&i);
2251 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2252 bb->il.gimple.seq = NULL;
2253 bb->il.gimple.phi_nodes = NULL;
2257 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2258 predicate VAL, return the edge that will be taken out of the block.
2259 If VAL does not match a unique edge, NULL is returned. */
2261 edge
2262 find_taken_edge (basic_block bb, tree val)
2264 gimple *stmt;
2266 stmt = last_stmt (bb);
2268 gcc_assert (is_ctrl_stmt (stmt));
2270 if (gimple_code (stmt) == GIMPLE_COND)
2271 return find_taken_edge_cond_expr (bb, val);
2273 if (gimple_code (stmt) == GIMPLE_SWITCH)
2274 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2276 if (computed_goto_p (stmt))
2278 /* Only optimize if the argument is a label, if the argument is
2279 not a label then we can not construct a proper CFG.
2281 It may be the case that we only need to allow the LABEL_REF to
2282 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2283 appear inside a LABEL_EXPR just to be safe. */
2284 if (val
2285 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2286 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2287 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2288 return NULL;
2291 gcc_unreachable ();
2294 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2295 statement, determine which of the outgoing edges will be taken out of the
2296 block. Return NULL if either edge may be taken. */
2298 static edge
2299 find_taken_edge_computed_goto (basic_block bb, tree val)
2301 basic_block dest;
2302 edge e = NULL;
2304 dest = label_to_block (val);
2305 if (dest)
2307 e = find_edge (bb, dest);
2308 gcc_assert (e != NULL);
2311 return e;
2314 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2315 statement, determine which of the two edges will be taken out of the
2316 block. Return NULL if either edge may be taken. */
2318 static edge
2319 find_taken_edge_cond_expr (basic_block bb, tree val)
2321 edge true_edge, false_edge;
2323 if (val == NULL
2324 || TREE_CODE (val) != INTEGER_CST)
2325 return NULL;
2327 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2329 return (integer_zerop (val) ? false_edge : true_edge);
2332 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2333 statement, determine which edge will be taken out of the block. Return
2334 NULL if any edge may be taken. */
2336 static edge
2337 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2338 tree val)
2340 basic_block dest_bb;
2341 edge e;
2342 tree taken_case;
2344 if (gimple_switch_num_labels (switch_stmt) == 1)
2345 taken_case = gimple_switch_default_label (switch_stmt);
2346 else if (! val || TREE_CODE (val) != INTEGER_CST)
2347 return NULL;
2348 else
2349 taken_case = find_case_label_for_value (switch_stmt, val);
2350 dest_bb = label_to_block (CASE_LABEL (taken_case));
2352 e = find_edge (bb, dest_bb);
2353 gcc_assert (e);
2354 return e;
2358 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2359 We can make optimal use here of the fact that the case labels are
2360 sorted: We can do a binary search for a case matching VAL. */
2362 static tree
2363 find_case_label_for_value (gswitch *switch_stmt, tree val)
2365 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2366 tree default_case = gimple_switch_default_label (switch_stmt);
2368 for (low = 0, high = n; high - low > 1; )
2370 size_t i = (high + low) / 2;
2371 tree t = gimple_switch_label (switch_stmt, i);
2372 int cmp;
2374 /* Cache the result of comparing CASE_LOW and val. */
2375 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2377 if (cmp > 0)
2378 high = i;
2379 else
2380 low = i;
2382 if (CASE_HIGH (t) == NULL)
2384 /* A singe-valued case label. */
2385 if (cmp == 0)
2386 return t;
2388 else
2390 /* A case range. We can only handle integer ranges. */
2391 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2392 return t;
2396 return default_case;
2400 /* Dump a basic block on stderr. */
2402 void
2403 gimple_debug_bb (basic_block bb)
2405 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2409 /* Dump basic block with index N on stderr. */
2411 basic_block
2412 gimple_debug_bb_n (int n)
2414 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2415 return BASIC_BLOCK_FOR_FN (cfun, n);
2419 /* Dump the CFG on stderr.
2421 FLAGS are the same used by the tree dumping functions
2422 (see TDF_* in dumpfile.h). */
2424 void
2425 gimple_debug_cfg (dump_flags_t flags)
2427 gimple_dump_cfg (stderr, flags);
2431 /* Dump the program showing basic block boundaries on the given FILE.
2433 FLAGS are the same used by the tree dumping functions (see TDF_* in
2434 tree.h). */
2436 void
2437 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2439 if (flags & TDF_DETAILS)
2441 dump_function_header (file, current_function_decl, flags);
2442 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2443 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2444 last_basic_block_for_fn (cfun));
2446 brief_dump_cfg (file, flags);
2447 fprintf (file, "\n");
2450 if (flags & TDF_STATS)
2451 dump_cfg_stats (file);
2453 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2457 /* Dump CFG statistics on FILE. */
2459 void
2460 dump_cfg_stats (FILE *file)
2462 static long max_num_merged_labels = 0;
2463 unsigned long size, total = 0;
2464 long num_edges;
2465 basic_block bb;
2466 const char * const fmt_str = "%-30s%-13s%12s\n";
2467 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2468 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2469 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2470 const char *funcname = current_function_name ();
2472 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2474 fprintf (file, "---------------------------------------------------------\n");
2475 fprintf (file, fmt_str, "", " Number of ", "Memory");
2476 fprintf (file, fmt_str, "", " instances ", "used ");
2477 fprintf (file, "---------------------------------------------------------\n");
2479 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2480 total += size;
2481 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2482 SCALE (size), LABEL (size));
2484 num_edges = 0;
2485 FOR_EACH_BB_FN (bb, cfun)
2486 num_edges += EDGE_COUNT (bb->succs);
2487 size = num_edges * sizeof (struct edge_def);
2488 total += size;
2489 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2491 fprintf (file, "---------------------------------------------------------\n");
2492 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2493 LABEL (total));
2494 fprintf (file, "---------------------------------------------------------\n");
2495 fprintf (file, "\n");
2497 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2498 max_num_merged_labels = cfg_stats.num_merged_labels;
2500 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2501 cfg_stats.num_merged_labels, max_num_merged_labels);
2503 fprintf (file, "\n");
2507 /* Dump CFG statistics on stderr. Keep extern so that it's always
2508 linked in the final executable. */
2510 DEBUG_FUNCTION void
2511 debug_cfg_stats (void)
2513 dump_cfg_stats (stderr);
2516 /*---------------------------------------------------------------------------
2517 Miscellaneous helpers
2518 ---------------------------------------------------------------------------*/
2520 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2521 flow. Transfers of control flow associated with EH are excluded. */
2523 static bool
2524 call_can_make_abnormal_goto (gimple *t)
2526 /* If the function has no non-local labels, then a call cannot make an
2527 abnormal transfer of control. */
2528 if (!cfun->has_nonlocal_label
2529 && !cfun->calls_setjmp)
2530 return false;
2532 /* Likewise if the call has no side effects. */
2533 if (!gimple_has_side_effects (t))
2534 return false;
2536 /* Likewise if the called function is leaf. */
2537 if (gimple_call_flags (t) & ECF_LEAF)
2538 return false;
2540 return true;
2544 /* Return true if T can make an abnormal transfer of control flow.
2545 Transfers of control flow associated with EH are excluded. */
2547 bool
2548 stmt_can_make_abnormal_goto (gimple *t)
2550 if (computed_goto_p (t))
2551 return true;
2552 if (is_gimple_call (t))
2553 return call_can_make_abnormal_goto (t);
2554 return false;
2558 /* Return true if T represents a stmt that always transfers control. */
2560 bool
2561 is_ctrl_stmt (gimple *t)
2563 switch (gimple_code (t))
2565 case GIMPLE_COND:
2566 case GIMPLE_SWITCH:
2567 case GIMPLE_GOTO:
2568 case GIMPLE_RETURN:
2569 case GIMPLE_RESX:
2570 return true;
2571 default:
2572 return false;
2577 /* Return true if T is a statement that may alter the flow of control
2578 (e.g., a call to a non-returning function). */
2580 bool
2581 is_ctrl_altering_stmt (gimple *t)
2583 gcc_assert (t);
2585 switch (gimple_code (t))
2587 case GIMPLE_CALL:
2588 /* Per stmt call flag indicates whether the call could alter
2589 controlflow. */
2590 if (gimple_call_ctrl_altering_p (t))
2591 return true;
2592 break;
2594 case GIMPLE_EH_DISPATCH:
2595 /* EH_DISPATCH branches to the individual catch handlers at
2596 this level of a try or allowed-exceptions region. It can
2597 fallthru to the next statement as well. */
2598 return true;
2600 case GIMPLE_ASM:
2601 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2602 return true;
2603 break;
2605 CASE_GIMPLE_OMP:
2606 /* OpenMP directives alter control flow. */
2607 return true;
2609 case GIMPLE_TRANSACTION:
2610 /* A transaction start alters control flow. */
2611 return true;
2613 default:
2614 break;
2617 /* If a statement can throw, it alters control flow. */
2618 return stmt_can_throw_internal (t);
2622 /* Return true if T is a simple local goto. */
2624 bool
2625 simple_goto_p (gimple *t)
2627 return (gimple_code (t) == GIMPLE_GOTO
2628 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2632 /* Return true if STMT should start a new basic block. PREV_STMT is
2633 the statement preceding STMT. It is used when STMT is a label or a
2634 case label. Labels should only start a new basic block if their
2635 previous statement wasn't a label. Otherwise, sequence of labels
2636 would generate unnecessary basic blocks that only contain a single
2637 label. */
2639 static inline bool
2640 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2642 if (stmt == NULL)
2643 return false;
2645 /* Labels start a new basic block only if the preceding statement
2646 wasn't a label of the same type. This prevents the creation of
2647 consecutive blocks that have nothing but a single label. */
2648 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2650 /* Nonlocal and computed GOTO targets always start a new block. */
2651 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2652 || FORCED_LABEL (gimple_label_label (label_stmt)))
2653 return true;
2655 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2657 if (DECL_NONLOCAL (gimple_label_label (
2658 as_a <glabel *> (prev_stmt))))
2659 return true;
2661 cfg_stats.num_merged_labels++;
2662 return false;
2664 else
2665 return true;
2667 else if (gimple_code (stmt) == GIMPLE_CALL)
2669 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2670 /* setjmp acts similar to a nonlocal GOTO target and thus should
2671 start a new block. */
2672 return true;
2673 if (gimple_call_internal_p (stmt, IFN_PHI)
2674 && prev_stmt
2675 && gimple_code (prev_stmt) != GIMPLE_LABEL
2676 && (gimple_code (prev_stmt) != GIMPLE_CALL
2677 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2678 /* PHI nodes start a new block unless preceeded by a label
2679 or another PHI. */
2680 return true;
2683 return false;
2687 /* Return true if T should end a basic block. */
2689 bool
2690 stmt_ends_bb_p (gimple *t)
2692 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2695 /* Remove block annotations and other data structures. */
2697 void
2698 delete_tree_cfg_annotations (struct function *fn)
2700 vec_free (label_to_block_map_for_fn (fn));
2703 /* Return the virtual phi in BB. */
2705 gphi *
2706 get_virtual_phi (basic_block bb)
2708 for (gphi_iterator gsi = gsi_start_phis (bb);
2709 !gsi_end_p (gsi);
2710 gsi_next (&gsi))
2712 gphi *phi = gsi.phi ();
2714 if (virtual_operand_p (PHI_RESULT (phi)))
2715 return phi;
2718 return NULL;
2721 /* Return the first statement in basic block BB. */
2723 gimple *
2724 first_stmt (basic_block bb)
2726 gimple_stmt_iterator i = gsi_start_bb (bb);
2727 gimple *stmt = NULL;
2729 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2731 gsi_next (&i);
2732 stmt = NULL;
2734 return stmt;
2737 /* Return the first non-label statement in basic block BB. */
2739 static gimple *
2740 first_non_label_stmt (basic_block bb)
2742 gimple_stmt_iterator i = gsi_start_bb (bb);
2743 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2744 gsi_next (&i);
2745 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2748 /* Return the last statement in basic block BB. */
2750 gimple *
2751 last_stmt (basic_block bb)
2753 gimple_stmt_iterator i = gsi_last_bb (bb);
2754 gimple *stmt = NULL;
2756 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2758 gsi_prev (&i);
2759 stmt = NULL;
2761 return stmt;
2764 /* Return the last statement of an otherwise empty block. Return NULL
2765 if the block is totally empty, or if it contains more than one
2766 statement. */
2768 gimple *
2769 last_and_only_stmt (basic_block bb)
2771 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2772 gimple *last, *prev;
2774 if (gsi_end_p (i))
2775 return NULL;
2777 last = gsi_stmt (i);
2778 gsi_prev_nondebug (&i);
2779 if (gsi_end_p (i))
2780 return last;
2782 /* Empty statements should no longer appear in the instruction stream.
2783 Everything that might have appeared before should be deleted by
2784 remove_useless_stmts, and the optimizers should just gsi_remove
2785 instead of smashing with build_empty_stmt.
2787 Thus the only thing that should appear here in a block containing
2788 one executable statement is a label. */
2789 prev = gsi_stmt (i);
2790 if (gimple_code (prev) == GIMPLE_LABEL)
2791 return last;
2792 else
2793 return NULL;
2796 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2798 static void
2799 reinstall_phi_args (edge new_edge, edge old_edge)
2801 edge_var_map *vm;
2802 int i;
2803 gphi_iterator phis;
2805 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2806 if (!v)
2807 return;
2809 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2810 v->iterate (i, &vm) && !gsi_end_p (phis);
2811 i++, gsi_next (&phis))
2813 gphi *phi = phis.phi ();
2814 tree result = redirect_edge_var_map_result (vm);
2815 tree arg = redirect_edge_var_map_def (vm);
2817 gcc_assert (result == gimple_phi_result (phi));
2819 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2822 redirect_edge_var_map_clear (old_edge);
2825 /* Returns the basic block after which the new basic block created
2826 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2827 near its "logical" location. This is of most help to humans looking
2828 at debugging dumps. */
2830 basic_block
2831 split_edge_bb_loc (edge edge_in)
2833 basic_block dest = edge_in->dest;
2834 basic_block dest_prev = dest->prev_bb;
2836 if (dest_prev)
2838 edge e = find_edge (dest_prev, dest);
2839 if (e && !(e->flags & EDGE_COMPLEX))
2840 return edge_in->src;
2842 return dest_prev;
2845 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2846 Abort on abnormal edges. */
2848 static basic_block
2849 gimple_split_edge (edge edge_in)
2851 basic_block new_bb, after_bb, dest;
2852 edge new_edge, e;
2854 /* Abnormal edges cannot be split. */
2855 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2857 dest = edge_in->dest;
2859 after_bb = split_edge_bb_loc (edge_in);
2861 new_bb = create_empty_bb (after_bb);
2862 new_bb->count = edge_in->count ();
2864 e = redirect_edge_and_branch (edge_in, new_bb);
2865 gcc_assert (e == edge_in);
2867 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2868 reinstall_phi_args (new_edge, e);
2870 return new_bb;
2874 /* Verify properties of the address expression T with base object BASE. */
2876 static tree
2877 verify_address (tree t, tree base)
2879 bool old_constant;
2880 bool old_side_effects;
2881 bool new_constant;
2882 bool new_side_effects;
2884 old_constant = TREE_CONSTANT (t);
2885 old_side_effects = TREE_SIDE_EFFECTS (t);
2887 recompute_tree_invariant_for_addr_expr (t);
2888 new_side_effects = TREE_SIDE_EFFECTS (t);
2889 new_constant = TREE_CONSTANT (t);
2891 if (old_constant != new_constant)
2893 error ("constant not recomputed when ADDR_EXPR changed");
2894 return t;
2896 if (old_side_effects != new_side_effects)
2898 error ("side effects not recomputed when ADDR_EXPR changed");
2899 return t;
2902 if (!(VAR_P (base)
2903 || TREE_CODE (base) == PARM_DECL
2904 || TREE_CODE (base) == RESULT_DECL))
2905 return NULL_TREE;
2907 if (DECL_GIMPLE_REG_P (base))
2909 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2910 return base;
2913 return NULL_TREE;
2916 /* Callback for walk_tree, check that all elements with address taken are
2917 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2918 inside a PHI node. */
2920 static tree
2921 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2923 tree t = *tp, x;
2925 if (TYPE_P (t))
2926 *walk_subtrees = 0;
2928 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2929 #define CHECK_OP(N, MSG) \
2930 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2931 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2933 switch (TREE_CODE (t))
2935 case SSA_NAME:
2936 if (SSA_NAME_IN_FREE_LIST (t))
2938 error ("SSA name in freelist but still referenced");
2939 return *tp;
2941 break;
2943 case PARM_DECL:
2944 case VAR_DECL:
2945 case RESULT_DECL:
2947 tree context = decl_function_context (t);
2948 if (context != cfun->decl
2949 && !SCOPE_FILE_SCOPE_P (context)
2950 && !TREE_STATIC (t)
2951 && !DECL_EXTERNAL (t))
2953 error ("Local declaration from a different function");
2954 return t;
2957 break;
2959 case INDIRECT_REF:
2960 error ("INDIRECT_REF in gimple IL");
2961 return t;
2963 case MEM_REF:
2964 x = TREE_OPERAND (t, 0);
2965 if (!POINTER_TYPE_P (TREE_TYPE (x))
2966 || !is_gimple_mem_ref_addr (x))
2968 error ("invalid first operand of MEM_REF");
2969 return x;
2971 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2972 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2974 error ("invalid offset operand of MEM_REF");
2975 return TREE_OPERAND (t, 1);
2977 if (TREE_CODE (x) == ADDR_EXPR)
2979 tree va = verify_address (x, TREE_OPERAND (x, 0));
2980 if (va)
2981 return va;
2982 x = TREE_OPERAND (x, 0);
2984 walk_tree (&x, verify_expr, data, NULL);
2985 *walk_subtrees = 0;
2986 break;
2988 case ASSERT_EXPR:
2989 x = fold (ASSERT_EXPR_COND (t));
2990 if (x == boolean_false_node)
2992 error ("ASSERT_EXPR with an always-false condition");
2993 return *tp;
2995 break;
2997 case MODIFY_EXPR:
2998 error ("MODIFY_EXPR not expected while having tuples");
2999 return *tp;
3001 case ADDR_EXPR:
3003 tree tem;
3005 gcc_assert (is_gimple_address (t));
3007 /* Skip any references (they will be checked when we recurse down the
3008 tree) and ensure that any variable used as a prefix is marked
3009 addressable. */
3010 for (x = TREE_OPERAND (t, 0);
3011 handled_component_p (x);
3012 x = TREE_OPERAND (x, 0))
3015 if ((tem = verify_address (t, x)))
3016 return tem;
3018 if (!(VAR_P (x)
3019 || TREE_CODE (x) == PARM_DECL
3020 || TREE_CODE (x) == RESULT_DECL))
3021 return NULL;
3023 if (!TREE_ADDRESSABLE (x))
3025 error ("address taken, but ADDRESSABLE bit not set");
3026 return x;
3029 break;
3032 case COND_EXPR:
3033 x = COND_EXPR_COND (t);
3034 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3036 error ("non-integral used in condition");
3037 return x;
3039 if (!is_gimple_condexpr (x))
3041 error ("invalid conditional operand");
3042 return x;
3044 break;
3046 case NON_LVALUE_EXPR:
3047 case TRUTH_NOT_EXPR:
3048 gcc_unreachable ();
3050 CASE_CONVERT:
3051 case FIX_TRUNC_EXPR:
3052 case FLOAT_EXPR:
3053 case NEGATE_EXPR:
3054 case ABS_EXPR:
3055 case BIT_NOT_EXPR:
3056 CHECK_OP (0, "invalid operand to unary operator");
3057 break;
3059 case REALPART_EXPR:
3060 case IMAGPART_EXPR:
3061 case BIT_FIELD_REF:
3062 if (!is_gimple_reg_type (TREE_TYPE (t)))
3064 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3065 return t;
3068 if (TREE_CODE (t) == BIT_FIELD_REF)
3070 tree t0 = TREE_OPERAND (t, 0);
3071 tree t1 = TREE_OPERAND (t, 1);
3072 tree t2 = TREE_OPERAND (t, 2);
3073 if (!tree_fits_uhwi_p (t1)
3074 || !tree_fits_uhwi_p (t2)
3075 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3076 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3078 error ("invalid position or size operand to BIT_FIELD_REF");
3079 return t;
3081 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3082 && (TYPE_PRECISION (TREE_TYPE (t))
3083 != tree_to_uhwi (t1)))
3085 error ("integral result type precision does not match "
3086 "field size of BIT_FIELD_REF");
3087 return t;
3089 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3090 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3091 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3092 != tree_to_uhwi (t1)))
3094 error ("mode size of non-integral result does not "
3095 "match field size of BIT_FIELD_REF");
3096 return t;
3098 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3099 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3100 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3102 error ("position plus size exceeds size of referenced object in "
3103 "BIT_FIELD_REF");
3104 return t;
3107 t = TREE_OPERAND (t, 0);
3109 /* Fall-through. */
3110 case COMPONENT_REF:
3111 case ARRAY_REF:
3112 case ARRAY_RANGE_REF:
3113 case VIEW_CONVERT_EXPR:
3114 /* We have a nest of references. Verify that each of the operands
3115 that determine where to reference is either a constant or a variable,
3116 verify that the base is valid, and then show we've already checked
3117 the subtrees. */
3118 while (handled_component_p (t))
3120 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3121 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3122 else if (TREE_CODE (t) == ARRAY_REF
3123 || TREE_CODE (t) == ARRAY_RANGE_REF)
3125 CHECK_OP (1, "invalid array index");
3126 if (TREE_OPERAND (t, 2))
3127 CHECK_OP (2, "invalid array lower bound");
3128 if (TREE_OPERAND (t, 3))
3129 CHECK_OP (3, "invalid array stride");
3131 else if (TREE_CODE (t) == BIT_FIELD_REF
3132 || TREE_CODE (t) == REALPART_EXPR
3133 || TREE_CODE (t) == IMAGPART_EXPR)
3135 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3136 "REALPART_EXPR");
3137 return t;
3140 t = TREE_OPERAND (t, 0);
3143 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3145 error ("invalid reference prefix");
3146 return t;
3148 walk_tree (&t, verify_expr, data, NULL);
3149 *walk_subtrees = 0;
3150 break;
3151 case PLUS_EXPR:
3152 case MINUS_EXPR:
3153 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3154 POINTER_PLUS_EXPR. */
3155 if (POINTER_TYPE_P (TREE_TYPE (t)))
3157 error ("invalid operand to plus/minus, type is a pointer");
3158 return t;
3160 CHECK_OP (0, "invalid operand to binary operator");
3161 CHECK_OP (1, "invalid operand to binary operator");
3162 break;
3164 case POINTER_DIFF_EXPR:
3165 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3166 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
3168 error ("invalid operand to pointer diff, operand is not a pointer");
3169 return t;
3171 if (TREE_CODE (TREE_TYPE (t)) != INTEGER_TYPE
3172 || TYPE_UNSIGNED (TREE_TYPE (t))
3173 || (TYPE_PRECISION (TREE_TYPE (t))
3174 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))))
3176 error ("invalid type for pointer diff");
3177 return t;
3179 CHECK_OP (0, "invalid operand to pointer diff");
3180 CHECK_OP (1, "invalid operand to pointer diff");
3181 break;
3183 case POINTER_PLUS_EXPR:
3184 /* Check to make sure the first operand is a pointer or reference type. */
3185 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3187 error ("invalid operand to pointer plus, first operand is not a pointer");
3188 return t;
3190 /* Check to make sure the second operand is a ptrofftype. */
3191 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3193 error ("invalid operand to pointer plus, second operand is not an "
3194 "integer type of appropriate width");
3195 return t;
3197 /* FALLTHROUGH */
3198 case LT_EXPR:
3199 case LE_EXPR:
3200 case GT_EXPR:
3201 case GE_EXPR:
3202 case EQ_EXPR:
3203 case NE_EXPR:
3204 case UNORDERED_EXPR:
3205 case ORDERED_EXPR:
3206 case UNLT_EXPR:
3207 case UNLE_EXPR:
3208 case UNGT_EXPR:
3209 case UNGE_EXPR:
3210 case UNEQ_EXPR:
3211 case LTGT_EXPR:
3212 case MULT_EXPR:
3213 case TRUNC_DIV_EXPR:
3214 case CEIL_DIV_EXPR:
3215 case FLOOR_DIV_EXPR:
3216 case ROUND_DIV_EXPR:
3217 case TRUNC_MOD_EXPR:
3218 case CEIL_MOD_EXPR:
3219 case FLOOR_MOD_EXPR:
3220 case ROUND_MOD_EXPR:
3221 case RDIV_EXPR:
3222 case EXACT_DIV_EXPR:
3223 case MIN_EXPR:
3224 case MAX_EXPR:
3225 case LSHIFT_EXPR:
3226 case RSHIFT_EXPR:
3227 case LROTATE_EXPR:
3228 case RROTATE_EXPR:
3229 case BIT_IOR_EXPR:
3230 case BIT_XOR_EXPR:
3231 case BIT_AND_EXPR:
3232 CHECK_OP (0, "invalid operand to binary operator");
3233 CHECK_OP (1, "invalid operand to binary operator");
3234 break;
3236 case CONSTRUCTOR:
3237 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3238 *walk_subtrees = 0;
3239 break;
3241 case CASE_LABEL_EXPR:
3242 if (CASE_CHAIN (t))
3244 error ("invalid CASE_CHAIN");
3245 return t;
3247 break;
3249 default:
3250 break;
3252 return NULL;
3254 #undef CHECK_OP
3258 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3259 Returns true if there is an error, otherwise false. */
3261 static bool
3262 verify_types_in_gimple_min_lval (tree expr)
3264 tree op;
3266 if (is_gimple_id (expr))
3267 return false;
3269 if (TREE_CODE (expr) != TARGET_MEM_REF
3270 && TREE_CODE (expr) != MEM_REF)
3272 error ("invalid expression for min lvalue");
3273 return true;
3276 /* TARGET_MEM_REFs are strange beasts. */
3277 if (TREE_CODE (expr) == TARGET_MEM_REF)
3278 return false;
3280 op = TREE_OPERAND (expr, 0);
3281 if (!is_gimple_val (op))
3283 error ("invalid operand in indirect reference");
3284 debug_generic_stmt (op);
3285 return true;
3287 /* Memory references now generally can involve a value conversion. */
3289 return false;
3292 /* Verify if EXPR is a valid GIMPLE reference expression. If
3293 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3294 if there is an error, otherwise false. */
3296 static bool
3297 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3299 while (handled_component_p (expr))
3301 tree op = TREE_OPERAND (expr, 0);
3303 if (TREE_CODE (expr) == ARRAY_REF
3304 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3306 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3307 || (TREE_OPERAND (expr, 2)
3308 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3309 || (TREE_OPERAND (expr, 3)
3310 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3312 error ("invalid operands to array reference");
3313 debug_generic_stmt (expr);
3314 return true;
3318 /* Verify if the reference array element types are compatible. */
3319 if (TREE_CODE (expr) == ARRAY_REF
3320 && !useless_type_conversion_p (TREE_TYPE (expr),
3321 TREE_TYPE (TREE_TYPE (op))))
3323 error ("type mismatch in array reference");
3324 debug_generic_stmt (TREE_TYPE (expr));
3325 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3326 return true;
3328 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3329 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3330 TREE_TYPE (TREE_TYPE (op))))
3332 error ("type mismatch in array range reference");
3333 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3334 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3335 return true;
3338 if ((TREE_CODE (expr) == REALPART_EXPR
3339 || TREE_CODE (expr) == IMAGPART_EXPR)
3340 && !useless_type_conversion_p (TREE_TYPE (expr),
3341 TREE_TYPE (TREE_TYPE (op))))
3343 error ("type mismatch in real/imagpart reference");
3344 debug_generic_stmt (TREE_TYPE (expr));
3345 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3346 return true;
3349 if (TREE_CODE (expr) == COMPONENT_REF
3350 && !useless_type_conversion_p (TREE_TYPE (expr),
3351 TREE_TYPE (TREE_OPERAND (expr, 1))))
3353 error ("type mismatch in component reference");
3354 debug_generic_stmt (TREE_TYPE (expr));
3355 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3356 return true;
3359 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3361 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3362 that their operand is not an SSA name or an invariant when
3363 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3364 bug). Otherwise there is nothing to verify, gross mismatches at
3365 most invoke undefined behavior. */
3366 if (require_lvalue
3367 && (TREE_CODE (op) == SSA_NAME
3368 || is_gimple_min_invariant (op)))
3370 error ("conversion of an SSA_NAME on the left hand side");
3371 debug_generic_stmt (expr);
3372 return true;
3374 else if (TREE_CODE (op) == SSA_NAME
3375 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3377 error ("conversion of register to a different size");
3378 debug_generic_stmt (expr);
3379 return true;
3381 else if (!handled_component_p (op))
3382 return false;
3385 expr = op;
3388 if (TREE_CODE (expr) == MEM_REF)
3390 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3392 error ("invalid address operand in MEM_REF");
3393 debug_generic_stmt (expr);
3394 return true;
3396 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3397 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3399 error ("invalid offset operand in MEM_REF");
3400 debug_generic_stmt (expr);
3401 return true;
3404 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3406 if (!TMR_BASE (expr)
3407 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3409 error ("invalid address operand in TARGET_MEM_REF");
3410 return true;
3412 if (!TMR_OFFSET (expr)
3413 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3414 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3416 error ("invalid offset operand in TARGET_MEM_REF");
3417 debug_generic_stmt (expr);
3418 return true;
3422 return ((require_lvalue || !is_gimple_min_invariant (expr))
3423 && verify_types_in_gimple_min_lval (expr));
3426 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3427 list of pointer-to types that is trivially convertible to DEST. */
3429 static bool
3430 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3432 tree src;
3434 if (!TYPE_POINTER_TO (src_obj))
3435 return true;
3437 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3438 if (useless_type_conversion_p (dest, src))
3439 return true;
3441 return false;
3444 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3445 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3447 static bool
3448 valid_fixed_convert_types_p (tree type1, tree type2)
3450 return (FIXED_POINT_TYPE_P (type1)
3451 && (INTEGRAL_TYPE_P (type2)
3452 || SCALAR_FLOAT_TYPE_P (type2)
3453 || FIXED_POINT_TYPE_P (type2)));
3456 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3457 is a problem, otherwise false. */
3459 static bool
3460 verify_gimple_call (gcall *stmt)
3462 tree fn = gimple_call_fn (stmt);
3463 tree fntype, fndecl;
3464 unsigned i;
3466 if (gimple_call_internal_p (stmt))
3468 if (fn)
3470 error ("gimple call has two targets");
3471 debug_generic_stmt (fn);
3472 return true;
3474 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3475 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3477 return false;
3480 else
3482 if (!fn)
3484 error ("gimple call has no target");
3485 return true;
3489 if (fn && !is_gimple_call_addr (fn))
3491 error ("invalid function in gimple call");
3492 debug_generic_stmt (fn);
3493 return true;
3496 if (fn
3497 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3498 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3499 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3501 error ("non-function in gimple call");
3502 return true;
3505 fndecl = gimple_call_fndecl (stmt);
3506 if (fndecl
3507 && TREE_CODE (fndecl) == FUNCTION_DECL
3508 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3509 && !DECL_PURE_P (fndecl)
3510 && !TREE_READONLY (fndecl))
3512 error ("invalid pure const state for function");
3513 return true;
3516 tree lhs = gimple_call_lhs (stmt);
3517 if (lhs
3518 && (!is_gimple_lvalue (lhs)
3519 || verify_types_in_gimple_reference (lhs, true)))
3521 error ("invalid LHS in gimple call");
3522 return true;
3525 if (gimple_call_ctrl_altering_p (stmt)
3526 && gimple_call_noreturn_p (stmt)
3527 && should_remove_lhs_p (lhs))
3529 error ("LHS in noreturn call");
3530 return true;
3533 fntype = gimple_call_fntype (stmt);
3534 if (fntype
3535 && lhs
3536 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3537 /* ??? At least C++ misses conversions at assignments from
3538 void * call results.
3539 For now simply allow arbitrary pointer type conversions. */
3540 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3541 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3543 error ("invalid conversion in gimple call");
3544 debug_generic_stmt (TREE_TYPE (lhs));
3545 debug_generic_stmt (TREE_TYPE (fntype));
3546 return true;
3549 if (gimple_call_chain (stmt)
3550 && !is_gimple_val (gimple_call_chain (stmt)))
3552 error ("invalid static chain in gimple call");
3553 debug_generic_stmt (gimple_call_chain (stmt));
3554 return true;
3557 /* If there is a static chain argument, the call should either be
3558 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3559 if (gimple_call_chain (stmt)
3560 && fndecl
3561 && !DECL_STATIC_CHAIN (fndecl))
3563 error ("static chain with function that doesn%'t use one");
3564 return true;
3567 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3569 switch (DECL_FUNCTION_CODE (fndecl))
3571 case BUILT_IN_UNREACHABLE:
3572 case BUILT_IN_TRAP:
3573 if (gimple_call_num_args (stmt) > 0)
3575 /* Built-in unreachable with parameters might not be caught by
3576 undefined behavior sanitizer. Front-ends do check users do not
3577 call them that way but we also produce calls to
3578 __builtin_unreachable internally, for example when IPA figures
3579 out a call cannot happen in a legal program. In such cases,
3580 we must make sure arguments are stripped off. */
3581 error ("__builtin_unreachable or __builtin_trap call with "
3582 "arguments");
3583 return true;
3585 break;
3586 default:
3587 break;
3591 /* ??? The C frontend passes unpromoted arguments in case it
3592 didn't see a function declaration before the call. So for now
3593 leave the call arguments mostly unverified. Once we gimplify
3594 unit-at-a-time we have a chance to fix this. */
3596 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3598 tree arg = gimple_call_arg (stmt, i);
3599 if ((is_gimple_reg_type (TREE_TYPE (arg))
3600 && !is_gimple_val (arg))
3601 || (!is_gimple_reg_type (TREE_TYPE (arg))
3602 && !is_gimple_lvalue (arg)))
3604 error ("invalid argument to gimple call");
3605 debug_generic_expr (arg);
3606 return true;
3610 return false;
3613 /* Verifies the gimple comparison with the result type TYPE and
3614 the operands OP0 and OP1, comparison code is CODE. */
3616 static bool
3617 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3619 tree op0_type = TREE_TYPE (op0);
3620 tree op1_type = TREE_TYPE (op1);
3622 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3624 error ("invalid operands in gimple comparison");
3625 return true;
3628 /* For comparisons we do not have the operations type as the
3629 effective type the comparison is carried out in. Instead
3630 we require that either the first operand is trivially
3631 convertible into the second, or the other way around.
3632 Because we special-case pointers to void we allow
3633 comparisons of pointers with the same mode as well. */
3634 if (!useless_type_conversion_p (op0_type, op1_type)
3635 && !useless_type_conversion_p (op1_type, op0_type)
3636 && (!POINTER_TYPE_P (op0_type)
3637 || !POINTER_TYPE_P (op1_type)
3638 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3640 error ("mismatching comparison operand types");
3641 debug_generic_expr (op0_type);
3642 debug_generic_expr (op1_type);
3643 return true;
3646 /* The resulting type of a comparison may be an effective boolean type. */
3647 if (INTEGRAL_TYPE_P (type)
3648 && (TREE_CODE (type) == BOOLEAN_TYPE
3649 || TYPE_PRECISION (type) == 1))
3651 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3652 || TREE_CODE (op1_type) == VECTOR_TYPE)
3653 && code != EQ_EXPR && code != NE_EXPR
3654 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3655 && !VECTOR_INTEGER_TYPE_P (op0_type))
3657 error ("unsupported operation or type for vector comparison"
3658 " returning a boolean");
3659 debug_generic_expr (op0_type);
3660 debug_generic_expr (op1_type);
3661 return true;
3664 /* Or a boolean vector type with the same element count
3665 as the comparison operand types. */
3666 else if (TREE_CODE (type) == VECTOR_TYPE
3667 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3669 if (TREE_CODE (op0_type) != VECTOR_TYPE
3670 || TREE_CODE (op1_type) != VECTOR_TYPE)
3672 error ("non-vector operands in vector comparison");
3673 debug_generic_expr (op0_type);
3674 debug_generic_expr (op1_type);
3675 return true;
3678 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3680 error ("invalid vector comparison resulting type");
3681 debug_generic_expr (type);
3682 return true;
3685 else
3687 error ("bogus comparison result type");
3688 debug_generic_expr (type);
3689 return true;
3692 return false;
3695 /* Verify a gimple assignment statement STMT with an unary rhs.
3696 Returns true if anything is wrong. */
3698 static bool
3699 verify_gimple_assign_unary (gassign *stmt)
3701 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3702 tree lhs = gimple_assign_lhs (stmt);
3703 tree lhs_type = TREE_TYPE (lhs);
3704 tree rhs1 = gimple_assign_rhs1 (stmt);
3705 tree rhs1_type = TREE_TYPE (rhs1);
3707 if (!is_gimple_reg (lhs))
3709 error ("non-register as LHS of unary operation");
3710 return true;
3713 if (!is_gimple_val (rhs1))
3715 error ("invalid operand in unary operation");
3716 return true;
3719 /* First handle conversions. */
3720 switch (rhs_code)
3722 CASE_CONVERT:
3724 /* Allow conversions from pointer type to integral type only if
3725 there is no sign or zero extension involved.
3726 For targets were the precision of ptrofftype doesn't match that
3727 of pointers we need to allow arbitrary conversions to ptrofftype. */
3728 if ((POINTER_TYPE_P (lhs_type)
3729 && INTEGRAL_TYPE_P (rhs1_type))
3730 || (POINTER_TYPE_P (rhs1_type)
3731 && INTEGRAL_TYPE_P (lhs_type)
3732 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3733 || ptrofftype_p (sizetype))))
3734 return false;
3736 /* Allow conversion from integral to offset type and vice versa. */
3737 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3738 && INTEGRAL_TYPE_P (rhs1_type))
3739 || (INTEGRAL_TYPE_P (lhs_type)
3740 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3741 return false;
3743 /* Otherwise assert we are converting between types of the
3744 same kind. */
3745 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3747 error ("invalid types in nop conversion");
3748 debug_generic_expr (lhs_type);
3749 debug_generic_expr (rhs1_type);
3750 return true;
3753 return false;
3756 case ADDR_SPACE_CONVERT_EXPR:
3758 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3759 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3760 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3762 error ("invalid types in address space conversion");
3763 debug_generic_expr (lhs_type);
3764 debug_generic_expr (rhs1_type);
3765 return true;
3768 return false;
3771 case FIXED_CONVERT_EXPR:
3773 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3774 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3776 error ("invalid types in fixed-point conversion");
3777 debug_generic_expr (lhs_type);
3778 debug_generic_expr (rhs1_type);
3779 return true;
3782 return false;
3785 case FLOAT_EXPR:
3787 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3788 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3789 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3791 error ("invalid types in conversion to floating point");
3792 debug_generic_expr (lhs_type);
3793 debug_generic_expr (rhs1_type);
3794 return true;
3797 return false;
3800 case FIX_TRUNC_EXPR:
3802 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3803 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3804 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3806 error ("invalid types in conversion to integer");
3807 debug_generic_expr (lhs_type);
3808 debug_generic_expr (rhs1_type);
3809 return true;
3812 return false;
3815 case VEC_UNPACK_HI_EXPR:
3816 case VEC_UNPACK_LO_EXPR:
3817 case VEC_UNPACK_FLOAT_HI_EXPR:
3818 case VEC_UNPACK_FLOAT_LO_EXPR:
3819 /* FIXME. */
3820 return false;
3822 case NEGATE_EXPR:
3823 case ABS_EXPR:
3824 case BIT_NOT_EXPR:
3825 case PAREN_EXPR:
3826 case CONJ_EXPR:
3827 break;
3829 default:
3830 gcc_unreachable ();
3833 /* For the remaining codes assert there is no conversion involved. */
3834 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3836 error ("non-trivial conversion in unary operation");
3837 debug_generic_expr (lhs_type);
3838 debug_generic_expr (rhs1_type);
3839 return true;
3842 return false;
3845 /* Verify a gimple assignment statement STMT with a binary rhs.
3846 Returns true if anything is wrong. */
3848 static bool
3849 verify_gimple_assign_binary (gassign *stmt)
3851 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3852 tree lhs = gimple_assign_lhs (stmt);
3853 tree lhs_type = TREE_TYPE (lhs);
3854 tree rhs1 = gimple_assign_rhs1 (stmt);
3855 tree rhs1_type = TREE_TYPE (rhs1);
3856 tree rhs2 = gimple_assign_rhs2 (stmt);
3857 tree rhs2_type = TREE_TYPE (rhs2);
3859 if (!is_gimple_reg (lhs))
3861 error ("non-register as LHS of binary operation");
3862 return true;
3865 if (!is_gimple_val (rhs1)
3866 || !is_gimple_val (rhs2))
3868 error ("invalid operands in binary operation");
3869 return true;
3872 /* First handle operations that involve different types. */
3873 switch (rhs_code)
3875 case COMPLEX_EXPR:
3877 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3878 || !(INTEGRAL_TYPE_P (rhs1_type)
3879 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3880 || !(INTEGRAL_TYPE_P (rhs2_type)
3881 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3883 error ("type mismatch in complex expression");
3884 debug_generic_expr (lhs_type);
3885 debug_generic_expr (rhs1_type);
3886 debug_generic_expr (rhs2_type);
3887 return true;
3890 return false;
3893 case LSHIFT_EXPR:
3894 case RSHIFT_EXPR:
3895 case LROTATE_EXPR:
3896 case RROTATE_EXPR:
3898 /* Shifts and rotates are ok on integral types, fixed point
3899 types and integer vector types. */
3900 if ((!INTEGRAL_TYPE_P (rhs1_type)
3901 && !FIXED_POINT_TYPE_P (rhs1_type)
3902 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3903 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3904 || (!INTEGRAL_TYPE_P (rhs2_type)
3905 /* Vector shifts of vectors are also ok. */
3906 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3907 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3908 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3909 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3910 || !useless_type_conversion_p (lhs_type, rhs1_type))
3912 error ("type mismatch in shift expression");
3913 debug_generic_expr (lhs_type);
3914 debug_generic_expr (rhs1_type);
3915 debug_generic_expr (rhs2_type);
3916 return true;
3919 return false;
3922 case WIDEN_LSHIFT_EXPR:
3924 if (!INTEGRAL_TYPE_P (lhs_type)
3925 || !INTEGRAL_TYPE_P (rhs1_type)
3926 || TREE_CODE (rhs2) != INTEGER_CST
3927 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3929 error ("type mismatch in widening vector shift expression");
3930 debug_generic_expr (lhs_type);
3931 debug_generic_expr (rhs1_type);
3932 debug_generic_expr (rhs2_type);
3933 return true;
3936 return false;
3939 case VEC_WIDEN_LSHIFT_HI_EXPR:
3940 case VEC_WIDEN_LSHIFT_LO_EXPR:
3942 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3943 || TREE_CODE (lhs_type) != VECTOR_TYPE
3944 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3945 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3946 || TREE_CODE (rhs2) != INTEGER_CST
3947 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3948 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3950 error ("type mismatch in widening vector shift expression");
3951 debug_generic_expr (lhs_type);
3952 debug_generic_expr (rhs1_type);
3953 debug_generic_expr (rhs2_type);
3954 return true;
3957 return false;
3960 case PLUS_EXPR:
3961 case MINUS_EXPR:
3963 tree lhs_etype = lhs_type;
3964 tree rhs1_etype = rhs1_type;
3965 tree rhs2_etype = rhs2_type;
3966 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3968 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3969 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3971 error ("invalid non-vector operands to vector valued plus");
3972 return true;
3974 lhs_etype = TREE_TYPE (lhs_type);
3975 rhs1_etype = TREE_TYPE (rhs1_type);
3976 rhs2_etype = TREE_TYPE (rhs2_type);
3978 if (POINTER_TYPE_P (lhs_etype)
3979 || POINTER_TYPE_P (rhs1_etype)
3980 || POINTER_TYPE_P (rhs2_etype))
3982 error ("invalid (pointer) operands to plus/minus");
3983 return true;
3986 /* Continue with generic binary expression handling. */
3987 break;
3990 case POINTER_PLUS_EXPR:
3992 if (!POINTER_TYPE_P (rhs1_type)
3993 || !useless_type_conversion_p (lhs_type, rhs1_type)
3994 || !ptrofftype_p (rhs2_type))
3996 error ("type mismatch in pointer plus expression");
3997 debug_generic_stmt (lhs_type);
3998 debug_generic_stmt (rhs1_type);
3999 debug_generic_stmt (rhs2_type);
4000 return true;
4003 return false;
4006 case POINTER_DIFF_EXPR:
4008 if (!POINTER_TYPE_P (rhs1_type)
4009 || !POINTER_TYPE_P (rhs2_type)
4010 || !types_compatible_p (rhs1_type, rhs2_type)
4011 || TREE_CODE (lhs_type) != INTEGER_TYPE
4012 || TYPE_UNSIGNED (lhs_type)
4013 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4015 error ("type mismatch in pointer diff expression");
4016 debug_generic_stmt (lhs_type);
4017 debug_generic_stmt (rhs1_type);
4018 debug_generic_stmt (rhs2_type);
4019 return true;
4022 return false;
4025 case TRUTH_ANDIF_EXPR:
4026 case TRUTH_ORIF_EXPR:
4027 case TRUTH_AND_EXPR:
4028 case TRUTH_OR_EXPR:
4029 case TRUTH_XOR_EXPR:
4031 gcc_unreachable ();
4033 case LT_EXPR:
4034 case LE_EXPR:
4035 case GT_EXPR:
4036 case GE_EXPR:
4037 case EQ_EXPR:
4038 case NE_EXPR:
4039 case UNORDERED_EXPR:
4040 case ORDERED_EXPR:
4041 case UNLT_EXPR:
4042 case UNLE_EXPR:
4043 case UNGT_EXPR:
4044 case UNGE_EXPR:
4045 case UNEQ_EXPR:
4046 case LTGT_EXPR:
4047 /* Comparisons are also binary, but the result type is not
4048 connected to the operand types. */
4049 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4051 case WIDEN_MULT_EXPR:
4052 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4053 return true;
4054 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4055 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4057 case WIDEN_SUM_EXPR:
4059 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4060 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4061 && ((!INTEGRAL_TYPE_P (rhs1_type)
4062 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4063 || (!INTEGRAL_TYPE_P (lhs_type)
4064 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4065 || !useless_type_conversion_p (lhs_type, rhs2_type)
4066 || (GET_MODE_SIZE (element_mode (rhs2_type))
4067 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4069 error ("type mismatch in widening sum reduction");
4070 debug_generic_expr (lhs_type);
4071 debug_generic_expr (rhs1_type);
4072 debug_generic_expr (rhs2_type);
4073 return true;
4075 return false;
4078 case VEC_WIDEN_MULT_HI_EXPR:
4079 case VEC_WIDEN_MULT_LO_EXPR:
4080 case VEC_WIDEN_MULT_EVEN_EXPR:
4081 case VEC_WIDEN_MULT_ODD_EXPR:
4083 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4084 || TREE_CODE (lhs_type) != VECTOR_TYPE
4085 || !types_compatible_p (rhs1_type, rhs2_type)
4086 || (GET_MODE_SIZE (element_mode (lhs_type))
4087 != 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4089 error ("type mismatch in vector widening multiplication");
4090 debug_generic_expr (lhs_type);
4091 debug_generic_expr (rhs1_type);
4092 debug_generic_expr (rhs2_type);
4093 return true;
4095 return false;
4098 case VEC_PACK_TRUNC_EXPR:
4099 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4100 vector boolean types. */
4101 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4102 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4103 && types_compatible_p (rhs1_type, rhs2_type)
4104 && (TYPE_VECTOR_SUBPARTS (lhs_type)
4105 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4106 return false;
4108 /* Fallthru. */
4109 case VEC_PACK_SAT_EXPR:
4110 case VEC_PACK_FIX_TRUNC_EXPR:
4112 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4113 || TREE_CODE (lhs_type) != VECTOR_TYPE
4114 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4115 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4116 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4117 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4118 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4119 || !types_compatible_p (rhs1_type, rhs2_type)
4120 || (GET_MODE_SIZE (element_mode (rhs1_type))
4121 != 2 * GET_MODE_SIZE (element_mode (lhs_type))))
4123 error ("type mismatch in vector pack expression");
4124 debug_generic_expr (lhs_type);
4125 debug_generic_expr (rhs1_type);
4126 debug_generic_expr (rhs2_type);
4127 return true;
4130 return false;
4133 case MULT_EXPR:
4134 case MULT_HIGHPART_EXPR:
4135 case TRUNC_DIV_EXPR:
4136 case CEIL_DIV_EXPR:
4137 case FLOOR_DIV_EXPR:
4138 case ROUND_DIV_EXPR:
4139 case TRUNC_MOD_EXPR:
4140 case CEIL_MOD_EXPR:
4141 case FLOOR_MOD_EXPR:
4142 case ROUND_MOD_EXPR:
4143 case RDIV_EXPR:
4144 case EXACT_DIV_EXPR:
4145 case MIN_EXPR:
4146 case MAX_EXPR:
4147 case BIT_IOR_EXPR:
4148 case BIT_XOR_EXPR:
4149 case BIT_AND_EXPR:
4150 /* Continue with generic binary expression handling. */
4151 break;
4153 default:
4154 gcc_unreachable ();
4157 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4158 || !useless_type_conversion_p (lhs_type, rhs2_type))
4160 error ("type mismatch in binary expression");
4161 debug_generic_stmt (lhs_type);
4162 debug_generic_stmt (rhs1_type);
4163 debug_generic_stmt (rhs2_type);
4164 return true;
4167 return false;
4170 /* Verify a gimple assignment statement STMT with a ternary rhs.
4171 Returns true if anything is wrong. */
4173 static bool
4174 verify_gimple_assign_ternary (gassign *stmt)
4176 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4177 tree lhs = gimple_assign_lhs (stmt);
4178 tree lhs_type = TREE_TYPE (lhs);
4179 tree rhs1 = gimple_assign_rhs1 (stmt);
4180 tree rhs1_type = TREE_TYPE (rhs1);
4181 tree rhs2 = gimple_assign_rhs2 (stmt);
4182 tree rhs2_type = TREE_TYPE (rhs2);
4183 tree rhs3 = gimple_assign_rhs3 (stmt);
4184 tree rhs3_type = TREE_TYPE (rhs3);
4186 if (!is_gimple_reg (lhs))
4188 error ("non-register as LHS of ternary operation");
4189 return true;
4192 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4193 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4194 || !is_gimple_val (rhs2)
4195 || !is_gimple_val (rhs3))
4197 error ("invalid operands in ternary operation");
4198 return true;
4201 /* First handle operations that involve different types. */
4202 switch (rhs_code)
4204 case WIDEN_MULT_PLUS_EXPR:
4205 case WIDEN_MULT_MINUS_EXPR:
4206 if ((!INTEGRAL_TYPE_P (rhs1_type)
4207 && !FIXED_POINT_TYPE_P (rhs1_type))
4208 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4209 || !useless_type_conversion_p (lhs_type, rhs3_type)
4210 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4211 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4213 error ("type mismatch in widening multiply-accumulate expression");
4214 debug_generic_expr (lhs_type);
4215 debug_generic_expr (rhs1_type);
4216 debug_generic_expr (rhs2_type);
4217 debug_generic_expr (rhs3_type);
4218 return true;
4220 break;
4222 case FMA_EXPR:
4223 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4224 || !useless_type_conversion_p (lhs_type, rhs2_type)
4225 || !useless_type_conversion_p (lhs_type, rhs3_type))
4227 error ("type mismatch in fused multiply-add expression");
4228 debug_generic_expr (lhs_type);
4229 debug_generic_expr (rhs1_type);
4230 debug_generic_expr (rhs2_type);
4231 debug_generic_expr (rhs3_type);
4232 return true;
4234 break;
4236 case VEC_COND_EXPR:
4237 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4238 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4239 != TYPE_VECTOR_SUBPARTS (lhs_type))
4241 error ("the first argument of a VEC_COND_EXPR must be of a "
4242 "boolean vector type of the same number of elements "
4243 "as the result");
4244 debug_generic_expr (lhs_type);
4245 debug_generic_expr (rhs1_type);
4246 return true;
4248 /* Fallthrough. */
4249 case COND_EXPR:
4250 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4251 || !useless_type_conversion_p (lhs_type, rhs3_type))
4253 error ("type mismatch in conditional expression");
4254 debug_generic_expr (lhs_type);
4255 debug_generic_expr (rhs2_type);
4256 debug_generic_expr (rhs3_type);
4257 return true;
4259 break;
4261 case VEC_PERM_EXPR:
4262 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4263 || !useless_type_conversion_p (lhs_type, rhs2_type))
4265 error ("type mismatch in vector permute expression");
4266 debug_generic_expr (lhs_type);
4267 debug_generic_expr (rhs1_type);
4268 debug_generic_expr (rhs2_type);
4269 debug_generic_expr (rhs3_type);
4270 return true;
4273 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4274 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4275 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4277 error ("vector types expected in vector permute expression");
4278 debug_generic_expr (lhs_type);
4279 debug_generic_expr (rhs1_type);
4280 debug_generic_expr (rhs2_type);
4281 debug_generic_expr (rhs3_type);
4282 return true;
4285 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4286 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4287 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4288 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4289 != TYPE_VECTOR_SUBPARTS (lhs_type))
4291 error ("vectors with different element number found "
4292 "in vector permute expression");
4293 debug_generic_expr (lhs_type);
4294 debug_generic_expr (rhs1_type);
4295 debug_generic_expr (rhs2_type);
4296 debug_generic_expr (rhs3_type);
4297 return true;
4300 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4301 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type)))
4302 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type))))
4304 error ("invalid mask type in vector permute expression");
4305 debug_generic_expr (lhs_type);
4306 debug_generic_expr (rhs1_type);
4307 debug_generic_expr (rhs2_type);
4308 debug_generic_expr (rhs3_type);
4309 return true;
4312 return false;
4314 case SAD_EXPR:
4315 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4316 || !useless_type_conversion_p (lhs_type, rhs3_type)
4317 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4318 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4320 error ("type mismatch in sad expression");
4321 debug_generic_expr (lhs_type);
4322 debug_generic_expr (rhs1_type);
4323 debug_generic_expr (rhs2_type);
4324 debug_generic_expr (rhs3_type);
4325 return true;
4328 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4329 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4330 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4332 error ("vector types expected in sad expression");
4333 debug_generic_expr (lhs_type);
4334 debug_generic_expr (rhs1_type);
4335 debug_generic_expr (rhs2_type);
4336 debug_generic_expr (rhs3_type);
4337 return true;
4340 return false;
4342 case BIT_INSERT_EXPR:
4343 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4345 error ("type mismatch in BIT_INSERT_EXPR");
4346 debug_generic_expr (lhs_type);
4347 debug_generic_expr (rhs1_type);
4348 return true;
4350 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4351 && INTEGRAL_TYPE_P (rhs2_type))
4352 || (VECTOR_TYPE_P (rhs1_type)
4353 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4355 error ("not allowed type combination in BIT_INSERT_EXPR");
4356 debug_generic_expr (rhs1_type);
4357 debug_generic_expr (rhs2_type);
4358 return true;
4360 if (! tree_fits_uhwi_p (rhs3)
4361 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4362 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4364 error ("invalid position or size in BIT_INSERT_EXPR");
4365 return true;
4367 if (INTEGRAL_TYPE_P (rhs1_type))
4369 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4370 if (bitpos >= TYPE_PRECISION (rhs1_type)
4371 || (bitpos + TYPE_PRECISION (rhs2_type)
4372 > TYPE_PRECISION (rhs1_type)))
4374 error ("insertion out of range in BIT_INSERT_EXPR");
4375 return true;
4378 else if (VECTOR_TYPE_P (rhs1_type))
4380 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4381 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4382 if (bitpos % bitsize != 0)
4384 error ("vector insertion not at element boundary");
4385 return true;
4388 return false;
4390 case DOT_PROD_EXPR:
4392 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4393 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4394 && ((!INTEGRAL_TYPE_P (rhs1_type)
4395 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4396 || (!INTEGRAL_TYPE_P (lhs_type)
4397 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4398 || !types_compatible_p (rhs1_type, rhs2_type)
4399 || !useless_type_conversion_p (lhs_type, rhs3_type)
4400 || (GET_MODE_SIZE (element_mode (rhs3_type))
4401 < 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4403 error ("type mismatch in dot product reduction");
4404 debug_generic_expr (lhs_type);
4405 debug_generic_expr (rhs1_type);
4406 debug_generic_expr (rhs2_type);
4407 return true;
4409 return false;
4412 case REALIGN_LOAD_EXPR:
4413 /* FIXME. */
4414 return false;
4416 default:
4417 gcc_unreachable ();
4419 return false;
4422 /* Verify a gimple assignment statement STMT with a single rhs.
4423 Returns true if anything is wrong. */
4425 static bool
4426 verify_gimple_assign_single (gassign *stmt)
4428 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4429 tree lhs = gimple_assign_lhs (stmt);
4430 tree lhs_type = TREE_TYPE (lhs);
4431 tree rhs1 = gimple_assign_rhs1 (stmt);
4432 tree rhs1_type = TREE_TYPE (rhs1);
4433 bool res = false;
4435 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4437 error ("non-trivial conversion at assignment");
4438 debug_generic_expr (lhs_type);
4439 debug_generic_expr (rhs1_type);
4440 return true;
4443 if (gimple_clobber_p (stmt)
4444 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4446 error ("non-decl/MEM_REF LHS in clobber statement");
4447 debug_generic_expr (lhs);
4448 return true;
4451 if (handled_component_p (lhs)
4452 || TREE_CODE (lhs) == MEM_REF
4453 || TREE_CODE (lhs) == TARGET_MEM_REF)
4454 res |= verify_types_in_gimple_reference (lhs, true);
4456 /* Special codes we cannot handle via their class. */
4457 switch (rhs_code)
4459 case ADDR_EXPR:
4461 tree op = TREE_OPERAND (rhs1, 0);
4462 if (!is_gimple_addressable (op))
4464 error ("invalid operand in unary expression");
4465 return true;
4468 /* Technically there is no longer a need for matching types, but
4469 gimple hygiene asks for this check. In LTO we can end up
4470 combining incompatible units and thus end up with addresses
4471 of globals that change their type to a common one. */
4472 if (!in_lto_p
4473 && !types_compatible_p (TREE_TYPE (op),
4474 TREE_TYPE (TREE_TYPE (rhs1)))
4475 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4476 TREE_TYPE (op)))
4478 error ("type mismatch in address expression");
4479 debug_generic_stmt (TREE_TYPE (rhs1));
4480 debug_generic_stmt (TREE_TYPE (op));
4481 return true;
4484 return verify_types_in_gimple_reference (op, true);
4487 /* tcc_reference */
4488 case INDIRECT_REF:
4489 error ("INDIRECT_REF in gimple IL");
4490 return true;
4492 case COMPONENT_REF:
4493 case BIT_FIELD_REF:
4494 case ARRAY_REF:
4495 case ARRAY_RANGE_REF:
4496 case VIEW_CONVERT_EXPR:
4497 case REALPART_EXPR:
4498 case IMAGPART_EXPR:
4499 case TARGET_MEM_REF:
4500 case MEM_REF:
4501 if (!is_gimple_reg (lhs)
4502 && is_gimple_reg_type (TREE_TYPE (lhs)))
4504 error ("invalid rhs for gimple memory store");
4505 debug_generic_stmt (lhs);
4506 debug_generic_stmt (rhs1);
4507 return true;
4509 return res || verify_types_in_gimple_reference (rhs1, false);
4511 /* tcc_constant */
4512 case SSA_NAME:
4513 case INTEGER_CST:
4514 case REAL_CST:
4515 case FIXED_CST:
4516 case COMPLEX_CST:
4517 case VECTOR_CST:
4518 case STRING_CST:
4519 return res;
4521 /* tcc_declaration */
4522 case CONST_DECL:
4523 return res;
4524 case VAR_DECL:
4525 case PARM_DECL:
4526 if (!is_gimple_reg (lhs)
4527 && !is_gimple_reg (rhs1)
4528 && is_gimple_reg_type (TREE_TYPE (lhs)))
4530 error ("invalid rhs for gimple memory store");
4531 debug_generic_stmt (lhs);
4532 debug_generic_stmt (rhs1);
4533 return true;
4535 return res;
4537 case CONSTRUCTOR:
4538 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4540 unsigned int i;
4541 tree elt_i, elt_v, elt_t = NULL_TREE;
4543 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4544 return res;
4545 /* For vector CONSTRUCTORs we require that either it is empty
4546 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4547 (then the element count must be correct to cover the whole
4548 outer vector and index must be NULL on all elements, or it is
4549 a CONSTRUCTOR of scalar elements, where we as an exception allow
4550 smaller number of elements (assuming zero filling) and
4551 consecutive indexes as compared to NULL indexes (such
4552 CONSTRUCTORs can appear in the IL from FEs). */
4553 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4555 if (elt_t == NULL_TREE)
4557 elt_t = TREE_TYPE (elt_v);
4558 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4560 tree elt_t = TREE_TYPE (elt_v);
4561 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4562 TREE_TYPE (elt_t)))
4564 error ("incorrect type of vector CONSTRUCTOR"
4565 " elements");
4566 debug_generic_stmt (rhs1);
4567 return true;
4569 else if (CONSTRUCTOR_NELTS (rhs1)
4570 * TYPE_VECTOR_SUBPARTS (elt_t)
4571 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4573 error ("incorrect number of vector CONSTRUCTOR"
4574 " elements");
4575 debug_generic_stmt (rhs1);
4576 return true;
4579 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4580 elt_t))
4582 error ("incorrect type of vector CONSTRUCTOR elements");
4583 debug_generic_stmt (rhs1);
4584 return true;
4586 else if (CONSTRUCTOR_NELTS (rhs1)
4587 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4589 error ("incorrect number of vector CONSTRUCTOR elements");
4590 debug_generic_stmt (rhs1);
4591 return true;
4594 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4596 error ("incorrect type of vector CONSTRUCTOR elements");
4597 debug_generic_stmt (rhs1);
4598 return true;
4600 if (elt_i != NULL_TREE
4601 && (TREE_CODE (elt_t) == VECTOR_TYPE
4602 || TREE_CODE (elt_i) != INTEGER_CST
4603 || compare_tree_int (elt_i, i) != 0))
4605 error ("vector CONSTRUCTOR with non-NULL element index");
4606 debug_generic_stmt (rhs1);
4607 return true;
4609 if (!is_gimple_val (elt_v))
4611 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4612 debug_generic_stmt (rhs1);
4613 return true;
4617 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4619 error ("non-vector CONSTRUCTOR with elements");
4620 debug_generic_stmt (rhs1);
4621 return true;
4623 return res;
4624 case OBJ_TYPE_REF:
4625 case ASSERT_EXPR:
4626 case WITH_SIZE_EXPR:
4627 /* FIXME. */
4628 return res;
4630 default:;
4633 return res;
4636 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4637 is a problem, otherwise false. */
4639 static bool
4640 verify_gimple_assign (gassign *stmt)
4642 switch (gimple_assign_rhs_class (stmt))
4644 case GIMPLE_SINGLE_RHS:
4645 return verify_gimple_assign_single (stmt);
4647 case GIMPLE_UNARY_RHS:
4648 return verify_gimple_assign_unary (stmt);
4650 case GIMPLE_BINARY_RHS:
4651 return verify_gimple_assign_binary (stmt);
4653 case GIMPLE_TERNARY_RHS:
4654 return verify_gimple_assign_ternary (stmt);
4656 default:
4657 gcc_unreachable ();
4661 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4662 is a problem, otherwise false. */
4664 static bool
4665 verify_gimple_return (greturn *stmt)
4667 tree op = gimple_return_retval (stmt);
4668 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4670 /* We cannot test for present return values as we do not fix up missing
4671 return values from the original source. */
4672 if (op == NULL)
4673 return false;
4675 if (!is_gimple_val (op)
4676 && TREE_CODE (op) != RESULT_DECL)
4678 error ("invalid operand in return statement");
4679 debug_generic_stmt (op);
4680 return true;
4683 if ((TREE_CODE (op) == RESULT_DECL
4684 && DECL_BY_REFERENCE (op))
4685 || (TREE_CODE (op) == SSA_NAME
4686 && SSA_NAME_VAR (op)
4687 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4688 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4689 op = TREE_TYPE (op);
4691 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4693 error ("invalid conversion in return statement");
4694 debug_generic_stmt (restype);
4695 debug_generic_stmt (TREE_TYPE (op));
4696 return true;
4699 return false;
4703 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4704 is a problem, otherwise false. */
4706 static bool
4707 verify_gimple_goto (ggoto *stmt)
4709 tree dest = gimple_goto_dest (stmt);
4711 /* ??? We have two canonical forms of direct goto destinations, a
4712 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4713 if (TREE_CODE (dest) != LABEL_DECL
4714 && (!is_gimple_val (dest)
4715 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4717 error ("goto destination is neither a label nor a pointer");
4718 return true;
4721 return false;
4724 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4725 is a problem, otherwise false. */
4727 static bool
4728 verify_gimple_switch (gswitch *stmt)
4730 unsigned int i, n;
4731 tree elt, prev_upper_bound = NULL_TREE;
4732 tree index_type, elt_type = NULL_TREE;
4734 if (!is_gimple_val (gimple_switch_index (stmt)))
4736 error ("invalid operand to switch statement");
4737 debug_generic_stmt (gimple_switch_index (stmt));
4738 return true;
4741 index_type = TREE_TYPE (gimple_switch_index (stmt));
4742 if (! INTEGRAL_TYPE_P (index_type))
4744 error ("non-integral type switch statement");
4745 debug_generic_expr (index_type);
4746 return true;
4749 elt = gimple_switch_label (stmt, 0);
4750 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4752 error ("invalid default case label in switch statement");
4753 debug_generic_expr (elt);
4754 return true;
4757 n = gimple_switch_num_labels (stmt);
4758 for (i = 1; i < n; i++)
4760 elt = gimple_switch_label (stmt, i);
4762 if (! CASE_LOW (elt))
4764 error ("invalid case label in switch statement");
4765 debug_generic_expr (elt);
4766 return true;
4768 if (CASE_HIGH (elt)
4769 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4771 error ("invalid case range in switch statement");
4772 debug_generic_expr (elt);
4773 return true;
4776 if (elt_type)
4778 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4779 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4781 error ("type mismatch for case label in switch statement");
4782 debug_generic_expr (elt);
4783 return true;
4786 else
4788 elt_type = TREE_TYPE (CASE_LOW (elt));
4789 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4791 error ("type precision mismatch in switch statement");
4792 return true;
4796 if (prev_upper_bound)
4798 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4800 error ("case labels not sorted in switch statement");
4801 return true;
4805 prev_upper_bound = CASE_HIGH (elt);
4806 if (! prev_upper_bound)
4807 prev_upper_bound = CASE_LOW (elt);
4810 return false;
4813 /* Verify a gimple debug statement STMT.
4814 Returns true if anything is wrong. */
4816 static bool
4817 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4819 /* There isn't much that could be wrong in a gimple debug stmt. A
4820 gimple debug bind stmt, for example, maps a tree, that's usually
4821 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4822 component or member of an aggregate type, to another tree, that
4823 can be an arbitrary expression. These stmts expand into debug
4824 insns, and are converted to debug notes by var-tracking.c. */
4825 return false;
4828 /* Verify a gimple label statement STMT.
4829 Returns true if anything is wrong. */
4831 static bool
4832 verify_gimple_label (glabel *stmt)
4834 tree decl = gimple_label_label (stmt);
4835 int uid;
4836 bool err = false;
4838 if (TREE_CODE (decl) != LABEL_DECL)
4839 return true;
4840 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4841 && DECL_CONTEXT (decl) != current_function_decl)
4843 error ("label's context is not the current function decl");
4844 err |= true;
4847 uid = LABEL_DECL_UID (decl);
4848 if (cfun->cfg
4849 && (uid == -1
4850 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4852 error ("incorrect entry in label_to_block_map");
4853 err |= true;
4856 uid = EH_LANDING_PAD_NR (decl);
4857 if (uid)
4859 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4860 if (decl != lp->post_landing_pad)
4862 error ("incorrect setting of landing pad number");
4863 err |= true;
4867 return err;
4870 /* Verify a gimple cond statement STMT.
4871 Returns true if anything is wrong. */
4873 static bool
4874 verify_gimple_cond (gcond *stmt)
4876 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4878 error ("invalid comparison code in gimple cond");
4879 return true;
4881 if (!(!gimple_cond_true_label (stmt)
4882 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4883 || !(!gimple_cond_false_label (stmt)
4884 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4886 error ("invalid labels in gimple cond");
4887 return true;
4890 return verify_gimple_comparison (boolean_type_node,
4891 gimple_cond_lhs (stmt),
4892 gimple_cond_rhs (stmt),
4893 gimple_cond_code (stmt));
4896 /* Verify the GIMPLE statement STMT. Returns true if there is an
4897 error, otherwise false. */
4899 static bool
4900 verify_gimple_stmt (gimple *stmt)
4902 switch (gimple_code (stmt))
4904 case GIMPLE_ASSIGN:
4905 return verify_gimple_assign (as_a <gassign *> (stmt));
4907 case GIMPLE_LABEL:
4908 return verify_gimple_label (as_a <glabel *> (stmt));
4910 case GIMPLE_CALL:
4911 return verify_gimple_call (as_a <gcall *> (stmt));
4913 case GIMPLE_COND:
4914 return verify_gimple_cond (as_a <gcond *> (stmt));
4916 case GIMPLE_GOTO:
4917 return verify_gimple_goto (as_a <ggoto *> (stmt));
4919 case GIMPLE_SWITCH:
4920 return verify_gimple_switch (as_a <gswitch *> (stmt));
4922 case GIMPLE_RETURN:
4923 return verify_gimple_return (as_a <greturn *> (stmt));
4925 case GIMPLE_ASM:
4926 return false;
4928 case GIMPLE_TRANSACTION:
4929 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4931 /* Tuples that do not have tree operands. */
4932 case GIMPLE_NOP:
4933 case GIMPLE_PREDICT:
4934 case GIMPLE_RESX:
4935 case GIMPLE_EH_DISPATCH:
4936 case GIMPLE_EH_MUST_NOT_THROW:
4937 return false;
4939 CASE_GIMPLE_OMP:
4940 /* OpenMP directives are validated by the FE and never operated
4941 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4942 non-gimple expressions when the main index variable has had
4943 its address taken. This does not affect the loop itself
4944 because the header of an GIMPLE_OMP_FOR is merely used to determine
4945 how to setup the parallel iteration. */
4946 return false;
4948 case GIMPLE_DEBUG:
4949 return verify_gimple_debug (stmt);
4951 default:
4952 gcc_unreachable ();
4956 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4957 and false otherwise. */
4959 static bool
4960 verify_gimple_phi (gimple *phi)
4962 bool err = false;
4963 unsigned i;
4964 tree phi_result = gimple_phi_result (phi);
4965 bool virtual_p;
4967 if (!phi_result)
4969 error ("invalid PHI result");
4970 return true;
4973 virtual_p = virtual_operand_p (phi_result);
4974 if (TREE_CODE (phi_result) != SSA_NAME
4975 || (virtual_p
4976 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4978 error ("invalid PHI result");
4979 err = true;
4982 for (i = 0; i < gimple_phi_num_args (phi); i++)
4984 tree t = gimple_phi_arg_def (phi, i);
4986 if (!t)
4988 error ("missing PHI def");
4989 err |= true;
4990 continue;
4992 /* Addressable variables do have SSA_NAMEs but they
4993 are not considered gimple values. */
4994 else if ((TREE_CODE (t) == SSA_NAME
4995 && virtual_p != virtual_operand_p (t))
4996 || (virtual_p
4997 && (TREE_CODE (t) != SSA_NAME
4998 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4999 || (!virtual_p
5000 && !is_gimple_val (t)))
5002 error ("invalid PHI argument");
5003 debug_generic_expr (t);
5004 err |= true;
5006 #ifdef ENABLE_TYPES_CHECKING
5007 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5009 error ("incompatible types in PHI argument %u", i);
5010 debug_generic_stmt (TREE_TYPE (phi_result));
5011 debug_generic_stmt (TREE_TYPE (t));
5012 err |= true;
5014 #endif
5017 return err;
5020 /* Verify the GIMPLE statements inside the sequence STMTS. */
5022 static bool
5023 verify_gimple_in_seq_2 (gimple_seq stmts)
5025 gimple_stmt_iterator ittr;
5026 bool err = false;
5028 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5030 gimple *stmt = gsi_stmt (ittr);
5032 switch (gimple_code (stmt))
5034 case GIMPLE_BIND:
5035 err |= verify_gimple_in_seq_2 (
5036 gimple_bind_body (as_a <gbind *> (stmt)));
5037 break;
5039 case GIMPLE_TRY:
5040 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5041 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5042 break;
5044 case GIMPLE_EH_FILTER:
5045 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5046 break;
5048 case GIMPLE_EH_ELSE:
5050 geh_else *eh_else = as_a <geh_else *> (stmt);
5051 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5052 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5054 break;
5056 case GIMPLE_CATCH:
5057 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5058 as_a <gcatch *> (stmt)));
5059 break;
5061 case GIMPLE_TRANSACTION:
5062 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5063 break;
5065 default:
5067 bool err2 = verify_gimple_stmt (stmt);
5068 if (err2)
5069 debug_gimple_stmt (stmt);
5070 err |= err2;
5075 return err;
5078 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5079 is a problem, otherwise false. */
5081 static bool
5082 verify_gimple_transaction (gtransaction *stmt)
5084 tree lab;
5086 lab = gimple_transaction_label_norm (stmt);
5087 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5088 return true;
5089 lab = gimple_transaction_label_uninst (stmt);
5090 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5091 return true;
5092 lab = gimple_transaction_label_over (stmt);
5093 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5094 return true;
5096 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5100 /* Verify the GIMPLE statements inside the statement list STMTS. */
5102 DEBUG_FUNCTION void
5103 verify_gimple_in_seq (gimple_seq stmts)
5105 timevar_push (TV_TREE_STMT_VERIFY);
5106 if (verify_gimple_in_seq_2 (stmts))
5107 internal_error ("verify_gimple failed");
5108 timevar_pop (TV_TREE_STMT_VERIFY);
5111 /* Return true when the T can be shared. */
5113 static bool
5114 tree_node_can_be_shared (tree t)
5116 if (IS_TYPE_OR_DECL_P (t)
5117 || is_gimple_min_invariant (t)
5118 || TREE_CODE (t) == SSA_NAME
5119 || t == error_mark_node
5120 || TREE_CODE (t) == IDENTIFIER_NODE)
5121 return true;
5123 if (TREE_CODE (t) == CASE_LABEL_EXPR)
5124 return true;
5126 if (DECL_P (t))
5127 return true;
5129 return false;
5132 /* Called via walk_tree. Verify tree sharing. */
5134 static tree
5135 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5137 hash_set<void *> *visited = (hash_set<void *> *) data;
5139 if (tree_node_can_be_shared (*tp))
5141 *walk_subtrees = false;
5142 return NULL;
5145 if (visited->add (*tp))
5146 return *tp;
5148 return NULL;
5151 /* Called via walk_gimple_stmt. Verify tree sharing. */
5153 static tree
5154 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5156 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5157 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5160 static bool eh_error_found;
5161 bool
5162 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5163 hash_set<gimple *> *visited)
5165 if (!visited->contains (stmt))
5167 error ("dead STMT in EH table");
5168 debug_gimple_stmt (stmt);
5169 eh_error_found = true;
5171 return true;
5174 /* Verify if the location LOCs block is in BLOCKS. */
5176 static bool
5177 verify_location (hash_set<tree> *blocks, location_t loc)
5179 tree block = LOCATION_BLOCK (loc);
5180 if (block != NULL_TREE
5181 && !blocks->contains (block))
5183 error ("location references block not in block tree");
5184 return true;
5186 if (block != NULL_TREE)
5187 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5188 return false;
5191 /* Called via walk_tree. Verify that expressions have no blocks. */
5193 static tree
5194 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5196 if (!EXPR_P (*tp))
5198 *walk_subtrees = false;
5199 return NULL;
5202 location_t loc = EXPR_LOCATION (*tp);
5203 if (LOCATION_BLOCK (loc) != NULL)
5204 return *tp;
5206 return NULL;
5209 /* Called via walk_tree. Verify locations of expressions. */
5211 static tree
5212 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5214 hash_set<tree> *blocks = (hash_set<tree> *) data;
5216 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5218 tree t = DECL_DEBUG_EXPR (*tp);
5219 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5220 if (addr)
5221 return addr;
5223 if ((VAR_P (*tp)
5224 || TREE_CODE (*tp) == PARM_DECL
5225 || TREE_CODE (*tp) == RESULT_DECL)
5226 && DECL_HAS_VALUE_EXPR_P (*tp))
5228 tree t = DECL_VALUE_EXPR (*tp);
5229 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5230 if (addr)
5231 return addr;
5234 if (!EXPR_P (*tp))
5236 *walk_subtrees = false;
5237 return NULL;
5240 location_t loc = EXPR_LOCATION (*tp);
5241 if (verify_location (blocks, loc))
5242 return *tp;
5244 return NULL;
5247 /* Called via walk_gimple_op. Verify locations of expressions. */
5249 static tree
5250 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5252 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5253 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5256 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5258 static void
5259 collect_subblocks (hash_set<tree> *blocks, tree block)
5261 tree t;
5262 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5264 blocks->add (t);
5265 collect_subblocks (blocks, t);
5269 /* Verify the GIMPLE statements in the CFG of FN. */
5271 DEBUG_FUNCTION void
5272 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5274 basic_block bb;
5275 bool err = false;
5277 timevar_push (TV_TREE_STMT_VERIFY);
5278 hash_set<void *> visited;
5279 hash_set<gimple *> visited_stmts;
5281 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5282 hash_set<tree> blocks;
5283 if (DECL_INITIAL (fn->decl))
5285 blocks.add (DECL_INITIAL (fn->decl));
5286 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5289 FOR_EACH_BB_FN (bb, fn)
5291 gimple_stmt_iterator gsi;
5293 for (gphi_iterator gpi = gsi_start_phis (bb);
5294 !gsi_end_p (gpi);
5295 gsi_next (&gpi))
5297 gphi *phi = gpi.phi ();
5298 bool err2 = false;
5299 unsigned i;
5301 visited_stmts.add (phi);
5303 if (gimple_bb (phi) != bb)
5305 error ("gimple_bb (phi) is set to a wrong basic block");
5306 err2 = true;
5309 err2 |= verify_gimple_phi (phi);
5311 /* Only PHI arguments have locations. */
5312 if (gimple_location (phi) != UNKNOWN_LOCATION)
5314 error ("PHI node with location");
5315 err2 = true;
5318 for (i = 0; i < gimple_phi_num_args (phi); i++)
5320 tree arg = gimple_phi_arg_def (phi, i);
5321 tree addr = walk_tree (&arg, verify_node_sharing_1,
5322 &visited, NULL);
5323 if (addr)
5325 error ("incorrect sharing of tree nodes");
5326 debug_generic_expr (addr);
5327 err2 |= true;
5329 location_t loc = gimple_phi_arg_location (phi, i);
5330 if (virtual_operand_p (gimple_phi_result (phi))
5331 && loc != UNKNOWN_LOCATION)
5333 error ("virtual PHI with argument locations");
5334 err2 = true;
5336 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5337 if (addr)
5339 debug_generic_expr (addr);
5340 err2 = true;
5342 err2 |= verify_location (&blocks, loc);
5345 if (err2)
5346 debug_gimple_stmt (phi);
5347 err |= err2;
5350 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5352 gimple *stmt = gsi_stmt (gsi);
5353 bool err2 = false;
5354 struct walk_stmt_info wi;
5355 tree addr;
5356 int lp_nr;
5358 visited_stmts.add (stmt);
5360 if (gimple_bb (stmt) != bb)
5362 error ("gimple_bb (stmt) is set to a wrong basic block");
5363 err2 = true;
5366 err2 |= verify_gimple_stmt (stmt);
5367 err2 |= verify_location (&blocks, gimple_location (stmt));
5369 memset (&wi, 0, sizeof (wi));
5370 wi.info = (void *) &visited;
5371 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5372 if (addr)
5374 error ("incorrect sharing of tree nodes");
5375 debug_generic_expr (addr);
5376 err2 |= true;
5379 memset (&wi, 0, sizeof (wi));
5380 wi.info = (void *) &blocks;
5381 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5382 if (addr)
5384 debug_generic_expr (addr);
5385 err2 |= true;
5388 /* ??? Instead of not checking these stmts at all the walker
5389 should know its context via wi. */
5390 if (!is_gimple_debug (stmt)
5391 && !is_gimple_omp (stmt))
5393 memset (&wi, 0, sizeof (wi));
5394 addr = walk_gimple_op (stmt, verify_expr, &wi);
5395 if (addr)
5397 debug_generic_expr (addr);
5398 inform (gimple_location (stmt), "in statement");
5399 err2 |= true;
5403 /* If the statement is marked as part of an EH region, then it is
5404 expected that the statement could throw. Verify that when we
5405 have optimizations that simplify statements such that we prove
5406 that they cannot throw, that we update other data structures
5407 to match. */
5408 lp_nr = lookup_stmt_eh_lp (stmt);
5409 if (lp_nr > 0)
5411 if (!stmt_could_throw_p (stmt))
5413 if (verify_nothrow)
5415 error ("statement marked for throw, but doesn%'t");
5416 err2 |= true;
5419 else if (!gsi_one_before_end_p (gsi))
5421 error ("statement marked for throw in middle of block");
5422 err2 |= true;
5426 if (err2)
5427 debug_gimple_stmt (stmt);
5428 err |= err2;
5432 eh_error_found = false;
5433 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5434 if (eh_table)
5435 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5436 (&visited_stmts);
5438 if (err || eh_error_found)
5439 internal_error ("verify_gimple failed");
5441 verify_histograms ();
5442 timevar_pop (TV_TREE_STMT_VERIFY);
5446 /* Verifies that the flow information is OK. */
5448 static int
5449 gimple_verify_flow_info (void)
5451 int err = 0;
5452 basic_block bb;
5453 gimple_stmt_iterator gsi;
5454 gimple *stmt;
5455 edge e;
5456 edge_iterator ei;
5458 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5459 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5461 error ("ENTRY_BLOCK has IL associated with it");
5462 err = 1;
5465 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5466 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5468 error ("EXIT_BLOCK has IL associated with it");
5469 err = 1;
5472 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5473 if (e->flags & EDGE_FALLTHRU)
5475 error ("fallthru to exit from bb %d", e->src->index);
5476 err = 1;
5479 FOR_EACH_BB_FN (bb, cfun)
5481 bool found_ctrl_stmt = false;
5483 stmt = NULL;
5485 /* Skip labels on the start of basic block. */
5486 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5488 tree label;
5489 gimple *prev_stmt = stmt;
5491 stmt = gsi_stmt (gsi);
5493 if (gimple_code (stmt) != GIMPLE_LABEL)
5494 break;
5496 label = gimple_label_label (as_a <glabel *> (stmt));
5497 if (prev_stmt && DECL_NONLOCAL (label))
5499 error ("nonlocal label ");
5500 print_generic_expr (stderr, label);
5501 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5502 bb->index);
5503 err = 1;
5506 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5508 error ("EH landing pad label ");
5509 print_generic_expr (stderr, label);
5510 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5511 bb->index);
5512 err = 1;
5515 if (label_to_block (label) != bb)
5517 error ("label ");
5518 print_generic_expr (stderr, label);
5519 fprintf (stderr, " to block does not match in bb %d",
5520 bb->index);
5521 err = 1;
5524 if (decl_function_context (label) != current_function_decl)
5526 error ("label ");
5527 print_generic_expr (stderr, label);
5528 fprintf (stderr, " has incorrect context in bb %d",
5529 bb->index);
5530 err = 1;
5534 /* Verify that body of basic block BB is free of control flow. */
5535 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5537 gimple *stmt = gsi_stmt (gsi);
5539 if (found_ctrl_stmt)
5541 error ("control flow in the middle of basic block %d",
5542 bb->index);
5543 err = 1;
5546 if (stmt_ends_bb_p (stmt))
5547 found_ctrl_stmt = true;
5549 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5551 error ("label ");
5552 print_generic_expr (stderr, gimple_label_label (label_stmt));
5553 fprintf (stderr, " in the middle of basic block %d", bb->index);
5554 err = 1;
5558 gsi = gsi_last_bb (bb);
5559 if (gsi_end_p (gsi))
5560 continue;
5562 stmt = gsi_stmt (gsi);
5564 if (gimple_code (stmt) == GIMPLE_LABEL)
5565 continue;
5567 err |= verify_eh_edges (stmt);
5569 if (is_ctrl_stmt (stmt))
5571 FOR_EACH_EDGE (e, ei, bb->succs)
5572 if (e->flags & EDGE_FALLTHRU)
5574 error ("fallthru edge after a control statement in bb %d",
5575 bb->index);
5576 err = 1;
5580 if (gimple_code (stmt) != GIMPLE_COND)
5582 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5583 after anything else but if statement. */
5584 FOR_EACH_EDGE (e, ei, bb->succs)
5585 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5587 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5588 bb->index);
5589 err = 1;
5593 switch (gimple_code (stmt))
5595 case GIMPLE_COND:
5597 edge true_edge;
5598 edge false_edge;
5600 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5602 if (!true_edge
5603 || !false_edge
5604 || !(true_edge->flags & EDGE_TRUE_VALUE)
5605 || !(false_edge->flags & EDGE_FALSE_VALUE)
5606 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5607 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5608 || EDGE_COUNT (bb->succs) >= 3)
5610 error ("wrong outgoing edge flags at end of bb %d",
5611 bb->index);
5612 err = 1;
5615 break;
5617 case GIMPLE_GOTO:
5618 if (simple_goto_p (stmt))
5620 error ("explicit goto at end of bb %d", bb->index);
5621 err = 1;
5623 else
5625 /* FIXME. We should double check that the labels in the
5626 destination blocks have their address taken. */
5627 FOR_EACH_EDGE (e, ei, bb->succs)
5628 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5629 | EDGE_FALSE_VALUE))
5630 || !(e->flags & EDGE_ABNORMAL))
5632 error ("wrong outgoing edge flags at end of bb %d",
5633 bb->index);
5634 err = 1;
5637 break;
5639 case GIMPLE_CALL:
5640 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5641 break;
5642 /* fallthru */
5643 case GIMPLE_RETURN:
5644 if (!single_succ_p (bb)
5645 || (single_succ_edge (bb)->flags
5646 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5647 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5649 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5650 err = 1;
5652 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5654 error ("return edge does not point to exit in bb %d",
5655 bb->index);
5656 err = 1;
5658 break;
5660 case GIMPLE_SWITCH:
5662 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5663 tree prev;
5664 edge e;
5665 size_t i, n;
5667 n = gimple_switch_num_labels (switch_stmt);
5669 /* Mark all the destination basic blocks. */
5670 for (i = 0; i < n; ++i)
5672 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5673 basic_block label_bb = label_to_block (lab);
5674 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5675 label_bb->aux = (void *)1;
5678 /* Verify that the case labels are sorted. */
5679 prev = gimple_switch_label (switch_stmt, 0);
5680 for (i = 1; i < n; ++i)
5682 tree c = gimple_switch_label (switch_stmt, i);
5683 if (!CASE_LOW (c))
5685 error ("found default case not at the start of "
5686 "case vector");
5687 err = 1;
5688 continue;
5690 if (CASE_LOW (prev)
5691 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5693 error ("case labels not sorted: ");
5694 print_generic_expr (stderr, prev);
5695 fprintf (stderr," is greater than ");
5696 print_generic_expr (stderr, c);
5697 fprintf (stderr," but comes before it.\n");
5698 err = 1;
5700 prev = c;
5702 /* VRP will remove the default case if it can prove it will
5703 never be executed. So do not verify there always exists
5704 a default case here. */
5706 FOR_EACH_EDGE (e, ei, bb->succs)
5708 if (!e->dest->aux)
5710 error ("extra outgoing edge %d->%d",
5711 bb->index, e->dest->index);
5712 err = 1;
5715 e->dest->aux = (void *)2;
5716 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5717 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5719 error ("wrong outgoing edge flags at end of bb %d",
5720 bb->index);
5721 err = 1;
5725 /* Check that we have all of them. */
5726 for (i = 0; i < n; ++i)
5728 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5729 basic_block label_bb = label_to_block (lab);
5731 if (label_bb->aux != (void *)2)
5733 error ("missing edge %i->%i", bb->index, label_bb->index);
5734 err = 1;
5738 FOR_EACH_EDGE (e, ei, bb->succs)
5739 e->dest->aux = (void *)0;
5741 break;
5743 case GIMPLE_EH_DISPATCH:
5744 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5745 break;
5747 default:
5748 break;
5752 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5753 verify_dominators (CDI_DOMINATORS);
5755 return err;
5759 /* Updates phi nodes after creating a forwarder block joined
5760 by edge FALLTHRU. */
5762 static void
5763 gimple_make_forwarder_block (edge fallthru)
5765 edge e;
5766 edge_iterator ei;
5767 basic_block dummy, bb;
5768 tree var;
5769 gphi_iterator gsi;
5771 dummy = fallthru->src;
5772 bb = fallthru->dest;
5774 if (single_pred_p (bb))
5775 return;
5777 /* If we redirected a branch we must create new PHI nodes at the
5778 start of BB. */
5779 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5781 gphi *phi, *new_phi;
5783 phi = gsi.phi ();
5784 var = gimple_phi_result (phi);
5785 new_phi = create_phi_node (var, bb);
5786 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5787 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5788 UNKNOWN_LOCATION);
5791 /* Add the arguments we have stored on edges. */
5792 FOR_EACH_EDGE (e, ei, bb->preds)
5794 if (e == fallthru)
5795 continue;
5797 flush_pending_stmts (e);
5802 /* Return a non-special label in the head of basic block BLOCK.
5803 Create one if it doesn't exist. */
5805 tree
5806 gimple_block_label (basic_block bb)
5808 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5809 bool first = true;
5810 tree label;
5811 glabel *stmt;
5813 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5815 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5816 if (!stmt)
5817 break;
5818 label = gimple_label_label (stmt);
5819 if (!DECL_NONLOCAL (label))
5821 if (!first)
5822 gsi_move_before (&i, &s);
5823 return label;
5827 label = create_artificial_label (UNKNOWN_LOCATION);
5828 stmt = gimple_build_label (label);
5829 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5830 return label;
5834 /* Attempt to perform edge redirection by replacing a possibly complex
5835 jump instruction by a goto or by removing the jump completely.
5836 This can apply only if all edges now point to the same block. The
5837 parameters and return values are equivalent to
5838 redirect_edge_and_branch. */
5840 static edge
5841 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5843 basic_block src = e->src;
5844 gimple_stmt_iterator i;
5845 gimple *stmt;
5847 /* We can replace or remove a complex jump only when we have exactly
5848 two edges. */
5849 if (EDGE_COUNT (src->succs) != 2
5850 /* Verify that all targets will be TARGET. Specifically, the
5851 edge that is not E must also go to TARGET. */
5852 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5853 return NULL;
5855 i = gsi_last_bb (src);
5856 if (gsi_end_p (i))
5857 return NULL;
5859 stmt = gsi_stmt (i);
5861 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5863 gsi_remove (&i, true);
5864 e = ssa_redirect_edge (e, target);
5865 e->flags = EDGE_FALLTHRU;
5866 return e;
5869 return NULL;
5873 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5874 edge representing the redirected branch. */
5876 static edge
5877 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5879 basic_block bb = e->src;
5880 gimple_stmt_iterator gsi;
5881 edge ret;
5882 gimple *stmt;
5884 if (e->flags & EDGE_ABNORMAL)
5885 return NULL;
5887 if (e->dest == dest)
5888 return NULL;
5890 if (e->flags & EDGE_EH)
5891 return redirect_eh_edge (e, dest);
5893 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5895 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5896 if (ret)
5897 return ret;
5900 gsi = gsi_last_bb (bb);
5901 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5903 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5905 case GIMPLE_COND:
5906 /* For COND_EXPR, we only need to redirect the edge. */
5907 break;
5909 case GIMPLE_GOTO:
5910 /* No non-abnormal edges should lead from a non-simple goto, and
5911 simple ones should be represented implicitly. */
5912 gcc_unreachable ();
5914 case GIMPLE_SWITCH:
5916 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5917 tree label = gimple_block_label (dest);
5918 tree cases = get_cases_for_edge (e, switch_stmt);
5920 /* If we have a list of cases associated with E, then use it
5921 as it's a lot faster than walking the entire case vector. */
5922 if (cases)
5924 edge e2 = find_edge (e->src, dest);
5925 tree last, first;
5927 first = cases;
5928 while (cases)
5930 last = cases;
5931 CASE_LABEL (cases) = label;
5932 cases = CASE_CHAIN (cases);
5935 /* If there was already an edge in the CFG, then we need
5936 to move all the cases associated with E to E2. */
5937 if (e2)
5939 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5941 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5942 CASE_CHAIN (cases2) = first;
5944 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5946 else
5948 size_t i, n = gimple_switch_num_labels (switch_stmt);
5950 for (i = 0; i < n; i++)
5952 tree elt = gimple_switch_label (switch_stmt, i);
5953 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5954 CASE_LABEL (elt) = label;
5958 break;
5960 case GIMPLE_ASM:
5962 gasm *asm_stmt = as_a <gasm *> (stmt);
5963 int i, n = gimple_asm_nlabels (asm_stmt);
5964 tree label = NULL;
5966 for (i = 0; i < n; ++i)
5968 tree cons = gimple_asm_label_op (asm_stmt, i);
5969 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5971 if (!label)
5972 label = gimple_block_label (dest);
5973 TREE_VALUE (cons) = label;
5977 /* If we didn't find any label matching the former edge in the
5978 asm labels, we must be redirecting the fallthrough
5979 edge. */
5980 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5982 break;
5984 case GIMPLE_RETURN:
5985 gsi_remove (&gsi, true);
5986 e->flags |= EDGE_FALLTHRU;
5987 break;
5989 case GIMPLE_OMP_RETURN:
5990 case GIMPLE_OMP_CONTINUE:
5991 case GIMPLE_OMP_SECTIONS_SWITCH:
5992 case GIMPLE_OMP_FOR:
5993 /* The edges from OMP constructs can be simply redirected. */
5994 break;
5996 case GIMPLE_EH_DISPATCH:
5997 if (!(e->flags & EDGE_FALLTHRU))
5998 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5999 break;
6001 case GIMPLE_TRANSACTION:
6002 if (e->flags & EDGE_TM_ABORT)
6003 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6004 gimple_block_label (dest));
6005 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6006 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6007 gimple_block_label (dest));
6008 else
6009 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6010 gimple_block_label (dest));
6011 break;
6013 default:
6014 /* Otherwise it must be a fallthru edge, and we don't need to
6015 do anything besides redirecting it. */
6016 gcc_assert (e->flags & EDGE_FALLTHRU);
6017 break;
6020 /* Update/insert PHI nodes as necessary. */
6022 /* Now update the edges in the CFG. */
6023 e = ssa_redirect_edge (e, dest);
6025 return e;
6028 /* Returns true if it is possible to remove edge E by redirecting
6029 it to the destination of the other edge from E->src. */
6031 static bool
6032 gimple_can_remove_branch_p (const_edge e)
6034 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6035 return false;
6037 return true;
6040 /* Simple wrapper, as we can always redirect fallthru edges. */
6042 static basic_block
6043 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6045 e = gimple_redirect_edge_and_branch (e, dest);
6046 gcc_assert (e);
6048 return NULL;
6052 /* Splits basic block BB after statement STMT (but at least after the
6053 labels). If STMT is NULL, BB is split just after the labels. */
6055 static basic_block
6056 gimple_split_block (basic_block bb, void *stmt)
6058 gimple_stmt_iterator gsi;
6059 gimple_stmt_iterator gsi_tgt;
6060 gimple_seq list;
6061 basic_block new_bb;
6062 edge e;
6063 edge_iterator ei;
6065 new_bb = create_empty_bb (bb);
6067 /* Redirect the outgoing edges. */
6068 new_bb->succs = bb->succs;
6069 bb->succs = NULL;
6070 FOR_EACH_EDGE (e, ei, new_bb->succs)
6071 e->src = new_bb;
6073 /* Get a stmt iterator pointing to the first stmt to move. */
6074 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6075 gsi = gsi_after_labels (bb);
6076 else
6078 gsi = gsi_for_stmt ((gimple *) stmt);
6079 gsi_next (&gsi);
6082 /* Move everything from GSI to the new basic block. */
6083 if (gsi_end_p (gsi))
6084 return new_bb;
6086 /* Split the statement list - avoid re-creating new containers as this
6087 brings ugly quadratic memory consumption in the inliner.
6088 (We are still quadratic since we need to update stmt BB pointers,
6089 sadly.) */
6090 gsi_split_seq_before (&gsi, &list);
6091 set_bb_seq (new_bb, list);
6092 for (gsi_tgt = gsi_start (list);
6093 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6094 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6096 return new_bb;
6100 /* Moves basic block BB after block AFTER. */
6102 static bool
6103 gimple_move_block_after (basic_block bb, basic_block after)
6105 if (bb->prev_bb == after)
6106 return true;
6108 unlink_block (bb);
6109 link_block (bb, after);
6111 return true;
6115 /* Return TRUE if block BB has no executable statements, otherwise return
6116 FALSE. */
6118 static bool
6119 gimple_empty_block_p (basic_block bb)
6121 /* BB must have no executable statements. */
6122 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6123 if (phi_nodes (bb))
6124 return false;
6125 if (gsi_end_p (gsi))
6126 return true;
6127 if (is_gimple_debug (gsi_stmt (gsi)))
6128 gsi_next_nondebug (&gsi);
6129 return gsi_end_p (gsi);
6133 /* Split a basic block if it ends with a conditional branch and if the
6134 other part of the block is not empty. */
6136 static basic_block
6137 gimple_split_block_before_cond_jump (basic_block bb)
6139 gimple *last, *split_point;
6140 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6141 if (gsi_end_p (gsi))
6142 return NULL;
6143 last = gsi_stmt (gsi);
6144 if (gimple_code (last) != GIMPLE_COND
6145 && gimple_code (last) != GIMPLE_SWITCH)
6146 return NULL;
6147 gsi_prev (&gsi);
6148 split_point = gsi_stmt (gsi);
6149 return split_block (bb, split_point)->dest;
6153 /* Return true if basic_block can be duplicated. */
6155 static bool
6156 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6158 return true;
6161 /* Create a duplicate of the basic block BB. NOTE: This does not
6162 preserve SSA form. */
6164 static basic_block
6165 gimple_duplicate_bb (basic_block bb)
6167 basic_block new_bb;
6168 gimple_stmt_iterator gsi_tgt;
6170 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6172 /* Copy the PHI nodes. We ignore PHI node arguments here because
6173 the incoming edges have not been setup yet. */
6174 for (gphi_iterator gpi = gsi_start_phis (bb);
6175 !gsi_end_p (gpi);
6176 gsi_next (&gpi))
6178 gphi *phi, *copy;
6179 phi = gpi.phi ();
6180 copy = create_phi_node (NULL_TREE, new_bb);
6181 create_new_def_for (gimple_phi_result (phi), copy,
6182 gimple_phi_result_ptr (copy));
6183 gimple_set_uid (copy, gimple_uid (phi));
6186 gsi_tgt = gsi_start_bb (new_bb);
6187 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6188 !gsi_end_p (gsi);
6189 gsi_next (&gsi))
6191 def_operand_p def_p;
6192 ssa_op_iter op_iter;
6193 tree lhs;
6194 gimple *stmt, *copy;
6196 stmt = gsi_stmt (gsi);
6197 if (gimple_code (stmt) == GIMPLE_LABEL)
6198 continue;
6200 /* Don't duplicate label debug stmts. */
6201 if (gimple_debug_bind_p (stmt)
6202 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6203 == LABEL_DECL)
6204 continue;
6206 /* Create a new copy of STMT and duplicate STMT's virtual
6207 operands. */
6208 copy = gimple_copy (stmt);
6209 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6211 maybe_duplicate_eh_stmt (copy, stmt);
6212 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6214 /* When copying around a stmt writing into a local non-user
6215 aggregate, make sure it won't share stack slot with other
6216 vars. */
6217 lhs = gimple_get_lhs (stmt);
6218 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6220 tree base = get_base_address (lhs);
6221 if (base
6222 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6223 && DECL_IGNORED_P (base)
6224 && !TREE_STATIC (base)
6225 && !DECL_EXTERNAL (base)
6226 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6227 DECL_NONSHAREABLE (base) = 1;
6230 /* Create new names for all the definitions created by COPY and
6231 add replacement mappings for each new name. */
6232 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6233 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6236 return new_bb;
6239 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6241 static void
6242 add_phi_args_after_copy_edge (edge e_copy)
6244 basic_block bb, bb_copy = e_copy->src, dest;
6245 edge e;
6246 edge_iterator ei;
6247 gphi *phi, *phi_copy;
6248 tree def;
6249 gphi_iterator psi, psi_copy;
6251 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6252 return;
6254 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6256 if (e_copy->dest->flags & BB_DUPLICATED)
6257 dest = get_bb_original (e_copy->dest);
6258 else
6259 dest = e_copy->dest;
6261 e = find_edge (bb, dest);
6262 if (!e)
6264 /* During loop unrolling the target of the latch edge is copied.
6265 In this case we are not looking for edge to dest, but to
6266 duplicated block whose original was dest. */
6267 FOR_EACH_EDGE (e, ei, bb->succs)
6269 if ((e->dest->flags & BB_DUPLICATED)
6270 && get_bb_original (e->dest) == dest)
6271 break;
6274 gcc_assert (e != NULL);
6277 for (psi = gsi_start_phis (e->dest),
6278 psi_copy = gsi_start_phis (e_copy->dest);
6279 !gsi_end_p (psi);
6280 gsi_next (&psi), gsi_next (&psi_copy))
6282 phi = psi.phi ();
6283 phi_copy = psi_copy.phi ();
6284 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6285 add_phi_arg (phi_copy, def, e_copy,
6286 gimple_phi_arg_location_from_edge (phi, e));
6291 /* Basic block BB_COPY was created by code duplication. Add phi node
6292 arguments for edges going out of BB_COPY. The blocks that were
6293 duplicated have BB_DUPLICATED set. */
6295 void
6296 add_phi_args_after_copy_bb (basic_block bb_copy)
6298 edge e_copy;
6299 edge_iterator ei;
6301 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6303 add_phi_args_after_copy_edge (e_copy);
6307 /* Blocks in REGION_COPY array of length N_REGION were created by
6308 duplication of basic blocks. Add phi node arguments for edges
6309 going from these blocks. If E_COPY is not NULL, also add
6310 phi node arguments for its destination.*/
6312 void
6313 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6314 edge e_copy)
6316 unsigned i;
6318 for (i = 0; i < n_region; i++)
6319 region_copy[i]->flags |= BB_DUPLICATED;
6321 for (i = 0; i < n_region; i++)
6322 add_phi_args_after_copy_bb (region_copy[i]);
6323 if (e_copy)
6324 add_phi_args_after_copy_edge (e_copy);
6326 for (i = 0; i < n_region; i++)
6327 region_copy[i]->flags &= ~BB_DUPLICATED;
6330 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6331 important exit edge EXIT. By important we mean that no SSA name defined
6332 inside region is live over the other exit edges of the region. All entry
6333 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6334 to the duplicate of the region. Dominance and loop information is
6335 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6336 UPDATE_DOMINANCE is false then we assume that the caller will update the
6337 dominance information after calling this function. The new basic
6338 blocks are stored to REGION_COPY in the same order as they had in REGION,
6339 provided that REGION_COPY is not NULL.
6340 The function returns false if it is unable to copy the region,
6341 true otherwise. */
6343 bool
6344 gimple_duplicate_sese_region (edge entry, edge exit,
6345 basic_block *region, unsigned n_region,
6346 basic_block *region_copy,
6347 bool update_dominance)
6349 unsigned i;
6350 bool free_region_copy = false, copying_header = false;
6351 struct loop *loop = entry->dest->loop_father;
6352 edge exit_copy;
6353 vec<basic_block> doms = vNULL;
6354 edge redirected;
6355 profile_count total_count = profile_count::uninitialized ();
6356 profile_count entry_count = profile_count::uninitialized ();
6358 if (!can_copy_bbs_p (region, n_region))
6359 return false;
6361 /* Some sanity checking. Note that we do not check for all possible
6362 missuses of the functions. I.e. if you ask to copy something weird,
6363 it will work, but the state of structures probably will not be
6364 correct. */
6365 for (i = 0; i < n_region; i++)
6367 /* We do not handle subloops, i.e. all the blocks must belong to the
6368 same loop. */
6369 if (region[i]->loop_father != loop)
6370 return false;
6372 if (region[i] != entry->dest
6373 && region[i] == loop->header)
6374 return false;
6377 /* In case the function is used for loop header copying (which is the primary
6378 use), ensure that EXIT and its copy will be new latch and entry edges. */
6379 if (loop->header == entry->dest)
6381 copying_header = true;
6383 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6384 return false;
6386 for (i = 0; i < n_region; i++)
6387 if (region[i] != exit->src
6388 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6389 return false;
6392 initialize_original_copy_tables ();
6394 if (copying_header)
6395 set_loop_copy (loop, loop_outer (loop));
6396 else
6397 set_loop_copy (loop, loop);
6399 if (!region_copy)
6401 region_copy = XNEWVEC (basic_block, n_region);
6402 free_region_copy = true;
6405 /* Record blocks outside the region that are dominated by something
6406 inside. */
6407 if (update_dominance)
6409 doms.create (0);
6410 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6413 if (entry->dest->count.initialized_p ())
6415 total_count = entry->dest->count;
6416 entry_count = entry->count ();
6417 /* Fix up corner cases, to avoid division by zero or creation of negative
6418 frequencies. */
6419 if (entry_count > total_count)
6420 entry_count = total_count;
6423 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6424 split_edge_bb_loc (entry), update_dominance);
6425 if (total_count.initialized_p () && entry_count.initialized_p ())
6427 scale_bbs_frequencies_profile_count (region, n_region,
6428 total_count - entry_count,
6429 total_count);
6430 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6431 total_count);
6434 if (copying_header)
6436 loop->header = exit->dest;
6437 loop->latch = exit->src;
6440 /* Redirect the entry and add the phi node arguments. */
6441 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6442 gcc_assert (redirected != NULL);
6443 flush_pending_stmts (entry);
6445 /* Concerning updating of dominators: We must recount dominators
6446 for entry block and its copy. Anything that is outside of the
6447 region, but was dominated by something inside needs recounting as
6448 well. */
6449 if (update_dominance)
6451 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6452 doms.safe_push (get_bb_original (entry->dest));
6453 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6454 doms.release ();
6457 /* Add the other PHI node arguments. */
6458 add_phi_args_after_copy (region_copy, n_region, NULL);
6460 if (free_region_copy)
6461 free (region_copy);
6463 free_original_copy_tables ();
6464 return true;
6467 /* Checks if BB is part of the region defined by N_REGION BBS. */
6468 static bool
6469 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6471 unsigned int n;
6473 for (n = 0; n < n_region; n++)
6475 if (bb == bbs[n])
6476 return true;
6478 return false;
6481 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6482 are stored to REGION_COPY in the same order in that they appear
6483 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6484 the region, EXIT an exit from it. The condition guarding EXIT
6485 is moved to ENTRY. Returns true if duplication succeeds, false
6486 otherwise.
6488 For example,
6490 some_code;
6491 if (cond)
6493 else
6496 is transformed to
6498 if (cond)
6500 some_code;
6503 else
6505 some_code;
6510 bool
6511 gimple_duplicate_sese_tail (edge entry, edge exit,
6512 basic_block *region, unsigned n_region,
6513 basic_block *region_copy)
6515 unsigned i;
6516 bool free_region_copy = false;
6517 struct loop *loop = exit->dest->loop_father;
6518 struct loop *orig_loop = entry->dest->loop_father;
6519 basic_block switch_bb, entry_bb, nentry_bb;
6520 vec<basic_block> doms;
6521 profile_count total_count = profile_count::uninitialized (),
6522 exit_count = profile_count::uninitialized ();
6523 edge exits[2], nexits[2], e;
6524 gimple_stmt_iterator gsi;
6525 gimple *cond_stmt;
6526 edge sorig, snew;
6527 basic_block exit_bb;
6528 gphi_iterator psi;
6529 gphi *phi;
6530 tree def;
6531 struct loop *target, *aloop, *cloop;
6533 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6534 exits[0] = exit;
6535 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6537 if (!can_copy_bbs_p (region, n_region))
6538 return false;
6540 initialize_original_copy_tables ();
6541 set_loop_copy (orig_loop, loop);
6543 target= loop;
6544 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6546 if (bb_part_of_region_p (aloop->header, region, n_region))
6548 cloop = duplicate_loop (aloop, target);
6549 duplicate_subloops (aloop, cloop);
6553 if (!region_copy)
6555 region_copy = XNEWVEC (basic_block, n_region);
6556 free_region_copy = true;
6559 gcc_assert (!need_ssa_update_p (cfun));
6561 /* Record blocks outside the region that are dominated by something
6562 inside. */
6563 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6565 total_count = exit->src->count;
6566 exit_count = exit->count ();
6567 /* Fix up corner cases, to avoid division by zero or creation of negative
6568 frequencies. */
6569 if (exit_count > total_count)
6570 exit_count = total_count;
6572 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6573 split_edge_bb_loc (exit), true);
6574 if (total_count.initialized_p () && exit_count.initialized_p ())
6576 scale_bbs_frequencies_profile_count (region, n_region,
6577 total_count - exit_count,
6578 total_count);
6579 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6580 total_count);
6583 /* Create the switch block, and put the exit condition to it. */
6584 entry_bb = entry->dest;
6585 nentry_bb = get_bb_copy (entry_bb);
6586 if (!last_stmt (entry->src)
6587 || !stmt_ends_bb_p (last_stmt (entry->src)))
6588 switch_bb = entry->src;
6589 else
6590 switch_bb = split_edge (entry);
6591 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6593 gsi = gsi_last_bb (switch_bb);
6594 cond_stmt = last_stmt (exit->src);
6595 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6596 cond_stmt = gimple_copy (cond_stmt);
6598 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6600 sorig = single_succ_edge (switch_bb);
6601 sorig->flags = exits[1]->flags;
6602 sorig->probability = exits[1]->probability;
6603 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6604 snew->probability = exits[0]->probability;
6607 /* Register the new edge from SWITCH_BB in loop exit lists. */
6608 rescan_loop_exit (snew, true, false);
6610 /* Add the PHI node arguments. */
6611 add_phi_args_after_copy (region_copy, n_region, snew);
6613 /* Get rid of now superfluous conditions and associated edges (and phi node
6614 arguments). */
6615 exit_bb = exit->dest;
6617 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6618 PENDING_STMT (e) = NULL;
6620 /* The latch of ORIG_LOOP was copied, and so was the backedge
6621 to the original header. We redirect this backedge to EXIT_BB. */
6622 for (i = 0; i < n_region; i++)
6623 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6625 gcc_assert (single_succ_edge (region_copy[i]));
6626 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6627 PENDING_STMT (e) = NULL;
6628 for (psi = gsi_start_phis (exit_bb);
6629 !gsi_end_p (psi);
6630 gsi_next (&psi))
6632 phi = psi.phi ();
6633 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6634 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6637 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6638 PENDING_STMT (e) = NULL;
6640 /* Anything that is outside of the region, but was dominated by something
6641 inside needs to update dominance info. */
6642 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6643 doms.release ();
6644 /* Update the SSA web. */
6645 update_ssa (TODO_update_ssa);
6647 if (free_region_copy)
6648 free (region_copy);
6650 free_original_copy_tables ();
6651 return true;
6654 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6655 adding blocks when the dominator traversal reaches EXIT. This
6656 function silently assumes that ENTRY strictly dominates EXIT. */
6658 void
6659 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6660 vec<basic_block> *bbs_p)
6662 basic_block son;
6664 for (son = first_dom_son (CDI_DOMINATORS, entry);
6665 son;
6666 son = next_dom_son (CDI_DOMINATORS, son))
6668 bbs_p->safe_push (son);
6669 if (son != exit)
6670 gather_blocks_in_sese_region (son, exit, bbs_p);
6674 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6675 The duplicates are recorded in VARS_MAP. */
6677 static void
6678 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6679 tree to_context)
6681 tree t = *tp, new_t;
6682 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6684 if (DECL_CONTEXT (t) == to_context)
6685 return;
6687 bool existed;
6688 tree &loc = vars_map->get_or_insert (t, &existed);
6690 if (!existed)
6692 if (SSA_VAR_P (t))
6694 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6695 add_local_decl (f, new_t);
6697 else
6699 gcc_assert (TREE_CODE (t) == CONST_DECL);
6700 new_t = copy_node (t);
6702 DECL_CONTEXT (new_t) = to_context;
6704 loc = new_t;
6706 else
6707 new_t = loc;
6709 *tp = new_t;
6713 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6714 VARS_MAP maps old ssa names and var_decls to the new ones. */
6716 static tree
6717 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6718 tree to_context)
6720 tree new_name;
6722 gcc_assert (!virtual_operand_p (name));
6724 tree *loc = vars_map->get (name);
6726 if (!loc)
6728 tree decl = SSA_NAME_VAR (name);
6729 if (decl)
6731 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6732 replace_by_duplicate_decl (&decl, vars_map, to_context);
6733 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6734 decl, SSA_NAME_DEF_STMT (name));
6736 else
6737 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6738 name, SSA_NAME_DEF_STMT (name));
6740 /* Now that we've used the def stmt to define new_name, make sure it
6741 doesn't define name anymore. */
6742 SSA_NAME_DEF_STMT (name) = NULL;
6744 vars_map->put (name, new_name);
6746 else
6747 new_name = *loc;
6749 return new_name;
6752 struct move_stmt_d
6754 tree orig_block;
6755 tree new_block;
6756 tree from_context;
6757 tree to_context;
6758 hash_map<tree, tree> *vars_map;
6759 htab_t new_label_map;
6760 hash_map<void *, void *> *eh_map;
6761 bool remap_decls_p;
6764 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6765 contained in *TP if it has been ORIG_BLOCK previously and change the
6766 DECL_CONTEXT of every local variable referenced in *TP. */
6768 static tree
6769 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6771 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6772 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6773 tree t = *tp;
6775 if (EXPR_P (t))
6777 tree block = TREE_BLOCK (t);
6778 if (block == NULL_TREE)
6780 else if (block == p->orig_block
6781 || p->orig_block == NULL_TREE)
6782 TREE_SET_BLOCK (t, p->new_block);
6783 else if (flag_checking)
6785 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6786 block = BLOCK_SUPERCONTEXT (block);
6787 gcc_assert (block == p->orig_block);
6790 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6792 if (TREE_CODE (t) == SSA_NAME)
6793 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6794 else if (TREE_CODE (t) == PARM_DECL
6795 && gimple_in_ssa_p (cfun))
6796 *tp = *(p->vars_map->get (t));
6797 else if (TREE_CODE (t) == LABEL_DECL)
6799 if (p->new_label_map)
6801 struct tree_map in, *out;
6802 in.base.from = t;
6803 out = (struct tree_map *)
6804 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6805 if (out)
6806 *tp = t = out->to;
6809 /* For FORCED_LABELs we can end up with references from other
6810 functions if some SESE regions are outlined. It is UB to
6811 jump in between them, but they could be used just for printing
6812 addresses etc. In that case, DECL_CONTEXT on the label should
6813 be the function containing the glabel stmt with that LABEL_DECL,
6814 rather than whatever function a reference to the label was seen
6815 last time. */
6816 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6817 DECL_CONTEXT (t) = p->to_context;
6819 else if (p->remap_decls_p)
6821 /* Replace T with its duplicate. T should no longer appear in the
6822 parent function, so this looks wasteful; however, it may appear
6823 in referenced_vars, and more importantly, as virtual operands of
6824 statements, and in alias lists of other variables. It would be
6825 quite difficult to expunge it from all those places. ??? It might
6826 suffice to do this for addressable variables. */
6827 if ((VAR_P (t) && !is_global_var (t))
6828 || TREE_CODE (t) == CONST_DECL)
6829 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6831 *walk_subtrees = 0;
6833 else if (TYPE_P (t))
6834 *walk_subtrees = 0;
6836 return NULL_TREE;
6839 /* Helper for move_stmt_r. Given an EH region number for the source
6840 function, map that to the duplicate EH regio number in the dest. */
6842 static int
6843 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6845 eh_region old_r, new_r;
6847 old_r = get_eh_region_from_number (old_nr);
6848 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6850 return new_r->index;
6853 /* Similar, but operate on INTEGER_CSTs. */
6855 static tree
6856 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6858 int old_nr, new_nr;
6860 old_nr = tree_to_shwi (old_t_nr);
6861 new_nr = move_stmt_eh_region_nr (old_nr, p);
6863 return build_int_cst (integer_type_node, new_nr);
6866 /* Like move_stmt_op, but for gimple statements.
6868 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6869 contained in the current statement in *GSI_P and change the
6870 DECL_CONTEXT of every local variable referenced in the current
6871 statement. */
6873 static tree
6874 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6875 struct walk_stmt_info *wi)
6877 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6878 gimple *stmt = gsi_stmt (*gsi_p);
6879 tree block = gimple_block (stmt);
6881 if (block == p->orig_block
6882 || (p->orig_block == NULL_TREE
6883 && block != NULL_TREE))
6884 gimple_set_block (stmt, p->new_block);
6886 switch (gimple_code (stmt))
6888 case GIMPLE_CALL:
6889 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6891 tree r, fndecl = gimple_call_fndecl (stmt);
6892 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6893 switch (DECL_FUNCTION_CODE (fndecl))
6895 case BUILT_IN_EH_COPY_VALUES:
6896 r = gimple_call_arg (stmt, 1);
6897 r = move_stmt_eh_region_tree_nr (r, p);
6898 gimple_call_set_arg (stmt, 1, r);
6899 /* FALLTHRU */
6901 case BUILT_IN_EH_POINTER:
6902 case BUILT_IN_EH_FILTER:
6903 r = gimple_call_arg (stmt, 0);
6904 r = move_stmt_eh_region_tree_nr (r, p);
6905 gimple_call_set_arg (stmt, 0, r);
6906 break;
6908 default:
6909 break;
6912 break;
6914 case GIMPLE_RESX:
6916 gresx *resx_stmt = as_a <gresx *> (stmt);
6917 int r = gimple_resx_region (resx_stmt);
6918 r = move_stmt_eh_region_nr (r, p);
6919 gimple_resx_set_region (resx_stmt, r);
6921 break;
6923 case GIMPLE_EH_DISPATCH:
6925 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6926 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6927 r = move_stmt_eh_region_nr (r, p);
6928 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6930 break;
6932 case GIMPLE_OMP_RETURN:
6933 case GIMPLE_OMP_CONTINUE:
6934 break;
6936 case GIMPLE_LABEL:
6938 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6939 so that such labels can be referenced from other regions.
6940 Make sure to update it when seeing a GIMPLE_LABEL though,
6941 that is the owner of the label. */
6942 walk_gimple_op (stmt, move_stmt_op, wi);
6943 *handled_ops_p = true;
6944 tree label = gimple_label_label (as_a <glabel *> (stmt));
6945 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6946 DECL_CONTEXT (label) = p->to_context;
6948 break;
6950 default:
6951 if (is_gimple_omp (stmt))
6953 /* Do not remap variables inside OMP directives. Variables
6954 referenced in clauses and directive header belong to the
6955 parent function and should not be moved into the child
6956 function. */
6957 bool save_remap_decls_p = p->remap_decls_p;
6958 p->remap_decls_p = false;
6959 *handled_ops_p = true;
6961 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6962 move_stmt_op, wi);
6964 p->remap_decls_p = save_remap_decls_p;
6966 break;
6969 return NULL_TREE;
6972 /* Move basic block BB from function CFUN to function DEST_FN. The
6973 block is moved out of the original linked list and placed after
6974 block AFTER in the new list. Also, the block is removed from the
6975 original array of blocks and placed in DEST_FN's array of blocks.
6976 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6977 updated to reflect the moved edges.
6979 The local variables are remapped to new instances, VARS_MAP is used
6980 to record the mapping. */
6982 static void
6983 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6984 basic_block after, bool update_edge_count_p,
6985 struct move_stmt_d *d)
6987 struct control_flow_graph *cfg;
6988 edge_iterator ei;
6989 edge e;
6990 gimple_stmt_iterator si;
6991 unsigned old_len, new_len;
6993 /* Remove BB from dominance structures. */
6994 delete_from_dominance_info (CDI_DOMINATORS, bb);
6996 /* Move BB from its current loop to the copy in the new function. */
6997 if (current_loops)
6999 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
7000 if (new_loop)
7001 bb->loop_father = new_loop;
7004 /* Link BB to the new linked list. */
7005 move_block_after (bb, after);
7007 /* Update the edge count in the corresponding flowgraphs. */
7008 if (update_edge_count_p)
7009 FOR_EACH_EDGE (e, ei, bb->succs)
7011 cfun->cfg->x_n_edges--;
7012 dest_cfun->cfg->x_n_edges++;
7015 /* Remove BB from the original basic block array. */
7016 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7017 cfun->cfg->x_n_basic_blocks--;
7019 /* Grow DEST_CFUN's basic block array if needed. */
7020 cfg = dest_cfun->cfg;
7021 cfg->x_n_basic_blocks++;
7022 if (bb->index >= cfg->x_last_basic_block)
7023 cfg->x_last_basic_block = bb->index + 1;
7025 old_len = vec_safe_length (cfg->x_basic_block_info);
7026 if ((unsigned) cfg->x_last_basic_block >= old_len)
7028 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7029 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7032 (*cfg->x_basic_block_info)[bb->index] = bb;
7034 /* Remap the variables in phi nodes. */
7035 for (gphi_iterator psi = gsi_start_phis (bb);
7036 !gsi_end_p (psi); )
7038 gphi *phi = psi.phi ();
7039 use_operand_p use;
7040 tree op = PHI_RESULT (phi);
7041 ssa_op_iter oi;
7042 unsigned i;
7044 if (virtual_operand_p (op))
7046 /* Remove the phi nodes for virtual operands (alias analysis will be
7047 run for the new function, anyway). */
7048 remove_phi_node (&psi, true);
7049 continue;
7052 SET_PHI_RESULT (phi,
7053 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7054 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7056 op = USE_FROM_PTR (use);
7057 if (TREE_CODE (op) == SSA_NAME)
7058 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7061 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7063 location_t locus = gimple_phi_arg_location (phi, i);
7064 tree block = LOCATION_BLOCK (locus);
7066 if (locus == UNKNOWN_LOCATION)
7067 continue;
7068 if (d->orig_block == NULL_TREE || block == d->orig_block)
7070 locus = set_block (locus, d->new_block);
7071 gimple_phi_arg_set_location (phi, i, locus);
7075 gsi_next (&psi);
7078 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7080 gimple *stmt = gsi_stmt (si);
7081 struct walk_stmt_info wi;
7083 memset (&wi, 0, sizeof (wi));
7084 wi.info = d;
7085 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7087 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7089 tree label = gimple_label_label (label_stmt);
7090 int uid = LABEL_DECL_UID (label);
7092 gcc_assert (uid > -1);
7094 old_len = vec_safe_length (cfg->x_label_to_block_map);
7095 if (old_len <= (unsigned) uid)
7097 new_len = 3 * uid / 2 + 1;
7098 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7101 (*cfg->x_label_to_block_map)[uid] = bb;
7102 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7104 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7106 if (uid >= dest_cfun->cfg->last_label_uid)
7107 dest_cfun->cfg->last_label_uid = uid + 1;
7110 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7111 remove_stmt_from_eh_lp_fn (cfun, stmt);
7113 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7114 gimple_remove_stmt_histograms (cfun, stmt);
7116 /* We cannot leave any operands allocated from the operand caches of
7117 the current function. */
7118 free_stmt_operands (cfun, stmt);
7119 push_cfun (dest_cfun);
7120 update_stmt (stmt);
7121 pop_cfun ();
7124 FOR_EACH_EDGE (e, ei, bb->succs)
7125 if (e->goto_locus != UNKNOWN_LOCATION)
7127 tree block = LOCATION_BLOCK (e->goto_locus);
7128 if (d->orig_block == NULL_TREE
7129 || block == d->orig_block)
7130 e->goto_locus = set_block (e->goto_locus, d->new_block);
7134 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7135 the outermost EH region. Use REGION as the incoming base EH region. */
7137 static eh_region
7138 find_outermost_region_in_block (struct function *src_cfun,
7139 basic_block bb, eh_region region)
7141 gimple_stmt_iterator si;
7143 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7145 gimple *stmt = gsi_stmt (si);
7146 eh_region stmt_region;
7147 int lp_nr;
7149 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7150 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7151 if (stmt_region)
7153 if (region == NULL)
7154 region = stmt_region;
7155 else if (stmt_region != region)
7157 region = eh_region_outermost (src_cfun, stmt_region, region);
7158 gcc_assert (region != NULL);
7163 return region;
7166 static tree
7167 new_label_mapper (tree decl, void *data)
7169 htab_t hash = (htab_t) data;
7170 struct tree_map *m;
7171 void **slot;
7173 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7175 m = XNEW (struct tree_map);
7176 m->hash = DECL_UID (decl);
7177 m->base.from = decl;
7178 m->to = create_artificial_label (UNKNOWN_LOCATION);
7179 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7180 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7181 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7183 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7184 gcc_assert (*slot == NULL);
7186 *slot = m;
7188 return m->to;
7191 /* Tree walker to replace the decls used inside value expressions by
7192 duplicates. */
7194 static tree
7195 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7197 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7199 switch (TREE_CODE (*tp))
7201 case VAR_DECL:
7202 case PARM_DECL:
7203 case RESULT_DECL:
7204 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7205 break;
7206 default:
7207 break;
7210 if (IS_TYPE_OR_DECL_P (*tp))
7211 *walk_subtrees = false;
7213 return NULL;
7216 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7217 subblocks. */
7219 static void
7220 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7221 tree to_context)
7223 tree *tp, t;
7225 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7227 t = *tp;
7228 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7229 continue;
7230 replace_by_duplicate_decl (&t, vars_map, to_context);
7231 if (t != *tp)
7233 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7235 tree x = DECL_VALUE_EXPR (*tp);
7236 struct replace_decls_d rd = { vars_map, to_context };
7237 unshare_expr (x);
7238 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7239 SET_DECL_VALUE_EXPR (t, x);
7240 DECL_HAS_VALUE_EXPR_P (t) = 1;
7242 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7243 *tp = t;
7247 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7248 replace_block_vars_by_duplicates (block, vars_map, to_context);
7251 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7252 from FN1 to FN2. */
7254 static void
7255 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7256 struct loop *loop)
7258 /* Discard it from the old loop array. */
7259 (*get_loops (fn1))[loop->num] = NULL;
7261 /* Place it in the new loop array, assigning it a new number. */
7262 loop->num = number_of_loops (fn2);
7263 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7265 /* Recurse to children. */
7266 for (loop = loop->inner; loop; loop = loop->next)
7267 fixup_loop_arrays_after_move (fn1, fn2, loop);
7270 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7271 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7273 DEBUG_FUNCTION void
7274 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7276 basic_block bb;
7277 edge_iterator ei;
7278 edge e;
7279 bitmap bbs = BITMAP_ALLOC (NULL);
7280 int i;
7282 gcc_assert (entry != NULL);
7283 gcc_assert (entry != exit);
7284 gcc_assert (bbs_p != NULL);
7286 gcc_assert (bbs_p->length () > 0);
7288 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7289 bitmap_set_bit (bbs, bb->index);
7291 gcc_assert (bitmap_bit_p (bbs, entry->index));
7292 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7294 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7296 if (bb == entry)
7298 gcc_assert (single_pred_p (entry));
7299 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7301 else
7302 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7304 e = ei_edge (ei);
7305 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7308 if (bb == exit)
7310 gcc_assert (single_succ_p (exit));
7311 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7313 else
7314 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7316 e = ei_edge (ei);
7317 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7321 BITMAP_FREE (bbs);
7324 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7326 bool
7327 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7329 bitmap release_names = (bitmap)data;
7331 if (TREE_CODE (from) != SSA_NAME)
7332 return true;
7334 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7335 return true;
7338 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7339 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7340 single basic block in the original CFG and the new basic block is
7341 returned. DEST_CFUN must not have a CFG yet.
7343 Note that the region need not be a pure SESE region. Blocks inside
7344 the region may contain calls to abort/exit. The only restriction
7345 is that ENTRY_BB should be the only entry point and it must
7346 dominate EXIT_BB.
7348 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7349 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7350 to the new function.
7352 All local variables referenced in the region are assumed to be in
7353 the corresponding BLOCK_VARS and unexpanded variable lists
7354 associated with DEST_CFUN.
7356 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7357 reimplement move_sese_region_to_fn by duplicating the region rather than
7358 moving it. */
7360 basic_block
7361 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7362 basic_block exit_bb, tree orig_block)
7364 vec<basic_block> bbs, dom_bbs;
7365 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7366 basic_block after, bb, *entry_pred, *exit_succ, abb;
7367 struct function *saved_cfun = cfun;
7368 int *entry_flag, *exit_flag;
7369 profile_probability *entry_prob, *exit_prob;
7370 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7371 edge e;
7372 edge_iterator ei;
7373 htab_t new_label_map;
7374 hash_map<void *, void *> *eh_map;
7375 struct loop *loop = entry_bb->loop_father;
7376 struct loop *loop0 = get_loop (saved_cfun, 0);
7377 struct move_stmt_d d;
7379 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7380 region. */
7381 gcc_assert (entry_bb != exit_bb
7382 && (!exit_bb
7383 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7385 /* Collect all the blocks in the region. Manually add ENTRY_BB
7386 because it won't be added by dfs_enumerate_from. */
7387 bbs.create (0);
7388 bbs.safe_push (entry_bb);
7389 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7391 if (flag_checking)
7392 verify_sese (entry_bb, exit_bb, &bbs);
7394 /* The blocks that used to be dominated by something in BBS will now be
7395 dominated by the new block. */
7396 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7397 bbs.address (),
7398 bbs.length ());
7400 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7401 the predecessor edges to ENTRY_BB and the successor edges to
7402 EXIT_BB so that we can re-attach them to the new basic block that
7403 will replace the region. */
7404 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7405 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7406 entry_flag = XNEWVEC (int, num_entry_edges);
7407 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7408 i = 0;
7409 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7411 entry_prob[i] = e->probability;
7412 entry_flag[i] = e->flags;
7413 entry_pred[i++] = e->src;
7414 remove_edge (e);
7417 if (exit_bb)
7419 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7420 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7421 exit_flag = XNEWVEC (int, num_exit_edges);
7422 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7423 i = 0;
7424 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7426 exit_prob[i] = e->probability;
7427 exit_flag[i] = e->flags;
7428 exit_succ[i++] = e->dest;
7429 remove_edge (e);
7432 else
7434 num_exit_edges = 0;
7435 exit_succ = NULL;
7436 exit_flag = NULL;
7437 exit_prob = NULL;
7440 /* Switch context to the child function to initialize DEST_FN's CFG. */
7441 gcc_assert (dest_cfun->cfg == NULL);
7442 push_cfun (dest_cfun);
7444 init_empty_tree_cfg ();
7446 /* Initialize EH information for the new function. */
7447 eh_map = NULL;
7448 new_label_map = NULL;
7449 if (saved_cfun->eh)
7451 eh_region region = NULL;
7453 FOR_EACH_VEC_ELT (bbs, i, bb)
7454 region = find_outermost_region_in_block (saved_cfun, bb, region);
7456 init_eh_for_function ();
7457 if (region != NULL)
7459 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7460 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7461 new_label_mapper, new_label_map);
7465 /* Initialize an empty loop tree. */
7466 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7467 init_loops_structure (dest_cfun, loops, 1);
7468 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7469 set_loops_for_fn (dest_cfun, loops);
7471 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7473 /* Move the outlined loop tree part. */
7474 num_nodes = bbs.length ();
7475 FOR_EACH_VEC_ELT (bbs, i, bb)
7477 if (bb->loop_father->header == bb)
7479 struct loop *this_loop = bb->loop_father;
7480 struct loop *outer = loop_outer (this_loop);
7481 if (outer == loop
7482 /* If the SESE region contains some bbs ending with
7483 a noreturn call, those are considered to belong
7484 to the outermost loop in saved_cfun, rather than
7485 the entry_bb's loop_father. */
7486 || outer == loop0)
7488 if (outer != loop)
7489 num_nodes -= this_loop->num_nodes;
7490 flow_loop_tree_node_remove (bb->loop_father);
7491 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7492 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7495 else if (bb->loop_father == loop0 && loop0 != loop)
7496 num_nodes--;
7498 /* Remove loop exits from the outlined region. */
7499 if (loops_for_fn (saved_cfun)->exits)
7500 FOR_EACH_EDGE (e, ei, bb->succs)
7502 struct loops *l = loops_for_fn (saved_cfun);
7503 loop_exit **slot
7504 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7505 NO_INSERT);
7506 if (slot)
7507 l->exits->clear_slot (slot);
7512 /* Adjust the number of blocks in the tree root of the outlined part. */
7513 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7515 /* Setup a mapping to be used by move_block_to_fn. */
7516 loop->aux = current_loops->tree_root;
7517 loop0->aux = current_loops->tree_root;
7519 /* Fix up orig_loop_num. If the block referenced in it has been moved
7520 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7521 struct loop *dloop;
7522 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7523 if (dloop->orig_loop_num)
7525 if ((*larray)[dloop->orig_loop_num] != NULL
7526 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7527 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7528 else
7529 dloop->orig_loop_num = 0;
7531 ggc_free (larray);
7533 pop_cfun ();
7535 /* Move blocks from BBS into DEST_CFUN. */
7536 gcc_assert (bbs.length () >= 2);
7537 after = dest_cfun->cfg->x_entry_block_ptr;
7538 hash_map<tree, tree> vars_map;
7540 memset (&d, 0, sizeof (d));
7541 d.orig_block = orig_block;
7542 d.new_block = DECL_INITIAL (dest_cfun->decl);
7543 d.from_context = cfun->decl;
7544 d.to_context = dest_cfun->decl;
7545 d.vars_map = &vars_map;
7546 d.new_label_map = new_label_map;
7547 d.eh_map = eh_map;
7548 d.remap_decls_p = true;
7550 if (gimple_in_ssa_p (cfun))
7551 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7553 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7554 set_ssa_default_def (dest_cfun, arg, narg);
7555 vars_map.put (arg, narg);
7558 FOR_EACH_VEC_ELT (bbs, i, bb)
7560 /* No need to update edge counts on the last block. It has
7561 already been updated earlier when we detached the region from
7562 the original CFG. */
7563 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7564 after = bb;
7567 loop->aux = NULL;
7568 loop0->aux = NULL;
7569 /* Loop sizes are no longer correct, fix them up. */
7570 loop->num_nodes -= num_nodes;
7571 for (struct loop *outer = loop_outer (loop);
7572 outer; outer = loop_outer (outer))
7573 outer->num_nodes -= num_nodes;
7574 loop0->num_nodes -= bbs.length () - num_nodes;
7576 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7578 struct loop *aloop;
7579 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7580 if (aloop != NULL)
7582 if (aloop->simduid)
7584 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7585 d.to_context);
7586 dest_cfun->has_simduid_loops = true;
7588 if (aloop->force_vectorize)
7589 dest_cfun->has_force_vectorize_loops = true;
7593 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7594 if (orig_block)
7596 tree block;
7597 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7598 == NULL_TREE);
7599 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7600 = BLOCK_SUBBLOCKS (orig_block);
7601 for (block = BLOCK_SUBBLOCKS (orig_block);
7602 block; block = BLOCK_CHAIN (block))
7603 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7604 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7607 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7608 &vars_map, dest_cfun->decl);
7610 if (new_label_map)
7611 htab_delete (new_label_map);
7612 if (eh_map)
7613 delete eh_map;
7615 if (gimple_in_ssa_p (cfun))
7617 /* We need to release ssa-names in a defined order, so first find them,
7618 and then iterate in ascending version order. */
7619 bitmap release_names = BITMAP_ALLOC (NULL);
7620 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7621 bitmap_iterator bi;
7622 unsigned i;
7623 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7624 release_ssa_name (ssa_name (i));
7625 BITMAP_FREE (release_names);
7628 /* Rewire the entry and exit blocks. The successor to the entry
7629 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7630 the child function. Similarly, the predecessor of DEST_FN's
7631 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7632 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7633 various CFG manipulation function get to the right CFG.
7635 FIXME, this is silly. The CFG ought to become a parameter to
7636 these helpers. */
7637 push_cfun (dest_cfun);
7638 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7639 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7640 if (exit_bb)
7642 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7643 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7645 else
7646 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7647 pop_cfun ();
7649 /* Back in the original function, the SESE region has disappeared,
7650 create a new basic block in its place. */
7651 bb = create_empty_bb (entry_pred[0]);
7652 if (current_loops)
7653 add_bb_to_loop (bb, loop);
7654 for (i = 0; i < num_entry_edges; i++)
7656 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7657 e->probability = entry_prob[i];
7660 for (i = 0; i < num_exit_edges; i++)
7662 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7663 e->probability = exit_prob[i];
7666 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7667 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7668 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7669 dom_bbs.release ();
7671 if (exit_bb)
7673 free (exit_prob);
7674 free (exit_flag);
7675 free (exit_succ);
7677 free (entry_prob);
7678 free (entry_flag);
7679 free (entry_pred);
7680 bbs.release ();
7682 return bb;
7685 /* Dump default def DEF to file FILE using FLAGS and indentation
7686 SPC. */
7688 static void
7689 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7691 for (int i = 0; i < spc; ++i)
7692 fprintf (file, " ");
7693 dump_ssaname_info_to_file (file, def, spc);
7695 print_generic_expr (file, TREE_TYPE (def), flags);
7696 fprintf (file, " ");
7697 print_generic_expr (file, def, flags);
7698 fprintf (file, " = ");
7699 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7700 fprintf (file, ";\n");
7703 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7705 static void
7706 print_no_sanitize_attr_value (FILE *file, tree value)
7708 unsigned int flags = tree_to_uhwi (value);
7709 bool first = true;
7710 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7712 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7714 if (!first)
7715 fprintf (file, " | ");
7716 fprintf (file, "%s", sanitizer_opts[i].name);
7717 first = false;
7722 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7725 void
7726 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7728 tree arg, var, old_current_fndecl = current_function_decl;
7729 struct function *dsf;
7730 bool ignore_topmost_bind = false, any_var = false;
7731 basic_block bb;
7732 tree chain;
7733 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7734 && decl_is_tm_clone (fndecl));
7735 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7737 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7739 fprintf (file, "__attribute__((");
7741 bool first = true;
7742 tree chain;
7743 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7744 first = false, chain = TREE_CHAIN (chain))
7746 if (!first)
7747 fprintf (file, ", ");
7749 tree name = get_attribute_name (chain);
7750 print_generic_expr (file, name, dump_flags);
7751 if (TREE_VALUE (chain) != NULL_TREE)
7753 fprintf (file, " (");
7755 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7756 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7757 else
7758 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7759 fprintf (file, ")");
7763 fprintf (file, "))\n");
7766 current_function_decl = fndecl;
7767 if (flags & TDF_GIMPLE)
7769 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7770 dump_flags | TDF_SLIM);
7771 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7773 else
7774 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7776 arg = DECL_ARGUMENTS (fndecl);
7777 while (arg)
7779 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7780 fprintf (file, " ");
7781 print_generic_expr (file, arg, dump_flags);
7782 if (DECL_CHAIN (arg))
7783 fprintf (file, ", ");
7784 arg = DECL_CHAIN (arg);
7786 fprintf (file, ")\n");
7788 dsf = DECL_STRUCT_FUNCTION (fndecl);
7789 if (dsf && (flags & TDF_EH))
7790 dump_eh_tree (file, dsf);
7792 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7794 dump_node (fndecl, TDF_SLIM | flags, file);
7795 current_function_decl = old_current_fndecl;
7796 return;
7799 /* When GIMPLE is lowered, the variables are no longer available in
7800 BIND_EXPRs, so display them separately. */
7801 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7803 unsigned ix;
7804 ignore_topmost_bind = true;
7806 fprintf (file, "{\n");
7807 if (gimple_in_ssa_p (fun)
7808 && (flags & TDF_ALIAS))
7810 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7811 arg = DECL_CHAIN (arg))
7813 tree def = ssa_default_def (fun, arg);
7814 if (def)
7815 dump_default_def (file, def, 2, flags);
7818 tree res = DECL_RESULT (fun->decl);
7819 if (res != NULL_TREE
7820 && DECL_BY_REFERENCE (res))
7822 tree def = ssa_default_def (fun, res);
7823 if (def)
7824 dump_default_def (file, def, 2, flags);
7827 tree static_chain = fun->static_chain_decl;
7828 if (static_chain != NULL_TREE)
7830 tree def = ssa_default_def (fun, static_chain);
7831 if (def)
7832 dump_default_def (file, def, 2, flags);
7836 if (!vec_safe_is_empty (fun->local_decls))
7837 FOR_EACH_LOCAL_DECL (fun, ix, var)
7839 print_generic_decl (file, var, flags);
7840 fprintf (file, "\n");
7842 any_var = true;
7845 tree name;
7847 if (gimple_in_ssa_p (cfun))
7848 FOR_EACH_SSA_NAME (ix, name, cfun)
7850 if (!SSA_NAME_VAR (name))
7852 fprintf (file, " ");
7853 print_generic_expr (file, TREE_TYPE (name), flags);
7854 fprintf (file, " ");
7855 print_generic_expr (file, name, flags);
7856 fprintf (file, ";\n");
7858 any_var = true;
7863 if (fun && fun->decl == fndecl
7864 && fun->cfg
7865 && basic_block_info_for_fn (fun))
7867 /* If the CFG has been built, emit a CFG-based dump. */
7868 if (!ignore_topmost_bind)
7869 fprintf (file, "{\n");
7871 if (any_var && n_basic_blocks_for_fn (fun))
7872 fprintf (file, "\n");
7874 FOR_EACH_BB_FN (bb, fun)
7875 dump_bb (file, bb, 2, flags);
7877 fprintf (file, "}\n");
7879 else if (fun->curr_properties & PROP_gimple_any)
7881 /* The function is now in GIMPLE form but the CFG has not been
7882 built yet. Emit the single sequence of GIMPLE statements
7883 that make up its body. */
7884 gimple_seq body = gimple_body (fndecl);
7886 if (gimple_seq_first_stmt (body)
7887 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7888 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7889 print_gimple_seq (file, body, 0, flags);
7890 else
7892 if (!ignore_topmost_bind)
7893 fprintf (file, "{\n");
7895 if (any_var)
7896 fprintf (file, "\n");
7898 print_gimple_seq (file, body, 2, flags);
7899 fprintf (file, "}\n");
7902 else
7904 int indent;
7906 /* Make a tree based dump. */
7907 chain = DECL_SAVED_TREE (fndecl);
7908 if (chain && TREE_CODE (chain) == BIND_EXPR)
7910 if (ignore_topmost_bind)
7912 chain = BIND_EXPR_BODY (chain);
7913 indent = 2;
7915 else
7916 indent = 0;
7918 else
7920 if (!ignore_topmost_bind)
7922 fprintf (file, "{\n");
7923 /* No topmost bind, pretend it's ignored for later. */
7924 ignore_topmost_bind = true;
7926 indent = 2;
7929 if (any_var)
7930 fprintf (file, "\n");
7932 print_generic_stmt_indented (file, chain, flags, indent);
7933 if (ignore_topmost_bind)
7934 fprintf (file, "}\n");
7937 if (flags & TDF_ENUMERATE_LOCALS)
7938 dump_enumerated_decls (file, flags);
7939 fprintf (file, "\n\n");
7941 current_function_decl = old_current_fndecl;
7944 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7946 DEBUG_FUNCTION void
7947 debug_function (tree fn, dump_flags_t flags)
7949 dump_function_to_file (fn, stderr, flags);
7953 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7955 static void
7956 print_pred_bbs (FILE *file, basic_block bb)
7958 edge e;
7959 edge_iterator ei;
7961 FOR_EACH_EDGE (e, ei, bb->preds)
7962 fprintf (file, "bb_%d ", e->src->index);
7966 /* Print on FILE the indexes for the successors of basic_block BB. */
7968 static void
7969 print_succ_bbs (FILE *file, basic_block bb)
7971 edge e;
7972 edge_iterator ei;
7974 FOR_EACH_EDGE (e, ei, bb->succs)
7975 fprintf (file, "bb_%d ", e->dest->index);
7978 /* Print to FILE the basic block BB following the VERBOSITY level. */
7980 void
7981 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7983 char *s_indent = (char *) alloca ((size_t) indent + 1);
7984 memset ((void *) s_indent, ' ', (size_t) indent);
7985 s_indent[indent] = '\0';
7987 /* Print basic_block's header. */
7988 if (verbosity >= 2)
7990 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7991 print_pred_bbs (file, bb);
7992 fprintf (file, "}, succs = {");
7993 print_succ_bbs (file, bb);
7994 fprintf (file, "})\n");
7997 /* Print basic_block's body. */
7998 if (verbosity >= 3)
8000 fprintf (file, "%s {\n", s_indent);
8001 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8002 fprintf (file, "%s }\n", s_indent);
8006 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
8008 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8009 VERBOSITY level this outputs the contents of the loop, or just its
8010 structure. */
8012 static void
8013 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
8015 char *s_indent;
8016 basic_block bb;
8018 if (loop == NULL)
8019 return;
8021 s_indent = (char *) alloca ((size_t) indent + 1);
8022 memset ((void *) s_indent, ' ', (size_t) indent);
8023 s_indent[indent] = '\0';
8025 /* Print loop's header. */
8026 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8027 if (loop->header)
8028 fprintf (file, "header = %d", loop->header->index);
8029 else
8031 fprintf (file, "deleted)\n");
8032 return;
8034 if (loop->latch)
8035 fprintf (file, ", latch = %d", loop->latch->index);
8036 else
8037 fprintf (file, ", multiple latches");
8038 fprintf (file, ", niter = ");
8039 print_generic_expr (file, loop->nb_iterations);
8041 if (loop->any_upper_bound)
8043 fprintf (file, ", upper_bound = ");
8044 print_decu (loop->nb_iterations_upper_bound, file);
8046 if (loop->any_likely_upper_bound)
8048 fprintf (file, ", likely_upper_bound = ");
8049 print_decu (loop->nb_iterations_likely_upper_bound, file);
8052 if (loop->any_estimate)
8054 fprintf (file, ", estimate = ");
8055 print_decu (loop->nb_iterations_estimate, file);
8057 if (loop->unroll)
8058 fprintf (file, ", unroll = %d", loop->unroll);
8059 fprintf (file, ")\n");
8061 /* Print loop's body. */
8062 if (verbosity >= 1)
8064 fprintf (file, "%s{\n", s_indent);
8065 FOR_EACH_BB_FN (bb, cfun)
8066 if (bb->loop_father == loop)
8067 print_loops_bb (file, bb, indent, verbosity);
8069 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8070 fprintf (file, "%s}\n", s_indent);
8074 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8075 spaces. Following VERBOSITY level this outputs the contents of the
8076 loop, or just its structure. */
8078 static void
8079 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8080 int verbosity)
8082 if (loop == NULL)
8083 return;
8085 print_loop (file, loop, indent, verbosity);
8086 print_loop_and_siblings (file, loop->next, indent, verbosity);
8089 /* Follow a CFG edge from the entry point of the program, and on entry
8090 of a loop, pretty print the loop structure on FILE. */
8092 void
8093 print_loops (FILE *file, int verbosity)
8095 basic_block bb;
8097 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8098 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8099 if (bb && bb->loop_father)
8100 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8103 /* Dump a loop. */
8105 DEBUG_FUNCTION void
8106 debug (struct loop &ref)
8108 print_loop (stderr, &ref, 0, /*verbosity*/0);
8111 DEBUG_FUNCTION void
8112 debug (struct loop *ptr)
8114 if (ptr)
8115 debug (*ptr);
8116 else
8117 fprintf (stderr, "<nil>\n");
8120 /* Dump a loop verbosely. */
8122 DEBUG_FUNCTION void
8123 debug_verbose (struct loop &ref)
8125 print_loop (stderr, &ref, 0, /*verbosity*/3);
8128 DEBUG_FUNCTION void
8129 debug_verbose (struct loop *ptr)
8131 if (ptr)
8132 debug (*ptr);
8133 else
8134 fprintf (stderr, "<nil>\n");
8138 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8140 DEBUG_FUNCTION void
8141 debug_loops (int verbosity)
8143 print_loops (stderr, verbosity);
8146 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8148 DEBUG_FUNCTION void
8149 debug_loop (struct loop *loop, int verbosity)
8151 print_loop (stderr, loop, 0, verbosity);
8154 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8155 level. */
8157 DEBUG_FUNCTION void
8158 debug_loop_num (unsigned num, int verbosity)
8160 debug_loop (get_loop (cfun, num), verbosity);
8163 /* Return true if BB ends with a call, possibly followed by some
8164 instructions that must stay with the call. Return false,
8165 otherwise. */
8167 static bool
8168 gimple_block_ends_with_call_p (basic_block bb)
8170 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8171 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8175 /* Return true if BB ends with a conditional branch. Return false,
8176 otherwise. */
8178 static bool
8179 gimple_block_ends_with_condjump_p (const_basic_block bb)
8181 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8182 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8186 /* Return true if statement T may terminate execution of BB in ways not
8187 explicitly represtented in the CFG. */
8189 bool
8190 stmt_can_terminate_bb_p (gimple *t)
8192 tree fndecl = NULL_TREE;
8193 int call_flags = 0;
8195 /* Eh exception not handled internally terminates execution of the whole
8196 function. */
8197 if (stmt_can_throw_external (t))
8198 return true;
8200 /* NORETURN and LONGJMP calls already have an edge to exit.
8201 CONST and PURE calls do not need one.
8202 We don't currently check for CONST and PURE here, although
8203 it would be a good idea, because those attributes are
8204 figured out from the RTL in mark_constant_function, and
8205 the counter incrementation code from -fprofile-arcs
8206 leads to different results from -fbranch-probabilities. */
8207 if (is_gimple_call (t))
8209 fndecl = gimple_call_fndecl (t);
8210 call_flags = gimple_call_flags (t);
8213 if (is_gimple_call (t)
8214 && fndecl
8215 && DECL_BUILT_IN (fndecl)
8216 && (call_flags & ECF_NOTHROW)
8217 && !(call_flags & ECF_RETURNS_TWICE)
8218 /* fork() doesn't really return twice, but the effect of
8219 wrapping it in __gcov_fork() which calls __gcov_flush()
8220 and clears the counters before forking has the same
8221 effect as returning twice. Force a fake edge. */
8222 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8223 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8224 return false;
8226 if (is_gimple_call (t))
8228 edge_iterator ei;
8229 edge e;
8230 basic_block bb;
8232 if (call_flags & (ECF_PURE | ECF_CONST)
8233 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8234 return false;
8236 /* Function call may do longjmp, terminate program or do other things.
8237 Special case noreturn that have non-abnormal edges out as in this case
8238 the fact is sufficiently represented by lack of edges out of T. */
8239 if (!(call_flags & ECF_NORETURN))
8240 return true;
8242 bb = gimple_bb (t);
8243 FOR_EACH_EDGE (e, ei, bb->succs)
8244 if ((e->flags & EDGE_FAKE) == 0)
8245 return true;
8248 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8249 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8250 return true;
8252 return false;
8256 /* Add fake edges to the function exit for any non constant and non
8257 noreturn calls (or noreturn calls with EH/abnormal edges),
8258 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8259 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8260 that were split.
8262 The goal is to expose cases in which entering a basic block does
8263 not imply that all subsequent instructions must be executed. */
8265 static int
8266 gimple_flow_call_edges_add (sbitmap blocks)
8268 int i;
8269 int blocks_split = 0;
8270 int last_bb = last_basic_block_for_fn (cfun);
8271 bool check_last_block = false;
8273 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8274 return 0;
8276 if (! blocks)
8277 check_last_block = true;
8278 else
8279 check_last_block = bitmap_bit_p (blocks,
8280 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8282 /* In the last basic block, before epilogue generation, there will be
8283 a fallthru edge to EXIT. Special care is required if the last insn
8284 of the last basic block is a call because make_edge folds duplicate
8285 edges, which would result in the fallthru edge also being marked
8286 fake, which would result in the fallthru edge being removed by
8287 remove_fake_edges, which would result in an invalid CFG.
8289 Moreover, we can't elide the outgoing fake edge, since the block
8290 profiler needs to take this into account in order to solve the minimal
8291 spanning tree in the case that the call doesn't return.
8293 Handle this by adding a dummy instruction in a new last basic block. */
8294 if (check_last_block)
8296 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8297 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8298 gimple *t = NULL;
8300 if (!gsi_end_p (gsi))
8301 t = gsi_stmt (gsi);
8303 if (t && stmt_can_terminate_bb_p (t))
8305 edge e;
8307 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8308 if (e)
8310 gsi_insert_on_edge (e, gimple_build_nop ());
8311 gsi_commit_edge_inserts ();
8316 /* Now add fake edges to the function exit for any non constant
8317 calls since there is no way that we can determine if they will
8318 return or not... */
8319 for (i = 0; i < last_bb; i++)
8321 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8322 gimple_stmt_iterator gsi;
8323 gimple *stmt, *last_stmt;
8325 if (!bb)
8326 continue;
8328 if (blocks && !bitmap_bit_p (blocks, i))
8329 continue;
8331 gsi = gsi_last_nondebug_bb (bb);
8332 if (!gsi_end_p (gsi))
8334 last_stmt = gsi_stmt (gsi);
8337 stmt = gsi_stmt (gsi);
8338 if (stmt_can_terminate_bb_p (stmt))
8340 edge e;
8342 /* The handling above of the final block before the
8343 epilogue should be enough to verify that there is
8344 no edge to the exit block in CFG already.
8345 Calling make_edge in such case would cause us to
8346 mark that edge as fake and remove it later. */
8347 if (flag_checking && stmt == last_stmt)
8349 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8350 gcc_assert (e == NULL);
8353 /* Note that the following may create a new basic block
8354 and renumber the existing basic blocks. */
8355 if (stmt != last_stmt)
8357 e = split_block (bb, stmt);
8358 if (e)
8359 blocks_split++;
8361 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8362 e->probability = profile_probability::guessed_never ();
8364 gsi_prev (&gsi);
8366 while (!gsi_end_p (gsi));
8370 if (blocks_split)
8371 checking_verify_flow_info ();
8373 return blocks_split;
8376 /* Removes edge E and all the blocks dominated by it, and updates dominance
8377 information. The IL in E->src needs to be updated separately.
8378 If dominance info is not available, only the edge E is removed.*/
8380 void
8381 remove_edge_and_dominated_blocks (edge e)
8383 vec<basic_block> bbs_to_remove = vNULL;
8384 vec<basic_block> bbs_to_fix_dom = vNULL;
8385 edge f;
8386 edge_iterator ei;
8387 bool none_removed = false;
8388 unsigned i;
8389 basic_block bb, dbb;
8390 bitmap_iterator bi;
8392 /* If we are removing a path inside a non-root loop that may change
8393 loop ownership of blocks or remove loops. Mark loops for fixup. */
8394 if (current_loops
8395 && loop_outer (e->src->loop_father) != NULL
8396 && e->src->loop_father == e->dest->loop_father)
8397 loops_state_set (LOOPS_NEED_FIXUP);
8399 if (!dom_info_available_p (CDI_DOMINATORS))
8401 remove_edge (e);
8402 return;
8405 /* No updating is needed for edges to exit. */
8406 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8408 if (cfgcleanup_altered_bbs)
8409 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8410 remove_edge (e);
8411 return;
8414 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8415 that is not dominated by E->dest, then this set is empty. Otherwise,
8416 all the basic blocks dominated by E->dest are removed.
8418 Also, to DF_IDOM we store the immediate dominators of the blocks in
8419 the dominance frontier of E (i.e., of the successors of the
8420 removed blocks, if there are any, and of E->dest otherwise). */
8421 FOR_EACH_EDGE (f, ei, e->dest->preds)
8423 if (f == e)
8424 continue;
8426 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8428 none_removed = true;
8429 break;
8433 auto_bitmap df, df_idom;
8434 if (none_removed)
8435 bitmap_set_bit (df_idom,
8436 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8437 else
8439 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8440 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8442 FOR_EACH_EDGE (f, ei, bb->succs)
8444 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8445 bitmap_set_bit (df, f->dest->index);
8448 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8449 bitmap_clear_bit (df, bb->index);
8451 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8453 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8454 bitmap_set_bit (df_idom,
8455 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8459 if (cfgcleanup_altered_bbs)
8461 /* Record the set of the altered basic blocks. */
8462 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8463 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8466 /* Remove E and the cancelled blocks. */
8467 if (none_removed)
8468 remove_edge (e);
8469 else
8471 /* Walk backwards so as to get a chance to substitute all
8472 released DEFs into debug stmts. See
8473 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8474 details. */
8475 for (i = bbs_to_remove.length (); i-- > 0; )
8476 delete_basic_block (bbs_to_remove[i]);
8479 /* Update the dominance information. The immediate dominator may change only
8480 for blocks whose immediate dominator belongs to DF_IDOM:
8482 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8483 removal. Let Z the arbitrary block such that idom(Z) = Y and
8484 Z dominates X after the removal. Before removal, there exists a path P
8485 from Y to X that avoids Z. Let F be the last edge on P that is
8486 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8487 dominates W, and because of P, Z does not dominate W), and W belongs to
8488 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8489 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8491 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8492 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8493 dbb;
8494 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8495 bbs_to_fix_dom.safe_push (dbb);
8498 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8500 bbs_to_remove.release ();
8501 bbs_to_fix_dom.release ();
8504 /* Purge dead EH edges from basic block BB. */
8506 bool
8507 gimple_purge_dead_eh_edges (basic_block bb)
8509 bool changed = false;
8510 edge e;
8511 edge_iterator ei;
8512 gimple *stmt = last_stmt (bb);
8514 if (stmt && stmt_can_throw_internal (stmt))
8515 return false;
8517 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8519 if (e->flags & EDGE_EH)
8521 remove_edge_and_dominated_blocks (e);
8522 changed = true;
8524 else
8525 ei_next (&ei);
8528 return changed;
8531 /* Purge dead EH edges from basic block listed in BLOCKS. */
8533 bool
8534 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8536 bool changed = false;
8537 unsigned i;
8538 bitmap_iterator bi;
8540 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8542 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8544 /* Earlier gimple_purge_dead_eh_edges could have removed
8545 this basic block already. */
8546 gcc_assert (bb || changed);
8547 if (bb != NULL)
8548 changed |= gimple_purge_dead_eh_edges (bb);
8551 return changed;
8554 /* Purge dead abnormal call edges from basic block BB. */
8556 bool
8557 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8559 bool changed = false;
8560 edge e;
8561 edge_iterator ei;
8562 gimple *stmt = last_stmt (bb);
8564 if (!cfun->has_nonlocal_label
8565 && !cfun->calls_setjmp)
8566 return false;
8568 if (stmt && stmt_can_make_abnormal_goto (stmt))
8569 return false;
8571 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8573 if (e->flags & EDGE_ABNORMAL)
8575 if (e->flags & EDGE_FALLTHRU)
8576 e->flags &= ~EDGE_ABNORMAL;
8577 else
8578 remove_edge_and_dominated_blocks (e);
8579 changed = true;
8581 else
8582 ei_next (&ei);
8585 return changed;
8588 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8590 bool
8591 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8593 bool changed = false;
8594 unsigned i;
8595 bitmap_iterator bi;
8597 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8599 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8601 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8602 this basic block already. */
8603 gcc_assert (bb || changed);
8604 if (bb != NULL)
8605 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8608 return changed;
8611 /* This function is called whenever a new edge is created or
8612 redirected. */
8614 static void
8615 gimple_execute_on_growing_pred (edge e)
8617 basic_block bb = e->dest;
8619 if (!gimple_seq_empty_p (phi_nodes (bb)))
8620 reserve_phi_args_for_new_edge (bb);
8623 /* This function is called immediately before edge E is removed from
8624 the edge vector E->dest->preds. */
8626 static void
8627 gimple_execute_on_shrinking_pred (edge e)
8629 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8630 remove_phi_args (e);
8633 /*---------------------------------------------------------------------------
8634 Helper functions for Loop versioning
8635 ---------------------------------------------------------------------------*/
8637 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8638 of 'first'. Both of them are dominated by 'new_head' basic block. When
8639 'new_head' was created by 'second's incoming edge it received phi arguments
8640 on the edge by split_edge(). Later, additional edge 'e' was created to
8641 connect 'new_head' and 'first'. Now this routine adds phi args on this
8642 additional edge 'e' that new_head to second edge received as part of edge
8643 splitting. */
8645 static void
8646 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8647 basic_block new_head, edge e)
8649 gphi *phi1, *phi2;
8650 gphi_iterator psi1, psi2;
8651 tree def;
8652 edge e2 = find_edge (new_head, second);
8654 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8655 edge, we should always have an edge from NEW_HEAD to SECOND. */
8656 gcc_assert (e2 != NULL);
8658 /* Browse all 'second' basic block phi nodes and add phi args to
8659 edge 'e' for 'first' head. PHI args are always in correct order. */
8661 for (psi2 = gsi_start_phis (second),
8662 psi1 = gsi_start_phis (first);
8663 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8664 gsi_next (&psi2), gsi_next (&psi1))
8666 phi1 = psi1.phi ();
8667 phi2 = psi2.phi ();
8668 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8669 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8674 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8675 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8676 the destination of the ELSE part. */
8678 static void
8679 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8680 basic_block second_head ATTRIBUTE_UNUSED,
8681 basic_block cond_bb, void *cond_e)
8683 gimple_stmt_iterator gsi;
8684 gimple *new_cond_expr;
8685 tree cond_expr = (tree) cond_e;
8686 edge e0;
8688 /* Build new conditional expr */
8689 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8690 NULL_TREE, NULL_TREE);
8692 /* Add new cond in cond_bb. */
8693 gsi = gsi_last_bb (cond_bb);
8694 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8696 /* Adjust edges appropriately to connect new head with first head
8697 as well as second head. */
8698 e0 = single_succ_edge (cond_bb);
8699 e0->flags &= ~EDGE_FALLTHRU;
8700 e0->flags |= EDGE_FALSE_VALUE;
8704 /* Do book-keeping of basic block BB for the profile consistency checker.
8705 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8706 then do post-pass accounting. Store the counting in RECORD. */
8707 static void
8708 gimple_account_profile_record (basic_block bb, int after_pass,
8709 struct profile_record *record)
8711 gimple_stmt_iterator i;
8712 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8714 record->size[after_pass]
8715 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8716 if (bb->count.initialized_p ())
8717 record->time[after_pass]
8718 += estimate_num_insns (gsi_stmt (i),
8719 &eni_time_weights) * bb->count.to_gcov_type ();
8720 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8721 record->time[after_pass]
8722 += estimate_num_insns (gsi_stmt (i),
8723 &eni_time_weights) * bb->count.to_frequency (cfun);
8727 struct cfg_hooks gimple_cfg_hooks = {
8728 "gimple",
8729 gimple_verify_flow_info,
8730 gimple_dump_bb, /* dump_bb */
8731 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8732 create_bb, /* create_basic_block */
8733 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8734 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8735 gimple_can_remove_branch_p, /* can_remove_branch_p */
8736 remove_bb, /* delete_basic_block */
8737 gimple_split_block, /* split_block */
8738 gimple_move_block_after, /* move_block_after */
8739 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8740 gimple_merge_blocks, /* merge_blocks */
8741 gimple_predict_edge, /* predict_edge */
8742 gimple_predicted_by_p, /* predicted_by_p */
8743 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8744 gimple_duplicate_bb, /* duplicate_block */
8745 gimple_split_edge, /* split_edge */
8746 gimple_make_forwarder_block, /* make_forward_block */
8747 NULL, /* tidy_fallthru_edge */
8748 NULL, /* force_nonfallthru */
8749 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8750 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8751 gimple_flow_call_edges_add, /* flow_call_edges_add */
8752 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8753 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8754 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8755 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8756 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8757 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8758 flush_pending_stmts, /* flush_pending_stmts */
8759 gimple_empty_block_p, /* block_empty_p */
8760 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8761 gimple_account_profile_record,
8765 /* Split all critical edges. */
8767 unsigned int
8768 split_critical_edges (void)
8770 basic_block bb;
8771 edge e;
8772 edge_iterator ei;
8774 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8775 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8776 mappings around the calls to split_edge. */
8777 start_recording_case_labels ();
8778 FOR_ALL_BB_FN (bb, cfun)
8780 FOR_EACH_EDGE (e, ei, bb->succs)
8782 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8783 split_edge (e);
8784 /* PRE inserts statements to edges and expects that
8785 since split_critical_edges was done beforehand, committing edge
8786 insertions will not split more edges. In addition to critical
8787 edges we must split edges that have multiple successors and
8788 end by control flow statements, such as RESX.
8789 Go ahead and split them too. This matches the logic in
8790 gimple_find_edge_insert_loc. */
8791 else if ((!single_pred_p (e->dest)
8792 || !gimple_seq_empty_p (phi_nodes (e->dest))
8793 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8794 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8795 && !(e->flags & EDGE_ABNORMAL))
8797 gimple_stmt_iterator gsi;
8799 gsi = gsi_last_bb (e->src);
8800 if (!gsi_end_p (gsi)
8801 && stmt_ends_bb_p (gsi_stmt (gsi))
8802 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8803 && !gimple_call_builtin_p (gsi_stmt (gsi),
8804 BUILT_IN_RETURN)))
8805 split_edge (e);
8809 end_recording_case_labels ();
8810 return 0;
8813 namespace {
8815 const pass_data pass_data_split_crit_edges =
8817 GIMPLE_PASS, /* type */
8818 "crited", /* name */
8819 OPTGROUP_NONE, /* optinfo_flags */
8820 TV_TREE_SPLIT_EDGES, /* tv_id */
8821 PROP_cfg, /* properties_required */
8822 PROP_no_crit_edges, /* properties_provided */
8823 0, /* properties_destroyed */
8824 0, /* todo_flags_start */
8825 0, /* todo_flags_finish */
8828 class pass_split_crit_edges : public gimple_opt_pass
8830 public:
8831 pass_split_crit_edges (gcc::context *ctxt)
8832 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8835 /* opt_pass methods: */
8836 virtual unsigned int execute (function *) { return split_critical_edges (); }
8838 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8839 }; // class pass_split_crit_edges
8841 } // anon namespace
8843 gimple_opt_pass *
8844 make_pass_split_crit_edges (gcc::context *ctxt)
8846 return new pass_split_crit_edges (ctxt);
8850 /* Insert COND expression which is GIMPLE_COND after STMT
8851 in basic block BB with appropriate basic block split
8852 and creation of a new conditionally executed basic block.
8853 Update profile so the new bb is visited with probability PROB.
8854 Return created basic block. */
8855 basic_block
8856 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
8857 profile_probability prob)
8859 edge fall = split_block (bb, stmt);
8860 gimple_stmt_iterator iter = gsi_last_bb (bb);
8861 basic_block new_bb;
8863 /* Insert cond statement. */
8864 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8865 if (gsi_end_p (iter))
8866 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8867 else
8868 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8870 /* Create conditionally executed block. */
8871 new_bb = create_empty_bb (bb);
8872 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8873 e->probability = prob;
8874 new_bb->count = e->count ();
8875 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8877 /* Fix edge for split bb. */
8878 fall->flags = EDGE_FALSE_VALUE;
8879 fall->probability -= e->probability;
8881 /* Update dominance info. */
8882 if (dom_info_available_p (CDI_DOMINATORS))
8884 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8885 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8888 /* Update loop info. */
8889 if (current_loops)
8890 add_bb_to_loop (new_bb, bb->loop_father);
8892 return new_bb;
8895 /* Build a ternary operation and gimplify it. Emit code before GSI.
8896 Return the gimple_val holding the result. */
8898 tree
8899 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8900 tree type, tree a, tree b, tree c)
8902 tree ret;
8903 location_t loc = gimple_location (gsi_stmt (*gsi));
8905 ret = fold_build3_loc (loc, code, type, a, b, c);
8906 STRIP_NOPS (ret);
8908 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8909 GSI_SAME_STMT);
8912 /* Build a binary operation and gimplify it. Emit code before GSI.
8913 Return the gimple_val holding the result. */
8915 tree
8916 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8917 tree type, tree a, tree b)
8919 tree ret;
8921 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8922 STRIP_NOPS (ret);
8924 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8925 GSI_SAME_STMT);
8928 /* Build a unary operation and gimplify it. Emit code before GSI.
8929 Return the gimple_val holding the result. */
8931 tree
8932 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8933 tree a)
8935 tree ret;
8937 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8938 STRIP_NOPS (ret);
8940 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8941 GSI_SAME_STMT);
8946 /* Given a basic block B which ends with a conditional and has
8947 precisely two successors, determine which of the edges is taken if
8948 the conditional is true and which is taken if the conditional is
8949 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8951 void
8952 extract_true_false_edges_from_block (basic_block b,
8953 edge *true_edge,
8954 edge *false_edge)
8956 edge e = EDGE_SUCC (b, 0);
8958 if (e->flags & EDGE_TRUE_VALUE)
8960 *true_edge = e;
8961 *false_edge = EDGE_SUCC (b, 1);
8963 else
8965 *false_edge = e;
8966 *true_edge = EDGE_SUCC (b, 1);
8971 /* From a controlling predicate in the immediate dominator DOM of
8972 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8973 predicate evaluates to true and false and store them to
8974 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8975 they are non-NULL. Returns true if the edges can be determined,
8976 else return false. */
8978 bool
8979 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8980 edge *true_controlled_edge,
8981 edge *false_controlled_edge)
8983 basic_block bb = phiblock;
8984 edge true_edge, false_edge, tem;
8985 edge e0 = NULL, e1 = NULL;
8987 /* We have to verify that one edge into the PHI node is dominated
8988 by the true edge of the predicate block and the other edge
8989 dominated by the false edge. This ensures that the PHI argument
8990 we are going to take is completely determined by the path we
8991 take from the predicate block.
8992 We can only use BB dominance checks below if the destination of
8993 the true/false edges are dominated by their edge, thus only
8994 have a single predecessor. */
8995 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8996 tem = EDGE_PRED (bb, 0);
8997 if (tem == true_edge
8998 || (single_pred_p (true_edge->dest)
8999 && (tem->src == true_edge->dest
9000 || dominated_by_p (CDI_DOMINATORS,
9001 tem->src, true_edge->dest))))
9002 e0 = tem;
9003 else if (tem == false_edge
9004 || (single_pred_p (false_edge->dest)
9005 && (tem->src == false_edge->dest
9006 || dominated_by_p (CDI_DOMINATORS,
9007 tem->src, false_edge->dest))))
9008 e1 = tem;
9009 else
9010 return false;
9011 tem = EDGE_PRED (bb, 1);
9012 if (tem == true_edge
9013 || (single_pred_p (true_edge->dest)
9014 && (tem->src == true_edge->dest
9015 || dominated_by_p (CDI_DOMINATORS,
9016 tem->src, true_edge->dest))))
9017 e0 = tem;
9018 else if (tem == false_edge
9019 || (single_pred_p (false_edge->dest)
9020 && (tem->src == false_edge->dest
9021 || dominated_by_p (CDI_DOMINATORS,
9022 tem->src, false_edge->dest))))
9023 e1 = tem;
9024 else
9025 return false;
9026 if (!e0 || !e1)
9027 return false;
9029 if (true_controlled_edge)
9030 *true_controlled_edge = e0;
9031 if (false_controlled_edge)
9032 *false_controlled_edge = e1;
9034 return true;
9037 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9038 range [low, high]. Place associated stmts before *GSI. */
9040 void
9041 generate_range_test (basic_block bb, tree index, tree low, tree high,
9042 tree *lhs, tree *rhs)
9044 tree type = TREE_TYPE (index);
9045 tree utype = unsigned_type_for (type);
9047 low = fold_convert (type, low);
9048 high = fold_convert (type, high);
9050 tree tmp = make_ssa_name (type);
9051 gassign *sub1
9052 = gimple_build_assign (tmp, MINUS_EXPR, index, low);
9054 *lhs = make_ssa_name (utype);
9055 gassign *a = gimple_build_assign (*lhs, NOP_EXPR, tmp);
9057 *rhs = fold_build2 (MINUS_EXPR, utype, high, low);
9058 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9059 gsi_insert_before (&gsi, sub1, GSI_SAME_STMT);
9060 gsi_insert_before (&gsi, a, GSI_SAME_STMT);
9063 /* Emit return warnings. */
9065 namespace {
9067 const pass_data pass_data_warn_function_return =
9069 GIMPLE_PASS, /* type */
9070 "*warn_function_return", /* name */
9071 OPTGROUP_NONE, /* optinfo_flags */
9072 TV_NONE, /* tv_id */
9073 PROP_cfg, /* properties_required */
9074 0, /* properties_provided */
9075 0, /* properties_destroyed */
9076 0, /* todo_flags_start */
9077 0, /* todo_flags_finish */
9080 class pass_warn_function_return : public gimple_opt_pass
9082 public:
9083 pass_warn_function_return (gcc::context *ctxt)
9084 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9087 /* opt_pass methods: */
9088 virtual unsigned int execute (function *);
9090 }; // class pass_warn_function_return
9092 unsigned int
9093 pass_warn_function_return::execute (function *fun)
9095 source_location location;
9096 gimple *last;
9097 edge e;
9098 edge_iterator ei;
9100 if (!targetm.warn_func_return (fun->decl))
9101 return 0;
9103 /* If we have a path to EXIT, then we do return. */
9104 if (TREE_THIS_VOLATILE (fun->decl)
9105 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9107 location = UNKNOWN_LOCATION;
9108 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9109 (e = ei_safe_edge (ei)); )
9111 last = last_stmt (e->src);
9112 if ((gimple_code (last) == GIMPLE_RETURN
9113 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9114 && location == UNKNOWN_LOCATION
9115 && ((location = LOCATION_LOCUS (gimple_location (last)))
9116 != UNKNOWN_LOCATION)
9117 && !optimize)
9118 break;
9119 /* When optimizing, replace return stmts in noreturn functions
9120 with __builtin_unreachable () call. */
9121 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9123 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9124 gimple *new_stmt = gimple_build_call (fndecl, 0);
9125 gimple_set_location (new_stmt, gimple_location (last));
9126 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9127 gsi_replace (&gsi, new_stmt, true);
9128 remove_edge (e);
9130 else
9131 ei_next (&ei);
9133 if (location == UNKNOWN_LOCATION)
9134 location = cfun->function_end_locus;
9135 warning_at (location, 0, "%<noreturn%> function does return");
9138 /* If we see "return;" in some basic block, then we do reach the end
9139 without returning a value. */
9140 else if (warn_return_type > 0
9141 && !TREE_NO_WARNING (fun->decl)
9142 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9144 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9146 gimple *last = last_stmt (e->src);
9147 greturn *return_stmt = dyn_cast <greturn *> (last);
9148 if (return_stmt
9149 && gimple_return_retval (return_stmt) == NULL
9150 && !gimple_no_warning_p (last))
9152 location = gimple_location (last);
9153 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9154 location = fun->function_end_locus;
9155 warning_at (location, OPT_Wreturn_type,
9156 "control reaches end of non-void function");
9157 TREE_NO_WARNING (fun->decl) = 1;
9158 break;
9161 /* The C++ FE turns fallthrough from the end of non-void function
9162 into __builtin_unreachable () call with BUILTINS_LOCATION.
9163 Recognize those too. */
9164 basic_block bb;
9165 if (!TREE_NO_WARNING (fun->decl))
9166 FOR_EACH_BB_FN (bb, fun)
9167 if (EDGE_COUNT (bb->succs) == 0)
9169 gimple *last = last_stmt (bb);
9170 const enum built_in_function ubsan_missing_ret
9171 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9172 if (last
9173 && ((LOCATION_LOCUS (gimple_location (last))
9174 == BUILTINS_LOCATION
9175 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9176 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9178 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9179 gsi_prev_nondebug (&gsi);
9180 gimple *prev = gsi_stmt (gsi);
9181 if (prev == NULL)
9182 location = UNKNOWN_LOCATION;
9183 else
9184 location = gimple_location (prev);
9185 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9186 location = fun->function_end_locus;
9187 warning_at (location, OPT_Wreturn_type,
9188 "control reaches end of non-void function");
9189 TREE_NO_WARNING (fun->decl) = 1;
9190 break;
9194 return 0;
9197 } // anon namespace
9199 gimple_opt_pass *
9200 make_pass_warn_function_return (gcc::context *ctxt)
9202 return new pass_warn_function_return (ctxt);
9205 /* Walk a gimplified function and warn for functions whose return value is
9206 ignored and attribute((warn_unused_result)) is set. This is done before
9207 inlining, so we don't have to worry about that. */
9209 static void
9210 do_warn_unused_result (gimple_seq seq)
9212 tree fdecl, ftype;
9213 gimple_stmt_iterator i;
9215 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9217 gimple *g = gsi_stmt (i);
9219 switch (gimple_code (g))
9221 case GIMPLE_BIND:
9222 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9223 break;
9224 case GIMPLE_TRY:
9225 do_warn_unused_result (gimple_try_eval (g));
9226 do_warn_unused_result (gimple_try_cleanup (g));
9227 break;
9228 case GIMPLE_CATCH:
9229 do_warn_unused_result (gimple_catch_handler (
9230 as_a <gcatch *> (g)));
9231 break;
9232 case GIMPLE_EH_FILTER:
9233 do_warn_unused_result (gimple_eh_filter_failure (g));
9234 break;
9236 case GIMPLE_CALL:
9237 if (gimple_call_lhs (g))
9238 break;
9239 if (gimple_call_internal_p (g))
9240 break;
9242 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9243 LHS. All calls whose value is ignored should be
9244 represented like this. Look for the attribute. */
9245 fdecl = gimple_call_fndecl (g);
9246 ftype = gimple_call_fntype (g);
9248 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9250 location_t loc = gimple_location (g);
9252 if (fdecl)
9253 warning_at (loc, OPT_Wunused_result,
9254 "ignoring return value of %qD, "
9255 "declared with attribute warn_unused_result",
9256 fdecl);
9257 else
9258 warning_at (loc, OPT_Wunused_result,
9259 "ignoring return value of function "
9260 "declared with attribute warn_unused_result");
9262 break;
9264 default:
9265 /* Not a container, not a call, or a call whose value is used. */
9266 break;
9271 namespace {
9273 const pass_data pass_data_warn_unused_result =
9275 GIMPLE_PASS, /* type */
9276 "*warn_unused_result", /* name */
9277 OPTGROUP_NONE, /* optinfo_flags */
9278 TV_NONE, /* tv_id */
9279 PROP_gimple_any, /* properties_required */
9280 0, /* properties_provided */
9281 0, /* properties_destroyed */
9282 0, /* todo_flags_start */
9283 0, /* todo_flags_finish */
9286 class pass_warn_unused_result : public gimple_opt_pass
9288 public:
9289 pass_warn_unused_result (gcc::context *ctxt)
9290 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9293 /* opt_pass methods: */
9294 virtual bool gate (function *) { return flag_warn_unused_result; }
9295 virtual unsigned int execute (function *)
9297 do_warn_unused_result (gimple_body (current_function_decl));
9298 return 0;
9301 }; // class pass_warn_unused_result
9303 } // anon namespace
9305 gimple_opt_pass *
9306 make_pass_warn_unused_result (gcc::context *ctxt)
9308 return new pass_warn_unused_result (ctxt);
9311 /* IPA passes, compilation of earlier functions or inlining
9312 might have changed some properties, such as marked functions nothrow,
9313 pure, const or noreturn.
9314 Remove redundant edges and basic blocks, and create new ones if necessary.
9316 This pass can't be executed as stand alone pass from pass manager, because
9317 in between inlining and this fixup the verify_flow_info would fail. */
9319 unsigned int
9320 execute_fixup_cfg (void)
9322 basic_block bb;
9323 gimple_stmt_iterator gsi;
9324 int todo = 0;
9325 cgraph_node *node = cgraph_node::get (current_function_decl);
9326 profile_count num = node->count;
9327 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9328 bool scale = num.initialized_p () && !(num == den);
9330 if (scale)
9332 profile_count::adjust_for_ipa_scaling (&num, &den);
9333 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9334 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9335 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9338 FOR_EACH_BB_FN (bb, cfun)
9340 if (scale)
9341 bb->count = bb->count.apply_scale (num, den);
9342 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9344 gimple *stmt = gsi_stmt (gsi);
9345 tree decl = is_gimple_call (stmt)
9346 ? gimple_call_fndecl (stmt)
9347 : NULL;
9348 if (decl)
9350 int flags = gimple_call_flags (stmt);
9351 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9353 if (gimple_purge_dead_abnormal_call_edges (bb))
9354 todo |= TODO_cleanup_cfg;
9356 if (gimple_in_ssa_p (cfun))
9358 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9359 update_stmt (stmt);
9363 if (flags & ECF_NORETURN
9364 && fixup_noreturn_call (stmt))
9365 todo |= TODO_cleanup_cfg;
9368 /* Remove stores to variables we marked write-only.
9369 Keep access when store has side effect, i.e. in case when source
9370 is volatile. */
9371 if (gimple_store_p (stmt)
9372 && !gimple_has_side_effects (stmt))
9374 tree lhs = get_base_address (gimple_get_lhs (stmt));
9376 if (VAR_P (lhs)
9377 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9378 && varpool_node::get (lhs)->writeonly)
9380 unlink_stmt_vdef (stmt);
9381 gsi_remove (&gsi, true);
9382 release_defs (stmt);
9383 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9384 continue;
9387 /* For calls we can simply remove LHS when it is known
9388 to be write-only. */
9389 if (is_gimple_call (stmt)
9390 && gimple_get_lhs (stmt))
9392 tree lhs = get_base_address (gimple_get_lhs (stmt));
9394 if (VAR_P (lhs)
9395 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9396 && varpool_node::get (lhs)->writeonly)
9398 gimple_call_set_lhs (stmt, NULL);
9399 update_stmt (stmt);
9400 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9404 if (maybe_clean_eh_stmt (stmt)
9405 && gimple_purge_dead_eh_edges (bb))
9406 todo |= TODO_cleanup_cfg;
9407 gsi_next (&gsi);
9410 /* If we have a basic block with no successors that does not
9411 end with a control statement or a noreturn call end it with
9412 a call to __builtin_unreachable. This situation can occur
9413 when inlining a noreturn call that does in fact return. */
9414 if (EDGE_COUNT (bb->succs) == 0)
9416 gimple *stmt = last_stmt (bb);
9417 if (!stmt
9418 || (!is_ctrl_stmt (stmt)
9419 && (!is_gimple_call (stmt)
9420 || !gimple_call_noreturn_p (stmt))))
9422 if (stmt && is_gimple_call (stmt))
9423 gimple_call_set_ctrl_altering (stmt, false);
9424 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9425 stmt = gimple_build_call (fndecl, 0);
9426 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9427 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9428 if (!cfun->after_inlining)
9430 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9431 node->create_edge (cgraph_node::get_create (fndecl),
9432 call_stmt, bb->count);
9437 if (scale)
9438 compute_function_frequency ();
9440 if (current_loops
9441 && (todo & TODO_cleanup_cfg))
9442 loops_state_set (LOOPS_NEED_FIXUP);
9444 return todo;
9447 namespace {
9449 const pass_data pass_data_fixup_cfg =
9451 GIMPLE_PASS, /* type */
9452 "fixup_cfg", /* name */
9453 OPTGROUP_NONE, /* optinfo_flags */
9454 TV_NONE, /* tv_id */
9455 PROP_cfg, /* properties_required */
9456 0, /* properties_provided */
9457 0, /* properties_destroyed */
9458 0, /* todo_flags_start */
9459 0, /* todo_flags_finish */
9462 class pass_fixup_cfg : public gimple_opt_pass
9464 public:
9465 pass_fixup_cfg (gcc::context *ctxt)
9466 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9469 /* opt_pass methods: */
9470 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9471 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9473 }; // class pass_fixup_cfg
9475 } // anon namespace
9477 gimple_opt_pass *
9478 make_pass_fixup_cfg (gcc::context *ctxt)
9480 return new pass_fixup_cfg (ctxt);
9483 /* Garbage collection support for edge_def. */
9485 extern void gt_ggc_mx (tree&);
9486 extern void gt_ggc_mx (gimple *&);
9487 extern void gt_ggc_mx (rtx&);
9488 extern void gt_ggc_mx (basic_block&);
9490 static void
9491 gt_ggc_mx (rtx_insn *& x)
9493 if (x)
9494 gt_ggc_mx_rtx_def ((void *) x);
9497 void
9498 gt_ggc_mx (edge_def *e)
9500 tree block = LOCATION_BLOCK (e->goto_locus);
9501 gt_ggc_mx (e->src);
9502 gt_ggc_mx (e->dest);
9503 if (current_ir_type () == IR_GIMPLE)
9504 gt_ggc_mx (e->insns.g);
9505 else
9506 gt_ggc_mx (e->insns.r);
9507 gt_ggc_mx (block);
9510 /* PCH support for edge_def. */
9512 extern void gt_pch_nx (tree&);
9513 extern void gt_pch_nx (gimple *&);
9514 extern void gt_pch_nx (rtx&);
9515 extern void gt_pch_nx (basic_block&);
9517 static void
9518 gt_pch_nx (rtx_insn *& x)
9520 if (x)
9521 gt_pch_nx_rtx_def ((void *) x);
9524 void
9525 gt_pch_nx (edge_def *e)
9527 tree block = LOCATION_BLOCK (e->goto_locus);
9528 gt_pch_nx (e->src);
9529 gt_pch_nx (e->dest);
9530 if (current_ir_type () == IR_GIMPLE)
9531 gt_pch_nx (e->insns.g);
9532 else
9533 gt_pch_nx (e->insns.r);
9534 gt_pch_nx (block);
9537 void
9538 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9540 tree block = LOCATION_BLOCK (e->goto_locus);
9541 op (&(e->src), cookie);
9542 op (&(e->dest), cookie);
9543 if (current_ir_type () == IR_GIMPLE)
9544 op (&(e->insns.g), cookie);
9545 else
9546 op (&(e->insns.r), cookie);
9547 op (&(block), cookie);
9550 #if CHECKING_P
9552 namespace selftest {
9554 /* Helper function for CFG selftests: create a dummy function decl
9555 and push it as cfun. */
9557 static tree
9558 push_fndecl (const char *name)
9560 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9561 /* FIXME: this uses input_location: */
9562 tree fndecl = build_fn_decl (name, fn_type);
9563 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9564 NULL_TREE, integer_type_node);
9565 DECL_RESULT (fndecl) = retval;
9566 push_struct_function (fndecl);
9567 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9568 ASSERT_TRUE (fun != NULL);
9569 init_empty_tree_cfg_for_function (fun);
9570 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9571 ASSERT_EQ (0, n_edges_for_fn (fun));
9572 return fndecl;
9575 /* These tests directly create CFGs.
9576 Compare with the static fns within tree-cfg.c:
9577 - build_gimple_cfg
9578 - make_blocks: calls create_basic_block (seq, bb);
9579 - make_edges. */
9581 /* Verify a simple cfg of the form:
9582 ENTRY -> A -> B -> C -> EXIT. */
9584 static void
9585 test_linear_chain ()
9587 gimple_register_cfg_hooks ();
9589 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9590 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9592 /* Create some empty blocks. */
9593 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9594 basic_block bb_b = create_empty_bb (bb_a);
9595 basic_block bb_c = create_empty_bb (bb_b);
9597 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9598 ASSERT_EQ (0, n_edges_for_fn (fun));
9600 /* Create some edges: a simple linear chain of BBs. */
9601 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9602 make_edge (bb_a, bb_b, 0);
9603 make_edge (bb_b, bb_c, 0);
9604 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9606 /* Verify the edges. */
9607 ASSERT_EQ (4, n_edges_for_fn (fun));
9608 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9609 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9610 ASSERT_EQ (1, bb_a->preds->length ());
9611 ASSERT_EQ (1, bb_a->succs->length ());
9612 ASSERT_EQ (1, bb_b->preds->length ());
9613 ASSERT_EQ (1, bb_b->succs->length ());
9614 ASSERT_EQ (1, bb_c->preds->length ());
9615 ASSERT_EQ (1, bb_c->succs->length ());
9616 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9617 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9619 /* Verify the dominance information
9620 Each BB in our simple chain should be dominated by the one before
9621 it. */
9622 calculate_dominance_info (CDI_DOMINATORS);
9623 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9624 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9625 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9626 ASSERT_EQ (1, dom_by_b.length ());
9627 ASSERT_EQ (bb_c, dom_by_b[0]);
9628 free_dominance_info (CDI_DOMINATORS);
9629 dom_by_b.release ();
9631 /* Similarly for post-dominance: each BB in our chain is post-dominated
9632 by the one after it. */
9633 calculate_dominance_info (CDI_POST_DOMINATORS);
9634 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9635 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9636 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9637 ASSERT_EQ (1, postdom_by_b.length ());
9638 ASSERT_EQ (bb_a, postdom_by_b[0]);
9639 free_dominance_info (CDI_POST_DOMINATORS);
9640 postdom_by_b.release ();
9642 pop_cfun ();
9645 /* Verify a simple CFG of the form:
9646 ENTRY
9650 /t \f
9656 EXIT. */
9658 static void
9659 test_diamond ()
9661 gimple_register_cfg_hooks ();
9663 tree fndecl = push_fndecl ("cfg_test_diamond");
9664 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9666 /* Create some empty blocks. */
9667 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9668 basic_block bb_b = create_empty_bb (bb_a);
9669 basic_block bb_c = create_empty_bb (bb_a);
9670 basic_block bb_d = create_empty_bb (bb_b);
9672 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9673 ASSERT_EQ (0, n_edges_for_fn (fun));
9675 /* Create the edges. */
9676 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9677 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9678 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9679 make_edge (bb_b, bb_d, 0);
9680 make_edge (bb_c, bb_d, 0);
9681 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9683 /* Verify the edges. */
9684 ASSERT_EQ (6, n_edges_for_fn (fun));
9685 ASSERT_EQ (1, bb_a->preds->length ());
9686 ASSERT_EQ (2, bb_a->succs->length ());
9687 ASSERT_EQ (1, bb_b->preds->length ());
9688 ASSERT_EQ (1, bb_b->succs->length ());
9689 ASSERT_EQ (1, bb_c->preds->length ());
9690 ASSERT_EQ (1, bb_c->succs->length ());
9691 ASSERT_EQ (2, bb_d->preds->length ());
9692 ASSERT_EQ (1, bb_d->succs->length ());
9694 /* Verify the dominance information. */
9695 calculate_dominance_info (CDI_DOMINATORS);
9696 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9697 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9698 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9699 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9700 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9701 dom_by_a.release ();
9702 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9703 ASSERT_EQ (0, dom_by_b.length ());
9704 dom_by_b.release ();
9705 free_dominance_info (CDI_DOMINATORS);
9707 /* Similarly for post-dominance. */
9708 calculate_dominance_info (CDI_POST_DOMINATORS);
9709 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9710 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9711 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9712 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9713 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9714 postdom_by_d.release ();
9715 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9716 ASSERT_EQ (0, postdom_by_b.length ());
9717 postdom_by_b.release ();
9718 free_dominance_info (CDI_POST_DOMINATORS);
9720 pop_cfun ();
9723 /* Verify that we can handle a CFG containing a "complete" aka
9724 fully-connected subgraph (where A B C D below all have edges
9725 pointing to each other node, also to themselves).
9726 e.g.:
9727 ENTRY EXIT
9733 A<--->B
9734 ^^ ^^
9735 | \ / |
9736 | X |
9737 | / \ |
9738 VV VV
9739 C<--->D
9742 static void
9743 test_fully_connected ()
9745 gimple_register_cfg_hooks ();
9747 tree fndecl = push_fndecl ("cfg_fully_connected");
9748 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9750 const int n = 4;
9752 /* Create some empty blocks. */
9753 auto_vec <basic_block> subgraph_nodes;
9754 for (int i = 0; i < n; i++)
9755 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9757 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9758 ASSERT_EQ (0, n_edges_for_fn (fun));
9760 /* Create the edges. */
9761 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9762 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9763 for (int i = 0; i < n; i++)
9764 for (int j = 0; j < n; j++)
9765 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9767 /* Verify the edges. */
9768 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9769 /* The first one is linked to ENTRY/EXIT as well as itself and
9770 everything else. */
9771 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9772 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9773 /* The other ones in the subgraph are linked to everything in
9774 the subgraph (including themselves). */
9775 for (int i = 1; i < n; i++)
9777 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9778 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9781 /* Verify the dominance information. */
9782 calculate_dominance_info (CDI_DOMINATORS);
9783 /* The initial block in the subgraph should be dominated by ENTRY. */
9784 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9785 get_immediate_dominator (CDI_DOMINATORS,
9786 subgraph_nodes[0]));
9787 /* Every other block in the subgraph should be dominated by the
9788 initial block. */
9789 for (int i = 1; i < n; i++)
9790 ASSERT_EQ (subgraph_nodes[0],
9791 get_immediate_dominator (CDI_DOMINATORS,
9792 subgraph_nodes[i]));
9793 free_dominance_info (CDI_DOMINATORS);
9795 /* Similarly for post-dominance. */
9796 calculate_dominance_info (CDI_POST_DOMINATORS);
9797 /* The initial block in the subgraph should be postdominated by EXIT. */
9798 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9799 get_immediate_dominator (CDI_POST_DOMINATORS,
9800 subgraph_nodes[0]));
9801 /* Every other block in the subgraph should be postdominated by the
9802 initial block, since that leads to EXIT. */
9803 for (int i = 1; i < n; i++)
9804 ASSERT_EQ (subgraph_nodes[0],
9805 get_immediate_dominator (CDI_POST_DOMINATORS,
9806 subgraph_nodes[i]));
9807 free_dominance_info (CDI_POST_DOMINATORS);
9809 pop_cfun ();
9812 /* Run all of the selftests within this file. */
9814 void
9815 tree_cfg_c_tests ()
9817 test_linear_chain ();
9818 test_diamond ();
9819 test_fully_connected ();
9822 } // namespace selftest
9824 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9825 - loop
9826 - nested loops
9827 - switch statement (a block with many out-edges)
9828 - something that jumps to itself
9829 - etc */
9831 #endif /* CHECKING_P */